gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.requests;
import org.apache.kafka.common.InvalidRecordException;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.protocol.ApiKeys;
import org.apache.kafka.common.record.CompressionType;
import org.apache.kafka.common.record.MemoryRecords;
import org.apache.kafka.common.record.MemoryRecordsBuilder;
import org.apache.kafka.common.record.RecordBatch;
import org.apache.kafka.common.record.RecordVersion;
import org.apache.kafka.common.record.SimpleRecord;
import org.apache.kafka.common.record.TimestampType;
import org.junit.Test;
import java.nio.ByteBuffer;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
public class ProduceRequestTest {
private final SimpleRecord simpleRecord = new SimpleRecord(System.currentTimeMillis(),
"key".getBytes(),
"value".getBytes());
@Test
public void shouldBeFlaggedAsTransactionalWhenTransactionalRecords() throws Exception {
final MemoryRecords memoryRecords = MemoryRecords.withTransactionalRecords(0, CompressionType.NONE, 1L,
(short) 1, 1, 1, simpleRecord);
final ProduceRequest request = ProduceRequest.Builder.forCurrentMagic((short) -1,
10, Collections.singletonMap(new TopicPartition("topic", 1), memoryRecords)).build();
assertTrue(request.hasTransactionalRecords());
}
@Test
public void shouldNotBeFlaggedAsTransactionalWhenNoRecords() throws Exception {
final ProduceRequest request = createNonIdempotentNonTransactionalRecords();
assertFalse(request.hasTransactionalRecords());
}
@Test
public void shouldNotBeFlaggedAsIdempotentWhenRecordsNotIdempotent() throws Exception {
final ProduceRequest request = createNonIdempotentNonTransactionalRecords();
assertFalse(request.hasTransactionalRecords());
}
@Test
public void shouldBeFlaggedAsIdempotentWhenIdempotentRecords() throws Exception {
final MemoryRecords memoryRecords = MemoryRecords.withIdempotentRecords(1, CompressionType.NONE, 1L,
(short) 1, 1, 1, simpleRecord);
final ProduceRequest request = ProduceRequest.Builder.forCurrentMagic((short) -1, 10,
Collections.singletonMap(new TopicPartition("topic", 1), memoryRecords)).build();
assertTrue(request.hasIdempotentRecords());
}
@Test
public void testBuildWithOldMessageFormat() {
ByteBuffer buffer = ByteBuffer.allocate(256);
MemoryRecordsBuilder builder = MemoryRecords.builder(buffer, RecordBatch.MAGIC_VALUE_V1, CompressionType.NONE,
TimestampType.CREATE_TIME, 0L);
builder.append(10L, null, "a".getBytes());
Map<TopicPartition, MemoryRecords> produceData = new HashMap<>();
produceData.put(new TopicPartition("test", 0), builder.build());
ProduceRequest.Builder requestBuilder = ProduceRequest.Builder.forMagic(RecordBatch.MAGIC_VALUE_V1, (short) 1,
5000, produceData, null);
assertEquals(2, requestBuilder.oldestAllowedVersion());
assertEquals(2, requestBuilder.latestAllowedVersion());
}
@Test
public void testBuildWithCurrentMessageFormat() {
ByteBuffer buffer = ByteBuffer.allocate(256);
MemoryRecordsBuilder builder = MemoryRecords.builder(buffer, RecordBatch.CURRENT_MAGIC_VALUE,
CompressionType.NONE, TimestampType.CREATE_TIME, 0L);
builder.append(10L, null, "a".getBytes());
Map<TopicPartition, MemoryRecords> produceData = new HashMap<>();
produceData.put(new TopicPartition("test", 0), builder.build());
ProduceRequest.Builder requestBuilder = ProduceRequest.Builder.forMagic(RecordBatch.CURRENT_MAGIC_VALUE,
(short) 1, 5000, produceData, null);
assertEquals(3, requestBuilder.oldestAllowedVersion());
assertEquals(ApiKeys.PRODUCE.latestVersion(), requestBuilder.latestAllowedVersion());
}
@Test
public void testV3AndAboveShouldContainOnlyOneRecordBatch() {
ByteBuffer buffer = ByteBuffer.allocate(256);
MemoryRecordsBuilder builder = MemoryRecords.builder(buffer, CompressionType.NONE, TimestampType.CREATE_TIME, 0L);
builder.append(10L, null, "a".getBytes());
builder.close();
builder = MemoryRecords.builder(buffer, CompressionType.NONE, TimestampType.CREATE_TIME, 1L);
builder.append(11L, "1".getBytes(), "b".getBytes());
builder.append(12L, null, "c".getBytes());
builder.close();
buffer.flip();
Map<TopicPartition, MemoryRecords> produceData = new HashMap<>();
produceData.put(new TopicPartition("test", 0), MemoryRecords.readableRecords(buffer));
ProduceRequest.Builder requestBuilder = ProduceRequest.Builder.forCurrentMagic((short) 1, 5000, produceData);
assertThrowsInvalidRecordExceptionForAllVersions(requestBuilder);
}
@Test
public void testV3AndAboveCannotHaveNoRecordBatches() {
Map<TopicPartition, MemoryRecords> produceData = new HashMap<>();
produceData.put(new TopicPartition("test", 0), MemoryRecords.EMPTY);
ProduceRequest.Builder requestBuilder = ProduceRequest.Builder.forCurrentMagic((short) 1, 5000, produceData);
assertThrowsInvalidRecordExceptionForAllVersions(requestBuilder);
}
@Test
public void testV3AndAboveCannotUseMagicV0() {
ByteBuffer buffer = ByteBuffer.allocate(256);
MemoryRecordsBuilder builder = MemoryRecords.builder(buffer, RecordBatch.MAGIC_VALUE_V0, CompressionType.NONE,
TimestampType.NO_TIMESTAMP_TYPE, 0L);
builder.append(10L, null, "a".getBytes());
Map<TopicPartition, MemoryRecords> produceData = new HashMap<>();
produceData.put(new TopicPartition("test", 0), builder.build());
ProduceRequest.Builder requestBuilder = ProduceRequest.Builder.forCurrentMagic((short) 1, 5000, produceData);
assertThrowsInvalidRecordExceptionForAllVersions(requestBuilder);
}
@Test
public void testV3AndAboveCannotUseMagicV1() {
ByteBuffer buffer = ByteBuffer.allocate(256);
MemoryRecordsBuilder builder = MemoryRecords.builder(buffer, RecordBatch.MAGIC_VALUE_V1, CompressionType.NONE,
TimestampType.CREATE_TIME, 0L);
builder.append(10L, null, "a".getBytes());
Map<TopicPartition, MemoryRecords> produceData = new HashMap<>();
produceData.put(new TopicPartition("test", 0), builder.build());
ProduceRequest.Builder requestBuilder = ProduceRequest.Builder.forCurrentMagic((short) 1, 5000, produceData);
assertThrowsInvalidRecordExceptionForAllVersions(requestBuilder);
}
@Test
public void testV6AndBelowCannotUseZStdCompression() {
ByteBuffer buffer = ByteBuffer.allocate(256);
MemoryRecordsBuilder builder = MemoryRecords.builder(buffer, RecordBatch.MAGIC_VALUE_V2, CompressionType.ZSTD,
TimestampType.CREATE_TIME, 0L);
builder.append(10L, null, "a".getBytes());
Map<TopicPartition, MemoryRecords> produceData = new HashMap<>();
produceData.put(new TopicPartition("test", 0), builder.build());
// Can't create ProduceRequest instance with version within [3, 7)
for (short version = 3; version < 7; version++) {
ProduceRequest.Builder requestBuilder = new ProduceRequest.Builder(version, version, (short) 1, 5000, produceData, null);
assertThrowsInvalidRecordExceptionForAllVersions(requestBuilder);
}
// Works fine with current version (>= 7)
ProduceRequest.Builder.forCurrentMagic((short) 1, 5000, produceData);
}
@Test
public void testMixedTransactionalData() {
final long producerId = 15L;
final short producerEpoch = 5;
final int sequence = 10;
final String transactionalId = "txnlId";
final MemoryRecords nonTxnRecords = MemoryRecords.withRecords(CompressionType.NONE,
new SimpleRecord("foo".getBytes()));
final MemoryRecords txnRecords = MemoryRecords.withTransactionalRecords(CompressionType.NONE, producerId,
producerEpoch, sequence, new SimpleRecord("bar".getBytes()));
final Map<TopicPartition, MemoryRecords> recordsByPartition = new LinkedHashMap<>();
recordsByPartition.put(new TopicPartition("foo", 0), txnRecords);
recordsByPartition.put(new TopicPartition("foo", 1), nonTxnRecords);
final ProduceRequest.Builder builder = ProduceRequest.Builder.forMagic(RecordVersion.current().value, (short) -1, 5000,
recordsByPartition, transactionalId);
final ProduceRequest request = builder.build();
assertTrue(request.hasTransactionalRecords());
assertTrue(request.hasIdempotentRecords());
}
@Test
public void testMixedIdempotentData() {
final long producerId = 15L;
final short producerEpoch = 5;
final int sequence = 10;
final MemoryRecords nonTxnRecords = MemoryRecords.withRecords(CompressionType.NONE,
new SimpleRecord("foo".getBytes()));
final MemoryRecords txnRecords = MemoryRecords.withIdempotentRecords(CompressionType.NONE, producerId,
producerEpoch, sequence, new SimpleRecord("bar".getBytes()));
final Map<TopicPartition, MemoryRecords> recordsByPartition = new LinkedHashMap<>();
recordsByPartition.put(new TopicPartition("foo", 0), txnRecords);
recordsByPartition.put(new TopicPartition("foo", 1), nonTxnRecords);
final ProduceRequest.Builder builder = ProduceRequest.Builder.forMagic(RecordVersion.current().value, (short) -1, 5000,
recordsByPartition, null);
final ProduceRequest request = builder.build();
assertFalse(request.hasTransactionalRecords());
assertTrue(request.hasIdempotentRecords());
}
private void assertThrowsInvalidRecordExceptionForAllVersions(ProduceRequest.Builder builder) {
for (short version = builder.oldestAllowedVersion(); version < builder.latestAllowedVersion(); version++) {
assertThrowsInvalidRecordException(builder, version);
}
}
private void assertThrowsInvalidRecordException(ProduceRequest.Builder builder, short version) {
try {
builder.build(version).toStruct();
fail("Builder did not raise " + InvalidRecordException.class.getName() + " as expected");
} catch (RuntimeException e) {
assertTrue("Unexpected exception type " + e.getClass().getName(),
InvalidRecordException.class.isAssignableFrom(e.getClass()));
}
}
private ProduceRequest createNonIdempotentNonTransactionalRecords() {
final MemoryRecords memoryRecords = MemoryRecords.withRecords(CompressionType.NONE, simpleRecord);
return ProduceRequest.Builder.forCurrentMagic((short) -1, 10,
Collections.singletonMap(new TopicPartition("topic", 1), memoryRecords)).build();
}
}
| |
package com.silicornio.quepoconn;
import android.os.Handler;
import android.os.Message;
import com.silicornio.quepotranslator.QPTransManager;
import java.util.HashMap;
import java.util.Map;
import javax.net.ssl.SSLSocketFactory;
/**
* Created by SilicorniO
*/
public class QPConnManager {
/** List of executors **/
private Map<QPConnExecutor, Thread> mExecutors = new HashMap<>();
/** Maximum number of executors at same time **/
private int mNumMaxExecutors = 4;
/** Queue of configs to execute **/
private QPConnQueue mQueue = new QPConnQueue();
/** Handler to send events from the main thread **/
private MainHandler mMainHandler = new MainHandler();
/** Configuration of the connection manager **/
private QPConnConf mConf;
/** Translator manager to use if we want to translate objects **/
private QPTransManager mTransManager;
/** Array of classes to avoid in translator manager **/
private Class[] mTransAvoidClasses;
/** SSL Socket Factory associated to this manager **/
protected SSLSocketFactory sslSocketFactory;
/** Flag to know if converting object it is necessary to send null values **/
private boolean mSerializeNull = false;
public QPConnManager(){
}
public QPConnManager(QPConnConf conf){
mConf = conf;
}
/**
* Set the translator manager to apply
* @param transManager QPTransManager
* @param transAvoidClasses Class[] array of classes to avoid from translations
*/
public void seTranslatorManager(QPTransManager transManager, Class[] transAvoidClasses) {
seTranslatorManager(transManager, transAvoidClasses, false);
}
/**
* Set the translator manager to apply
* @param transManager QPTransManager
* @param transAvoidClasses Class[] array of classes to avoid from translations
* @param serializeNull boolean TRUE to show null values, FALSE to hide it
*/
public void seTranslatorManager(QPTransManager transManager, Class[] transAvoidClasses, boolean serializeNull) {
mTransManager = transManager;
mTransAvoidClasses = transAvoidClasses;
mSerializeNull = serializeNull;
if(mTransManager!=null){
mTransManager.setTranslateNullElements(serializeNull);
}
}
/**
* Set the SSL Socket Factory to use with connections
* @param sslSocketFactory SSLSocketFactory
*/
public void setSslSocketFactory(SSLSocketFactory sslSocketFactory) {
this.sslSocketFactory = sslSocketFactory;
}
/**
* Set the maximum number of executor at the same time
* @param numMaxExecutors int maximum number, it must be bigger than 0
*/
public void setNumMaxExecutors(int numMaxExecutors){
if(numMaxExecutors>0) {
mNumMaxExecutors = numMaxExecutors;
}
}
//----- CONNECTIONS -----
/**
* Add a connection
* @param connConfig QPConnConfig
* @param responseBgListener QPResponseBgListener to call from background
* @param responseListener QPResponseListener to call from main thread
*/
public void addConn(QPConnConfig connConfig, QPResponseBgListener responseBgListener, QPResponseListener responseListener){
//add responses to the config
connConfig.responseBgListener = responseBgListener;
connConfig.responseListener = responseListener;
//add the config to the queue
mQueue.push(connConfig);
//try to execute next
executeNextConn();
}
/**
* Execute the next connection in the queue
* @return boolean TRUE if another executor was started, FALSE if no config to execute
* or no space for more executors
*/
private boolean executeNextConn(){
//check if it is possible to add new executors
if(mExecutors.size()>= mNumMaxExecutors){
return false;
}
//get the next connection config to execute
QPConnConfig configToExecute = mQueue.popFirst();
if(configToExecute==null){
return false;
}
//translate values, prepared for executing
configToExecute.translateValues();
//generate the request
final QPConnRequest request = new QPConnRequest(configToExecute);
if(!request.prepare(sslSocketFactory)){
return false;
}
//create the executor
final QPConnExecutor executor = new QPConnExecutor(request, mExecutorListener);
//create the thread
Thread threadExecutor = new Thread(){
@Override
public void run() {
//translate objects if necessary
request.translateValues(mTransManager, mTransAvoidClasses, mSerializeNull);
//execute
executor.execute();
}
};
//add the executor with the thread to the map and execute
mExecutors.put(executor, threadExecutor);
threadExecutor.start();
return true;
}
/**
* Called when a execution finishes
*/
private QPConnExecutor.QPConnExecutorListener mExecutorListener = new QPConnExecutor.QPConnExecutorListener(){
@Override
public void onExecutionEnd(QPConnExecutor executor, QPConnResponse response) {
//remove the executor from the list
if(mExecutors!=null) {
mExecutors.remove(executor);
//translate values if necessary
response.translateValues(mTransManager);
//call to the listener in background if this connection has one configured
if(response.config.responseBgListener!=null){
response.config.responseBgListener.responseOnBackground(response, response.config);
}
//if config has a main listener configured we call to the listener from the main thread calling to the handler
if(response.config.responseListener!=null) {
Message msg = new Message();
msg.obj = response;
mMainHandler.sendMessage(msg);
}
}
}
};
/**
* Handler used to send events over the main thread
*/
private static class MainHandler extends Handler {
@Override
public void handleMessage(Message msg) {
//get the response
QPConnResponse response = (QPConnResponse) msg.obj;
//call to the listener (it was checked before if it was null or not)
response.config.responseListener.responseOnMainThread(response, response.config);
}
}
/**
* Destroy everything stopping all executions
*/
public void destroy(){
//destroy queue
mQueue.destroy();
mQueue = null;
//destroy executors
for(QPConnExecutor executor : mExecutors.keySet()){
executor.destroy();
}
mExecutors.clear();
mExecutors = null;
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.activiti.engine.impl.persistence.entity;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.el.ELContext;
import org.activiti.engine.ActivitiException;
import org.activiti.engine.delegate.VariableScope;
import org.activiti.engine.impl.context.Context;
import org.activiti.engine.impl.interceptor.CommandContext;
import org.activiti.engine.impl.variable.VariableType;
import org.activiti.engine.impl.variable.VariableTypes;
/**
* @author Tom Baeyens
* @author Joram Barrez
* @author Tijs Rademakers
* @author Saeid Mirzaei
*/
public abstract class VariableScopeImpl extends AbstractEntity implements Serializable, VariableScope {
private static final long serialVersionUID = 1L;
// The cache used when fetching all variables
protected Map<String, VariableInstanceEntity> variableInstances; // needs to be null, the logic depends on it for checking if vars were already fetched
// The cache is used when fetching/setting specific variables
protected Map<String, VariableInstanceEntity> usedVariablesCache = new HashMap<String, VariableInstanceEntity>();
protected Map<String, VariableInstance> transientVariabes;
protected ELContext cachedElContext;
protected abstract Collection<VariableInstanceEntity> loadVariableInstances();
protected abstract VariableScopeImpl getParentVariableScope();
protected abstract void initializeVariableInstanceBackPointer(VariableInstanceEntity variableInstance);
protected void ensureVariableInstancesInitialized() {
if (variableInstances == null) {
variableInstances = new HashMap<String, VariableInstanceEntity>();
CommandContext commandContext = Context.getCommandContext();
if (commandContext == null) {
throw new ActivitiException("lazy loading outside command context");
}
Collection<VariableInstanceEntity> variableInstancesList = loadVariableInstances();
for (VariableInstanceEntity variableInstance : variableInstancesList) {
variableInstances.put(variableInstance.getName(), variableInstance);
}
}
}
public Map<String, Object> getVariables() {
return collectVariables(new HashMap<String, Object>());
}
public Map<String, VariableInstance> getVariableInstances() {
return collectVariableInstances(new HashMap<String, VariableInstance>());
}
public Map<String, Object> getVariables(Collection<String> variableNames) {
return getVariables(variableNames, true);
}
public Map<String, VariableInstance> getVariableInstances(Collection<String> variableNames) {
return getVariableInstances(variableNames, true);
}
public Map<String, Object> getVariables(Collection<String> variableNames, boolean fetchAllVariables) {
Map<String, Object> requestedVariables = new HashMap<String, Object>();
Set<String> variableNamesToFetch = new HashSet<String>(variableNames);
// Transient variables 'shadow' any existing variables.
// The values in the fetch-cache will be more recent, so they can override any existing ones
for (String variableName : variableNames) {
if (transientVariabes != null && transientVariabes.containsKey(variableName)) {
requestedVariables.put(variableName, transientVariabes.get(variableName).getValue());
variableNamesToFetch.remove(variableName);
} else if (usedVariablesCache.containsKey(variableName)) {
requestedVariables.put(variableName, usedVariablesCache.get(variableName).getValue());
variableNamesToFetch.remove(variableName);
}
}
if (fetchAllVariables == true) {
// getVariables() will go up the execution hierarchy, no need to do
// it here also, the cached values will already be applied too
Map<String, Object> allVariables = getVariables();
for (String variableName : variableNamesToFetch) {
requestedVariables.put(variableName, allVariables.get(variableName));
}
return requestedVariables;
} else {
// Go up if needed
VariableScope parent = getParentVariableScope();
if (parent != null) {
requestedVariables.putAll(parent.getVariables(variableNamesToFetch, fetchAllVariables));
}
// Fetch variables on this scope
List<VariableInstanceEntity> variables = getSpecificVariables(variableNamesToFetch);
for (VariableInstanceEntity variable : variables) {
requestedVariables.put(variable.getName(), variable.getValue());
}
return requestedVariables;
}
}
public Map<String, VariableInstance> getVariableInstances(Collection<String> variableNames, boolean fetchAllVariables) {
Map<String, VariableInstance> requestedVariables = new HashMap<String, VariableInstance>();
Set<String> variableNamesToFetch = new HashSet<String>(variableNames);
// The values in the fetch-cache will be more recent, so they can override any existing ones
for (String variableName : variableNames) {
if (transientVariabes != null && transientVariabes.containsKey(variableName)) {
requestedVariables.put(variableName, transientVariabes.get(variableName));
variableNamesToFetch.remove(variableName);
} else if (usedVariablesCache.containsKey(variableName)) {
requestedVariables.put(variableName, usedVariablesCache.get(variableName));
variableNamesToFetch.remove(variableName);
}
}
if (fetchAllVariables == true) {
// getVariables() will go up the execution hierarchy, no need to do it here
// also, the cached values will already be applied too
Map<String, VariableInstance> allVariables = getVariableInstances();
for (String variableName : variableNamesToFetch) {
requestedVariables.put(variableName, allVariables.get(variableName));
}
return requestedVariables;
} else {
// Go up if needed
VariableScope parent = getParentVariableScope();
if (parent != null) {
requestedVariables.putAll(parent.getVariableInstances(variableNamesToFetch, fetchAllVariables));
}
// Fetch variables on this scope
List<VariableInstanceEntity> variables = getSpecificVariables(variableNamesToFetch);
for (VariableInstanceEntity variable : variables) {
requestedVariables.put(variable.getName(), variable);
}
return requestedVariables;
}
}
protected Map<String, Object> collectVariables(HashMap<String, Object> variables) {
ensureVariableInstancesInitialized();
VariableScopeImpl parentScope = getParentVariableScope();
if (parentScope != null) {
variables.putAll(parentScope.collectVariables(variables));
}
for (VariableInstanceEntity variableInstance : variableInstances.values()) {
variables.put(variableInstance.getName(), variableInstance.getValue());
}
for (String variableName : usedVariablesCache.keySet()) {
variables.put(variableName, usedVariablesCache.get(variableName).getValue());
}
if (transientVariabes != null) {
for (String variableName : transientVariabes.keySet()) {
variables.put(variableName, transientVariabes.get(variableName).getValue());
}
}
return variables;
}
protected Map<String, VariableInstance> collectVariableInstances(HashMap<String, VariableInstance> variables) {
ensureVariableInstancesInitialized();
VariableScopeImpl parentScope = getParentVariableScope();
if (parentScope != null) {
variables.putAll(parentScope.collectVariableInstances(variables));
}
for (VariableInstance variableInstance : variableInstances.values()) {
variables.put(variableInstance.getName(), variableInstance);
}
for (String variableName : usedVariablesCache.keySet()) {
variables.put(variableName, usedVariablesCache.get(variableName));
}
if (transientVariabes != null) {
variables.putAll(transientVariabes);
}
return variables;
}
public Object getVariable(String variableName) {
return getVariable(variableName, true);
}
public VariableInstance getVariableInstance(String variableName) {
return getVariableInstance(variableName, true);
}
/**
* The same operation as {@link VariableScopeImpl#getVariable(String)},
* but with an extra parameter to indicate whether or not all variables need to be fetched.
*
* Note that the default Activiti way (because of backwards compatibility) is to fetch all the variables
* when doing a get/set of variables. So this means 'true' is the default value for this method,
* and in fact it will simply delegate to {@link #getVariable(String)}.
* This can also be the most performant, if you're doing a lot of variable gets in the same transaction (eg in service tasks).
*
* In case 'false' is used, only the specific variable will be fetched.
*/
public Object getVariable(String variableName, boolean fetchAllVariables) {
Object value = null;
VariableInstance variable = getVariableInstance(variableName, fetchAllVariables);
if (variable != null) {
value = variable.getValue();
}
return value;
}
public VariableInstance getVariableInstance(String variableName, boolean fetchAllVariables) {
// Transient variable
if (transientVariabes != null && transientVariabes.containsKey(variableName)) {
return transientVariabes.get(variableName);
}
// Check the local single-fetch cache
if (usedVariablesCache.containsKey(variableName)) {
return usedVariablesCache.get(variableName);
}
if (fetchAllVariables == true) {
ensureVariableInstancesInitialized();
VariableInstanceEntity variableInstance = variableInstances.get(variableName);
if (variableInstance != null) {
return variableInstance;
}
// Go up the hierarchy
VariableScope parentScope = getParentVariableScope();
if (parentScope != null) {
return parentScope.getVariableInstance(variableName, true);
}
return null;
} else {
if (variableInstances != null && variableInstances.containsKey(variableName)) {
return variableInstances.get(variableName);
}
VariableInstanceEntity variable = getSpecificVariable(variableName);
if (variable != null) {
usedVariablesCache.put(variableName, variable);
return variable;
}
// Go up the hierarchy
VariableScope parentScope = getParentVariableScope();
if (parentScope != null) {
return parentScope.getVariableInstance(variableName, false);
}
return null;
}
}
protected abstract VariableInstanceEntity getSpecificVariable(String variableName);
public Object getVariableLocal(String variableName) {
return getVariableLocal(variableName, true);
}
public VariableInstance getVariableInstanceLocal(String variableName) {
return getVariableInstanceLocal(variableName, true);
}
public Object getVariableLocal(String variableName, boolean fetchAllVariables) {
Object value = null;
VariableInstance variable = getVariableInstanceLocal(variableName, fetchAllVariables);
if (variable != null) {
value = variable.getValue();
}
return value;
}
public VariableInstance getVariableInstanceLocal(String variableName, boolean fetchAllVariables) {
if (transientVariabes != null && transientVariabes.containsKey(variableName)) {
return transientVariabes.get(variableName);
}
if (usedVariablesCache.containsKey(variableName)) {
return usedVariablesCache.get(variableName);
}
if (fetchAllVariables == true) {
ensureVariableInstancesInitialized();
VariableInstanceEntity variableInstance = variableInstances.get(variableName);
if (variableInstance != null) {
return variableInstance;
}
return null;
} else {
if (variableInstances != null && variableInstances.containsKey(variableName)) {
VariableInstanceEntity variable = variableInstances.get(variableName);
if (variable != null) {
return variableInstances.get(variableName);
}
}
VariableInstanceEntity variable = getSpecificVariable(variableName);
if (variable != null) {
usedVariablesCache.put(variableName, variable);
return variable;
}
return null;
}
}
public boolean hasVariables() {
if (transientVariabes != null && !transientVariabes.isEmpty()) {
return true;
}
ensureVariableInstancesInitialized();
if (!variableInstances.isEmpty()) {
return true;
}
VariableScope parentScope = getParentVariableScope();
if (parentScope != null) {
return parentScope.hasVariables();
}
return false;
}
public boolean hasVariablesLocal() {
if (transientVariabes != null && !transientVariabes.isEmpty()) {
return true;
}
ensureVariableInstancesInitialized();
return !variableInstances.isEmpty();
}
public boolean hasVariable(String variableName) {
if (hasVariableLocal(variableName)) {
return true;
}
VariableScope parentScope = getParentVariableScope();
if (parentScope != null) {
return parentScope.hasVariable(variableName);
}
return false;
}
public boolean hasVariableLocal(String variableName) {
if (transientVariabes != null && transientVariabes.containsKey(variableName)) {
return true;
}
ensureVariableInstancesInitialized();
return variableInstances.containsKey(variableName);
}
protected Set<String> collectVariableNames(Set<String> variableNames) {
if (transientVariabes != null) {
variableNames.addAll(transientVariabes.keySet());
}
ensureVariableInstancesInitialized();
VariableScopeImpl parentScope = getParentVariableScope();
if (parentScope != null) {
variableNames.addAll(parentScope.collectVariableNames(variableNames));
}
for (VariableInstanceEntity variableInstance : variableInstances.values()) {
variableNames.add(variableInstance.getName());
}
return variableNames;
}
public Set<String> getVariableNames() {
return collectVariableNames(new HashSet<String>());
}
public Map<String, Object> getVariablesLocal() {
Map<String, Object> variables = new HashMap<String, Object>();
ensureVariableInstancesInitialized();
for (VariableInstanceEntity variableInstance : variableInstances.values()) {
variables.put(variableInstance.getName(), variableInstance.getValue());
}
for (String variableName : usedVariablesCache.keySet()) {
variables.put(variableName, usedVariablesCache.get(variableName).getValue());
}
if (transientVariabes != null) {
for (String variableName : transientVariabes.keySet()) {
variables.put(variableName, transientVariabes.get(variableName).getValue());
}
}
return variables;
}
public Map<String, VariableInstance> getVariableInstancesLocal() {
Map<String, VariableInstance> variables = new HashMap<String, VariableInstance>();
ensureVariableInstancesInitialized();
for (VariableInstanceEntity variableInstance : variableInstances.values()) {
variables.put(variableInstance.getName(), variableInstance);
}
for (String variableName : usedVariablesCache.keySet()) {
variables.put(variableName, usedVariablesCache.get(variableName));
}
if (transientVariabes != null) {
variables.putAll(transientVariabes);
}
return variables;
}
public Map<String, Object> getVariablesLocal(Collection<String> variableNames) {
return getVariablesLocal(variableNames, true);
}
public Map<String, VariableInstance> getVariableInstancesLocal(Collection<String> variableNames) {
return getVariableInstancesLocal(variableNames, true);
}
public Map<String, Object> getVariablesLocal(Collection<String> variableNames, boolean fetchAllVariables) {
Map<String, Object> requestedVariables = new HashMap<String, Object>();
// The values in the fetch-cache will be more recent, so they can override any existing ones
Set<String> variableNamesToFetch = new HashSet<String>(variableNames);
for (String variableName : variableNames) {
if (transientVariabes != null && transientVariabes.containsKey(variableName)) {
requestedVariables.put(variableName, transientVariabes.get(variableName).getValue());
variableNamesToFetch.remove(variableName);
} else if (usedVariablesCache.containsKey(variableName)) {
requestedVariables.put(variableName, usedVariablesCache.get(variableName).getValue());
variableNamesToFetch.remove(variableName);
}
}
if (fetchAllVariables == true) {
Map<String, Object> allVariables = getVariablesLocal();
for (String variableName : variableNamesToFetch) {
requestedVariables.put(variableName, allVariables.get(variableName));
}
} else {
List<VariableInstanceEntity> variables = getSpecificVariables(variableNamesToFetch);
for (VariableInstanceEntity variable : variables) {
requestedVariables.put(variable.getName(), variable.getValue());
}
}
return requestedVariables;
}
public Map<String, VariableInstance> getVariableInstancesLocal(Collection<String> variableNames, boolean fetchAllVariables) {
Map<String, VariableInstance> requestedVariables = new HashMap<String, VariableInstance>();
// The values in the fetch-cache will be more recent, so they can override any existing ones
Set<String> variableNamesToFetch = new HashSet<String>(variableNames);
for (String variableName : variableNames) {
if (transientVariabes != null && transientVariabes.containsKey(variableName)) {
requestedVariables.put(variableName, transientVariabes.get(variableName));
variableNamesToFetch.remove(variableName);
} else if (usedVariablesCache.containsKey(variableName)) {
requestedVariables.put(variableName, usedVariablesCache.get(variableName));
variableNamesToFetch.remove(variableName);
}
}
if (fetchAllVariables == true) {
Map<String, VariableInstance> allVariables = getVariableInstancesLocal();
for (String variableName : variableNamesToFetch) {
requestedVariables.put(variableName, allVariables.get(variableName));
}
} else {
List<VariableInstanceEntity> variables = getSpecificVariables(variableNamesToFetch);
for (VariableInstanceEntity variable : variables) {
requestedVariables.put(variable.getName(), variable);
}
}
return requestedVariables;
}
protected abstract List<VariableInstanceEntity> getSpecificVariables(Collection<String> variableNames);
public Set<String> getVariableNamesLocal() {
Set<String> variableNames = new HashSet<String>();
if (transientVariabes != null) {
variableNames.addAll(transientVariabes.keySet());
}
ensureVariableInstancesInitialized();
variableNames.addAll(variableInstances.keySet());
return variableNames;
}
public Map<String, VariableInstanceEntity> getVariableInstanceEntities() {
ensureVariableInstancesInitialized();
return Collections.unmodifiableMap(variableInstances);
}
public Map<String, VariableInstanceEntity> getUsedVariablesCache() {
return usedVariablesCache;
}
public void createVariablesLocal(Map<String, ? extends Object> variables) {
if (variables != null) {
for (Map.Entry<String, ? extends Object> entry : variables.entrySet()) {
createVariableLocal(entry.getKey(), entry.getValue());
}
}
}
public void setVariables(Map<String, ? extends Object> variables) {
if (variables != null) {
for (String variableName : variables.keySet()) {
setVariable(variableName, variables.get(variableName));
}
}
}
public void setVariablesLocal(Map<String, ? extends Object> variables) {
if (variables != null) {
for (String variableName : variables.keySet()) {
setVariableLocal(variableName, variables.get(variableName));
}
}
}
public void removeVariables() {
ensureVariableInstancesInitialized();
Set<String> variableNames = new HashSet<String>(variableInstances.keySet());
for (String variableName : variableNames) {
removeVariable(variableName);
}
}
public void removeVariablesLocal() {
List<String> variableNames = new ArrayList<String>(getVariableNamesLocal());
for (String variableName : variableNames) {
removeVariableLocal(variableName);
}
}
public void removeVariables(Collection<String> variableNames) {
if (variableNames != null) {
for (String variableName : variableNames) {
removeVariable(variableName);
}
}
}
public void removeVariablesLocal(Collection<String> variableNames) {
if (variableNames != null) {
for (String variableName : variableNames) {
removeVariableLocal(variableName);
}
}
}
public void setVariable(String variableName, Object value) {
setVariable(variableName, value, getSourceActivityExecution(), true);
}
/**
* The default {@link #setVariable(String, Object)} fetches all variables
* (for historical and backwards compatible reasons) while setting the variables.
*
* Setting the fetchAllVariables parameter to true is the default behaviour
* (ie fetching all variables) Setting the fetchAllVariables parameter to false does not do that.
*
*/
public void setVariable(String variableName, Object value, boolean fetchAllVariables) {
setVariable(variableName, value, getSourceActivityExecution(), fetchAllVariables);
}
/**
* Sets a variable as high as possible (highest parent).
*
* @param sourceExecution The execution where the variable was originally set, used for history data.
* @param fetchAllVariables If true, all existing variables will be fetched when setting the variable.
*/
protected void setVariable(String variableName, Object value, ExecutionEntity sourceExecution, boolean fetchAllVariables) {
if (fetchAllVariables == true) {
// If it's in the cache, it's more recent
if (usedVariablesCache.containsKey(variableName)) {
updateVariableInstance(usedVariablesCache.get(variableName), value, sourceExecution);
}
// If the variable exists on this scope, replace it
if (hasVariableLocal(variableName)) {
setVariableLocal(variableName, value, sourceExecution, true);
return;
}
// Otherwise, go up the hierarchy (we're trying to put it as high as possible)
VariableScopeImpl parentVariableScope = getParentVariableScope();
if (parentVariableScope != null) {
if (sourceExecution == null) {
parentVariableScope.setVariable(variableName, value);
} else {
parentVariableScope.setVariable(variableName, value, sourceExecution, true);
}
return;
}
// We're as high as possible and the variable doesn't exist yet, so
// we're creating it
if (sourceExecution != null) {
createVariableLocal(variableName, value, sourceExecution);
} else {
createVariableLocal(variableName, value);
}
} else {
// Check local cache first
if (usedVariablesCache.containsKey(variableName)) {
updateVariableInstance(usedVariablesCache.get(variableName), value, sourceExecution);
} else if (variableInstances != null && variableInstances.containsKey(variableName)) {
updateVariableInstance(variableInstances.get(variableName), value, sourceExecution);
} else {
// Not in local cache, check if defined on this scope
// Create it if it doesn't exist yet
VariableInstanceEntity variable = getSpecificVariable(variableName);
if (variable != null) {
updateVariableInstance(variable, value, sourceExecution);
usedVariablesCache.put(variableName, variable);
} else {
VariableScopeImpl parent = getParentVariableScope();
if (parent != null) {
if (sourceExecution == null) {
parent.setVariable(variableName, value, fetchAllVariables);
} else {
parent.setVariable(variableName, value, sourceExecution, fetchAllVariables);
}
return;
}
variable = createVariableInstance(variableName, value, sourceExecution);
usedVariablesCache.put(variableName, variable);
}
}
}
}
public Object setVariableLocal(String variableName, Object value) {
return setVariableLocal(variableName, value, getSourceActivityExecution(), true);
}
/**
* The default {@link #setVariableLocal(String, Object)} fetches all variables (for historical and backwards compatible reasons) while setting the variables.
*
* Setting the fetchAllVariables parameter to true is the default behaviour (ie fetching all variables) Setting the fetchAllVariables parameter to false does not do that.
*
*/
public Object setVariableLocal(String variableName, Object value, boolean fetchAllVariables) {
return setVariableLocal(variableName, value, getSourceActivityExecution(), fetchAllVariables);
}
public Object setVariableLocal(String variableName, Object value, ExecutionEntity sourceActivityExecution, boolean fetchAllVariables) {
if (fetchAllVariables == true) {
// If it's in the cache, it's more recent
if (usedVariablesCache.containsKey(variableName)) {
updateVariableInstance(usedVariablesCache.get(variableName), value, sourceActivityExecution);
}
ensureVariableInstancesInitialized();
VariableInstanceEntity variableInstance = variableInstances.get(variableName);
if (variableInstance == null) {
variableInstance = usedVariablesCache.get(variableName);
}
if (variableInstance == null) {
createVariableLocal(variableName, value);
} else {
updateVariableInstance(variableInstance, value, sourceActivityExecution);
}
return null;
} else {
if (usedVariablesCache.containsKey(variableName)) {
updateVariableInstance(usedVariablesCache.get(variableName), value, sourceActivityExecution);
} else if (variableInstances != null && variableInstances.containsKey(variableName)) {
updateVariableInstance(variableInstances.get(variableName), value, sourceActivityExecution);
} else {
VariableInstanceEntity variable = getSpecificVariable(variableName);
if (variable != null) {
updateVariableInstance(variable, value, sourceActivityExecution);
} else {
variable = createVariableInstance(variableName, value, sourceActivityExecution);
}
usedVariablesCache.put(variableName, variable);
}
return null;
}
}
public void createVariableLocal(String variableName, Object value) {
createVariableLocal(variableName, value, getSourceActivityExecution());
}
/**
* only called when a new variable is created on this variable scope. This method is also responsible for propagating the creation of this variable to the history.
*/
protected void createVariableLocal(String variableName, Object value, ExecutionEntity sourceActivityExecution) {
ensureVariableInstancesInitialized();
if (variableInstances.containsKey(variableName)) {
throw new ActivitiException("variable '" + variableName + "' already exists. Use setVariableLocal if you want to overwrite the value");
}
createVariableInstance(variableName, value, sourceActivityExecution);
}
public void removeVariable(String variableName) {
removeVariable(variableName, getSourceActivityExecution());
}
protected void removeVariable(String variableName, ExecutionEntity sourceActivityExecution) {
ensureVariableInstancesInitialized();
if (variableInstances.containsKey(variableName)) {
removeVariableLocal(variableName);
return;
}
VariableScopeImpl parentVariableScope = getParentVariableScope();
if (parentVariableScope != null) {
if (sourceActivityExecution == null) {
parentVariableScope.removeVariable(variableName);
} else {
parentVariableScope.removeVariable(variableName, sourceActivityExecution);
}
}
}
public void removeVariableLocal(String variableName) {
removeVariableLocal(variableName, getSourceActivityExecution());
}
protected ExecutionEntity getSourceActivityExecution() {
return null;
}
protected void removeVariableLocal(String variableName, ExecutionEntity sourceActivityExecution) {
ensureVariableInstancesInitialized();
VariableInstanceEntity variableInstance = variableInstances.remove(variableName);
if (variableInstance != null) {
deleteVariableInstanceForExplicitUserCall(variableInstance, sourceActivityExecution);
}
}
protected void deleteVariableInstanceForExplicitUserCall(VariableInstanceEntity variableInstance, ExecutionEntity sourceActivityExecution) {
Context.getCommandContext().getVariableInstanceEntityManager().delete(variableInstance);
variableInstance.setValue(null);
// Record historic variable deletion
Context.getCommandContext().getHistoryManager().recordVariableRemoved(variableInstance);
// Record historic detail
Context.getCommandContext().getHistoryManager().recordHistoricDetailVariableCreate(variableInstance, sourceActivityExecution, isActivityIdUsedForDetails());
}
protected void updateVariableInstance(VariableInstanceEntity variableInstance, Object value, ExecutionEntity sourceActivityExecution) {
// Always check if the type should be altered. It's possible that the
// previous type is lower in the type
// checking chain (e.g. serializable) and will return true on
// isAbleToStore(), even though another type
// higher in the chain is eligible for storage.
VariableTypes variableTypes = Context.getProcessEngineConfiguration().getVariableTypes();
VariableType newType = variableTypes.findVariableType(value);
if (newType != null && !newType.equals(variableInstance.getType())) {
variableInstance.setValue(null);
variableInstance.setType(newType);
variableInstance.forceUpdate();
variableInstance.setValue(value);
} else {
variableInstance.setValue(value);
}
Context.getCommandContext().getHistoryManager().recordHistoricDetailVariableCreate(variableInstance, sourceActivityExecution, isActivityIdUsedForDetails());
Context.getCommandContext().getHistoryManager().recordVariableUpdate(variableInstance);
}
protected VariableInstanceEntity createVariableInstance(String variableName, Object value, ExecutionEntity sourceActivityExecution) {
VariableTypes variableTypes = Context.getProcessEngineConfiguration().getVariableTypes();
VariableType type = variableTypes.findVariableType(value);
VariableInstanceEntity variableInstance =
Context.getCommandContext()
.getVariableInstanceEntityManager()
.create(variableName, type, value);
initializeVariableInstanceBackPointer(variableInstance);
Context.getCommandContext().getVariableInstanceEntityManager().insert(variableInstance);
if (variableInstances != null) {
variableInstances.put(variableName, variableInstance);
}
// Record historic variable
Context.getCommandContext().getHistoryManager().recordVariableCreate(variableInstance);
// Record historic detail
Context.getCommandContext().getHistoryManager().recordHistoricDetailVariableCreate(variableInstance, sourceActivityExecution, isActivityIdUsedForDetails());
return variableInstance;
}
/*
* Transient variables
*/
public void setTransientVariablesLocal(Map<String, Object> transientVariables) {
for (String variableName : transientVariables.keySet()) {
setTransientVariableLocal(variableName, transientVariables.get(variableName));
}
}
public void setTransientVariableLocal(String variableName, Object variableValue) {
if (transientVariabes == null) {
transientVariabes = new HashMap<String, VariableInstance>();
}
transientVariabes.put(variableName, new TransientVariableInstance(variableName, variableValue));
}
public void setTransientVariables(Map<String, Object> transientVariables) {
for (String variableName : transientVariables.keySet()) {
setTransientVariable(variableName, transientVariables.get(variableName));
}
}
public void setTransientVariable(String variableName, Object variableValue) {
VariableScopeImpl parentVariableScope = getParentVariableScope();
if (parentVariableScope != null) {
parentVariableScope.setTransientVariable(variableName, variableValue);
return;
}
setTransientVariableLocal(variableName, variableValue);
}
public Object getTransientVariableLocal(String variableName) {
if (transientVariabes != null) {
return transientVariabes.get(variableName).getValue();
}
return null;
}
public Map<String, Object> getTransientVariablesLocal() {
if (transientVariabes != null) {
Map<String, Object> variables = new HashMap<String, Object>();
for (String variableName : transientVariabes.keySet()) {
variables.put(variableName, transientVariabes.get(variableName).getValue());
}
return variables;
} else {
return Collections.emptyMap();
}
}
public Object getTransientVariable(String variableName) {
if (transientVariabes != null && transientVariabes.containsKey(variableName)) {
return transientVariabes.get(variableName).getValue();
}
VariableScopeImpl parentScope = getParentVariableScope();
if (parentScope != null) {
return parentScope.getTransientVariable(variableName);
}
return null;
}
public Map<String, Object> getTransientVariables() {
return collectTransientVariables(new HashMap<String, Object>());
}
protected Map<String, Object> collectTransientVariables(HashMap<String, Object> variables) {
VariableScopeImpl parentScope = getParentVariableScope();
if (parentScope != null) {
variables.putAll(parentScope.collectVariables(variables));
}
if (transientVariabes != null) {
for (String variableName : transientVariabes.keySet()) {
variables.put(variableName, transientVariabes.get(variableName).getValue());
}
}
return variables;
}
public void removeTransientVariableLocal(String variableName) {
if (transientVariabes != null) {
transientVariabes.remove(variableName);
}
}
public void removeTransientVariablesLocal() {
if (transientVariabes != null) {
transientVariabes.clear();
}
}
public void removeTransientVariable(String variableName) {
if (transientVariabes != null && transientVariabes.containsKey(variableName)) {
removeTransientVariableLocal(variableName);
return;
}
VariableScopeImpl parentVariableScope = getParentVariableScope();
if (parentVariableScope != null) {
parentVariableScope.removeTransientVariable(variableName);
}
}
public void removeTransientVariables() {
removeTransientVariablesLocal();
VariableScopeImpl parentVariableScope = getParentVariableScope();
if (parentVariableScope != null) {
parentVariableScope.removeTransientVariablesLocal();
}
}
/**
* Execution variable updates have activity instance ids, but historic task variable updates don't.
*/
protected boolean isActivityIdUsedForDetails() {
return true;
}
// getters and setters
// //////////////////////////////////////////////////////
public ELContext getCachedElContext() {
return cachedElContext;
}
public void setCachedElContext(ELContext cachedElContext) {
this.cachedElContext = cachedElContext;
}
public <T> T getVariable(String variableName, Class<T> variableClass) {
return variableClass.cast(getVariable(variableName));
}
public <T> T getVariableLocal(String variableName, Class<T> variableClass) {
return variableClass.cast(getVariableLocal(variableName));
}
}
| |
package com.jagex.runescape.screen.title;
import com.jagex.runescape.*;
import com.jagex.runescape.definition.GameFont;
import java.awt.*;
public class TitleScreen {
private final int[] anIntArray969;
private RSImageProducer loginBoxLeftBackgroundTile;
public boolean welcomeScreenRaised;
public volatile boolean drawingFlames;
public volatile boolean currentlyDrawingFlames;
private IndexedImage titleBoxImage;
private IndexedImage titleButtonImage;
private IndexedImage[] titleFlameRuneImages;
private Sprite flameLeftBackground2;
private Sprite flameRightBackground2;
private RSImageProducer flameLeftBackground;
private RSImageProducer flameRightBackground;
private int[] titleFlames;
private int[] titleFlamesTemp;
private RSImageProducer topCentreBackgroundTile;
private RSImageProducer bottomCentreBackgroundTile;
private RSImageProducer bottomLeftBackgroundTile;
private RSImageProducer bottomRightBackgroundTile;
private RSImageProducer middleLeftBackgroundTile;
private RSImageProducer middleRightBackgroundTile;
private int flameShapeIndex;
private GameFont fontSmall;
private GameFont fontBold;
private int[] flameStrengths;
private int[] anIntArray829;
private final FlameColours flameColours;
public TitleScreen() {
this.anIntArray969 = new int[256];
this.welcomeScreenRaised = false;
this.drawingFlames = false;
this.currentlyDrawingFlames = false;
this.flameColours = new FlameColours();
}
public void load(final Component component, final Archive archive, final GameFont fontSmall, final GameFont fontPlain, final GameFont fontBold) {
Archive archive1 = archive;
this.fontSmall = fontSmall;
GameFont fontPlain1 = fontPlain;
this.fontBold = fontBold;
this.setupSprites(component, archive);
this.titleBoxImage = new IndexedImage(archive, "titlebox", 0);
this.titleButtonImage = new IndexedImage(archive, "titlebutton", 0);
this.titleFlameRuneImages = new IndexedImage[12];
for (int r = 0; r < 12; r++) {
this.titleFlameRuneImages[r] = new IndexedImage(archive, "runes", r);
}
this.flameLeftBackground2 = new Sprite(128, 265);
this.flameRightBackground2 = new Sprite(128, 265);
System.arraycopy(this.flameLeftBackground.pixels, 0, this.flameLeftBackground2.pixels, 0, 33920);
System.arraycopy(this.flameRightBackground.pixels, 0, this.flameRightBackground2.pixels, 0, 33920);
this.titleFlames = new int[32768];
this.titleFlamesTemp = new int[32768];
this.flameStrengths = new int[32768];
this.anIntArray829 = new int[32768];
this.updateFlameShape(null);
}
public void drawLoadingText(final Graphics gameGraphics, final int percentage, final String text) {
this.loginBoxLeftBackgroundTile.initDrawingArea();
final int horizontalOffset = 360;
final int verticalOffset1 = 200;
final int verticalOffset2 = 20;
this.fontBold.drawCentredText("RuneScape is loading - please wait...", horizontalOffset / 2,
verticalOffset1 / 2 - 26 - verticalOffset2, 0xFFFFFF);
final int loadingBarHeight = verticalOffset1 / 2 - 18 - verticalOffset2;
DrawingArea.drawUnfilledRectangle(horizontalOffset / 2 - 152, 304, 34, 0x8C1111, loadingBarHeight);
DrawingArea.drawUnfilledRectangle(horizontalOffset / 2 - 151, 302, 32, 0, loadingBarHeight + 1);
DrawingArea.drawFilledRectangle(horizontalOffset / 2 - 150, loadingBarHeight + 2, percentage * 3, 30, 0x8C1111);
DrawingArea.drawFilledRectangle((horizontalOffset / 2 - 150) + percentage * 3, loadingBarHeight + 2,
300 - percentage * 3, 30, 0);
this.fontBold.drawCentredText(text, horizontalOffset / 2, (verticalOffset1 / 2 + 5) - verticalOffset2, 0xFFFFFF);
this.loginBoxLeftBackgroundTile.drawGraphics(171, gameGraphics, 202);
if (this.welcomeScreenRaised) {
this.welcomeScreenRaised = false;
if (!this.currentlyDrawingFlames) {
this.drawFlames(gameGraphics);
}
this.drawTiles(gameGraphics);
}
}
public void setupImageProducers(final Component gameComponent) {
this.flameLeftBackground = new RSImageProducer(128, 265, gameComponent);
DrawingArea.clear();
this.flameRightBackground = new RSImageProducer(128, 265, gameComponent);
DrawingArea.clear();
this.topCentreBackgroundTile = new RSImageProducer(509, 171, gameComponent);
DrawingArea.clear();
this.bottomCentreBackgroundTile = new RSImageProducer(360, 132, gameComponent);
DrawingArea.clear();
this.loginBoxLeftBackgroundTile = new RSImageProducer(360, 200, gameComponent);
DrawingArea.clear();
this.bottomLeftBackgroundTile = new RSImageProducer(202, 238, gameComponent);
DrawingArea.clear();
this.bottomRightBackgroundTile = new RSImageProducer(203, 238, gameComponent);
DrawingArea.clear();
this.middleLeftBackgroundTile = new RSImageProducer(74, 94, gameComponent);
DrawingArea.clear();
this.middleRightBackgroundTile = new RSImageProducer(75, 94, gameComponent);
DrawingArea.clear();
}
public void clearImageProducers() {
this.topCentreBackgroundTile = null;
this.bottomCentreBackgroundTile = null;
this.loginBoxLeftBackgroundTile = null;
this.flameLeftBackground = null;
this.flameRightBackground = null;
this.bottomLeftBackgroundTile = null;
this.bottomRightBackgroundTile = null;
this.middleLeftBackgroundTile = null;
this.middleRightBackgroundTile = null;
}
public boolean imageProducersInitialised() {
return this.topCentreBackgroundTile != null;
}
public void nullLoader() {
this.titleBoxImage = null;
this.titleButtonImage = null;
this.titleFlameRuneImages = null;
this.titleFlames = null;
this.titleFlamesTemp = null;
this.flameLeftBackground2 = null;
this.flameRightBackground2 = null;
this.flameStrengths = null;
this.anIntArray829 = null;
this.clearImageProducers();
}
private void doFlamesDrawing(final Graphics gameGraphics) {
final char c = '\u0100';
this.flameColours.changeColours();
System.arraycopy(this.flameLeftBackground2.pixels, 0, this.flameLeftBackground.pixels, 0, 33920);
int i1 = 0;
int pos = 1152;
for (int k1 = 1; k1 < c - 1; k1++) {
final int l1 = (this.anIntArray969[k1] * (c - k1)) / c;
if (l1 != 0) {
System.out.println(l1);
}
int j2 = 22 + l1;
if (j2 < 0) {
j2 = 0;
}
i1 += j2;
for (int l2 = j2; l2 < 128; l2++) {
final int strength = this.flameStrengths[i1++];
if (strength != 0) {
final int off = 256 - strength;
final int colour = this.flameColours.getCurrentColour(strength);
final int bg = this.flameLeftBackground.pixels[pos];
this.flameLeftBackground.pixels[pos++] = ((colour & 0xFF00ff) * strength + (bg & 0xFF00FF) * off & 0xFF00FF00)
+ ((colour & 0xFF00) * strength + (bg & 0xFF00) * off & 0xFF0000) >> 8;
} else {
pos++;
}
}
pos += j2;
}
this.flameLeftBackground.drawGraphics(0, gameGraphics, 0);
System.arraycopy(this.flameRightBackground2.pixels, 0, this.flameRightBackground.pixels, 0, 33920);
i1 = 0;
pos = 1176;
for (int k2 = 1; k2 < c - 1; k2++) {
final int i3 = (this.anIntArray969[k2] * (c - k2)) / c;
if (i3 != 0) {
System.out.println(i3);
}
final int k3 = 103 - i3;
pos += i3;
for (int i4 = 0; i4 < k3; i4++) {
final int strength = this.flameStrengths[i1++];
if (strength != 0) {
final int off = 256 - strength;
final int colour = this.flameColours.getCurrentColour(strength);
final int bg = this.flameRightBackground.pixels[pos];
this.flameRightBackground.pixels[pos++] = ((colour & 0xFF00FF) * strength + (bg & 0xFF00FF) * off & 0xFF00FF00)
+ ((colour & 0xFF00) * strength + (bg & 0xFF00) * off & 0xFF0000) >> 8;
} else {
pos++;
}
}
i1 += 128 - k3;
pos += 128 - k3 - i3;
}
this.flameRightBackground.drawGraphics(0, gameGraphics, 637);
}
private void setupSprites(final Component component, final Archive archive) {
final byte[] titleData = archive.decompressFile("title.dat");
Sprite sprite = new Sprite(titleData, component);
this.flameLeftBackground.initDrawingArea();
sprite.drawInverse(0, 0);
this.flameRightBackground.initDrawingArea();
sprite.drawInverse(-637, 0);
this.topCentreBackgroundTile.initDrawingArea();
sprite.drawInverse(-128, 0);
this.bottomCentreBackgroundTile.initDrawingArea();
sprite.drawInverse(-202, -371);
this.loginBoxLeftBackgroundTile.initDrawingArea();
sprite.drawInverse(-202, -171);
this.bottomLeftBackgroundTile.initDrawingArea();
sprite.drawInverse(0, -265);
this.bottomRightBackgroundTile.initDrawingArea();
sprite.drawInverse(-562, -265);
this.middleLeftBackgroundTile.initDrawingArea();
sprite.drawInverse(-128, -171);
this.middleRightBackgroundTile.initDrawingArea();
sprite.drawInverse(-562, -171);
final int[] modifiedPixels = new int[sprite.width];
for (int row = 0; row < sprite.height; row++) {
for (int column = 0; column < sprite.width; column++) {
modifiedPixels[column] = sprite.pixels[(sprite.width - column - 1) + sprite.width * row];
}
System.arraycopy(modifiedPixels, 0, sprite.pixels, sprite.width * row, sprite.width);
}
this.flameLeftBackground.initDrawingArea();
sprite.drawInverse(382, 0);
this.flameRightBackground.initDrawingArea();
sprite.drawInverse(-255, 0);
this.topCentreBackgroundTile.initDrawingArea();
sprite.drawInverse(254, 0);
this.bottomCentreBackgroundTile.initDrawingArea();
sprite.drawInverse(180, -371);
this.loginBoxLeftBackgroundTile.initDrawingArea();
sprite.drawInverse(180, -171);
this.bottomLeftBackgroundTile.initDrawingArea();
sprite.drawInverse(382, -265);
this.bottomRightBackgroundTile.initDrawingArea();
sprite.drawInverse(-180, -265);
this.middleLeftBackgroundTile.initDrawingArea();
sprite.drawInverse(254, -171);
this.middleRightBackgroundTile.initDrawingArea();
sprite.drawInverse(-180, -171);
sprite = new Sprite(archive, "logo", 0);
this.topCentreBackgroundTile.initDrawingArea();
sprite.drawImage(382 - sprite.width / 2 - 128, 18);
}
private void drawFlames(final Graphics gameGraphics) {
this.flameLeftBackground.drawGraphics(0, gameGraphics, 0);
this.flameRightBackground.drawGraphics(0, gameGraphics, 637);
}
private void drawTiles(final Graphics gameGraphics) {
this.topCentreBackgroundTile.drawGraphics(0, gameGraphics, 128);
this.bottomCentreBackgroundTile.drawGraphics(371, gameGraphics, 202);
this.bottomLeftBackgroundTile.drawGraphics(265, gameGraphics, 0);
this.bottomRightBackgroundTile.drawGraphics(265, gameGraphics, 562);
this.middleLeftBackgroundTile.drawGraphics(171, gameGraphics, 128);
this.middleRightBackgroundTile.drawGraphics(171, gameGraphics, 562);
}
private void updateFlameStrength(final int tick) {
final char c = '\u0100';
for (int j = 10; j < 117; j++) {
final int k = (int) (Math.random() * 100D);
if (k < 50) {
this.flameStrengths[j + (c - 2 << 7)] = 255;
}
}
for (int i = 0; i < 100; i++) {
final int y = (int) (Math.random() * 124D) + 2;
final int x = (int) (Math.random() * 128D) + 128;
final int pos = y + (x << 7);
this.flameStrengths[pos] = 192;
}
for (int x = 1; x < c - 1; x++) {
for (int y = 1; y < 127; y++) {
final int pos = y + (x << 7);
this.anIntArray829[pos] = (this.flameStrengths[pos - 1] + this.flameStrengths[pos + 1] + this.flameStrengths[pos - 128] + this.flameStrengths[pos + 128]) / 4;
}
}
this.flameShapeIndex += 128;
if (this.flameShapeIndex > this.titleFlames.length) {
this.flameShapeIndex -= this.titleFlames.length;
final int image = (int) (Math.random() * 12D);
this.updateFlameShape(this.titleFlameRuneImages[image]);
}
for (int x = 1; x < c - 1; x++) {
for (int y = 1; y < 127; y++) {
final int pos = y + (x << 7);
int i4 = this.anIntArray829[pos + 128] - this.titleFlames[pos + this.flameShapeIndex & this.titleFlames.length - 1] / 5;
if (i4 < 0) {
i4 = 0;
}
this.flameStrengths[pos] = i4;
}
}
System.arraycopy(this.anIntArray969, 1, this.anIntArray969, 0, c - 1);
this.anIntArray969[c - 1] = (int) (Math.sin(tick / 14D) * 16D + Math.sin(tick / 15D) * 14D + Math.sin(tick / 16D) * 12D);
}
public void drawFlames2(final Graphics gameGraphics, final int tick) {
this.drawingFlames = true;
try {
long startTime = System.currentTimeMillis();
int currentLoop = 0;
int interval = 20;
while (this.currentlyDrawingFlames) {
this.updateFlameStrength(tick);
this.updateFlameStrength(tick);
this.doFlamesDrawing(gameGraphics);
if (++currentLoop > 10) {
final long currentTime = System.currentTimeMillis();
final int difference = (int) (currentTime - startTime) / 10 - interval;
interval = 40 - difference;
if (interval < 5) {
interval = 5;
}
currentLoop = 0;
startTime = currentTime;
}
try {
Thread.sleep(interval);
} catch (final Exception _ex) {
}
}
} catch (final Exception _ex) {
}
this.drawingFlames = false;
}
private void updateFlameShape(final IndexedImage runeImage) {
final int j = 256;
for (int pos = 0; pos < this.titleFlames.length; pos++) {
this.titleFlames[pos] = 0;
}
for (int i = 0; i < 5000; i++) {
final int pos = (int) (Math.random() * 128D * j);
this.titleFlames[pos] = (int) (Math.random() * 256D);
}
for (int i = 0; i < 20; i++) {
for (int x = 1; x < j - 1; x++) {
for (int y = 1; y < 127; y++) {
final int pos = y + (x << 7);
this.titleFlamesTemp[pos] = (
this.titleFlames[pos - 1]
+ this.titleFlames[pos + 1]
+ this.titleFlames[pos - 128]
+ this.titleFlames[pos + 128]
) / 4;
}
}
final int[] temp = this.titleFlames;
this.titleFlames = this.titleFlamesTemp;
this.titleFlamesTemp = temp;
}
if (runeImage != null) {
int imagePos = 0;
for (int x = 0; x < runeImage.height; x++) {
for (int y = 0; y < runeImage.width; y++) {
if (runeImage.pixels[imagePos++] != 0) {
final int _y = y + 16 + runeImage.drawOffsetX;
final int _x = x + 16 + runeImage.drawOffsetY;
final int pos = _y + (_x << 7);
this.titleFlames[pos] = 0;
}
}
}
}
}
public void drawLoginScreen(
final Graphics gameGraphics,
final boolean originalLoginScreen,
final int loginScreenState,
final String statusString,
final String message1,
final String message2,
final String enteredUsername,
final String enteredPassword,
final int tick,
final int focus
) {
this.loginBoxLeftBackgroundTile.initDrawingArea();
this.titleBoxImage.draw(0, 0);
final int x = 360;
final int y = 200;
if (loginScreenState == 0) {
int _y = y / 2 + 80;
this.fontSmall.drawCentredTextWithPotentialShadow(statusString, x / 2, _y, 0x75A9A9, true);
_y = y / 2 - 20;
this.fontBold.drawCentredTextWithPotentialShadow("Welcome to RuneScape", x / 2, _y, 0xFFFF00, true);
_y += 30;
int _x = x / 2 - 80;
final int __y = y / 2 + 20;
this.titleButtonImage.draw(_x - 73, __y - 20);
this.fontBold.drawCentredTextWithPotentialShadow("New User", _x, __y + 5, 0xFFFFFF, true);
_x = x / 2 + 80;
this.titleButtonImage.draw(_x - 73, __y - 20);
this.fontBold.drawCentredTextWithPotentialShadow("Existing User", _x, __y + 5, 0xFFFFFF, true);
}
if (loginScreenState == 2) {
int _y = y / 2 - 40;
if (message1.length() > 0) {
this.fontBold.drawCentredTextWithPotentialShadow(message1, x / 2, _y - 15, 0xFFFF00, true);
this.fontBold.drawCentredTextWithPotentialShadow(message2, x / 2, _y, 0xFFFF00, true);
_y += 30;
} else {
this.fontBold.drawCentredTextWithPotentialShadow(message2, x / 2, _y - 7, 0xFFFF00, true);
_y += 30;
}
this.fontBold.drawTextWithPotentialShadow(
"Username: " + enteredUsername + ((focus == 0) & (tick % 40 < 20) ? "@yel@|" : ""),
x / 2 - 90, _y, 0xFFFFFF, true);
_y += 15;
this.fontBold.drawTextWithPotentialShadow(
"Password: " + TextClass.asterisksForString(enteredPassword)
+ ((focus == 1) & (tick % 40 < 20) ? "@yel@|" : ""),
x / 2 - 88, _y, 0xFFFFFF, true);
_y += 15;
if (!originalLoginScreen) {
int _x = x / 2 - 80;
final int __y = y / 2 + 50;
this.titleButtonImage.draw(_x - 73, __y - 20);
this.fontBold.drawCentredTextWithPotentialShadow("Login", _x, __y + 5, 0xFFFFFF, true);
_x = x / 2 + 80;
this.titleButtonImage.draw(_x - 73, __y - 20);
this.fontBold.drawCentredTextWithPotentialShadow("Cancel", _x, __y + 5, 0xFFFFFF, true);
}
}
if (loginScreenState == 3) {
this.fontBold.drawCentredTextWithPotentialShadow("Create a free account", x / 2, y / 2 - 60, 0xFFFF00, true);
int _y = y / 2 - 35;
this.fontBold.drawCentredTextWithPotentialShadow("To create a new account you need to", x / 2, _y, 0xFFFFFF,
true);
_y += 15;
this.fontBold.drawCentredTextWithPotentialShadow("go back to the main RuneScape webpage", x / 2, _y, 0xFFFFFF,
true);
_y += 15;
this.fontBold.drawCentredTextWithPotentialShadow("and choose the red 'create account'", x / 2, _y, 0xFFFFFF,
true);
_y += 15;
this.fontBold.drawCentredTextWithPotentialShadow("button at the top right of that page.", x / 2, _y, 0xFFFFFF,
true);
_y += 15;
final int _x = x / 2;
final int __y = y / 2 + 50;
this.titleButtonImage.draw(_x - 73, __y - 20);
this.fontBold.drawCentredTextWithPotentialShadow("Cancel", _x, __y + 5, 0xFFFFFF, true);
}
this.loginBoxLeftBackgroundTile.drawGraphics(171, gameGraphics, 202);
if (this.welcomeScreenRaised) {
this.welcomeScreenRaised = false;
this.topCentreBackgroundTile.drawGraphics(0, gameGraphics, 128);
this.bottomCentreBackgroundTile.drawGraphics(371, gameGraphics, 202);
this.bottomLeftBackgroundTile.drawGraphics(265, gameGraphics, 0);
this.bottomRightBackgroundTile.drawGraphics(265, gameGraphics, 562);
this.middleLeftBackgroundTile.drawGraphics(171, gameGraphics, 128);
this.middleRightBackgroundTile.drawGraphics(171, gameGraphics, 562);
}
}
}
| |
package org.mockserver.serialization.serializers.request;
import com.fasterxml.jackson.core.JsonProcessingException;
import org.junit.Test;
import org.mockserver.serialization.ObjectMapperFactory;
import org.mockserver.serialization.model.HttpRequestDTO;
import org.mockserver.model.Cookie;
import org.mockserver.model.Header;
import org.mockserver.model.JsonSchemaBody;
import org.mockserver.model.XmlSchemaBody;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertThat;
import static org.mockserver.character.Character.NEW_LINE;
import static org.mockserver.model.HttpRequest.request;
import static org.mockserver.model.JsonBody.json;
import static org.mockserver.model.JsonPathBody.jsonPath;
import static org.mockserver.model.JsonSchemaBody.jsonSchema;
import static org.mockserver.model.Parameter.param;
import static org.mockserver.model.ParameterBody.params;
import static org.mockserver.model.RegexBody.regex;
import static org.mockserver.model.XPathBody.xpath;
import static org.mockserver.model.XmlBody.xml;
import static org.mockserver.model.XmlSchemaBody.xmlSchema;
public class HttpRequestDTOSerializerTest {
@Test
public void shouldReturnFormattedRequestWithNoFieldsSet() throws JsonProcessingException {
assertThat(ObjectMapperFactory.createObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(
new HttpRequestDTO(
request()
)),
is("{ }"));
}
@Test
public void shouldReturnFormattedRequestWithAllFieldsSet() throws JsonProcessingException {
assertThat(ObjectMapperFactory.createObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(
new HttpRequestDTO(
request()
.withMethod("GET")
.withPath("/some/path")
.withQueryStringParameters(param("parameterOneName", "parameterOneValue"))
.withBody("some_body")
.withHeaders(new Header("name", "value"))
.withCookies(new Cookie("name", "[A-Z]{0,10}"))
.withKeepAlive(true)
.withSecure(true)
)
),
is("{" + NEW_LINE +
" \"method\" : \"GET\"," + NEW_LINE +
" \"path\" : \"/some/path\"," + NEW_LINE +
" \"queryStringParameters\" : {" + NEW_LINE +
" \"parameterOneName\" : [ \"parameterOneValue\" ]" + NEW_LINE +
" }," + NEW_LINE +
" \"headers\" : {" + NEW_LINE +
" \"name\" : [ \"value\" ]" + NEW_LINE +
" }," + NEW_LINE +
" \"cookies\" : {" + NEW_LINE +
" \"name\" : \"[A-Z]{0,10}\"" + NEW_LINE +
" }," + NEW_LINE +
" \"keepAlive\" : true," + NEW_LINE +
" \"secure\" : true," + NEW_LINE +
" \"body\" : \"some_body\"" + NEW_LINE +
"}"));
}
@Test
public void shouldReturnFormattedRequestWithJsonBodyInToString() throws JsonProcessingException {
assertThat(ObjectMapperFactory.createObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(
new HttpRequestDTO(
request()
.withMethod("GET")
.withPath("/some/path")
.withQueryStringParameters(param("parameterOneName", "parameterOneValue"))
.withBody(json("{ \"key\": \"some_value\" }"))
.withHeaders(new Header("name", "value"))
.withCookies(new Cookie("name", "[A-Z]{0,10}"))
)
),
is("{" + NEW_LINE +
" \"method\" : \"GET\"," + NEW_LINE +
" \"path\" : \"/some/path\"," + NEW_LINE +
" \"queryStringParameters\" : {" + NEW_LINE +
" \"parameterOneName\" : [ \"parameterOneValue\" ]" + NEW_LINE +
" }," + NEW_LINE +
" \"headers\" : {" + NEW_LINE +
" \"name\" : [ \"value\" ]" + NEW_LINE +
" }," + NEW_LINE +
" \"cookies\" : {" + NEW_LINE +
" \"name\" : \"[A-Z]{0,10}\"" + NEW_LINE +
" }," + NEW_LINE +
" \"body\" : {" + NEW_LINE +
" \"type\" : \"JSON\"," + NEW_LINE +
" \"json\" : \"{ \\\"key\\\": \\\"some_value\\\" }\"" + NEW_LINE +
" }" + NEW_LINE +
"}"));
}
@Test
public void shouldReturnFormattedRequestWithJsonSchemaBodyInToString() throws JsonProcessingException {
assertThat(ObjectMapperFactory.createObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(new JsonSchemaBody("{\"type\": \"object\", \"properties\": {\"id\": {\"type\": \"integer\"}}, \"required\": [\"id\"]}")),
is("{" + NEW_LINE +
" \"type\" : \"JSON_SCHEMA\"," + NEW_LINE +
" \"jsonSchema\" : \"{\\\"type\\\": \\\"object\\\", \\\"properties\\\": {\\\"id\\\": {\\\"type\\\": \\\"integer\\\"}}, \\\"required\\\": [\\\"id\\\"]}\"" + NEW_LINE +
"}"));
assertThat(ObjectMapperFactory.createObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(
new HttpRequestDTO(
request()
.withMethod("GET")
.withPath("/some/path")
.withQueryStringParameters(param("parameterOneName", "parameterOneValue"))
.withBody(jsonSchema("{\"type\": \"object\", \"properties\": {\"id\": {\"type\": \"integer\"}}, \"required\": [\"id\"]}"))
.withHeaders(new Header("name", "value"))
.withCookies(new Cookie("name", "[A-Z]{0,10}"))
)
),
is("{" + NEW_LINE +
" \"method\" : \"GET\"," + NEW_LINE +
" \"path\" : \"/some/path\"," + NEW_LINE +
" \"queryStringParameters\" : {" + NEW_LINE +
" \"parameterOneName\" : [ \"parameterOneValue\" ]" + NEW_LINE +
" }," + NEW_LINE +
" \"headers\" : {" + NEW_LINE +
" \"name\" : [ \"value\" ]" + NEW_LINE +
" }," + NEW_LINE +
" \"cookies\" : {" + NEW_LINE +
" \"name\" : \"[A-Z]{0,10}\"" + NEW_LINE +
" }," + NEW_LINE +
" \"body\" : {" + NEW_LINE +
" \"type\" : \"JSON_SCHEMA\"," + NEW_LINE +
" \"jsonSchema\" : \"{\\\"type\\\": \\\"object\\\", \\\"properties\\\": {\\\"id\\\": {\\\"type\\\": \\\"integer\\\"}}, \\\"required\\\": [\\\"id\\\"]}\"" + NEW_LINE +
" }" + NEW_LINE +
"}"));
}
@Test
public void shouldReturnFormattedRequestWithJsonPathBodyInToString() throws JsonProcessingException {
assertThat(ObjectMapperFactory.createObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(
new HttpRequestDTO(
request()
.withMethod("GET")
.withPath("/some/path")
.withQueryStringParameters(param("parameterOneName", "parameterOneValue"))
.withBody(jsonPath("$..book[?(@.price <= $['expensive'])]"))
.withHeaders(new Header("name", "value"))
.withCookies(new Cookie("name", "[A-Z]{0,10}"))
)
),
is("{" + NEW_LINE +
" \"method\" : \"GET\"," + NEW_LINE +
" \"path\" : \"/some/path\"," + NEW_LINE +
" \"queryStringParameters\" : {" + NEW_LINE +
" \"parameterOneName\" : [ \"parameterOneValue\" ]" + NEW_LINE +
" }," + NEW_LINE +
" \"headers\" : {" + NEW_LINE +
" \"name\" : [ \"value\" ]" + NEW_LINE +
" }," + NEW_LINE +
" \"cookies\" : {" + NEW_LINE +
" \"name\" : \"[A-Z]{0,10}\"" + NEW_LINE +
" }," + NEW_LINE +
" \"body\" : {" + NEW_LINE +
" \"type\" : \"JSON_PATH\"," + NEW_LINE +
" \"jsonPath\" : \"$..book[?(@.price <= $['expensive'])]\"" + NEW_LINE +
" }" + NEW_LINE +
"}")
);
}
@Test
public void shouldReturnFormattedRequestWithXmlBodyInToString() throws JsonProcessingException {
assertThat(ObjectMapperFactory.createObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(
new HttpRequestDTO(
request()
.withMethod("GET")
.withPath("/some/path")
.withQueryStringParameters(param("parameterOneName", "parameterOneValue"))
.withBody(xml("<some><xml></xml></some>"))
.withHeaders(new Header("name", "value"))
.withCookies(new Cookie("name", "[A-Z]{0,10}"))
)
),
is("{" + NEW_LINE +
" \"method\" : \"GET\"," + NEW_LINE +
" \"path\" : \"/some/path\"," + NEW_LINE +
" \"queryStringParameters\" : {" + NEW_LINE +
" \"parameterOneName\" : [ \"parameterOneValue\" ]" + NEW_LINE +
" }," + NEW_LINE +
" \"headers\" : {" + NEW_LINE +
" \"name\" : [ \"value\" ]" + NEW_LINE +
" }," + NEW_LINE +
" \"cookies\" : {" + NEW_LINE +
" \"name\" : \"[A-Z]{0,10}\"" + NEW_LINE +
" }," + NEW_LINE +
" \"body\" : {" + NEW_LINE +
" \"type\" : \"XML\"," + NEW_LINE +
" \"xml\" : \"<some><xml></xml></some>\"" + NEW_LINE +
" }" + NEW_LINE +
"}")
);
}
@Test
public void shouldReturnFormattedRequestWithXmlSchemaBodyInToString() throws JsonProcessingException {
assertThat(ObjectMapperFactory.createObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(new XmlSchemaBody("{\"type\": \"object\", \"properties\": {\"id\": {\"type\": \"integer\"}}, \"required\": [\"id\"]}")),
is("{" + NEW_LINE +
" \"type\" : \"XML_SCHEMA\"," + NEW_LINE +
" \"xmlSchema\" : \"{\\\"type\\\": \\\"object\\\", \\\"properties\\\": {\\\"id\\\": {\\\"type\\\": \\\"integer\\\"}}, \\\"required\\\": [\\\"id\\\"]}\"" + NEW_LINE +
"}"));
assertThat(ObjectMapperFactory.createObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(
new HttpRequestDTO(
request()
.withMethod("GET")
.withPath("/some/path")
.withQueryStringParameters(param("parameterOneName", "parameterOneValue"))
.withBody(xmlSchema("{\"type\": \"object\", \"properties\": {\"id\": {\"type\": \"integer\"}}, \"required\": [\"id\"]}"))
.withHeaders(new Header("name", "value"))
.withCookies(new Cookie("name", "[A-Z]{0,10}"))
)
),
is("{" + NEW_LINE +
" \"method\" : \"GET\"," + NEW_LINE +
" \"path\" : \"/some/path\"," + NEW_LINE +
" \"queryStringParameters\" : {" + NEW_LINE +
" \"parameterOneName\" : [ \"parameterOneValue\" ]" + NEW_LINE +
" }," + NEW_LINE +
" \"headers\" : {" + NEW_LINE +
" \"name\" : [ \"value\" ]" + NEW_LINE +
" }," + NEW_LINE +
" \"cookies\" : {" + NEW_LINE +
" \"name\" : \"[A-Z]{0,10}\"" + NEW_LINE +
" }," + NEW_LINE +
" \"body\" : {" + NEW_LINE +
" \"type\" : \"XML_SCHEMA\"," + NEW_LINE +
" \"xmlSchema\" : \"{\\\"type\\\": \\\"object\\\", \\\"properties\\\": {\\\"id\\\": {\\\"type\\\": \\\"integer\\\"}}, \\\"required\\\": [\\\"id\\\"]}\"" + NEW_LINE +
" }" + NEW_LINE +
"}"));
}
@Test
public void shouldReturnFormattedRequestWithXPathBodyInToString() throws JsonProcessingException {
assertThat(ObjectMapperFactory.createObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(
new HttpRequestDTO(
request()
.withMethod("GET")
.withPath("/some/path")
.withQueryStringParameters(param("parameterOneName", "parameterOneValue"))
.withBody(xpath("//some/xml/path"))
.withHeaders(new Header("name", "value"))
.withCookies(new Cookie("name", "[A-Z]{0,10}"))
)
),
is("{" + NEW_LINE +
" \"method\" : \"GET\"," + NEW_LINE +
" \"path\" : \"/some/path\"," + NEW_LINE +
" \"queryStringParameters\" : {" + NEW_LINE +
" \"parameterOneName\" : [ \"parameterOneValue\" ]" + NEW_LINE +
" }," + NEW_LINE +
" \"headers\" : {" + NEW_LINE +
" \"name\" : [ \"value\" ]" + NEW_LINE +
" }," + NEW_LINE +
" \"cookies\" : {" + NEW_LINE +
" \"name\" : \"[A-Z]{0,10}\"" + NEW_LINE +
" }," + NEW_LINE +
" \"body\" : {" + NEW_LINE +
" \"type\" : \"XPATH\"," + NEW_LINE +
" \"xpath\" : \"//some/xml/path\"" + NEW_LINE +
" }" + NEW_LINE +
"}")
);
}
@Test
public void shouldReturnFormattedRequestWithRegexBodyInToString() throws JsonProcessingException {
assertThat(ObjectMapperFactory.createObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(
new HttpRequestDTO(
request()
.withMethod("GET")
.withPath("/some/path")
.withQueryStringParameters(param("parameterOneName", "parameterOneValue"))
.withBody(regex("[a-z]{1,3}"))
.withHeaders(new Header("name", "value"))
.withCookies(new Cookie("name", "[A-Z]{0,10}"))
)
),
is("{" + NEW_LINE +
" \"method\" : \"GET\"," + NEW_LINE +
" \"path\" : \"/some/path\"," + NEW_LINE +
" \"queryStringParameters\" : {" + NEW_LINE +
" \"parameterOneName\" : [ \"parameterOneValue\" ]" + NEW_LINE +
" }," + NEW_LINE +
" \"headers\" : {" + NEW_LINE +
" \"name\" : [ \"value\" ]" + NEW_LINE +
" }," + NEW_LINE +
" \"cookies\" : {" + NEW_LINE +
" \"name\" : \"[A-Z]{0,10}\"" + NEW_LINE +
" }," + NEW_LINE +
" \"body\" : {" + NEW_LINE +
" \"type\" : \"REGEX\"," + NEW_LINE +
" \"regex\" : \"[a-z]{1,3}\"" + NEW_LINE +
" }" + NEW_LINE +
"}")
);
}
@Test
public void shouldReturnFormattedRequestWithParameterBodyInToString() throws JsonProcessingException {
assertThat(ObjectMapperFactory.createObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(
new HttpRequestDTO(
request()
.withMethod("GET")
.withPath("/some/path")
.withQueryStringParameters(param("parameterOneName", "parameterOneValue"))
.withBody(params(
param("queryStringParameterOneName", "queryStringParameterOneValueOne", "queryStringParameterOneValueTwo"),
param("queryStringParameterTwoName", "queryStringParameterTwoValue")
))
.withHeaders(new Header("name", "value"))
.withCookies(new Cookie("name", "[A-Z]{0,10}"))
)
),
is("{" + NEW_LINE +
" \"method\" : \"GET\"," + NEW_LINE +
" \"path\" : \"/some/path\"," + NEW_LINE +
" \"queryStringParameters\" : {" + NEW_LINE +
" \"parameterOneName\" : [ \"parameterOneValue\" ]" + NEW_LINE +
" }," + NEW_LINE +
" \"headers\" : {" + NEW_LINE +
" \"name\" : [ \"value\" ]" + NEW_LINE +
" }," + NEW_LINE +
" \"cookies\" : {" + NEW_LINE +
" \"name\" : \"[A-Z]{0,10}\"" + NEW_LINE +
" }," + NEW_LINE +
" \"body\" : {" + NEW_LINE +
" \"type\" : \"PARAMETERS\"," + NEW_LINE +
" \"parameters\" : {" + NEW_LINE +
" \"queryStringParameterOneName\" : [ \"queryStringParameterOneValueOne\", \"queryStringParameterOneValueTwo\" ]," + NEW_LINE +
" \"queryStringParameterTwoName\" : [ \"queryStringParameterTwoValue\" ]" + NEW_LINE +
" }" + NEW_LINE +
" }" + NEW_LINE +
"}")
);
}
}
| |
package desmoj.extensions.visualization2d.engine.model;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.Point;
import desmoj.extensions.visualization2d.engine.modelGrafic.BackgroundElementGrafic;
import desmoj.extensions.visualization2d.engine.modelGrafic.Grafic;
/**
* The class describe background elements to give a structure of animation.
* A background element is a rectangle which includes optional text.
* The text position in the rectangle and text size and style are
* determined in the constructor of this class.
* With a createGrafic Method a BackgroundElementGrafic class is created.
* There are mainly 2 types:
* One with dynamic size has fixed corner points of its rectangle. By zooming
* its showed size is changed.
* And one with static size. This has a fixed middle point and fixed showed size.
* By zooming its showed size is not changed.
*
* When text is null, a rectangle without text is build.
* When background is null, the background of this element is transparent.
* When name is set, the element has a border with its name.
* With level is determined which element is located in front of an other.
*
* Elements with a low level are painted in front of elements with a high level.
* All Background elements are located in a background layer.
*
* See also in BackgroundElementGrafic.
*
* @version DESMO-J, Ver. 2.4.1 copyright (c) 2014
* @author christian.mueller@th-wildau.de
* For information about subproject: desmoj.extensions.visualization2d
* please have a look at:
* http://www.th-wildau.de/cmueller/Desmo-J/Visualization2d/
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You may
* obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
public class BackgroundElement implements Basic {
public static final int TEXT_POSITION_TopLeft = 0;
public static final int TEXT_POSITION_TopMiddle = 1;
public static final int TEXT_POSITION_TopRight = 2;
public static final int TEXT_POSITION_MiddleLeft = 3;
public static final int TEXT_POSITION_Middle = 4;
public static final int TEXT_POSITION_MiddleRight = 5;
public static final int TEXT_POSITION_BottomLeft = 6;
public static final int TEXT_POSITION_BottomMiddle = 7;
public static final int TEXT_POSITION_BottomRight = 8;
public static final int TEXT_Style_Plain = 0;
public static final int TEXT_Style_Bold = 1;
public static final int TEXT_Style_Italic = 2;
public static final int TEXT_Size_Normal = 10;
public static final int TEXT_Size_Small = 8;
public static final int TEXT_Size_Big = 20;
/**
* hashtable with all process-instances
*/
private String id;
private String name;
private double level; // Zeichenreihenfolge
private String text;
private String imageId; // null when no image to paint
private int textPosition; // siehe TEXT_POSITION_...
private int textSize; // siehe TEXT_SIZE_...
private int textStyle; // siehe TEXT_STYLE_...
private Grafic grafic;
private Model model;
/**
* Background element to paint a string
* @param id id of background element
* @param text text to show. When null, no text is shown.
* @param textSize for values look at TEXT_SIZE_...
* @param textStyle for values look at TEXT_STYLE_...
* @param level elements with low level are painted in front
* of elements with high level
* @param imageId image for background, maybe null
*/
public BackgroundElement(Model model, String id, String text,
int textSize, int textStyle, double level, String imageId){
this.model = model;
this.id = id;
this.name = null;
this.level = level;
this.text = text;
this.imageId = imageId;
this.textPosition = BackgroundElement.TEXT_POSITION_Middle;
this.textSize = BackgroundElement.TEXT_Size_Normal;
this.textStyle = BackgroundElement.TEXT_Style_Plain;
this.grafic = null;
if(this.id != null) model.getBackgroundElements().add(this);
if(imageId != null && !this.model.containsImageId(imageId)){
throw new ModelException("In BackgroundElement id: "+id+
" imageId: "+imageId+" is unknown.");
}
}
/**
* Background element with all features
* @param model used animation.model.Model
* @param id id of background element
* @param text text to show. When null, no text is shown.
* @param textPosition for values look at TEXT_POSITION_...
* @param textSize for values look at TEXT_SIZE_...
* @param textStyle for values look at TEXT_STYLE_...
* @param level elements with low level are painted in front
* of elements with high level
* @param imageId image for background, maybe null
*/
public BackgroundElement(Model model, String id, String text,
int textPosition, int textSize, int textStyle, double level, String imageId){
this.model = model;
this.id = id;
this.name = null;
this.level = level;
this.text = text;
this.imageId = imageId;
this.textPosition = textPosition;
this.textSize = textSize;
this.textStyle = textStyle;
this.grafic = null;
if(this.id != null) model.getBackgroundElements().add(this);
if(imageId != null && !this.model.containsImageId(imageId)){
throw new ModelException("In BackgroundElement id: "+id+
" imageId: "+imageId+" is unknown.");
}
}
public Model getModel(){
return this.model;
}
/**
* get id of background element
*/
public String getId() {
return this.id;
}
/**
* set name of background element
* @param name
*/
public void setName(String name){
this.name = name;
}
/**
* get name of background element
*/
public String getName(){
return this.name;
}
/**
* Get level of background element.
* Elements with low level are painted in front
* of elements with high level
* @return
*/
public double getLevel(){
return this.level;
}
/**
* Get text of background element.
* @return
*/
public String getText(){
return this.text;
}
/**
* Get ImageId, may be null
* This id is a key in this.getModel().getImage(id)
* @return
*/
public String getImageId(){
return this.imageId;
}
/**
* Get text position of background element.
* for values look at TEXT_POSITION_...
* @return
*/
public int getTextPosition(){
return this.textPosition;
}
/**
* Get text size of background element.
* for values look at TEXT_SIZE_...
* @return
*/
public int getTextSize(){
return this.textSize;
}
/**
* Get text style of background element.
* for values look at TEXT_STYLE_...
* @return
*/
public int getTextStyle(){
return this.textStyle;
}
/**
* Updates and repaints text properties
* @param text
* @param textPosition
* @param textSize
* @param textStyle
* @param foreground
* @param background
* @param imageId
*/
public void setData(String text, int textPosition, int textSize,
int textStyle, Color foreground, Color background, String imageId){
this.text = text;
this.textPosition = textPosition;
this.textSize = textSize;
this.textStyle = textStyle;
this.imageId = imageId;
if(imageId != null && !this.model.containsImageId(imageId)){
throw new ModelException("In BackgroundElement id: "+id+
" imageId: "+imageId+" is unknown.");
}
if(this.getGrafic() != null)
((BackgroundElementGrafic)this.getGrafic()).update(foreground, background);
}
/**
* create grafic of background element with fixed corners
* @param x0 x coordinate of top left corner
* @param y0 y coordinate of top left corner
* @param x1 x coordinate of bottom right corner
* @param y1 y coordinate of bottom right corner
* @param fg text color
* @param bg background color. When null, the background is transparent
* @return
*/
public Grafic createGrafic(String viewId, Point topLeft, Point bottomRight, Color fg, Color bg) {
this.grafic = new BackgroundElementGrafic(this, viewId, topLeft, bottomRight,
null, null, fg, bg);
return this.grafic;
}
/**
* create grafic of background element with fixed middle point and fixed size
* @param x x coordinate of the middle point
* @param y y coordinate of the middle point
* @param fg text color
* @param bg background color. When null, the background is transparent
* @param width width of background element.
* @param height heigth of background element.
* @return
*/
public Grafic createGrafic(String viewId, Point middle, Color fg, Color bg, Dimension size) {
this.grafic = new BackgroundElementGrafic(this, viewId, null, null, middle,
size, fg, bg);
return this.grafic;
}
/**
* create grafic of background element with fixed middle point
* The size is the size, that the text need to display
* @param viewId
* @param middle
* @param fg
* @param bg
* @return
*/
public Grafic createGrafic(String viewId, Point middle, Color fg, Color bg) {
this.grafic = new BackgroundElementGrafic(this, viewId, null, null, middle,
null, fg, bg);
return this.grafic;
}
/**
* get ProcessGrafic, created before
*/
public Grafic getGrafic() {
return grafic;
}
}
| |
/*
* Copyright 2020 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.provider.quarkus;
import static org.junit.Assert.assertEquals;
import static org.keycloak.quarkus.runtime.Environment.CLI_ARGS;
import java.io.File;
import java.lang.reflect.Field;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import io.quarkus.hibernate.orm.runtime.dialect.QuarkusH2Dialect;
import io.quarkus.runtime.LaunchMode;
import io.smallrye.config.SmallRyeConfig;
import org.eclipse.microprofile.config.ConfigProvider;
import org.eclipse.microprofile.config.spi.ConfigProviderResolver;
import org.hibernate.dialect.MariaDBDialect;
import org.junit.After;
import org.junit.Assert;
import org.junit.Test;
import org.keycloak.Config;
import org.keycloak.quarkus.runtime.configuration.KeycloakConfigSourceProvider;
import org.keycloak.quarkus.runtime.configuration.MicroProfileConfigProvider;
import io.quarkus.runtime.configuration.ConfigUtils;
import io.smallrye.config.SmallRyeConfigProviderResolver;
import org.keycloak.quarkus.runtime.Environment;
public class ConfigurationTest {
private static final Properties SYSTEM_PROPERTIES = (Properties) System.getProperties().clone();
private static final Map<String, String> ENVIRONMENT_VARIABLES = new HashMap<>(System.getenv());
private static final String ARG_SEPARATOR = ";;";
@SuppressWarnings("unchecked")
public static void putEnvVar(String name, String value) {
Map<String, String> env = System.getenv();
Field field = null;
try {
field = env.getClass().getDeclaredField("m");
field.setAccessible(true);
((Map<String, String>) field.get(env)).put(name, value);
} catch (Exception cause) {
throw new RuntimeException("Failed to update environment variables", cause);
} finally {
if (field != null) {
field.setAccessible(false);
}
}
}
@SuppressWarnings("unchecked")
public static void removeEnvVar(String name) {
Map<String, String> env = System.getenv();
Field field = null;
try {
field = env.getClass().getDeclaredField("m");
field.setAccessible(true);
((Map<String, String>) field.get(env)).remove(name);
} catch (Exception cause) {
throw new RuntimeException("Failed to update environment variables", cause);
} finally {
if (field != null) {
field.setAccessible(false);
}
}
}
@After
public void onAfter() {
Properties current = System.getProperties();
for (String name : current.stringPropertyNames()) {
if (!SYSTEM_PROPERTIES.containsKey(name)) {
current.remove(name);
}
}
for (String name : new HashMap<>(System.getenv()).keySet()) {
if (!ENVIRONMENT_VARIABLES.containsKey(name)) {
removeEnvVar(name);
}
}
SmallRyeConfigProviderResolver.class.cast(ConfigProviderResolver.instance()).releaseConfig(ConfigProvider.getConfig());
}
@Test
public void testCamelCase() {
putEnvVar("KC_SPI_CAMEL_CASE_SCOPE_CAMEL_CASE_PROP", "foobar");
initConfig();
String value = Config.scope("camelCaseScope").get("camelCaseProp");
assertEquals(value, "foobar");
}
@Test
public void testEnvVarPriorityOverPropertiesFile() {
putEnvVar("KC_SPI_HOSTNAME_DEFAULT_FRONTEND_URL", "http://envvar.unittest");
assertEquals("http://envvar.unittest", initConfig("hostname", "default").get("frontendUrl"));
}
@Test
public void testSysPropPriorityOverEnvVar() {
putEnvVar("KC_SPI_HOSTNAME_DEFAULT_FRONTEND_URL", "http://envvar.unittest");
System.setProperty("kc.spi.hostname.default.frontend-url", "http://propvar.unittest");
assertEquals("http://propvar.unittest", initConfig("hostname", "default").get("frontendUrl"));
}
@Test
public void testCLIPriorityOverSysProp() {
System.setProperty("kc.spi.hostname.default.frontend-url", "http://propvar.unittest");
System.setProperty(CLI_ARGS, "--spi-hostname-default-frontend-url=http://cli.unittest");
assertEquals("http://cli.unittest", initConfig("hostname", "default").get("frontendUrl"));
}
@Test
public void testDefaultValueFromProperty() {
System.setProperty("keycloak.frontendUrl", "http://defaultvalueprop.unittest");
assertEquals("http://defaultvalueprop.unittest", initConfig("hostname", "default").get("frontendUrl"));
}
@Test
public void testDefaultValue() {
assertEquals("http://filepropdefault.unittest", initConfig("hostname", "default").get("frontendUrl"));
}
@Test
public void testKeycloakProfilePropertySubstitution() {
System.setProperty(Environment.PROFILE, "user-profile");
assertEquals("http://filepropprofile.unittest", initConfig("hostname", "default").get("frontendUrl"));
}
@Test
public void testQuarkusProfilePropertyStillWorks() {
System.setProperty("quarkus.profile", "user-profile");
assertEquals("http://filepropprofile.unittest", initConfig("hostname", "default").get("frontendUrl"));
}
@Test
public void testCommandLineArguments() {
System.setProperty(CLI_ARGS, "--spi-hostname-default-frontend-url=http://fromargs.unittest" + ARG_SEPARATOR + "--no-ssl");
assertEquals("http://fromargs.unittest", initConfig("hostname", "default").get("frontendUrl"));
}
@Test
public void testSpiConfigurationUsingCommandLineArguments() {
System.setProperty(CLI_ARGS, "--spi-hostname-default-frontend-url=http://spifull.unittest");
assertEquals("http://spifull.unittest", initConfig("hostname", "default").get("frontendUrl"));
// test multi-word SPI names using camel cases
System.setProperty(CLI_ARGS, "--spi-action-token-handler-verify-email-some-property=test");
assertEquals("test", initConfig("action-token-handler", "verify-email").get("some-property"));
System.setProperty(CLI_ARGS, "--spi-action-token-handler-verify-email-some-property=test");
assertEquals("test", initConfig("actionTokenHandler", "verifyEmail").get("someProperty"));
// test multi-word SPI names using slashes
System.setProperty(CLI_ARGS, "--spi-client-registration-openid-connect-static-jwk-url=http://c.jwk.url");
assertEquals("http://c.jwk.url", initConfig("client-registration", "openid-connect").get("static-jwk-url"));
}
@Test
public void testPropertyMapping() {
System.setProperty(CLI_ARGS, "--db=mariadb" + ARG_SEPARATOR + "--db-url=jdbc:mariadb://localhost/keycloak");
SmallRyeConfig config = createConfig();
assertEquals(MariaDBDialect.class.getName(), config.getConfigValue("quarkus.hibernate-orm.dialect").getValue());
assertEquals("jdbc:mariadb://localhost/keycloak", config.getConfigValue("quarkus.datasource.jdbc.url").getValue());
}
@Test
public void testDatabaseUrlProperties() {
System.setProperty(CLI_ARGS, "--db=mariadb" + ARG_SEPARATOR + "--db-url=jdbc:mariadb:aurora://foo/bar?a=1&b=2");
SmallRyeConfig config = createConfig();
assertEquals(MariaDBDialect.class.getName(), config.getConfigValue("quarkus.hibernate-orm.dialect").getValue());
assertEquals("jdbc:mariadb:aurora://foo/bar?a=1&b=2", config.getConfigValue("quarkus.datasource.jdbc.url").getValue());
}
@Test
public void testDatabaseDefaults() {
System.setProperty(CLI_ARGS, "--db=h2-file");
SmallRyeConfig config = createConfig();
assertEquals(QuarkusH2Dialect.class.getName(), config.getConfigValue("quarkus.hibernate-orm.dialect").getValue());
assertEquals("jdbc:h2:file:~/data/keycloakdb;;AUTO_SERVER=TRUE", config.getConfigValue("quarkus.datasource.jdbc.url").getValue());
System.setProperty(CLI_ARGS, "--db=h2-mem");
config = createConfig();
assertEquals(QuarkusH2Dialect.class.getName(), config.getConfigValue("quarkus.hibernate-orm.dialect").getValue());
assertEquals("jdbc:h2:mem:keycloakdb", config.getConfigValue("quarkus.datasource.jdbc.url").getValue());
assertEquals("h2", config.getConfigValue("quarkus.datasource.db-kind").getValue());
}
@Test
public void testDatabaseKindProperties() {
System.setProperty(CLI_ARGS, "--db=postgres-10" + ARG_SEPARATOR + "--db-url=jdbc:postgresql://localhost/keycloak");
SmallRyeConfig config = createConfig();
assertEquals("io.quarkus.hibernate.orm.runtime.dialect.QuarkusPostgreSQL10Dialect",
config.getConfigValue("quarkus.hibernate-orm.dialect").getValue());
assertEquals("jdbc:postgresql://localhost/keycloak", config.getConfigValue("quarkus.datasource.jdbc.url").getValue());
assertEquals("postgresql", config.getConfigValue("quarkus.datasource.db-kind").getValue());
}
@Test
public void testDatabaseProperties() {
System.setProperty("kc.db.url.properties", ";;test=test;test1=test1");
System.setProperty("kc.db.url.path", "test-dir");
System.setProperty(CLI_ARGS, "--db=h2-file");
SmallRyeConfig config = createConfig();
assertEquals(QuarkusH2Dialect.class.getName(), config.getConfigValue("quarkus.hibernate-orm.dialect").getValue());
assertEquals("jdbc:h2:file:test-dir" + File.separator + "data" + File.separator + "keycloakdb;;test=test;test1=test1", config.getConfigValue("quarkus.datasource.jdbc.url").getValue());
System.setProperty("kc.db.url.properties", "?test=test&test1=test1");
System.setProperty(CLI_ARGS, "--db=mariadb");
config = createConfig();
assertEquals("jdbc:mariadb://localhost/keycloak?test=test&test1=test1", config.getConfigValue("quarkus.datasource.jdbc.url").getValue());
}
// KEYCLOAK-15632
@Test
public void testNestedDatabaseProperties() {
System.setProperty("kc.home.dir", "/tmp/kc/bin/../");
SmallRyeConfig config = createConfig();
assertEquals("jdbc:h2:file:/tmp/kc/bin/..//data/keycloakdb", config.getConfigValue("quarkus.datasource.foo").getValue());
Assert.assertEquals("foo-def-suffix", config.getConfigValue("quarkus.datasource.bar").getValue());
System.setProperty("kc.prop5", "val5");
config = createConfig();
Assert.assertEquals("foo-val5-suffix", config.getConfigValue("quarkus.datasource.bar").getValue());
System.setProperty("kc.prop4", "val4");
config = createConfig();
Assert.assertEquals("foo-val4", config.getConfigValue("quarkus.datasource.bar").getValue());
System.setProperty("kc.prop3", "val3");
config = createConfig();
Assert.assertEquals("foo-val3", config.getConfigValue("quarkus.datasource.bar").getValue());
}
@Test
public void testClusterConfig() {
// Cluster enabled by default, but disabled for the "dev" profile
Assert.assertEquals("cluster-default.xml", initConfig("connectionsInfinispan", "quarkus").get("configFile"));
// If explicitly set, then it is always used regardless of the profile
System.clearProperty(Environment.PROFILE);
System.setProperty(CLI_ARGS, "--cluster=foo");
Assert.assertEquals("cluster-foo.xml", initConfig("connectionsInfinispan", "quarkus").get("configFile"));
System.setProperty(Environment.PROFILE, "dev");
Assert.assertEquals("cluster-foo.xml", initConfig("connectionsInfinispan", "quarkus").get("configFile"));
System.setProperty(CLI_ARGS, "--cluster-stack=foo");
Assert.assertEquals("foo", initConfig("connectionsInfinispan", "quarkus").get("stack"));
}
@Test
public void testCommaSeparatedArgValues() {
System.setProperty(CLI_ARGS, "--spi-client-jpa-searchable-attributes=bar,foo");
assertEquals("bar,foo", initConfig("client-jpa").get("searchable-attributes"));
System.setProperty(CLI_ARGS, "--spi-client-jpa-searchable-attributes=bar,foo,foo bar");
assertEquals("bar,foo,foo bar", initConfig("client-jpa").get("searchable-attributes"));
System.setProperty(CLI_ARGS, "--spi-client-jpa-searchable-attributes=bar,foo, \"foo bar\"");
assertEquals("bar,foo, \"foo bar\"", initConfig("client-jpa").get("searchable-attributes"));
System.setProperty(CLI_ARGS, "--spi-client-jpa-searchable-attributes=bar,foo, \"foo bar\"" + ARG_SEPARATOR + "--spi-hostname-default-frontend-url=http://foo.unittest");
assertEquals("http://foo.unittest", initConfig("hostname-default").get("frontend-url"));
}
private Config.Scope initConfig(String... scope) {
Config.init(new MicroProfileConfigProvider(createConfig()));
return Config.scope(scope);
}
private SmallRyeConfig createConfig() {
KeycloakConfigSourceProvider.reload();
return ConfigUtils.configBuilder(true, LaunchMode.NORMAL).build();
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/talent/v4beta1/common.proto
package com.google.cloud.talent.v4beta1;
public final class CommonProto {
private CommonProto() {}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry);
}
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_talent_v4beta1_TimestampRange_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_talent_v4beta1_TimestampRange_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_talent_v4beta1_Location_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_talent_v4beta1_Location_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_talent_v4beta1_RequestMetadata_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_talent_v4beta1_RequestMetadata_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_talent_v4beta1_ResponseMetadata_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_talent_v4beta1_ResponseMetadata_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_talent_v4beta1_DeviceInfo_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_talent_v4beta1_DeviceInfo_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_talent_v4beta1_CustomAttribute_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_talent_v4beta1_CustomAttribute_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_talent_v4beta1_SpellingCorrection_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_talent_v4beta1_SpellingCorrection_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_talent_v4beta1_CompensationInfo_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_talent_v4beta1_CompensationInfo_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_talent_v4beta1_CompensationInfo_CompensationEntry_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_talent_v4beta1_CompensationInfo_CompensationEntry_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_talent_v4beta1_CompensationInfo_CompensationRange_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_talent_v4beta1_CompensationInfo_CompensationRange_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor descriptor;
static {
java.lang.String[] descriptorData = {
"\n(google/cloud/talent/v4beta1/common.pro"
+ "to\022\033google.cloud.talent.v4beta1\032\034google/"
+ "api/annotations.proto\032\037google/protobuf/t"
+ "imestamp.proto\032\036google/protobuf/wrappers"
+ ".proto\032\030google/type/latlng.proto\032\027google"
+ "/type/money.proto\032 google/type/postal_ad"
+ "dress.proto\"n\n\016TimestampRange\022.\n\nstart_t"
+ "ime\030\001 \001(\0132\032.google.protobuf.Timestamp\022,\n"
+ "\010end_time\030\002 \001(\0132\032.google.protobuf.Timest"
+ "amp\"\267\003\n\010Location\022I\n\rlocation_type\030\001 \001(\0162"
+ "2.google.cloud.talent.v4beta1.Location.L"
+ "ocationType\0222\n\016postal_address\030\002 \001(\0132\032.go"
+ "ogle.type.PostalAddress\022$\n\007lat_lng\030\003 \001(\013"
+ "2\023.google.type.LatLng\022\024\n\014radius_miles\030\004 "
+ "\001(\001\"\357\001\n\014LocationType\022\035\n\031LOCATION_TYPE_UN"
+ "SPECIFIED\020\000\022\013\n\007COUNTRY\020\001\022\027\n\023ADMINISTRATI"
+ "VE_AREA\020\002\022\033\n\027SUB_ADMINISTRATIVE_AREA\020\003\022\014"
+ "\n\010LOCALITY\020\004\022\017\n\013POSTAL_CODE\020\005\022\020\n\014SUB_LOC"
+ "ALITY\020\006\022\022\n\016SUB_LOCALITY_1\020\007\022\022\n\016SUB_LOCAL"
+ "ITY_2\020\010\022\020\n\014NEIGHBORHOOD\020\t\022\022\n\016STREET_ADDR"
+ "ESS\020\n\"\237\001\n\017RequestMetadata\022\016\n\006domain\030\001 \001("
+ "\t\022\022\n\nsession_id\030\002 \001(\t\022\017\n\007user_id\030\003 \001(\t\022\031"
+ "\n\021allow_missing_ids\030\004 \001(\010\022<\n\013device_info"
+ "\030\005 \001(\0132\'.google.cloud.talent.v4beta1.Dev"
+ "iceInfo\"&\n\020ResponseMetadata\022\022\n\nrequest_i"
+ "d\030\001 \001(\t\"\317\001\n\nDeviceInfo\022G\n\013device_type\030\001 "
+ "\001(\01622.google.cloud.talent.v4beta1.Device"
+ "Info.DeviceType\022\n\n\002id\030\002 \001(\t\"l\n\nDeviceTyp"
+ "e\022\033\n\027DEVICE_TYPE_UNSPECIFIED\020\000\022\007\n\003WEB\020\001\022"
+ "\016\n\nMOBILE_WEB\020\002\022\013\n\007ANDROID\020\003\022\007\n\003IOS\020\004\022\007\n"
+ "\003BOT\020\005\022\t\n\005OTHER\020\006\"Q\n\017CustomAttribute\022\025\n\r"
+ "string_values\030\001 \003(\t\022\023\n\013long_values\030\002 \003(\003"
+ "\022\022\n\nfilterable\030\003 \001(\010\"W\n\022SpellingCorrecti"
+ "on\022\021\n\tcorrected\030\001 \001(\010\022\026\n\016corrected_text\030"
+ "\002 \001(\t\022\026\n\016corrected_html\030\003 \001(\t\"\234\t\n\020Compen"
+ "sationInfo\022P\n\007entries\030\001 \003(\0132?.google.clo"
+ "ud.talent.v4beta1.CompensationInfo.Compe"
+ "nsationEntry\022k\n\"annualized_base_compensa"
+ "tion_range\030\002 \001(\0132?.google.cloud.talent.v"
+ "4beta1.CompensationInfo.CompensationRang"
+ "e\022l\n#annualized_total_compensation_range"
+ "\030\003 \001(\0132?.google.cloud.talent.v4beta1.Com"
+ "pensationInfo.CompensationRange\032\222\003\n\021Comp"
+ "ensationEntry\022L\n\004type\030\001 \001(\0162>.google.clo"
+ "ud.talent.v4beta1.CompensationInfo.Compe"
+ "nsationType\022L\n\004unit\030\002 \001(\0162>.google.cloud"
+ ".talent.v4beta1.CompensationInfo.Compens"
+ "ationUnit\022$\n\006amount\030\003 \001(\0132\022.google.type."
+ "MoneyH\000\022P\n\005range\030\004 \001(\0132?.google.cloud.ta"
+ "lent.v4beta1.CompensationInfo.Compensati"
+ "onRangeH\000\022\023\n\013description\030\005 \001(\t\022=\n\027expect"
+ "ed_units_per_year\030\006 \001(\0132\034.google.protobu"
+ "f.DoubleValueB\025\n\023compensation_amount\032o\n\021"
+ "CompensationRange\022,\n\020max_compensation\030\002 "
+ "\001(\0132\022.google.type.Money\022,\n\020min_compensat"
+ "ion\030\001 \001(\0132\022.google.type.Money\"\265\001\n\020Compen"
+ "sationType\022!\n\035COMPENSATION_TYPE_UNSPECIF"
+ "IED\020\000\022\010\n\004BASE\020\001\022\t\n\005BONUS\020\002\022\021\n\rSIGNING_BO"
+ "NUS\020\003\022\n\n\006EQUITY\020\004\022\022\n\016PROFIT_SHARING\020\005\022\017\n"
+ "\013COMMISSIONS\020\006\022\010\n\004TIPS\020\007\022\033\n\027OTHER_COMPEN"
+ "SATION_TYPE\020\010\"\234\001\n\020CompensationUnit\022!\n\035CO"
+ "MPENSATION_UNIT_UNSPECIFIED\020\000\022\n\n\006HOURLY\020"
+ "\001\022\t\n\005DAILY\020\002\022\n\n\006WEEKLY\020\003\022\013\n\007MONTHLY\020\004\022\n\n"
+ "\006YEARLY\020\005\022\014\n\010ONE_TIME\020\006\022\033\n\027OTHER_COMPENS"
+ "ATION_UNIT\020\007*y\n\013CompanySize\022\034\n\030COMPANY_S"
+ "IZE_UNSPECIFIED\020\000\022\010\n\004MINI\020\001\022\t\n\005SMALL\020\002\022\013"
+ "\n\007SMEDIUM\020\003\022\n\n\006MEDIUM\020\004\022\007\n\003BIG\020\005\022\n\n\006BIGG"
+ "ER\020\006\022\t\n\005GIANT\020\007*\342\001\n\nJobBenefit\022\033\n\027JOB_BE"
+ "NEFIT_UNSPECIFIED\020\000\022\016\n\nCHILD_CARE\020\001\022\n\n\006D"
+ "ENTAL\020\002\022\024\n\020DOMESTIC_PARTNER\020\003\022\022\n\016FLEXIBL"
+ "E_HOURS\020\004\022\013\n\007MEDICAL\020\005\022\022\n\016LIFE_INSURANCE"
+ "\020\006\022\022\n\016PARENTAL_LEAVE\020\007\022\023\n\017RETIREMENT_PLA"
+ "N\020\010\022\r\n\tSICK_DAYS\020\t\022\014\n\010VACATION\020\n\022\n\n\006VISI"
+ "ON\020\013*\216\002\n\nDegreeType\022\033\n\027DEGREE_TYPE_UNSPE"
+ "CIFIED\020\000\022\025\n\021PRIMARY_EDUCATION\020\001\022\035\n\031LOWER"
+ "_SECONDARY_EDUCATION\020\002\022\035\n\031UPPER_SECONDAR"
+ "Y_EDUCATION\020\003\022\034\n\030ADULT_REMEDIAL_EDUCATIO"
+ "N\020\004\022\034\n\030ASSOCIATES_OR_EQUIVALENT\020\005\022\033\n\027BAC"
+ "HELORS_OR_EQUIVALENT\020\006\022\031\n\025MASTERS_OR_EQU"
+ "IVALENT\020\007\022\032\n\026DOCTORAL_OR_EQUIVALENT\020\010*\334\001"
+ "\n\016EmploymentType\022\037\n\033EMPLOYMENT_TYPE_UNSP"
+ "ECIFIED\020\000\022\r\n\tFULL_TIME\020\001\022\r\n\tPART_TIME\020\002\022"
+ "\016\n\nCONTRACTOR\020\003\022\024\n\020CONTRACT_TO_HIRE\020\004\022\r\n"
+ "\tTEMPORARY\020\005\022\n\n\006INTERN\020\006\022\r\n\tVOLUNTEER\020\007\022"
+ "\014\n\010PER_DIEM\020\010\022\022\n\016FLY_IN_FLY_OUT\020\t\022\031\n\025OTH"
+ "ER_EMPLOYMENT_TYPE\020\n*q\n\010JobLevel\022\031\n\025JOB_"
+ "LEVEL_UNSPECIFIED\020\000\022\017\n\013ENTRY_LEVEL\020\001\022\017\n\013"
+ "EXPERIENCED\020\002\022\013\n\007MANAGER\020\003\022\014\n\010DIRECTOR\020\004"
+ "\022\r\n\tEXECUTIVE\020\005*\272\006\n\013JobCategory\022\034\n\030JOB_C"
+ "ATEGORY_UNSPECIFIED\020\000\022\032\n\026ACCOUNTING_AND_"
+ "FINANCE\020\001\022\035\n\031ADMINISTRATIVE_AND_OFFICE\020\002"
+ "\022\035\n\031ADVERTISING_AND_MARKETING\020\003\022\017\n\013ANIMA"
+ "L_CARE\020\004\022\032\n\026ART_FASHION_AND_DESIGN\020\005\022\027\n\023"
+ "BUSINESS_OPERATIONS\020\006\022\033\n\027CLEANING_AND_FA"
+ "CILITIES\020\007\022\023\n\017COMPUTER_AND_IT\020\010\022\020\n\014CONST"
+ "RUCTION\020\t\022\024\n\020CUSTOMER_SERVICE\020\n\022\r\n\tEDUCA"
+ "TION\020\013\022\034\n\030ENTERTAINMENT_AND_TRAVEL\020\014\022\030\n\024"
+ "FARMING_AND_OUTDOORS\020\r\022\016\n\nHEALTHCARE\020\016\022\023"
+ "\n\017HUMAN_RESOURCES\020\017\022\'\n#INSTALLATION_MAIN"
+ "TENANCE_AND_REPAIR\020\020\022\t\n\005LEGAL\020\021\022\016\n\nMANAG"
+ "EMENT\020\022\022\037\n\033MANUFACTURING_AND_WAREHOUSE\020\023"
+ "\022$\n MEDIA_COMMUNICATIONS_AND_WRITING\020\024\022\026"
+ "\n\022OIL_GAS_AND_MINING\020\025\022\036\n\032PERSONAL_CARE_"
+ "AND_SERVICES\020\026\022\027\n\023PROTECTIVE_SERVICES\020\027\022"
+ "\017\n\013REAL_ESTATE\020\030\022\036\n\032RESTAURANT_AND_HOSPI"
+ "TALITY\020\031\022\024\n\020SALES_AND_RETAIL\020\032\022\033\n\027SCIENC"
+ "E_AND_ENGINEERING\020\033\022\"\n\036SOCIAL_SERVICES_A"
+ "ND_NON_PROFIT\020\034\022!\n\035SPORTS_FITNESS_AND_RE"
+ "CREATION\020\035\022 \n\034TRANSPORTATION_AND_LOGISTI"
+ "CS\020\036*e\n\rPostingRegion\022\036\n\032POSTING_REGION_"
+ "UNSPECIFIED\020\000\022\027\n\023ADMINISTRATIVE_AREA\020\001\022\n"
+ "\n\006NATION\020\002\022\017\n\013TELECOMMUTE\020\003*j\n\nVisibilit"
+ "y\022\032\n\026VISIBILITY_UNSPECIFIED\020\000\022\020\n\014ACCOUNT"
+ "_ONLY\020\001\022\026\n\022SHARED_WITH_GOOGLE\020\002\022\026\n\022SHARE"
+ "D_WITH_PUBLIC\020\003*Z\n\020ContactInfoUsage\022\"\n\036C"
+ "ONTACT_INFO_USAGE_UNSPECIFIED\020\000\022\014\n\010PERSO"
+ "NAL\020\001\022\010\n\004WORK\020\002\022\n\n\006SCHOOL\020\003*q\n\020HtmlSanit"
+ "ization\022!\n\035HTML_SANITIZATION_UNSPECIFIED"
+ "\020\000\022\036\n\032HTML_SANITIZATION_DISABLED\020\001\022\032\n\026SI"
+ "MPLE_FORMATTING_ONLY\020\002*{\n\rCommuteMethod\022"
+ "\036\n\032COMMUTE_METHOD_UNSPECIFIED\020\000\022\013\n\007DRIVI"
+ "NG\020\001\022\013\n\007TRANSIT\020\002\022\013\n\007WALKING\020\003\022\013\n\007CYCLIN"
+ "G\020\004\022\026\n\022TRANSIT_ACCESSIBLE\020\005By\n\037com.googl"
+ "e.cloud.talent.v4beta1B\013CommonProtoP\001ZAg"
+ "oogle.golang.org/genproto/googleapis/clo"
+ "ud/talent/v4beta1;talent\242\002\003CTSb\006proto3"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
return null;
}
};
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
com.google.api.AnnotationsProto.getDescriptor(),
com.google.protobuf.TimestampProto.getDescriptor(),
com.google.protobuf.WrappersProto.getDescriptor(),
com.google.type.LatLngProto.getDescriptor(),
com.google.type.MoneyProto.getDescriptor(),
com.google.type.PostalAddressProto.getDescriptor(),
},
assigner);
internal_static_google_cloud_talent_v4beta1_TimestampRange_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_google_cloud_talent_v4beta1_TimestampRange_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_talent_v4beta1_TimestampRange_descriptor,
new java.lang.String[] {
"StartTime", "EndTime",
});
internal_static_google_cloud_talent_v4beta1_Location_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_google_cloud_talent_v4beta1_Location_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_talent_v4beta1_Location_descriptor,
new java.lang.String[] {
"LocationType", "PostalAddress", "LatLng", "RadiusMiles",
});
internal_static_google_cloud_talent_v4beta1_RequestMetadata_descriptor =
getDescriptor().getMessageTypes().get(2);
internal_static_google_cloud_talent_v4beta1_RequestMetadata_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_talent_v4beta1_RequestMetadata_descriptor,
new java.lang.String[] {
"Domain", "SessionId", "UserId", "AllowMissingIds", "DeviceInfo",
});
internal_static_google_cloud_talent_v4beta1_ResponseMetadata_descriptor =
getDescriptor().getMessageTypes().get(3);
internal_static_google_cloud_talent_v4beta1_ResponseMetadata_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_talent_v4beta1_ResponseMetadata_descriptor,
new java.lang.String[] {
"RequestId",
});
internal_static_google_cloud_talent_v4beta1_DeviceInfo_descriptor =
getDescriptor().getMessageTypes().get(4);
internal_static_google_cloud_talent_v4beta1_DeviceInfo_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_talent_v4beta1_DeviceInfo_descriptor,
new java.lang.String[] {
"DeviceType", "Id",
});
internal_static_google_cloud_talent_v4beta1_CustomAttribute_descriptor =
getDescriptor().getMessageTypes().get(5);
internal_static_google_cloud_talent_v4beta1_CustomAttribute_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_talent_v4beta1_CustomAttribute_descriptor,
new java.lang.String[] {
"StringValues", "LongValues", "Filterable",
});
internal_static_google_cloud_talent_v4beta1_SpellingCorrection_descriptor =
getDescriptor().getMessageTypes().get(6);
internal_static_google_cloud_talent_v4beta1_SpellingCorrection_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_talent_v4beta1_SpellingCorrection_descriptor,
new java.lang.String[] {
"Corrected", "CorrectedText", "CorrectedHtml",
});
internal_static_google_cloud_talent_v4beta1_CompensationInfo_descriptor =
getDescriptor().getMessageTypes().get(7);
internal_static_google_cloud_talent_v4beta1_CompensationInfo_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_talent_v4beta1_CompensationInfo_descriptor,
new java.lang.String[] {
"Entries", "AnnualizedBaseCompensationRange", "AnnualizedTotalCompensationRange",
});
internal_static_google_cloud_talent_v4beta1_CompensationInfo_CompensationEntry_descriptor =
internal_static_google_cloud_talent_v4beta1_CompensationInfo_descriptor
.getNestedTypes()
.get(0);
internal_static_google_cloud_talent_v4beta1_CompensationInfo_CompensationEntry_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_talent_v4beta1_CompensationInfo_CompensationEntry_descriptor,
new java.lang.String[] {
"Type",
"Unit",
"Amount",
"Range",
"Description",
"ExpectedUnitsPerYear",
"CompensationAmount",
});
internal_static_google_cloud_talent_v4beta1_CompensationInfo_CompensationRange_descriptor =
internal_static_google_cloud_talent_v4beta1_CompensationInfo_descriptor
.getNestedTypes()
.get(1);
internal_static_google_cloud_talent_v4beta1_CompensationInfo_CompensationRange_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_talent_v4beta1_CompensationInfo_CompensationRange_descriptor,
new java.lang.String[] {
"MaxCompensation", "MinCompensation",
});
com.google.api.AnnotationsProto.getDescriptor();
com.google.protobuf.TimestampProto.getDescriptor();
com.google.protobuf.WrappersProto.getDescriptor();
com.google.type.LatLngProto.getDescriptor();
com.google.type.MoneyProto.getDescriptor();
com.google.type.PostalAddressProto.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
}
| |
package eu.f3rog.blade.compiler.state;
import android.os.Bundle;
import android.os.Parcelable;
import android.view.View;
import com.squareup.javapoet.AnnotationSpec;
import com.squareup.javapoet.ClassName;
import com.squareup.javapoet.MethodSpec;
import java.util.ArrayList;
import java.util.List;
import javax.lang.model.element.Element;
import javax.lang.model.element.ElementKind;
import javax.lang.model.element.Modifier;
import javax.lang.model.element.TypeElement;
import javax.lang.model.element.VariableElement;
import blade.State;
import eu.f3rog.blade.compiler.ErrorMsg;
import eu.f3rog.blade.compiler.builder.annotation.WeaveBuilder;
import eu.f3rog.blade.compiler.builder.helper.BaseHelperModule;
import eu.f3rog.blade.compiler.builder.helper.HelperClassBuilder;
import eu.f3rog.blade.compiler.module.BundleUtils;
import eu.f3rog.blade.compiler.util.ProcessorError;
import eu.f3rog.blade.compiler.util.ProcessorUtils;
import eu.f3rog.blade.core.BundleWrapper;
import static eu.f3rog.blade.compiler.util.ProcessorUtils.addClassAsParameter;
import static eu.f3rog.blade.compiler.util.ProcessorUtils.cannotHaveAnnotation;
import static eu.f3rog.blade.compiler.util.ProcessorUtils.isActivitySubClass;
import static eu.f3rog.blade.compiler.util.ProcessorUtils.isFragmentSubClass;
import static eu.f3rog.blade.compiler.util.ProcessorUtils.isSubClassOf;
/**
* Class {@link StateHelperModule}
*
* @author FrantisekGazo
*/
public final class StateHelperModule
extends BaseHelperModule {
private static final ClassName PRESENTER_CLASS_NAME = ClassName.get("blade.mvp", "IPresenter");
private enum HelpedClassType {
ACTIVITY_OR_FRAGMENT, VIEW, PRESENTER, OTHER
}
private static final String METHOD_NAME_SAVE_SATE = "saveState";
private static final String METHOD_NAME_RESTORE_SATE = "restoreState";
private static final String WEAVE_onSaveInstanceState = "onSaveInstanceState";
private static final String WEAVE_onRestoreInstanceState = "onRestoreInstanceState";
private static final String WEAVE_onCreate = "onCreate";
private static final String WEAVE_onSaveState = "onSaveState";
private static final String STATEFUL_ID_FORMAT = "<Stateful-%s>";
private final List<BundleUtils.BundledField> mStatefulFields = new ArrayList<>();
private HelpedClassType mHelpedClassType;
private boolean mHasSaveStateMethod;
private boolean mHasRestoreStateMethod;
@Override
public void checkClass(final TypeElement e) throws ProcessorError {
// support any class
if (isActivitySubClass(e) || isFragmentSubClass(e)) {
mHelpedClassType = HelpedClassType.ACTIVITY_OR_FRAGMENT;
} else if (isSubClassOf(e, View.class)) {
mHelpedClassType = HelpedClassType.VIEW;
mHasSaveStateMethod = hasViewImplementedStateMethod(e, WEAVE_onSaveInstanceState);
mHasRestoreStateMethod = hasViewImplementedStateMethod(e, WEAVE_onRestoreInstanceState);
} else if (isSubClassOf(e, PRESENTER_CLASS_NAME)) {
mHelpedClassType = HelpedClassType.PRESENTER;
} else {
mHelpedClassType = HelpedClassType.OTHER;
}
}
private boolean hasViewImplementedStateMethod(final TypeElement viewType, final String methodName) {
final List<? extends Element> elements = viewType.getEnclosedElements();
for (final Element e : elements) {
if (e.getKind() == ElementKind.METHOD) {
final String name = e.getSimpleName().toString();
if (name.equals(methodName)) {
return true;
}
}
}
return false;
}
@Override
public void add(final VariableElement e) throws ProcessorError {
if (cannotHaveAnnotation(e)) {
throw new ProcessorError(e, ErrorMsg.Invalid_field_with_annotation, State.class.getSimpleName());
}
BundleUtils.addBundledField(mStatefulFields, e, State.class, new ProcessorUtils.IGetter<State, Class<?>>() {
@Override
public Class<?> get(State a) {
return a.value();
}
});
}
@Override
public boolean implement(final HelperClassBuilder builder) throws ProcessorError {
if (!mStatefulFields.isEmpty()) {
// add methods only if there is something stateful
addSaveStateMethod(builder);
addRestoreStateMethod(builder);
return true;
}
return false;
}
private void addSaveStateMethod(final HelperClassBuilder builder) {
final String target = "target";
final String state = "state";
final MethodSpec.Builder method = MethodSpec.methodBuilder(METHOD_NAME_SAVE_SATE)
.addModifiers(Modifier.PUBLIC, Modifier.STATIC);
addClassAsParameter(method, builder.getArgClassName(), target);
method.addParameter(Bundle.class, state);
if (mHelpedClassType != HelpedClassType.OTHER) {
method.addAnnotation(weaveSave(builder.getClassName()));
}
final String bundleWrapper = "bundleWrapper";
method.beginControlFlow("if ($N == null)", state)
.addStatement("throw new $T($S)", IllegalArgumentException.class, "State cannot be null!")
.endControlFlow()
.addStatement("$T $N = $T.from($N)", BundleWrapper.class, bundleWrapper, BundleWrapper.class, state);
BundleUtils.putToBundle(method, target, mStatefulFields, STATEFUL_ID_FORMAT, bundleWrapper);
builder.getBuilder().addMethod(method.build());
}
private void addRestoreStateMethod(final HelperClassBuilder builder) {
final String target = "target";
final String state = "state";
final MethodSpec.Builder method = MethodSpec.methodBuilder(METHOD_NAME_RESTORE_SATE)
.addModifiers(Modifier.PUBLIC, Modifier.STATIC);
addClassAsParameter(method, builder.getArgClassName(), target);
method.addParameter(Bundle.class, state);
if (mHelpedClassType != HelpedClassType.OTHER) {
method.addAnnotation(weaveRestore(builder.getClassName()));
}
final String bundleWrapper = "bundleWrapper";
method.beginControlFlow("if ($N == null)", state)
.addStatement("return")
.endControlFlow()
.addStatement("$T $N = $T.from($N)", BundleWrapper.class, bundleWrapper, BundleWrapper.class, state);
BundleUtils.getFromBundle(method, target, mStatefulFields, STATEFUL_ID_FORMAT, bundleWrapper);
builder.getBuilder().addMethod(method.build());
}
private AnnotationSpec weaveSave(final ClassName helperName) {
switch (mHelpedClassType) {
case ACTIVITY_OR_FRAGMENT:
return WeaveBuilder.weave().method(WEAVE_onSaveInstanceState, Bundle.class)
.placed(WeaveBuilder.MethodWeaveType.AFTER_BODY)
.withStatement("%s.%s(this, $1);", helperName, METHOD_NAME_SAVE_SATE)
.build();
case PRESENTER:
return WeaveBuilder.weave().method(WEAVE_onSaveState, Object.class)
.placed(WeaveBuilder.MethodWeaveType.AFTER_BODY)
.withStatement("%s.%s(this, (%s) $1);", helperName, METHOD_NAME_SAVE_SATE, Bundle.class.getCanonicalName())
.build();
case VIEW:
if (mHasSaveStateMethod) {
return WeaveBuilder.weave().method(WEAVE_onSaveInstanceState)
.renameExistingTo(WEAVE_onSaveInstanceState + "_BladeState")
.withStatement("%s bundle = new %s();", Bundle.class.getName(), Bundle.class.getName())
.withStatement("bundle.putParcelable('USER_STATE', this.onSaveInstanceState_BladeState());")
.withStatement("%s.%s(this, bundle);", helperName, METHOD_NAME_SAVE_SATE)
.withStatement("return bundle;")
.build();
} else {
return WeaveBuilder.weave().method(WEAVE_onSaveInstanceState)
.withStatement("%s bundle = new %s();", Bundle.class.getName(), Bundle.class.getName())
.withStatement("bundle.putParcelable('PARENT_STATE', super.onSaveInstanceState());")
.withStatement("%s.%s(this, bundle);", helperName, METHOD_NAME_SAVE_SATE)
.withStatement("return bundle;")
.build();
}
default:
throw new IllegalStateException();
}
}
private AnnotationSpec weaveRestore(final ClassName helperName) {
switch (mHelpedClassType) {
case ACTIVITY_OR_FRAGMENT:
return WeaveBuilder.weave().method(WEAVE_onCreate, Bundle.class)
.withPriority(WeaveBuilder.WeavePriority.HIGHER)
.withStatement("%s.%s(this, $1);", helperName, METHOD_NAME_RESTORE_SATE)
.build();
case PRESENTER:
return WeaveBuilder.weave().method(WEAVE_onCreate, Object.class)
.withStatement("%s.%s(this, (%s) $1);", helperName, METHOD_NAME_RESTORE_SATE, Bundle.class.getCanonicalName())
.build();
case VIEW:
if (mHasRestoreStateMethod) {
return WeaveBuilder.weave().method(WEAVE_onRestoreInstanceState, Parcelable.class)
.renameExistingTo(WEAVE_onRestoreInstanceState + "_BladeState")
.withStatement("if ($1 instanceof %s) {", Bundle.class.getName())
.withStatement("%s bundle = (%s) $1;", Bundle.class.getName(), Bundle.class.getName())
.withStatement("%s.%s(this, bundle);", helperName, METHOD_NAME_RESTORE_SATE)
.withStatement("this.onRestoreInstanceState_BladeState(bundle.getParcelable('USER_STATE'));")
.withStatement("} else {")
.withStatement("this.onRestoreInstanceState_BladeState($1);")
.withStatement("}")
.withStatement("return;")
.build();
} else {
return WeaveBuilder.weave().method(WEAVE_onRestoreInstanceState, Parcelable.class)
.withStatement("if ($1 instanceof %s) {", Bundle.class.getName())
.withStatement("%s bundle = (%s) $1;", Bundle.class.getName(), Bundle.class.getName())
.withStatement("%s.%s(this, bundle);", helperName, METHOD_NAME_RESTORE_SATE)
.withStatement("super.onRestoreInstanceState(bundle.getParcelable('PARENT_STATE'));")
.withStatement("} else {")
.withStatement("super.onRestoreInstanceState($1);")
.withStatement("}")
.withStatement("return;")
.build();
}
default:
throw new IllegalStateException();
}
}
}
| |
/*
* Copyright 2015 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.compiler.builder.impl;
import org.drools.compiler.compiler.PackageRegistry;
import org.drools.compiler.compiler.TypeDeclarationError;
import org.drools.compiler.lang.descr.AbstractClassTypeDeclarationDescr;
import org.drools.compiler.lang.descr.AnnotationDescr;
import org.drools.compiler.lang.descr.PatternDescr;
import org.drools.compiler.lang.descr.QualifiedName;
import org.drools.compiler.lang.descr.TypeDeclarationDescr;
import org.drools.compiler.lang.descr.TypeFieldDescr;
import org.drools.core.base.TypeResolver;
import org.drools.core.definitions.InternalKnowledgePackage;
import org.drools.core.factmodel.ClassDefinition;
import org.drools.core.factmodel.FieldDefinition;
import org.drools.core.factmodel.traits.Alias;
import org.drools.core.rule.TypeDeclaration;
import org.drools.core.util.HierarchySorter;
import org.drools.core.util.asm.ClassFieldInspector;
import org.kie.api.definition.type.Key;
import java.io.IOException;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
public class ClassHierarchyManager {
protected KnowledgeBuilderImpl kbuilder;
protected List<AbstractClassTypeDeclarationDescr> sortedDescriptors;
protected Map<QualifiedName, Collection<QualifiedName>> taxonomy;
public ClassHierarchyManager( Collection<AbstractClassTypeDeclarationDescr> unsortedDescrs, KnowledgeBuilderImpl kbuilder ) {
this.kbuilder = kbuilder;
this.sortedDescriptors = sortByHierarchy( unsortedDescrs, kbuilder );
}
public List<AbstractClassTypeDeclarationDescr> getSortedDescriptors() {
return sortedDescriptors;
}
/**
* Utility method to sort declared beans. Linearizes the hierarchy,
* i.e.generates a sequence of declaration such that, if Sub is subclass of
* Sup, then the index of Sub will be > than the index of Sup in the
* resulting collection. This ensures that superclasses are processed before
* their subclasses
*/
protected List<AbstractClassTypeDeclarationDescr> sortByHierarchy( Collection<AbstractClassTypeDeclarationDescr> unsortedDescrs, KnowledgeBuilderImpl kbuilder ) {
taxonomy = new HashMap<QualifiedName, Collection<QualifiedName>>();
Map<QualifiedName, AbstractClassTypeDeclarationDescr> cache = new HashMap<QualifiedName, AbstractClassTypeDeclarationDescr>();
for (AbstractClassTypeDeclarationDescr tdescr : unsortedDescrs) {
cache.put(tdescr.getType(), tdescr);
}
for (AbstractClassTypeDeclarationDescr tdescr : unsortedDescrs) {
QualifiedName name = tdescr.getType();
Collection<QualifiedName> supers = taxonomy.get(name);
if (supers == null) {
supers = new ArrayList<QualifiedName>();
taxonomy.put(name, supers);
} else {
kbuilder.addBuilderResult(new TypeDeclarationError(tdescr,
"Found duplicate declaration for type " + tdescr.getType()));
}
boolean circular = false;
for (QualifiedName sup : tdescr.getSuperTypes()) {
if (!Object.class.getName().equals(name.getFullName())) {
if (!hasCircularDependency(tdescr.getType(), sup, taxonomy)) {
if ( cache.containsKey( sup ) ) {
supers.add( sup );
}
} else {
circular = true;
kbuilder.addBuilderResult(new TypeDeclarationError(tdescr,
"Found circular dependency for type " + tdescr.getTypeName()));
break;
}
}
}
if (circular) {
tdescr.getSuperTypes().clear();
}
}
for (AbstractClassTypeDeclarationDescr tdescr : unsortedDescrs) {
for (TypeFieldDescr field : tdescr.getFields().values()) {
QualifiedName name = tdescr.getType();
QualifiedName typeName = new QualifiedName(field.getPattern().getObjectType());
if (!hasCircularDependency(name, typeName, taxonomy)) {
if ( cache.containsKey( typeName ) ) {
taxonomy.get( name ).add( typeName );
}
} else {
field.setRecursive( true );
}
}
}
List<QualifiedName> sorted = new HierarchySorter<QualifiedName>().sort(taxonomy);
ArrayList list = new ArrayList( sorted.size() );
for ( QualifiedName name : sorted ) {
list.add( cache.get( name ) );
}
return list;
}
protected static boolean hasCircularDependency(QualifiedName name,
QualifiedName typeName,
Map<QualifiedName, Collection<QualifiedName>> taxonomy) {
if (name.equals( typeName )) {
return true;
}
Collection<QualifiedName> parents = taxonomy.get(typeName);
if (parents != null) {
if (parents.contains(name)) {
return true;
} else {
for (QualifiedName ancestor : parents) {
if (hasCircularDependency(name, ancestor, taxonomy)) {
return true;
}
}
}
}
return false;
}
public void inheritFields( PackageRegistry pkgRegistry,
AbstractClassTypeDeclarationDescr typeDescr,
Collection<AbstractClassTypeDeclarationDescr> sortedTypeDescriptors,
List<TypeDefinition> unresolvedTypes,
Map<String,AbstractClassTypeDeclarationDescr> unprocessableDescrs ) {
TypeDeclarationDescr tDescr = (TypeDeclarationDescr) typeDescr;
boolean isNovel = TypeDeclarationUtils.isNovelClass( typeDescr, pkgRegistry );
boolean inferFields = ! isNovel && typeDescr.getFields().isEmpty();
for ( QualifiedName qname : tDescr.getSuperTypes() ) {
//descriptor needs fields inherited from superclass
if ( mergeInheritedFields(tDescr, unresolvedTypes, unprocessableDescrs, pkgRegistry.getTypeResolver() ) ) {
/*
//descriptor also needs metadata from superclass - NO LONGER SINCE DROOLS 6.x
for ( AbstractClassTypeDeclarationDescr descr : sortedTypeDescriptors ) {
// sortedTypeDescriptors are sorted by inheritance order, so we'll always find the superClass (if any) before the subclass
if ( qname.equals( descr.getType() ) ) {
typeDescr.getAnnotations().putAll( descr.getAnnotations() );
break;
} else if ( typeDescr.getType().equals( descr.getType() ) ) {
break;
}
}
*/
}
}
if ( inferFields ) {
// not novel, but only an empty declaration was provided.
// after inheriting the fields from supertypes, now we fill in the locally declared fields
try {
Class existingClass = TypeDeclarationUtils.getExistingDeclarationClass( typeDescr, pkgRegistry );
ClassFieldInspector inspector = new ClassFieldInspector( existingClass );
for (String name : inspector.getGetterMethods().keySet()) {
// classFieldAccessor requires both getter and setter
if (inspector.getSetterMethods().containsKey(name)) {
if (!inspector.isNonGetter(name) && !"class".equals(name)) {
TypeFieldDescr inheritedFlDescr = new TypeFieldDescr(
name,
new PatternDescr(
inspector.getFieldTypes().get(name).getName()));
inheritedFlDescr.setInherited(!Modifier.isAbstract( inspector.getGetterMethods().get( name ).getModifiers() ));
if (!tDescr.getFields().containsKey(inheritedFlDescr.getFieldName()))
tDescr.getFields().put(inheritedFlDescr.getFieldName(),
inheritedFlDescr);
}
}
}
} catch ( Exception e ) {
// can't happen as we know that the class is not novel - that is, it has been resolved before
}
}
}
/**
* In order to build a declared class, the fields inherited from its
* superclass(es) are added to its declaration. Inherited descriptors are
* marked as such to distinguish them from native ones. Various scenarioes
* are possible. (i) The superclass has been declared in the DRL as well :
* the fields are cloned as inherited (ii) The superclass is imported
* (external), but some of its fields have been tagged with metadata (iii)
* The superclass is imported.
*
* The search for field descriptors is carried out in the order. (i) and
* (ii+iii) are mutually exclusive. The search is as such: (i) The
* superclass' declared fields are used to build the base class additional
* fields (iii) The superclass is inspected to discover its (public) fields,
* from which descriptors are generated (ii) Both (i) and (iii) are applied,
* but the declared fields override the inspected ones
*
*
*
* @param typeDescr
* The base class descriptor, to be completed with the inherited
* fields descriptors
* @return true if all went well
*/
protected boolean mergeInheritedFields( TypeDeclarationDescr typeDescr,
List<TypeDefinition> unresolvedTypes,
Map<String,AbstractClassTypeDeclarationDescr> unprocessableDescrs,
TypeResolver typeResolver ) {
if (typeDescr.getSuperTypes().isEmpty())
return false;
boolean merge = false;
for (int j = typeDescr.getSuperTypes().size() - 1; j >= 0; j--) {
QualifiedName qname = typeDescr.getSuperTypes().get(j);
String simpleSuperTypeName = qname.getName();
String superTypePackageName = qname.getNamespace();
String fullSuper = qname.getFullName();
merge = mergeFields( simpleSuperTypeName,
superTypePackageName,
fullSuper,
typeDescr,
unresolvedTypes,
unprocessableDescrs,
typeResolver ) || merge;
}
return merge;
}
protected boolean mergeFields( String simpleSuperTypeName,
String superTypePackageName,
String fullSuper,
TypeDeclarationDescr typeDescr,
List<TypeDefinition> unresolvedTypes,
Map<String,AbstractClassTypeDeclarationDescr> unprocessableDescrs,
TypeResolver resolver ) {
Map<String, TypeFieldDescr> fieldMap = new LinkedHashMap<String, TypeFieldDescr>();
boolean isNovel = TypeDeclarationUtils.isNovelClass( typeDescr, kbuilder.getPackageRegistry( typeDescr.getNamespace() ) );
PackageRegistry registry = kbuilder.getPackageRegistry( superTypePackageName );
InternalKnowledgePackage pack = null;
if ( registry != null ) {
pack = registry.getPackage();
} else {
// If there is no regisrty the type isn't a DRL-declared type, which is forbidden.
// Avoid NPE JIRA-3041 when trying to access the registry. Avoid subsequent problems.
// DROOLS-536 At this point, the declarations might exist, but the package might not have been processed yet
if ( isNovel ) {
unprocessableDescrs.put( typeDescr.getType().getFullName(), typeDescr );
return false;
}
}
if ( unprocessableDescrs.containsKey( fullSuper ) ) {
unprocessableDescrs.put( typeDescr.getType().getFullName(), typeDescr );
return false;
}
// if a class is declared in DRL, its package can't be null? The default package is replaced by "defaultpkg"
boolean isSuperClassTagged = false;
boolean isSuperClassDeclared = true; //in the same package, or in a previous one
if ( pack != null ) {
// look for the supertype declaration in available packages
TypeDeclaration superTypeDeclaration = pack.getTypeDeclaration( simpleSuperTypeName );
if (superTypeDeclaration != null && superTypeDeclaration.getTypeClassDef() != null ) {
ClassDefinition classDef = superTypeDeclaration.getTypeClassDef();
// inherit fields
for (org.kie.api.definition.type.FactField fld : classDef.getFields()) {
TypeFieldDescr inheritedFlDescr = buildInheritedFieldDescrFromDefinition(fld, typeDescr);
fieldMap.put(inheritedFlDescr.getFieldName(),
inheritedFlDescr);
}
// new classes are already distinguished from tagged external classes
isSuperClassTagged = !superTypeDeclaration.isNovel();
}
/*
else {
for ( TypeDefinition def : unresolvedTypes ) {
if ( def.getTypeClassName().equals( fullSuper ) ) {
TypeDeclarationDescr td = (TypeDeclarationDescr) def.typeDescr;
for ( TypeFieldDescr tf : td.getFields().values() ) {
fieldMap.put( tf.getFieldName(), tf.cloneAsInherited() );
}
isSuperClassDeclared = def.type.isNovel();
break;
}
isSuperClassDeclared = false;
}
}
*/
} else {
isSuperClassDeclared = false;
}
// look for the class externally
if ( !isSuperClassDeclared || isSuperClassTagged ) {
try {
Class superKlass;
if ( registry != null ) {
superKlass = registry.getTypeResolver().resolveType(fullSuper);
} else {
// if the supertype has not been declared, and we have got so far, it means that this class is not novel
superKlass = resolver.resolveType( fullSuper );
}
ClassFieldInspector inspector = new ClassFieldInspector(superKlass);
for (String name : inspector.getGetterMethods().keySet()) {
// classFieldAccessor requires both getter and setter
if (inspector.getSetterMethods().containsKey(name)) {
if (!inspector.isNonGetter(name) && !"class".equals(name)) {
TypeFieldDescr inheritedFlDescr = new TypeFieldDescr(
name,
new PatternDescr(
inspector.getFieldTypes().get(name).getName()));
inheritedFlDescr.setInherited(!Modifier.isAbstract(inspector.getGetterMethods().get(name).getModifiers()));
if (!fieldMap.containsKey(inheritedFlDescr.getFieldName()))
fieldMap.put(inheritedFlDescr.getFieldName(),
inheritedFlDescr);
}
}
}
} catch (ClassNotFoundException cnfe) {
throw new RuntimeException("Unable to resolve Type Declaration superclass '" + fullSuper + "'");
} catch ( IOException e ) {
e.printStackTrace();
}
}
// finally, locally declared fields are merged. The map swap ensures that super-fields are added in order, before the subclass' ones
// notice that it is not possible to override a field changing its type
for ( String fieldName : typeDescr.getFields().keySet() ) {
if ( fieldMap.containsKey( fieldName ) ) {
String type1 = fieldMap.get( fieldName ).getPattern().getObjectType();
String type2 = typeDescr.getFields().get( fieldName ).getPattern().getObjectType();
if (type2.lastIndexOf(".") < 0) {
try {
TypeResolver typeResolver = kbuilder.getPackageRegistry( typeDescr.getNamespace() ).getTypeResolver();
type1 = typeResolver.resolveType( type1 ).getName();
type2 = typeResolver.resolveType( type2 ).getName();
// now that we are at it... this will be needed later anyway
fieldMap.get( fieldName ).getPattern().setObjectType( type1 );
typeDescr.getFields().get( fieldName ).getPattern().setObjectType( type2 );
} catch (ClassNotFoundException cnfe) {
// will fail later
}
}
boolean clash = ! type1.equals(type2);
TypeFieldDescr overriding = null;
if ( clash ) {
// this may still be an override using a subclass of the original type
try {
Class<?> sup = resolver.resolveType( type1 );
Class<?> loc = resolver.resolveType( type2 );
if ( sup.isAssignableFrom( loc ) ) {
clash = false;
// mark as non inherited so that a new field is actually built
overriding = fieldMap.get( fieldName );
}
} catch ( ClassNotFoundException cnfe ) {
// not much to do
}
}
if ( clash ) {
kbuilder.addBuilderResult(new TypeDeclarationError(typeDescr,
"Cannot redeclare field '" + fieldName + " from " + type1 + " to " + type2));
typeDescr.setType(null,null);
return false;
} else {
String initVal = fieldMap.get(fieldName).getInitExpr();
TypeFieldDescr fd = typeDescr.getFields().get(fieldName);
if (fd.getInitExpr() == null) {
fd.setInitExpr( initVal );
}
fd.setInherited( fieldMap.get( fieldName ).isInherited() );
fd.setOverriding( overriding );
for (String key : fieldMap.get(fieldName).getAnnotationNames()) {
if (fd.getAnnotation( key ) == null) {
fd.addAnnotation( fieldMap.get( fieldName ).getAnnotation( key ) );
}
}
if (fd.getIndex() < 0) {
fd.setIndex( fieldMap.get( fieldName ).getIndex() );
}
}
}
fieldMap.put( fieldName,
typeDescr.getFields().get( fieldName ) );
}
typeDescr.setFields(fieldMap);
return true;
}
protected TypeFieldDescr buildInheritedFieldDescrFromDefinition(org.kie.api.definition.type.FactField fld, TypeDeclarationDescr typeDescr) {
PatternDescr fldType = new PatternDescr();
TypeFieldDescr inheritedFldDescr = new TypeFieldDescr();
inheritedFldDescr.setFieldName(fld.getName());
fldType.setObjectType( ( (FieldDefinition) fld ).getTypeName() );
inheritedFldDescr.setPattern(fldType);
if (fld.isKey()) {
AnnotationDescr keyAnnotation = new AnnotationDescr(Key.class.getCanonicalName());
keyAnnotation.setFullyQualifiedName(Key.class.getCanonicalName());
inheritedFldDescr.addAnnotation(keyAnnotation);
}
inheritedFldDescr.setIndex(((FieldDefinition) fld).getDeclIndex());
inheritedFldDescr.setInherited(true);
String initExprOverride = ((FieldDefinition) fld).getInitExpr();
int overrideCount = 0;
// only @aliasing local fields may override defaults.
for (TypeFieldDescr localField : typeDescr.getFields().values()) {
Alias alias = localField.getTypedAnnotation(Alias.class);
if (alias != null && fld.getName().equals(alias.value().replaceAll("\"", "")) && localField.getInitExpr() != null) {
overrideCount++;
initExprOverride = localField.getInitExpr();
}
}
if (overrideCount > 1) {
// however, only one is allowed
initExprOverride = null;
}
inheritedFldDescr.setInitExpr(initExprOverride);
return inheritedFldDescr;
}
public void addDeclarationToPackagePreservingOrder( TypeDeclaration type,
AbstractClassTypeDeclarationDescr typeDescr,
InternalKnowledgePackage tgtPackage,
Map<String, PackageRegistry> pkgRegistryMap ) {
Collection<QualifiedName> parents = taxonomy.get( new QualifiedName( type.getFullName() ) );
int index = getSortedDescriptors().indexOf( typeDescr );
if ( parents != null && ! parents.isEmpty() ) {
for ( QualifiedName parentName : parents ) {
String nameSpace = parentName.getNamespace();
String name = parentName.getName();
PackageRegistry parentPkgRegistry = pkgRegistryMap.get( nameSpace );
if ( parentPkgRegistry != null ) {
TypeDeclaration parentDeclaration = parentPkgRegistry.getPackage().getTypeDeclaration( name );
if ( parentDeclaration != null && parentDeclaration.getNature() == TypeDeclaration.Nature.DEFINITION ) {
index = Math.max( index, parentDeclaration.getOrder() );
}
}
}
}
type.setOrder( index + 1 );
tgtPackage.addTypeDeclaration( type );
}
}
| |
package mods.belgabor.amtweaker.mods.ss2.handlers;
import minetweaker.IUndoableAction;
import minetweaker.MineTweakerAPI;
import minetweaker.api.item.IIngredient;
import minetweaker.api.item.IItemStack;
import mods.belgabor.amtweaker.util.CommandLoggerBase;
import net.minecraft.item.ItemStack;
import net.minecraft.item.crafting.IRecipe;
import net.minecraft.item.crafting.ShapelessRecipes;
import net.minecraftforge.oredict.ShapedOreRecipe;
import net.minecraftforge.oredict.ShapelessOreRecipe;
import shift.sextiarysector.recipe.FurnaceCraftingManager;
import stanhebben.zenscript.annotations.ZenClass;
import stanhebben.zenscript.annotations.ZenMethod;
import java.util.ArrayList;
import static mods.belgabor.amtweaker.helpers.InputHelper.toObjects;
import static mods.belgabor.amtweaker.helpers.InputHelper.toShapedObjects;
import static mods.belgabor.amtweaker.helpers.InputHelper.toStack;
import static mods.belgabor.amtweaker.helpers.StackHelper.areEqual;
@ZenClass("mods.ss2.LargeFurnace")
public class LargeFurnace {
private static final int TYPE_ANY = 0;
private static final int TYPE_SHAPELESS = 1;
private static final int TYPE_SHAPED = 2;
@ZenMethod
public static void addShapeless(IItemStack output, IIngredient[] inputs) {
if (inputs == null) {
MineTweakerAPI.getLogger().logError("Large Furnace: Input set must not be null!");
return;
}
if (inputs.length == 0) {
MineTweakerAPI.getLogger().logError("Large Furnace: Input set must not empty!");
return;
}
if (output == null) {
MineTweakerAPI.getLogger().logError("Large Furnace: Output must not be null!");
return;
}
ShapelessOreRecipe recipe = constructSafely(toStack(output), toObjects(inputs));
if (recipe == null) {
MineTweakerAPI.getLogger().logError("Large Furnace: Illegal recipe.");
return;
}
MineTweakerAPI.apply(new FurnaceAdd(output, recipe));
}
@ZenMethod
public static void addShaped(IItemStack output, IIngredient[][] inputs) {
if (inputs == null) {
MineTweakerAPI.getLogger().logError("Large Furnace: Input set must not be null!");
return;
}
if (inputs.length == 0) {
MineTweakerAPI.getLogger().logError("Large Furnace: Input set must not empty!");
return;
}
if (output == null) {
MineTweakerAPI.getLogger().logError("Large Furnace: Output must not be null!");
return;
}
ShapedOreRecipe recipe = constructSafelyShaped(toStack(output), toShapedObjects(inputs, true));
if (recipe == null) {
MineTweakerAPI.getLogger().logError("Large Furnace: Illegal recipe.");
return;
}
MineTweakerAPI.apply(new FurnaceAdd(output, recipe));
}
private static ShapelessOreRecipe constructSafely(ItemStack output, Object[] inputs) {
try {
return new ShapelessOreRecipe(output, inputs);
} catch (RuntimeException e) {
return null;
}
}
private static ShapedOreRecipe constructSafelyShaped(ItemStack output, Object[] inputs) {
try {
return new ShapedOreRecipe(output, inputs);
} catch (RuntimeException e) {
return null;
}
}
private static IRecipe findRecipe(ItemStack stack, int type) {
final IRecipe[] recipe = {null};
FurnaceCraftingManager.getInstance().getRecipeList().stream()
.filter(xRecipe -> {
if (type == TYPE_SHAPELESS) {
return xRecipe instanceof ShapelessOreRecipe || xRecipe instanceof ShapelessRecipes;
} else if (type == TYPE_SHAPED) {
return xRecipe instanceof ShapedOreRecipe;
}
return xRecipe instanceof IRecipe;
})
.forEachOrdered(xRecipe -> {
if (areEqual(stack, ((IRecipe) xRecipe).getRecipeOutput())) {
recipe[0] = (IRecipe) xRecipe;
}
});
return recipe[0];
}
private static class FurnaceAdd implements IUndoableAction {
private final ItemStack output;
private final IRecipe recipe;
private boolean applied = false;
public FurnaceAdd(IItemStack output, IRecipe recipe) {
this.output = toStack(output);
this.recipe = recipe;
}
@Override
public void apply() {
if (!applied) {
FurnaceCraftingManager.getInstance().addRecipe(recipe);
applied = true;
}
}
@Override
public boolean canUndo() {
return true;
}
@Override
public void undo() {
if (applied) {
FurnaceCraftingManager.getInstance().getRecipeList().remove(recipe);
applied = false;
}
}
@Override
public String describe() {
return "Adding Large Furnace recipe for " + CommandLoggerBase.getFullObjectDeclaration(output);
}
@Override
public String describeUndo() {
return "Removing Large Furnace recipe for " + CommandLoggerBase.getFullObjectDeclaration(output);
}
@Override
public Object getOverrideKey() {
return null;
}
}
/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
@ZenMethod
public static void removeShapeless(IItemStack output) {
doRemove(output, TYPE_SHAPELESS);
}
@ZenMethod
public static void removeShaped(IItemStack output) {
doRemove(output, TYPE_SHAPED);
}
@ZenMethod
public static void remove(IItemStack output) {
doRemove(output, TYPE_ANY);
}
private static void doRemove(IItemStack output, int type) {
if (output == null) {
MineTweakerAPI.getLogger().logError("Large Furnace: Output must not be null!");
return;
}
ItemStack sOutput = toStack(output, true);
if (findRecipe(sOutput, type) == null) {
MineTweakerAPI.getLogger().logError("Large Furnace: No recipe for " + output.toString());
return;
}
MineTweakerAPI.apply(new FurnaceRemove(sOutput, type));
}
private static class FurnaceRemove implements IUndoableAction {
private final ItemStack output;
private final ArrayList<IRecipe> recipes = new ArrayList<>();
private boolean applied = false;
public FurnaceRemove(ItemStack output, int type) {
this.output = output;
FurnaceCraftingManager.getInstance().getRecipeList().stream().filter(xRecipe -> xRecipe instanceof IRecipe)
.filter(xRecipe -> {
if (type == TYPE_SHAPELESS) {
return xRecipe instanceof ShapelessOreRecipe || xRecipe instanceof ShapelessRecipes;
} else if (type == TYPE_SHAPED) {
return xRecipe instanceof ShapedOreRecipe;
}
return xRecipe instanceof IRecipe;
})
.forEachOrdered(xRecipe -> {
if (areEqual(output, ((IRecipe) xRecipe).getRecipeOutput())) {
recipes.add((IRecipe) xRecipe);
}
});
}
@Override
public void apply() {
if (!applied) {
recipes.stream().forEachOrdered(recipe -> FurnaceCraftingManager.getInstance().getRecipeList().remove(recipe));
applied = true;
}
}
@Override
public boolean canUndo() {
return true;
}
@Override
public void undo() {
if (applied) {
recipes.stream().forEachOrdered(recipe -> FurnaceCraftingManager.getInstance().addRecipe(recipe));
applied = false;
}
}
@Override
public String describe() {
return "Removing Large Furnace recipes for " + CommandLoggerBase.getFullObjectDeclaration(output);
}
@Override
public String describeUndo() {
return "Restoring Large Furnace recipes for " + CommandLoggerBase.getFullObjectDeclaration(output);
}
@Override
public Object getOverrideKey() {
return null;
}
}
}
| |
/*
* Copyright (c) 2013 by Gerrit Grunwald
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.hansolo.enzo.gauge;
import com.sun.javafx.css.converters.PaintConverter;
import eu.hansolo.enzo.common.Section;
import eu.hansolo.enzo.gauge.skin.SimpleGaugeSkin;
import javafx.beans.property.BooleanProperty;
import javafx.beans.property.DoubleProperty;
import javafx.beans.property.DoublePropertyBase;
import javafx.beans.property.IntegerProperty;
import javafx.beans.property.IntegerPropertyBase;
import javafx.beans.property.ObjectProperty;
import javafx.beans.property.ReadOnlyBooleanProperty;
import javafx.beans.property.SimpleBooleanProperty;
import javafx.beans.property.SimpleDoubleProperty;
import javafx.beans.property.SimpleObjectProperty;
import javafx.beans.property.SimpleStringProperty;
import javafx.beans.property.StringProperty;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.css.CssMetaData;
import javafx.css.Styleable;
import javafx.css.StyleableObjectProperty;
import javafx.css.StyleableProperty;
import javafx.scene.control.Control;
import javafx.scene.control.Skin;
import javafx.scene.paint.Color;
import javafx.scene.paint.Paint;
import javafx.util.Duration;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
/**
* Created by
* User: hansolo
* Date: 01.04.13
* Time: 17:10
*/
public class SimpleGauge extends Control {
public static final String STYLE_CLASS_BLUE_TO_RED_5 = "blue-to-red-5";
public static final String STYLE_CLASS_RED_TO_BLUE_5 = "red-to-blue-5";
public static final String STYLE_CLASS_GREEN_TO_DARKGREEN_6 = "green-to-darkgreen-6";
public static final String STYLE_CLASS_GREEN_TO_RED_6 = "green-to-red-6";
public static final String STYLE_CLASS_RED_TO_GREEN_6 = "red-to-green-6";
public static final String STYLE_CLASS_BLUE_TO_RED_6 = "blue-to-red-6";
public static final String STYLE_CLASS_PURPLE_TO_RED_6 = "purple-to-red-6";
public static final String STYLE_CLASS_GREEN_TO_RED_7 = "green-to-red-7";
public static final String STYLE_CLASS_RED_TO_GREEN_7 = "red-to-green-7";
public static final String STYLE_CLASS_GREEN_TO_RED_10 = "green-to-red-10";
public static final String STYLE_CLASS_RED_TO_GREEN_10 = "red-to-green-10";
public static final String STYLE_CLASS_PURPLE_TO_CYAN_10 = "purple-to-cyan-10";
// Default section colors
private static final Color DEFAULT_VALUE_TEXT_COLOR = Color.web("#ffffff");
private static final Color DEFAULT_TITLE_TEXT_COLOR = Color.web("#ffffff");
private static final Color DEFAULT_SECTION_TEXT_COLOR = Color.web("#ffffff");
private static final Color DEFAULT_SECTION_FILL_0 = Color.web("#f3622d");
private static final Color DEFAULT_SECTION_FILL_1 = Color.web("#fba71b");
private static final Color DEFAULT_SECTION_FILL_2 = Color.web("#57b757");
private static final Color DEFAULT_SECTION_FILL_3 = Color.web("#f5982b");
private static final Color DEFAULT_SECTION_FILL_4 = Color.web("#41a9c9");
private static final Color DEFAULT_SECTION_FILL_5 = Color.web("#4258c9");
private static final Color DEFAULT_SECTION_FILL_6 = Color.web("#9a42c8");
private static final Color DEFAULT_SECTION_FILL_7 = Color.web("#c84164");
private static final Color DEFAULT_SECTION_FILL_8 = Color.web("#888888");
private static final Color DEFAULT_SECTION_FILL_9 = Color.web("#aaaaaa");
// Default measured range color
private static final Color DEFAULT_RANGE_FILL = Color.rgb(0, 0, 0, 0.25);
private DoubleProperty value;
private double oldValue;
private DoubleProperty minValue;
private double exactMinValue;
private DoubleProperty maxValue;
private double exactMaxValue;
private double minMeasuredValue;
private double maxMeasuredValue;
private int _decimals;
private IntegerProperty decimals;
private String _unit;
private StringProperty unit;
private boolean _animated;
private BooleanProperty animated;
private double animationDuration;
private double _startAngle;
private DoubleProperty startAngle;
private double _angleRange;
private DoubleProperty angleRange;
private boolean _clockwise;
private BooleanProperty clockwise;
private boolean _autoScale;
private BooleanProperty autoScale;
private boolean _sectionTextVisible;
private BooleanProperty sectionTextVisible;
private boolean _sectionIconVisible;
private BooleanProperty sectionIconVisible;
private boolean _measuredRangeVisible;
private BooleanProperty measuredRangeVisible;
private Color _needleColor;
private ObjectProperty<Color> needleColor;
private ObservableList<Section> sections;
private double _majorTickSpace;
private DoubleProperty majorTickSpace;
private double _minorTickSpace;
private DoubleProperty minorTickSpace;
private String _title;
private StringProperty title;
// CSS styleable properties
private ObjectProperty<Paint> valueTextColor;
private ObjectProperty<Paint> titleTextColor;
private ObjectProperty<Paint> sectionTextColor;
private ObjectProperty<Paint> sectionFill0;
private ObjectProperty<Paint> sectionFill1;
private ObjectProperty<Paint> sectionFill2;
private ObjectProperty<Paint> sectionFill3;
private ObjectProperty<Paint> sectionFill4;
private ObjectProperty<Paint> sectionFill5;
private ObjectProperty<Paint> sectionFill6;
private ObjectProperty<Paint> sectionFill7;
private ObjectProperty<Paint> sectionFill8;
private ObjectProperty<Paint> sectionFill9;
private ObjectProperty<Paint> rangeFill;
// ******************** Constructors **************************************
public SimpleGauge() {
getStyleClass().add("simple-gauge");
value = new DoublePropertyBase(0) {
@Override protected void invalidated() {
set(clamp(getMinValue(), getMaxValue(), get()));
}
@Override public Object getBean() { return this; }
@Override public String getName() { return "value"; }
};
minValue = new DoublePropertyBase(0) {
@Override protected void invalidated() {
if (getValue() < get()) setValue(get());
}
@Override public Object getBean() { return this; }
@Override public String getName() { return "minValue"; }
};
maxValue = new DoublePropertyBase(100) {
@Override protected void invalidated() {
if (getValue() > get()) setValue(get());
}
@Override public Object getBean() { return this; }
@Override public String getName() { return "maxValue"; }
};
oldValue = 0;
_decimals = 0;
_unit = "";
_animated = true;
_startAngle = 315;
_angleRange = 270;
_clockwise = true;
_autoScale = false;
_needleColor = Color.web("#5a615f");
_sectionTextVisible = false;
_sectionIconVisible = false;
_measuredRangeVisible = false;
sections = FXCollections.observableArrayList();
_majorTickSpace = 10;
_minorTickSpace = 1;
_title = "";
animationDuration = 3000;
}
// ******************** Methods *******************************************
public final double getValue() {
return value.get();
}
public final void setValue(final double VALUE) {
oldValue = valueProperty().get();
value.set(VALUE);
}
public final DoubleProperty valueProperty() {
return value;
}
public final double getOldValue() {
return oldValue;
}
public final double getMinValue() {
return minValue.get();
}
public final void setMinValue(final double MIN_VALUE) {
minValue.set(MIN_VALUE);
}
public final DoubleProperty minValueProperty() {
return minValue;
}
public final double getMaxValue() {
return maxValue.get();
}
public final void setMaxValue(final double MAX_VALUE) {
maxValue.set(MAX_VALUE);
}
public final DoubleProperty maxValueProperty() {
return maxValue;
}
public final double getMinMeasuredValue() {
return minMeasuredValue;
}
public final void setMinMeasuredValue(final double MIN_MEASURED_VALUE) {
minMeasuredValue = clamp(getMinValue(), getMaxValue(), MIN_MEASURED_VALUE);
}
public final double getMaxMeasuredValue() {
return maxMeasuredValue;
}
public final void setMaxMeasuredValue(final double MAX_MEASURED_VALUE) {
maxMeasuredValue = clamp(getMinValue(), getMaxValue(), MAX_MEASURED_VALUE);
}
public final void resetMinMaxMeasuredValues() {
minMeasuredValue = getValue();
maxMeasuredValue = getValue();
}
public final int getDecimals() {
return null == decimals ? _decimals : decimals.get();
}
public final void setDecimals(final int DECIMALS) {
if (null == decimals) {
_decimals = clamp(0, 3, DECIMALS);
} else {
decimals.set(DECIMALS);
}
}
public final IntegerProperty decimalsProperty() {
if (null == decimals) {
decimals = new IntegerPropertyBase(_decimals) {
@Override protected void invalidated() { set(clamp(0, 3, get())); }
@Override public Object getBean() { return this; }
@Override public String getName() { return "decimals"; }
};
}
return decimals;
}
public final String getUnit() {
return null == unit ? _unit : unit.get();
}
public final void setUnit(final String UNIT) {
if (null == unit) {
_unit = UNIT;
} else {
unit.set(UNIT);
}
}
public final StringProperty unitProperty() {
if (null == unit) {
unit = new SimpleStringProperty(this, "unit", _unit);
}
return unit;
}
public final boolean isAnimated() {
return null == animated ? _animated : animated.get();
}
public final void setAnimated(final boolean ANIMATED) {
if (null == animated) {
_animated = ANIMATED;
} else {
animated.set(ANIMATED);
}
}
public final BooleanProperty animatedProperty() {
if (null == animated) {
animated = new SimpleBooleanProperty(this, "animated", _animated);
}
return animated;
}
public double getStartAngle() {
return null == startAngle ? _startAngle : startAngle.get();
}
public final void setStartAngle(final double START_ANGLE) {
if (null == startAngle) {
_startAngle = clamp(0, 360, START_ANGLE);
} else {
startAngle.set(START_ANGLE);
}
}
public final DoubleProperty startAngleProperty() {
if (null == startAngle) {
startAngle = new DoublePropertyBase(_startAngle) {
@Override protected void invalidated() {
set(clamp(0d, 360d, get()));
}
@Override public Object getBean() { return this; }
@Override public String getName() { return "startAngle"; }
};
}
return startAngle;
}
public final double getAnimationDuration() {
return animationDuration;
}
public final void setAnimationDuration(final double ANIMATION_DURATION) {
animationDuration = clamp(20, 5000, ANIMATION_DURATION);
}
public final double getAngleRange() {
return null == angleRange ? _angleRange : angleRange.get();
}
public final void setAngleRange(final double ANGLE_RANGE) {
if (null == angleRange) {
_angleRange = clamp(0.0, 360.0, ANGLE_RANGE);
} else {
angleRange.set(ANGLE_RANGE);
}
}
public final DoubleProperty angleRangeProperty() {
if (null == angleRange) {
angleRange = new DoublePropertyBase(_angleRange) {
@Override protected void invalidated() { set(clamp(0d, 360d, get())); }
@Override public Object getBean() { return this; }
@Override public String getName() { return "angleRange"; }
};
}
return angleRange;
}
public final boolean isClockwise() {
return null == clockwise ? _clockwise : clockwise.get();
}
public final void setClockwise(final boolean CLOCKWISE) {
if (null == clockwise) {
_clockwise = CLOCKWISE;
} else {
clockwise.set(CLOCKWISE);
}
}
public final BooleanProperty clockwiseProperty() {
if (null == clockwise) {
clockwise = new SimpleBooleanProperty(this, "clockwise", _clockwise);
}
return clockwise;
}
public final boolean isAutoScale() {
return null == autoScale ? _autoScale : autoScale.get();
}
public final void setAutoScale(final boolean AUTO_SCALE) {
if (AUTO_SCALE) {
exactMinValue = getMinValue();
exactMaxValue = getMaxValue();
} else {
setMinValue(exactMinValue);
setMaxValue(exactMaxValue);
}
if (null == autoScale) {
_autoScale = AUTO_SCALE;
} else {
autoScale.set(AUTO_SCALE);
}
}
public final BooleanProperty autoScaleProperty() {
if (null == autoScale) {
autoScale = new SimpleBooleanProperty(this, "autoScale", _autoScale);
}
return autoScale;
}
// Properties related to visualization
public final Color getNeedleColor() {
return null == needleColor ? _needleColor : needleColor.get();
}
public final void setNeedleColor(final Color NEEDLE_COLOR) {
if (null == needleColor) {
_needleColor = NEEDLE_COLOR;
} else {
needleColor.set(NEEDLE_COLOR);
}
}
public final ObjectProperty<Color> needleColorProperty() {
if (null == needleColor) {
needleColor = new SimpleObjectProperty<>(this, "needleColor", _needleColor);
}
return needleColor;
}
public final ObservableList<Section> getSections() {
return sections;
}
public final void setSections(final List<Section> SECTIONS) {
sections.setAll(SECTIONS);
}
public final void setSections(final Section... SECTIONS) {
setSections(Arrays.asList(SECTIONS));
}
public final void addSection(final Section SECTION) {
if (!sections.contains(SECTION)) sections.add(SECTION);
}
public final void removeSection(final Section SECTION) {
if (sections.contains(SECTION)) sections.remove(SECTION);
}
public final double getMajorTickSpace() {
return null == majorTickSpace ? _majorTickSpace : majorTickSpace.get();
}
public final void setMajorTickSpace(final double MAJOR_TICK_SPACE) {
if (null == majorTickSpace) {
_majorTickSpace = MAJOR_TICK_SPACE;
} else {
majorTickSpace.set(MAJOR_TICK_SPACE);
}
}
public final DoubleProperty majorTickSpaceProperty() {
if (null == majorTickSpace) {
majorTickSpace = new SimpleDoubleProperty(this, "majorTickSpace", _majorTickSpace);
}
return majorTickSpace;
}
public final double getMinorTickSpace() {
return null == minorTickSpace ? _minorTickSpace : minorTickSpace.get();
}
public final void setMinorTickSpace(final double MINOR_TICK_SPACE) {
if (null == minorTickSpace) {
_minorTickSpace = MINOR_TICK_SPACE;
} else {
minorTickSpace.set(MINOR_TICK_SPACE);
}
}
public final DoubleProperty minorTickSpaceProperty() {
if (null == minorTickSpace) {
minorTickSpace = new SimpleDoubleProperty(this, "minorTickSpace", _minorTickSpace);
}
return minorTickSpace;
}
public final String getTitle() {
return null == title ? _title : title.get();
}
public final void setTitle(final String TITLE) {
if (null == title) {
_title = TITLE;
} else {
title.set(TITLE);
}
}
public final StringProperty titleProperty() {
if (null == title) {
title = new SimpleStringProperty(this, "title", _title);
}
return title;
}
public final boolean isSectionTextVisible() {
return null == sectionTextVisible ? _sectionTextVisible : sectionTextVisible.get();
}
public final void setSectionTextVisible(final boolean SECTION_TEXT_VISIBLE) {
if (null == sectionTextVisible) {
_sectionTextVisible = SECTION_TEXT_VISIBLE;
} else {
sectionTextVisible.set(SECTION_TEXT_VISIBLE);
}
}
public final BooleanProperty sectionTextVisibleProperty() {
if (null == sectionTextVisible) {
sectionTextVisible = new SimpleBooleanProperty(this, "sectionTextVisible", _sectionTextVisible);
}
return sectionTextVisible;
}
public final boolean isSectionIconVisible() {
return null == sectionIconVisible ? _sectionIconVisible : sectionIconVisible.get();
}
public final void setSectionIconVisible(final boolean SECTION_ICON_VISIBLE) {
if (null == sectionIconVisible) {
_sectionIconVisible = SECTION_ICON_VISIBLE;
} else {
sectionIconVisible.set(SECTION_ICON_VISIBLE);
}
}
public final BooleanProperty sectionIconVisibleProperty() {
if (null == sectionIconVisible) {
sectionIconVisible = new SimpleBooleanProperty(this, "sectionIconVisible", _sectionIconVisible);
}
return sectionIconVisible;
}
public final boolean isMeasuredRangeVisible() {
return null == measuredRangeVisible ? _measuredRangeVisible : measuredRangeVisible.get();
}
public final void setMeasuredRangeVisible(final boolean MEASURED_RANGE_VISIBLE) {
if (null == measuredRangeVisible) {
_measuredRangeVisible = MEASURED_RANGE_VISIBLE;
} else {
measuredRangeVisible.set(MEASURED_RANGE_VISIBLE);
}
}
public final ReadOnlyBooleanProperty measuredRangeVisibleProperty() {
if (null == measuredRangeVisible) {
measuredRangeVisible = new SimpleBooleanProperty(this, "measuredRangeVisible", _measuredRangeVisible);
}
return measuredRangeVisible;
}
private double clamp(final double MIN_VALUE, final double MAX_VALUE, final double VALUE) {
if (VALUE < MIN_VALUE) return MIN_VALUE;
if (VALUE > MAX_VALUE) return MAX_VALUE;
return VALUE;
}
private int clamp(final int MIN_VALUE, final int MAX_VALUE, final int VALUE) {
if (VALUE < MIN_VALUE) return MIN_VALUE;
if (VALUE > MAX_VALUE) return MAX_VALUE;
return VALUE;
}
private Duration clamp(final Duration MIN_VALUE, final Duration MAX_VALUE, final Duration VALUE) {
if (VALUE.lessThan(MIN_VALUE)) return MIN_VALUE;
if (VALUE.greaterThan(MAX_VALUE)) return MAX_VALUE;
return VALUE;
}
public void calcAutoScale() {
if (isAutoScale()) {
double maxNoOfMajorTicks = 10;
double maxNoOfMinorTicks = 10;
double niceMinValue;
double niceMaxValue;
double niceRange;
niceRange = (calcNiceNumber((getMaxValue() - getMinValue()), false));
majorTickSpace.set(calcNiceNumber(niceRange / (maxNoOfMajorTicks - 1), true));
niceMinValue = (Math.floor(getMinValue() / majorTickSpace.doubleValue()) * majorTickSpace.doubleValue());
niceMaxValue = (Math.ceil(getMaxValue() / majorTickSpace.doubleValue()) * majorTickSpace.doubleValue());
minorTickSpace.set(calcNiceNumber(majorTickSpace.doubleValue() / (maxNoOfMinorTicks - 1), true));
setMinValue(niceMinValue);
setMaxValue(niceMaxValue);
}
}
/**
* Returns a "niceScaling" number approximately equal to the range.
* Rounds the number if ROUND == true.
* Takes the ceiling if ROUND = false.
*
* @param RANGE the value range (maxValue - minValue)
* @param ROUND whether to round the result or ceil
* @return a "niceScaling" number to be used for the value range
*/
private double calcNiceNumber(final double RANGE, final boolean ROUND) {
final double EXPONENT = Math.floor(Math.log10(RANGE)); // exponent of range
final double FRACTION = RANGE / Math.pow(10, EXPONENT); // fractional part of range
//final double MOD = FRACTION % 0.5; // needed for large number scale
double niceFraction;
// niceScaling
/*
if (isLargeNumberScale()) {
if (MOD != 0) {
niceFraction = FRACTION - MOD;
niceFraction += 0.5;
} else {
niceFraction = FRACTION;
}
} else {
*/
if (ROUND) {
if (FRACTION < 1.5) {
niceFraction = 1;
} else if (FRACTION < 3) {
niceFraction = 2;
} else if (FRACTION < 7) {
niceFraction = 5;
} else {
niceFraction = 10;
}
} else {
if (Double.compare(FRACTION, 1) <= 0) {
niceFraction = 1;
} else if (Double.compare(FRACTION, 2) <= 0) {
niceFraction = 2;
} else if (Double.compare(FRACTION, 5) <= 0) {
niceFraction = 5;
} else {
niceFraction = 10;
}
}
//}
return niceFraction * Math.pow(10, EXPONENT);
}
private void validate() {
/*
if (getValue() < getMinValue()) setValue(getMinValue());
if (getValue() > getMaxValue()) setValue(getMaxValue());
for (Section section : sections) {
if (section.getStart() < getMinValue()) section.setStart(getMinValue());
if (section.getStart() > getMaxValue()) section.setStart(getMaxValue());
if (section.getStop() < getMinValue()) section.setStop(getMinValue());
if (section.getStop() > getMaxValue()) section.setStop(getMaxValue());
}
*/
}
// ******************** CSS Stylable Properties ***************************
public final Paint getValueTextColor() {
return null == valueTextColor ? DEFAULT_VALUE_TEXT_COLOR : valueTextColor.get();
}
public final void setValueTextColor(Paint value) {
valueTextColorProperty().set(value);
}
public final ObjectProperty<Paint> valueTextColorProperty() {
if (null == valueTextColor) {
valueTextColor = new StyleableObjectProperty<Paint>(DEFAULT_VALUE_TEXT_COLOR) {
@Override public CssMetaData getCssMetaData() { return StyleableProperties.VALUE_TEXT_COLOR; }
@Override public Object getBean() { return this; }
@Override public String getName() { return "valueTextColor"; }
};
}
return valueTextColor;
}
public final Paint getTitleTextColor() {
return null == titleTextColor ? DEFAULT_TITLE_TEXT_COLOR : titleTextColor.get();
}
public final void setTitleTextColor(Paint value) {
titleTextColorProperty().set(value);
}
public final ObjectProperty<Paint> titleTextColorProperty() {
if (null == titleTextColor) {
titleTextColor = new StyleableObjectProperty<Paint>(DEFAULT_TITLE_TEXT_COLOR) {
@Override public CssMetaData getCssMetaData() { return StyleableProperties.TITLE_TEXT_COLOR; }
@Override public Object getBean() { return this; }
@Override public String getName() { return "titleTextColor"; }
};
}
return titleTextColor;
}
public final Paint getSectionTextColor() {
return null == sectionTextColor ? DEFAULT_SECTION_TEXT_COLOR : sectionTextColor.get();
}
public final void setSectionTextColor(Paint value) {
sectionTextColorProperty().set(value);
}
public final ObjectProperty<Paint> sectionTextColorProperty() {
if (null == sectionTextColor) {
sectionTextColor = new StyleableObjectProperty<Paint>(DEFAULT_SECTION_TEXT_COLOR) {
@Override public CssMetaData getCssMetaData() { return StyleableProperties.SECTION_TEXT_COLOR; }
@Override public Object getBean() { return this; }
@Override public String getName() { return "sectionTextColor"; }
};
}
return sectionTextColor;
}
public final Paint getSectionFill0() {
return null == sectionFill0 ? DEFAULT_SECTION_FILL_0 : sectionFill0.get();
}
public final void setSectionFill0(Paint value) {
sectionFill0Property().set(value);
}
public final ObjectProperty<Paint> sectionFill0Property() {
if (null == sectionFill0) {
sectionFill0 = new StyleableObjectProperty<Paint>(DEFAULT_SECTION_FILL_0) {
@Override public CssMetaData getCssMetaData() { return StyleableProperties.SECTION_FILL_0; }
@Override public Object getBean() { return this; }
@Override public String getName() { return "sectionFill0"; }
};
}
return sectionFill0;
}
public final Paint getSectionFill1() {
return null == sectionFill1 ? DEFAULT_SECTION_FILL_1 : sectionFill1.get();
}
public final void setSectionFill1(Paint value) {
sectionFill1Property().set(value);
}
public final ObjectProperty<Paint> sectionFill1Property() {
if (null == sectionFill1) {
sectionFill1 = new StyleableObjectProperty<Paint>(DEFAULT_SECTION_FILL_1) {
@Override public CssMetaData getCssMetaData() { return StyleableProperties.SECTION_FILL_1; }
@Override public Object getBean() { return this; }
@Override public String getName() { return "sectionFill1"; }
};
}
return sectionFill1;
}
public final Paint getSectionFill2() {
return null == sectionFill2 ? DEFAULT_SECTION_FILL_2 : sectionFill2.get();
}
public final void setSectionFill2(Paint value) {
sectionFill2Property().set(value);
}
public final ObjectProperty<Paint> sectionFill2Property() {
if (null == sectionFill2) {
sectionFill2 = new StyleableObjectProperty<Paint>(DEFAULT_SECTION_FILL_2) {
@Override public CssMetaData getCssMetaData() { return StyleableProperties.SECTION_FILL_2; }
@Override public Object getBean() { return this; }
@Override public String getName() { return "sectionFill2"; }
};
}
return sectionFill2;
}
public final Paint getSectionFill3() {
return null == sectionFill3 ? DEFAULT_SECTION_FILL_3 : sectionFill3.get();
}
public final void setSectionFill3(Paint value) {
sectionFill3Property().set(value);
}
public final ObjectProperty<Paint> sectionFill3Property() {
if (null == sectionFill3) {
sectionFill3 = new StyleableObjectProperty<Paint>(DEFAULT_SECTION_FILL_3) {
@Override public CssMetaData getCssMetaData() { return StyleableProperties.SECTION_FILL_3; }
@Override public Object getBean() { return this; }
@Override public String getName() { return "sectionFill3"; }
};
}
return sectionFill3;
}
public final Paint getSectionFill4() {
return null == sectionFill4 ? DEFAULT_SECTION_FILL_4 : sectionFill4.get();
}
public final void setSectionFill4(Paint value) {
sectionFill4Property().set(value);
}
public final ObjectProperty<Paint> sectionFill4Property() {
if (null == sectionFill4) {
sectionFill4 = new StyleableObjectProperty<Paint>(DEFAULT_SECTION_FILL_4) {
@Override public CssMetaData getCssMetaData() { return StyleableProperties.SECTION_FILL_4; }
@Override public Object getBean() { return this; }
@Override public String getName() { return "sectionFill4"; }
};
}
return sectionFill4;
}
public final Paint getSectionFill5() {
return null == sectionFill5 ? DEFAULT_SECTION_FILL_5 : sectionFill5.get();
}
public final void setSectionFill5(Paint value) {
sectionFill5Property().set(value);
}
public final ObjectProperty<Paint> sectionFill5Property() {
if (null == sectionFill5) {
sectionFill5 = new StyleableObjectProperty<Paint>(DEFAULT_SECTION_FILL_5) {
@Override public CssMetaData getCssMetaData() { return StyleableProperties.SECTION_FILL_5; }
@Override public Object getBean() { return this; }
@Override public String getName() { return "sectionFill5"; }
};
}
return sectionFill5;
}
public final Paint getSectionFill6() {
return null == sectionFill6 ? DEFAULT_SECTION_FILL_6 : sectionFill6.get();
}
public final void setSectionFill6(Paint value) {
sectionFill6Property().set(value);
}
public final ObjectProperty<Paint> sectionFill6Property() {
if (null == sectionFill6) {
sectionFill6 = new StyleableObjectProperty<Paint>(DEFAULT_SECTION_FILL_6) {
@Override public CssMetaData getCssMetaData() { return StyleableProperties.SECTION_FILL_6; }
@Override public Object getBean() { return this; }
@Override public String getName() { return "sectionFill6"; }
};
}
return sectionFill6;
}
public final Paint getSectionFill7() {
return null == sectionFill7 ? DEFAULT_SECTION_FILL_7 : sectionFill7.get();
}
public final void setSectionFill7(Paint value) {
sectionFill7Property().set(value);
}
public final ObjectProperty<Paint> sectionFill7Property() {
if (null == sectionFill7) {
sectionFill7 = new StyleableObjectProperty<Paint>(DEFAULT_SECTION_FILL_7) {
@Override public CssMetaData getCssMetaData() { return StyleableProperties.SECTION_FILL_7; }
@Override public Object getBean() { return this; }
@Override public String getName() { return "sectionFill7"; }
};
}
return sectionFill7;
}
public final Paint getSectionFill8() {
return null == sectionFill8 ? DEFAULT_SECTION_FILL_8 : sectionFill8.get();
}
public final void setSectionFill8(Paint value) {
sectionFill8Property().set(value);
}
public final ObjectProperty<Paint> sectionFill8Property() {
if (null == sectionFill8) {
sectionFill8 = new StyleableObjectProperty<Paint>(DEFAULT_SECTION_FILL_8) {
@Override public CssMetaData getCssMetaData() { return StyleableProperties.SECTION_FILL_8; }
@Override public Object getBean() { return this; }
@Override public String getName() { return "sectionFill8"; }
};
}
return sectionFill8;
}
public final Paint getSectionFill9() {
return null == sectionFill9 ? DEFAULT_SECTION_FILL_9 : sectionFill9.get();
}
public final void setSectionFill9(Paint value) {
sectionFill9Property().set(value);
}
public final ObjectProperty<Paint> sectionFill9Property() {
if (null == sectionFill9) {
sectionFill9 = new StyleableObjectProperty<Paint>(DEFAULT_SECTION_FILL_9) {
@Override public CssMetaData getCssMetaData() { return StyleableProperties.SECTION_FILL_9; }
@Override public Object getBean() { return this; }
@Override public String getName() { return "sectionFill9"; }
};
}
return sectionFill9;
}
public final Paint getRangeFill() {
return null == rangeFill ? DEFAULT_RANGE_FILL : rangeFill.get();
}
public final void setRangeFill(Paint value) {
rangeFillProperty().set(value);
}
public final ObjectProperty<Paint> rangeFillProperty() {
if (null == rangeFill) {
rangeFill = new StyleableObjectProperty<Paint>(DEFAULT_RANGE_FILL) {
@Override public CssMetaData getCssMetaData() { return StyleableProperties.RANGE_FILL; }
@Override public Object getBean() { return this; }
@Override public String getName() { return "rangeFill"; }
};
}
return rangeFill;
}
// ******************** Style related *************************************
@Override protected Skin createDefaultSkin() {
return new SimpleGaugeSkin(this);
}
@Override public String getUserAgentStylesheet() {
return getClass().getResource("simplegauge.css").toExternalForm();
}
private static class StyleableProperties {
private static final CssMetaData<SimpleGauge, Paint> VALUE_TEXT_COLOR =
new CssMetaData<SimpleGauge, Paint>("-value-text", PaintConverter.getInstance(), DEFAULT_VALUE_TEXT_COLOR) {
@Override public boolean isSettable(SimpleGauge gauge) {
return null == gauge.valueTextColor || !gauge.valueTextColor.isBound();
}
@Override public StyleableProperty<Paint> getStyleableProperty(SimpleGauge gauge) {
return (StyleableProperty) gauge.valueTextColorProperty();
}
@Override public Paint getInitialValue(SimpleGauge gauge) {
return gauge.getValueTextColor();
}
};
private static final CssMetaData<SimpleGauge, Paint> TITLE_TEXT_COLOR =
new CssMetaData<SimpleGauge, Paint>("-title-text", PaintConverter.getInstance(), DEFAULT_TITLE_TEXT_COLOR) {
@Override public boolean isSettable(SimpleGauge gauge) {
return null == gauge.titleTextColor || !gauge.titleTextColor.isBound();
}
@Override public StyleableProperty<Paint> getStyleableProperty(SimpleGauge gauge) {
return (StyleableProperty) gauge.titleTextColorProperty();
}
@Override public Paint getInitialValue(SimpleGauge gauge) {
return gauge.getTitleTextColor();
}
};
private static final CssMetaData<SimpleGauge, Paint> SECTION_TEXT_COLOR =
new CssMetaData<SimpleGauge, Paint>("-section-text", PaintConverter.getInstance(), DEFAULT_SECTION_TEXT_COLOR) {
@Override public boolean isSettable(SimpleGauge gauge) {
return null == gauge.sectionTextColor || !gauge.sectionTextColor.isBound();
}
@Override public StyleableProperty<Paint> getStyleableProperty(SimpleGauge gauge) {
return (StyleableProperty) gauge.sectionTextColorProperty();
}
@Override public Paint getInitialValue(SimpleGauge gauge) {
return gauge.getSectionTextColor();
}
};
private static final CssMetaData<SimpleGauge, Paint> SECTION_FILL_0 =
new CssMetaData<SimpleGauge, Paint>("-section-fill-0", PaintConverter.getInstance(), DEFAULT_SECTION_FILL_0) {
@Override public boolean isSettable(SimpleGauge gauge) {
return null == gauge.sectionFill0 || !gauge.sectionFill0.isBound();
}
@Override public StyleableProperty<Paint> getStyleableProperty(SimpleGauge gauge) {
return (StyleableProperty) gauge.sectionFill0Property();
}
@Override public Paint getInitialValue(SimpleGauge gauge) {
return gauge.getSectionFill0();
}
};
private static final CssMetaData<SimpleGauge, Paint> SECTION_FILL_1 =
new CssMetaData<SimpleGauge, Paint>("-section-fill-1", PaintConverter.getInstance(), DEFAULT_SECTION_FILL_1) {
@Override public boolean isSettable(SimpleGauge gauge) {
return null == gauge.sectionFill1 || !gauge.sectionFill1.isBound();
}
@Override public StyleableProperty<Paint> getStyleableProperty(SimpleGauge gauge) {
return (StyleableProperty) gauge.sectionFill1Property();
}
@Override public Paint getInitialValue(SimpleGauge gauge) {
return gauge.getSectionFill1();
}
};
private static final CssMetaData<SimpleGauge, Paint> SECTION_FILL_2 =
new CssMetaData<SimpleGauge, Paint>("-section-fill-2", PaintConverter.getInstance(), DEFAULT_SECTION_FILL_2) {
@Override public boolean isSettable(SimpleGauge gauge) {
return null == gauge.sectionFill2 || !gauge.sectionFill2.isBound();
}
@Override public StyleableProperty<Paint> getStyleableProperty(SimpleGauge gauge) {
return (StyleableProperty) gauge.sectionFill2Property();
}
@Override public Paint getInitialValue(SimpleGauge gauge) {
return gauge.getSectionFill2();
}
};
private static final CssMetaData<SimpleGauge, Paint> SECTION_FILL_3 =
new CssMetaData<SimpleGauge, Paint>("-section-fill-3", PaintConverter.getInstance(), DEFAULT_SECTION_FILL_3) {
@Override public boolean isSettable(SimpleGauge gauge) {
return null == gauge.sectionFill3 || !gauge.sectionFill3.isBound();
}
@Override public StyleableProperty<Paint> getStyleableProperty(SimpleGauge gauge) {
return (StyleableProperty) gauge.sectionFill3Property();
}
@Override public Paint getInitialValue(SimpleGauge gauge) {
return gauge.getSectionFill3();
}
};
private static final CssMetaData<SimpleGauge, Paint> SECTION_FILL_4 =
new CssMetaData<SimpleGauge, Paint>("-section-fill-4", PaintConverter.getInstance(), DEFAULT_SECTION_FILL_4) {
@Override public boolean isSettable(SimpleGauge gauge) {
return null == gauge.sectionFill4 || !gauge.sectionFill4.isBound();
}
@Override public StyleableProperty<Paint> getStyleableProperty(SimpleGauge gauge) {
return (StyleableProperty) gauge.sectionFill4Property();
}
@Override public Paint getInitialValue(SimpleGauge gauge) {
return gauge.getSectionFill4();
}
};
private static final CssMetaData<SimpleGauge, Paint> SECTION_FILL_5 =
new CssMetaData<SimpleGauge, Paint>("-section-fill-5", PaintConverter.getInstance(), DEFAULT_SECTION_FILL_5) {
@Override public boolean isSettable(SimpleGauge gauge) {
return null == gauge.sectionFill5 || !gauge.sectionFill5.isBound();
}
@Override public StyleableProperty<Paint> getStyleableProperty(SimpleGauge gauge) {
return (StyleableProperty) gauge.sectionFill5Property();
}
@Override public Paint getInitialValue(SimpleGauge gauge) {
return gauge.getSectionFill5();
}
};
private static final CssMetaData<SimpleGauge, Paint> SECTION_FILL_6 =
new CssMetaData<SimpleGauge, Paint>("-section-fill-6", PaintConverter.getInstance(), DEFAULT_SECTION_FILL_6) {
@Override public boolean isSettable(SimpleGauge gauge) {
return null == gauge.sectionFill6 || !gauge.sectionFill6.isBound();
}
@Override public StyleableProperty<Paint> getStyleableProperty(SimpleGauge gauge) {
return (StyleableProperty) gauge.sectionFill6Property();
}
@Override public Paint getInitialValue(SimpleGauge gauge) {
return gauge.getSectionFill6();
}
};
private static final CssMetaData<SimpleGauge, Paint> SECTION_FILL_7 =
new CssMetaData<SimpleGauge, Paint>("-section-fill-7", PaintConverter.getInstance(), DEFAULT_SECTION_FILL_7) {
@Override public boolean isSettable(SimpleGauge gauge) {
return null == gauge.sectionFill7 || !gauge.sectionFill7.isBound();
}
@Override public StyleableProperty<Paint> getStyleableProperty(SimpleGauge gauge) {
return (StyleableProperty) gauge.sectionFill7Property();
}
@Override public Paint getInitialValue(SimpleGauge gauge) {
return gauge.getSectionFill7();
}
};
private static final CssMetaData<SimpleGauge, Paint> SECTION_FILL_8 =
new CssMetaData<SimpleGauge, Paint>("-section-fill-8", PaintConverter.getInstance(), DEFAULT_SECTION_FILL_8) {
@Override public boolean isSettable(SimpleGauge gauge) {
return null == gauge.sectionFill8 || !gauge.sectionFill8.isBound();
}
@Override public StyleableProperty<Paint> getStyleableProperty(SimpleGauge gauge) {
return (StyleableProperty) gauge.sectionFill8Property();
}
@Override public Paint getInitialValue(SimpleGauge gauge) {
return gauge.getSectionFill8();
}
};
private static final CssMetaData<SimpleGauge, Paint> SECTION_FILL_9 =
new CssMetaData<SimpleGauge, Paint>("-section-fill-9", PaintConverter.getInstance(), DEFAULT_SECTION_FILL_9) {
@Override public boolean isSettable(SimpleGauge gauge) {
return null == gauge.sectionFill9 || !gauge.sectionFill9.isBound();
}
@Override public StyleableProperty<Paint> getStyleableProperty(SimpleGauge gauge) {
return (StyleableProperty) gauge.sectionFill9Property();
}
@Override public Paint getInitialValue(SimpleGauge gauge) {
return gauge.getSectionFill9();
}
};
private static final CssMetaData<SimpleGauge, Paint> RANGE_FILL =
new CssMetaData<SimpleGauge, Paint>("-range-fill", PaintConverter.getInstance(), DEFAULT_RANGE_FILL) {
@Override public boolean isSettable(SimpleGauge gauge) {
return null == gauge.rangeFill || !gauge.rangeFill.isBound();
}
@Override public StyleableProperty<Paint> getStyleableProperty(SimpleGauge gauge) {
return (StyleableProperty) gauge.rangeFillProperty();
}
@Override public Paint getInitialValue(SimpleGauge gauge) {
return gauge.getRangeFill();
}
};
private static final List<CssMetaData<? extends Styleable, ?>> STYLEABLES;
static {
final List<CssMetaData<? extends Styleable, ?>> styleables = new ArrayList<>(Control.getClassCssMetaData());
Collections.addAll(styleables,
VALUE_TEXT_COLOR,
TITLE_TEXT_COLOR,
SECTION_TEXT_COLOR,
SECTION_FILL_0,
SECTION_FILL_1,
SECTION_FILL_2,
SECTION_FILL_3,
SECTION_FILL_4,
SECTION_FILL_5,
SECTION_FILL_6,
SECTION_FILL_7,
SECTION_FILL_8,
SECTION_FILL_9,
RANGE_FILL);
STYLEABLES = Collections.unmodifiableList(styleables);
}
}
public static List<CssMetaData<? extends Styleable, ?>> getClassCssMetaData() {
return StyleableProperties.STYLEABLES;
}
@Override public List<CssMetaData<? extends Styleable, ?>> getControlCssMetaData() {
return getClassCssMetaData();
}
}
| |
package net.i2p.data.i2np;
/*
* free (adj.): unencumbered; not under the control of others
* Written by jrandom in 2003 and released into the public domain
* with no warranty of any kind, either expressed or implied.
* It probably won't make your computer catch on fire, or eat
* your children, but it might. Use at your own risk.
*
*/
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import net.i2p.data.DataFormatException;
import net.i2p.data.DataHelper;
import net.i2p.data.DataStructureImpl;
import net.i2p.data.Hash;
import net.i2p.data.SessionKey;
import net.i2p.data.TunnelId;
//import net.i2p.util.Log;
/**
* Contains the delivery instructions for garlic cloves.
* Generic "delivery instructions" are used both in tunnel messages
* and in garlic cloves, with slight differences.
* However,
* the tunnel message generator TrivialPreprocessor and reader FragmentHandler do not use this class,
* the reading and writing is handled inline there.
*
* @author jrandom
*/
public class DeliveryInstructions extends DataStructureImpl {
//private final static Log _log = new Log(DeliveryInstructions.class);
//private boolean _encrypted;
//private SessionKey _encryptionKey;
private int _deliveryMode;
public final static int DELIVERY_MODE_LOCAL = 0;
public final static int DELIVERY_MODE_DESTINATION = 1;
public final static int DELIVERY_MODE_ROUTER = 2;
public final static int DELIVERY_MODE_TUNNEL = 3;
private Hash _destinationHash;
private Hash _routerHash;
private TunnelId _tunnelId;
private boolean _delayRequested;
private long _delaySeconds;
private final static int FLAG_MODE_LOCAL = 0;
private final static int FLAG_MODE_DESTINATION = 1;
private final static int FLAG_MODE_ROUTER = 2;
private final static int FLAG_MODE_TUNNEL = 3;
/** @deprecated unused */
private final static long FLAG_ENCRYPTED = 128;
private final static long FLAG_MODE = 96;
private final static long FLAG_DELAY = 16;
/**
* Immutable local instructions, no options
*
* @since 0.9.9
*/
public static final DeliveryInstructions LOCAL = new LocalInstructions();
/**
* Returns immutable local instructions, or new
*
* @since 0.9.20
*/
public static DeliveryInstructions create(byte[] data, int offset) throws DataFormatException {
if (data[offset] == 0)
return LOCAL;
DeliveryInstructions rv = new DeliveryInstructions();
rv.readBytes(data, offset);
return rv;
}
public DeliveryInstructions() {
_deliveryMode = -1;
}
/**
* For cloves only (not tunnels), default false, unused
* @deprecated unused
*/
public boolean getEncrypted() { return /* _encrypted */ false; }
/**
* For cloves only (not tunnels), default false, unused
* @deprecated unused
*/
public void setEncrypted(boolean encrypted) { /* _encrypted = encrypted; */ }
/**
* For cloves only (not tunnels), default null, unused
* @deprecated unused
*/
public SessionKey getEncryptionKey() { return /* _encryptionKey */ null; }
/**
* For cloves only (not tunnels), default null, unused
* @deprecated unused
*/
public void setEncryptionKey(SessionKey key) { /* _encryptionKey = key; */ }
/** default -1 */
public int getDeliveryMode() { return _deliveryMode; }
/** @param mode 0-3 */
public void setDeliveryMode(int mode) { _deliveryMode = mode; }
/** default null */
public Hash getDestination() { return _destinationHash; }
/** required for DESTINATION */
public void setDestination(Hash dest) { _destinationHash = dest; }
/** default null */
public Hash getRouter() { return _routerHash; }
/** required for ROUTER or TUNNEL */
public void setRouter(Hash router) { _routerHash = router; }
/** default null */
public TunnelId getTunnelId() { return _tunnelId; }
/** required for TUNNEL */
public void setTunnelId(TunnelId id) { _tunnelId = id; }
/**
* default false, unused
* @deprecated unused
*/
public boolean getDelayRequested() { return _delayRequested; }
/**
* default false, unused
* @deprecated unused
*/
public void setDelayRequested(boolean req) { _delayRequested = req; }
/**
* default 0, unused
* @deprecated unused
*/
public long getDelaySeconds() { return _delaySeconds; }
/**
* default 0, unused
* @deprecated unused
*/
public void setDelaySeconds(long seconds) { _delaySeconds = seconds; }
/**
* @deprecated unused
*/
public void readBytes(InputStream in) throws DataFormatException, IOException {
long flags = DataHelper.readLong(in, 1);
//if (_log.shouldLog(Log.DEBUG))
// _log.debug("Read flags: " + flags + " mode: " + flagMode(flags));
/****
if (flagEncrypted(flags)) {
SessionKey k = new SessionKey();
k.readBytes(in);
setEncryptionKey(k);
setEncrypted(true);
} else {
setEncrypted(false);
}
****/
setDeliveryMode(flagMode(flags));
switch (flagMode(flags)) {
case FLAG_MODE_LOCAL:
break;
case FLAG_MODE_DESTINATION:
//Hash destHash = new Hash();
//destHash.readBytes(in);
Hash destHash = Hash.create(in);
setDestination(destHash);
break;
case FLAG_MODE_ROUTER:
//Hash routerHash = new Hash();
//routerHash.readBytes(in);
Hash routerHash = Hash.create(in);
setRouter(routerHash);
break;
case FLAG_MODE_TUNNEL:
//Hash tunnelRouterHash = new Hash();
//tunnelRouterHash.readBytes(in);
Hash tunnelRouterHash = Hash.create(in);
setRouter(tunnelRouterHash);
TunnelId id = new TunnelId();
id.readBytes(in);
setTunnelId(id);
break;
}
if (flagDelay(flags)) {
long delay = DataHelper.readLong(in, 4);
setDelayRequested(true);
setDelaySeconds(delay);
} else {
setDelayRequested(false);
}
}
public int readBytes(byte data[], int offset) throws DataFormatException {
int cur = offset;
long flags = DataHelper.fromLong(data, cur, 1);
cur++;
//if (_log.shouldLog(Log.DEBUG))
// _log.debug("Read flags: " + flags + " mode: " + flagMode(flags));
/****
if (flagEncrypted(flags)) {
byte kd[] = new byte[SessionKey.KEYSIZE_BYTES];
System.arraycopy(data, cur, kd, 0, SessionKey.KEYSIZE_BYTES);
cur += SessionKey.KEYSIZE_BYTES;
setEncryptionKey(new SessionKey(kd));
setEncrypted(true);
} else {
setEncrypted(false);
}
****/
setDeliveryMode(flagMode(flags));
switch (flagMode(flags)) {
case FLAG_MODE_LOCAL:
break;
case FLAG_MODE_DESTINATION:
//byte destHash[] = new byte[Hash.HASH_LENGTH];
//System.arraycopy(data, cur, destHash, 0, Hash.HASH_LENGTH);
Hash dh = Hash.create(data, cur);
cur += Hash.HASH_LENGTH;
setDestination(dh);
break;
case FLAG_MODE_ROUTER:
//byte routerHash[] = new byte[Hash.HASH_LENGTH];
//System.arraycopy(data, cur, routerHash, 0, Hash.HASH_LENGTH);
Hash rh = Hash.create(data, cur);
cur += Hash.HASH_LENGTH;
setRouter(rh);
break;
case FLAG_MODE_TUNNEL:
//byte tunnelRouterHash[] = new byte[Hash.HASH_LENGTH];
//System.arraycopy(data, cur, tunnelRouterHash, 0, Hash.HASH_LENGTH);
Hash trh = Hash.create(data, cur);
cur += Hash.HASH_LENGTH;
setRouter(trh);
setTunnelId(new TunnelId(DataHelper.fromLong(data, cur, 4)));
cur += 4;
break;
}
if (flagDelay(flags)) {
long delay = DataHelper.fromLong(data, cur, 4);
cur += 4;
setDelayRequested(true);
setDelaySeconds(delay);
} else {
setDelayRequested(false);
}
return cur - offset;
}
/**
* For cloves only (not tunnels), default false, unused
* @deprecated unused
*/
/****
private static boolean flagEncrypted(long flags) {
return (0 != (flags & FLAG_ENCRYPTED));
}
****/
/** high bits */
private static int flagMode(long flags) {
long v = flags & FLAG_MODE;
v >>>= 5;
return (int)v;
}
/** unused */
private static boolean flagDelay(long flags) {
return (0 != (flags & FLAG_DELAY));
}
private long getFlags() {
long val = 0L;
/****
if (getEncrypted())
val = val | FLAG_ENCRYPTED;
****/
long fmode = 0;
switch (getDeliveryMode()) {
case FLAG_MODE_LOCAL:
break;
case FLAG_MODE_DESTINATION:
fmode = FLAG_MODE_DESTINATION << 5;
break;
case FLAG_MODE_ROUTER:
fmode = FLAG_MODE_ROUTER << 5;
break;
case FLAG_MODE_TUNNEL:
fmode = FLAG_MODE_TUNNEL << 5;
break;
}
val = val | fmode;
if (getDelayRequested())
val = val | FLAG_DELAY;
//if (_log.shouldLog(Log.DEBUG))
// _log.debug("getFlags() = " + val);
return val;
}
private int getAdditionalInfoSize() {
int additionalSize = 0;
/****
if (getEncrypted()) {
if (_encryptionKey == null) throw new IllegalStateException("Encryption key is not set");
additionalSize += SessionKey.KEYSIZE_BYTES;
}
****/
switch (getDeliveryMode()) {
case FLAG_MODE_LOCAL:
//if (_log.shouldLog(Log.DEBUG))
// _log.debug("mode = local");
break;
case FLAG_MODE_DESTINATION:
if (_destinationHash == null) throw new IllegalStateException("Destination hash is not set");
additionalSize += Hash.HASH_LENGTH;
break;
case FLAG_MODE_ROUTER:
if (_routerHash == null) throw new IllegalStateException("Router hash is not set");
additionalSize += Hash.HASH_LENGTH;
break;
case FLAG_MODE_TUNNEL:
if ( (_routerHash == null) || (_tunnelId == null) ) throw new IllegalStateException("Router hash or tunnel ID is not set");
additionalSize += Hash.HASH_LENGTH;
additionalSize += 4; // tunnelId
break;
}
if (getDelayRequested()) {
additionalSize += 4;
}
return additionalSize;
}
private byte[] getAdditionalInfo() {
int additionalSize = getAdditionalInfoSize();
byte rv[] = new byte[additionalSize];
int offset = 0;
offset += getAdditionalInfo(rv, offset);
if (offset != additionalSize)
//_log.log(Log.CRIT, "wtf, additionalSize = " + additionalSize + ", offset = " + offset);
throw new IllegalStateException("wtf, additionalSize = " + additionalSize + ", offset = " + offset);
return rv;
}
private int getAdditionalInfo(byte rv[], int offset) {
int origOffset = offset;
/****
if (getEncrypted()) {
if (_encryptionKey == null) throw new IllegalStateException("Encryption key is not set");
System.arraycopy(_encryptionKey.getData(), 0, rv, offset, SessionKey.KEYSIZE_BYTES);
offset += SessionKey.KEYSIZE_BYTES;
if (_log.shouldLog(Log.DEBUG))
_log.debug("IsEncrypted");
} else {
if (_log.shouldLog(Log.DEBUG))
_log.debug("Is NOT Encrypted");
}
****/
switch (getDeliveryMode()) {
case FLAG_MODE_LOCAL:
//if (_log.shouldLog(Log.DEBUG))
// _log.debug("mode = local");
break;
case FLAG_MODE_DESTINATION:
if (_destinationHash == null) throw new IllegalStateException("Destination hash is not set");
System.arraycopy(_destinationHash.getData(), 0, rv, offset, Hash.HASH_LENGTH);
offset += Hash.HASH_LENGTH;
//if (_log.shouldLog(Log.DEBUG))
// _log.debug("mode = destination, hash = " + _destinationHash);
break;
case FLAG_MODE_ROUTER:
if (_routerHash == null) throw new IllegalStateException("Router hash is not set");
System.arraycopy(_routerHash.getData(), 0, rv, offset, Hash.HASH_LENGTH);
offset += Hash.HASH_LENGTH;
//if (_log.shouldLog(Log.DEBUG))
// _log.debug("mode = router, routerHash = " + _routerHash);
break;
case FLAG_MODE_TUNNEL:
if ( (_routerHash == null) || (_tunnelId == null) ) throw new IllegalStateException("Router hash or tunnel ID is not set");
System.arraycopy(_routerHash.getData(), 0, rv, offset, Hash.HASH_LENGTH);
offset += Hash.HASH_LENGTH;
DataHelper.toLong(rv, offset, 4, _tunnelId.getTunnelId());
offset += 4;
//if (_log.shouldLog(Log.DEBUG))
// _log.debug("mode = tunnel, tunnelId = " + _tunnelId.getTunnelId()
// + ", routerHash = " + _routerHash);
break;
}
if (getDelayRequested()) {
//if (_log.shouldLog(Log.DEBUG))
// _log.debug("delay requested: " + getDelaySeconds());
DataHelper.toLong(rv, offset, 4, getDelaySeconds());
offset += 4;
} else {
//if (_log.shouldLog(Log.DEBUG))
// _log.debug("delay NOT requested");
}
return offset - origOffset;
}
/**
* @deprecated unused
*/
public void writeBytes(OutputStream out) throws DataFormatException, IOException {
if ( (_deliveryMode < 0) || (_deliveryMode > FLAG_MODE_TUNNEL) ) throw new DataFormatException("Invalid data: mode = " + _deliveryMode);
long flags = getFlags();
//if (_log.shouldLog(Log.DEBUG))
// _log.debug("Write flags: " + flags + " mode: " + getDeliveryMode()
// + " =?= " + flagMode(flags));
byte additionalInfo[] = getAdditionalInfo();
DataHelper.writeLong(out, 1, flags);
if (additionalInfo != null) {
out.write(additionalInfo);
out.flush();
}
}
/**
* @return the number of bytes written to the target
*/
public int writeBytes(byte target[], int offset) {
if ( (_deliveryMode < 0) || (_deliveryMode > FLAG_MODE_TUNNEL) ) throw new IllegalStateException("Invalid data: mode = " + _deliveryMode);
long flags = getFlags();
//if (_log.shouldLog(Log.DEBUG))
// _log.debug("Write flags: " + flags + " mode: " + getDeliveryMode()
// + " =?= " + flagMode(flags));
int origOffset = offset;
DataHelper.toLong(target, offset, 1, flags);
offset++;
offset += getAdditionalInfo(target, offset);
return offset - origOffset;
}
public int getSize() {
return 1 // flags
+ getAdditionalInfoSize();
}
@Override
public boolean equals(Object obj) {
if ( (obj == null) || !(obj instanceof DeliveryInstructions))
return false;
DeliveryInstructions instr = (DeliveryInstructions)obj;
return (getDelayRequested() == instr.getDelayRequested()) &&
(getDelaySeconds() == instr.getDelaySeconds()) &&
(getDeliveryMode() == instr.getDeliveryMode()) &&
//(getEncrypted() == instr.getEncrypted()) &&
DataHelper.eq(getDestination(), instr.getDestination()) &&
DataHelper.eq(getEncryptionKey(), instr.getEncryptionKey()) &&
DataHelper.eq(getRouter(), instr.getRouter()) &&
DataHelper.eq(getTunnelId(), instr.getTunnelId());
}
@Override
public int hashCode() {
return (int)getDelaySeconds() +
getDeliveryMode() +
DataHelper.hashCode(getDestination()) +
DataHelper.hashCode(getEncryptionKey()) +
DataHelper.hashCode(getRouter()) +
DataHelper.hashCode(getTunnelId());
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder(128);
buf.append("[DeliveryInstructions: ");
buf.append("\n\tDelivery mode: ");
switch (getDeliveryMode()) {
case DELIVERY_MODE_LOCAL:
buf.append("local");
break;
case DELIVERY_MODE_DESTINATION:
buf.append("destination");
break;
case DELIVERY_MODE_ROUTER:
buf.append("router");
break;
case DELIVERY_MODE_TUNNEL:
buf.append("tunnel");
break;
}
buf.append("\n\tDelay requested: ").append(getDelayRequested());
buf.append("\n\tDelay seconds: ").append(getDelaySeconds());
buf.append("\n\tDestination: ").append(getDestination());
//buf.append("\n\tEncrypted: ").append(getEncrypted());
buf.append("\n\tEncryption key: ").append(getEncryptionKey());
buf.append("\n\tRouter: ").append(getRouter());
buf.append("\n\tTunnelId: ").append(getTunnelId());
return buf.toString();
}
/**
* An immutable local delivery instructions with no options
* for efficiency.
*
* @since 0.9.9
*/
private static final class LocalInstructions extends DeliveryInstructions {
//private static final byte flag = DELIVERY_MODE_LOCAL << 5; // 0
@Override
public void setEncrypted(boolean encrypted) {
throw new RuntimeException("immutable");
}
@Override
public void setEncryptionKey(SessionKey key) {
throw new RuntimeException("immutable");
}
@Override
public int getDeliveryMode() { return DELIVERY_MODE_LOCAL; }
@Override
public void setDeliveryMode(int mode) {
throw new RuntimeException("immutable");
}
@Override
public void setDestination(Hash dest) {
throw new RuntimeException("immutable");
}
@Override
public void setRouter(Hash router) {
throw new RuntimeException("immutable");
}
@Override
public void setTunnelId(TunnelId id) {
throw new RuntimeException("immutable");
}
@Override
public void setDelayRequested(boolean req) {
throw new RuntimeException("immutable");
}
@Override
public void setDelaySeconds(long seconds) {
throw new RuntimeException("immutable");
}
@Override
public void readBytes(InputStream in) throws DataFormatException, IOException {
throw new RuntimeException("immutable");
}
@Override
public int readBytes(byte data[], int offset) throws DataFormatException {
throw new RuntimeException("immutable");
}
@Override
public void writeBytes(OutputStream out) throws DataFormatException, IOException {
out.write((byte) 0);
}
@Override
public int writeBytes(byte target[], int offset) {
target[offset] = 0;
return 1;
}
@Override
public int getSize() {
return 1;
}
@Override
public String toString() {
return "[DeliveryInstructions: " +
"\n\tDelivery mode: " +
"local]";
}
}
}
| |
/*
* MIT License
*
* Copyright (c) 2014-18, mcarvalho (gamboa.pt)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package htmlflow.test.views;
import htmlflow.DynamicHtml;
import htmlflow.test.model.Stock;
import org.xmlet.htmlapifaster.EnumHttpEquivType;
import org.xmlet.htmlapifaster.EnumMediaType;
import org.xmlet.htmlapifaster.EnumRelType;
import org.xmlet.htmlapifaster.EnumTypeContentType;
import org.xmlet.htmlapifaster.EnumTypeScriptType;
public class HtmlDynamicStocks {
public static DynamicHtml<Iterable<Stock>> stocksViewOk = DynamicHtml.view(HtmlDynamicStocks::templateStocksOk);
public static DynamicHtml<Iterable<Stock>> stocksViewWrong = DynamicHtml.view(HtmlDynamicStocks::templateStocksWrong);
private static void templateStocksOk(DynamicHtml<Iterable<Stock>> view, Iterable<Stock> stocks) {
view
.html()
.head()
.title().text("Stock Prices").__()
.meta()
.attrHttpEquiv(EnumHttpEquivType.CONTENT_TYPE)
.attrContent("text/html; charset=UTF-8")
.__()
.meta()
.addAttr("http-equiv", "Content-Style-Type")
.attrContent("text/CSS")
.__()
.meta()
.addAttr("http-equiv", "Content-Script-Type")
.attrContent("text/javascript")
.__()
.link()
.addAttr("rel", "shortcut icon")
.attrHref("/images/favicon.ico")
.__()
.link()
.attrRel(EnumRelType.STYLESHEET)
.attrType(EnumTypeContentType.TEXT_CSS)
.attrHref("/CSS/style.CSS")
.attrMedia(EnumMediaType.ALL)
.__()
.script()
.attrType(EnumTypeScriptType.TEXT_JAVASCRIPT)
.attrSrc("/js/util.js")
.__()
.__() // head
.body()
.h1().text("Stock Prices").__()
.table()
.thead()
.tr()
.th().text("#").__()
.th().text("symbol").__()
.th().text("name").__()
.th().text("price").__()
.th().text("change").__()
.th().text("ratio").__()
.__() // tr
.__() // thead
.tbody()
.of(tbody -> stocks.forEach(stock ->
tbody
.tr()
.dynamic(tr -> tr.attrClass(stock.getIndex() % 2 == 0 ? "even" : "odd"))
.td()
.dynamic(td -> td.text(stock.getIndex()))
.__()
.td()
.a().dynamic(a -> a.attrHref("/stocks/" + stock.getSymbol()).text(stock.getSymbol())).__()
.__()
.td()
.a().dynamic(a -> a.attrHref(stock.getUrl()).text(stock.getName())).__()
.__()
.td()
.strong().dynamic(strong -> strong.text(stock.getPrice())).__()
.__()
.td()
.dynamic(td -> {
double change = stock.getChange();
if (change < 0) {
td.attrClass("minus");
}
td.text(change);
})
.__()
.td()
.dynamic(td -> {
double ratio = stock.getRatio();
if (ratio < 0) {
td.attrClass("minus");
}
td.text(ratio);
})
.__()
.__()))
.__() // tbody
.__() // table
.__() // body
.__(); // html
};
private static void templateStocksWrong(DynamicHtml<Iterable<Stock>> view, Iterable<Stock> stocks) {
view
.html()
.head()
.title().text("Stock Prices").__()
.meta()
.attrHttpEquiv(EnumHttpEquivType.CONTENT_TYPE)
.attrContent("text/html; charset=UTF-8")
.__()
.meta()
.addAttr("http-equiv", "Content-Style-Type")
.attrContent("text/CSS")
.__()
.meta()
.addAttr("http-equiv", "Content-Script-Type")
.attrContent("text/javascript")
.__()
.link()
.addAttr("rel", "shortcut icon")
.attrHref("/images/favicon.ico")
.__()
.link()
.attrRel(EnumRelType.STYLESHEET)
.attrType(EnumTypeContentType.TEXT_CSS)
.attrHref("/CSS/style.CSS")
.attrMedia(EnumMediaType.ALL)
.__()
.script()
.attrType(EnumTypeScriptType.TEXT_JAVASCRIPT)
.attrSrc("/js/util.js")
.__()
.__() // head
.body()
.h1().text("Stock Prices").__()
.table()
.thead()
.tr()
.th().text("#").__()
.th().text("symbol").__()
.th().text("name").__()
.th().text("price").__()
.th().text("change").__()
.th().text("ratio").__()
.__() // tr
.__() // thead
.tbody()
.of(tbody -> stocks.forEach(stock ->
tbody
.tr()
// !!!!! In the following it is the wrong use with of() for the unit test !!!
.of(tr -> tr.attrClass(stock.getIndex() % 2 == 0 ? "even" : "odd"))
.td()
.of(td -> td.text(stock.getIndex()))
.__()
.td()
.a().of(a -> a.attrHref("/stocks/" + stock.getSymbol()).text(stock.getSymbol())).__()
.__()
.td()
.a().of(a -> a.attrHref(stock.getUrl()).text(stock.getName())).__()
.__()
.td()
.strong().of(strong -> strong.text(stock.getPrice())).__()
.__()
.td()
.of(td -> {
double change = stock.getChange();
if (change < 0) {
td.attrClass("minus");
}
td.text(change);
})
.__()
.td()
.of(td -> {
double ratio = stock.getRatio();
if (ratio < 0) {
td.attrClass("minus");
}
td.text(ratio);
})
.__()
.__()))
.__() // tbody
.__() // table
.__() // body
.__(); // html
};
}
| |
package parser;
public class DependencyArcList {
public int n;
public int[] st, edges;
public int[] left, right; // span
public int[] nonproj; // non-proj
public DependencyArcList(int n, boolean useHO)
{
this.n = n;
st = new int[n];
edges = new int[n];
if (useHO) {
left = new int[n];
right = new int[n];
}
}
public DependencyArcList(int[] heads, boolean useHO)
{
n = heads.length;
st = new int[n];
edges = new int[n];
constructDepTreeArcList(heads);
if (useHO) {
left = new int[n];
right = new int[n];
constructSpan();
}
}
public void resize(int n, boolean useHO)
{
if (n > st.length) {
st = new int[n];
edges = new int[n];
if (useHO) {
left = new int[n];
right = new int[n];
}
}
this.n = n;
edges[n - 1] = 0;
}
public int startIndex(int i)
{
return st[i];
}
public int endIndex(int i)
{
return (i >= n-1) ? n-1 : st[i+1];
}
public int get(int i)
{
return edges[i];
}
public void constructDepTreeArcList(int[] heads)
{
for (int i = 0; i < n; ++i)
st[i] = 0;
for (int i = 1; i < n; ++i) {
int j = heads[i];
++st[j];
}
for (int i = 1; i < n; ++i)
st[i] += st[i-1];
//Utils.Assert(st[n-1] == n-1);
for (int i = n-1; i > 0; --i) {
int j = heads[i];
--st[j];
edges[st[j]] = i;
}
}
public void update(int m, int oldH, int newH, int[] heads) {
updateDepTreeArcList(m, oldH, newH);
if (left != null && right != null)
updateDepSpan(m, oldH, newH, heads);
if (nonproj != null)
constructNonproj(heads);
}
public void updateDepTreeArcList(int m, int oldH, int newH) {
if (oldH == newH)
return;
// update the head of m from oldH to newH
if (oldH < newH) {
int end = endIndex(oldH);
int pos = startIndex(oldH);
for (; pos < end; ++pos)
if (edges[pos] == m)
break;
//Utils.Assert(pos < end);
// update oldH
for (; pos < end - 1; ++pos)
edges[pos] = edges[pos + 1];
// update oldH + 1 to newH - 1
for (int i = oldH + 1; i < newH; ++i) {
--st[i];
end = endIndex(i);
for (; pos < end - 1; ++pos)
edges[pos] = edges[pos + 1];
}
// update newH
st[newH]--;
end = endIndex(newH);
while (pos < end - 1 && edges[pos + 1] < m) {
edges[pos] = edges[pos + 1];
++pos;
}
edges[pos] = m;
}
else {
int start = startIndex(oldH);
int pos = endIndex(oldH) - 1;
for (; pos >= start; --pos)
if (edges[pos] == m)
break;
//Utils.Assert(pos >= start);
// update oldH
for (; pos > start; --pos)
edges[pos] = edges[pos - 1];
++st[oldH];
// update oldH - 1 to newH + 1
for (int i = oldH - 1; i > newH; --i) {
start = startIndex(i);
for (; pos > start; --pos)
edges[pos] = edges[pos - 1];
++st[i];
}
// update newH
start = startIndex(newH);
while (pos > start && edges[pos - 1] > m) {
edges[pos] = edges[pos - 1];
--pos;
}
edges[pos] = m;
}
}
public void updateDepSpan(int m, int oldH, int newH, int[] heads) {
if (oldH == newH)
return;
int tmpH = newH;
while (tmpH != -1) {
left[tmpH] = Math.min(left[tmpH], left[m]);
right[tmpH] = Math.max(right[tmpH], right[m]);
tmpH = heads[tmpH];
}
// assume that child list is updated
tmpH = oldH;
while (tmpH != -1) {
if (left[tmpH] == left[m]) {
left[tmpH] = tmpH;
int start = startIndex(tmpH);
int end = endIndex(tmpH);
for (int i = start; i < end; ++i)
left[tmpH] = Math.min(left[tmpH], left[edges[i]]);
}
if (right[tmpH] == right[m]) {
right[tmpH] = tmpH + 1;
int start = startIndex(tmpH);
int end = endIndex(tmpH);
for (int i = start; i < end; ++i)
right[tmpH] = Math.max(right[tmpH], right[edges[i]]);
}
tmpH = heads[tmpH];
}
}
private boolean isAncestorOf(int[] heads, int par, int ch)
{
int cnt = 0;
while (ch != 0) {
if (ch == par) return true;
ch = heads[ch];
}
return false;
}
private void constructSpan(int id) {
left[id] = id;
right[id] = id + 1;
int st = startIndex(id);
int ed = endIndex(id);
for (int p = st; p < ed; ++p) {
int cid = get(p);
if (right[cid] == 0)
constructSpan(cid);
if (left[cid] < left[id])
left[id] = left[cid];
if (right[cid] > right[id])
right[id] = right[cid];
}
}
public void constructSpan() {
// assume that child list is constructed
for (int i = 0; i < n; ++i) {
left[i] = 0;
right[i] = 0;
}
for (int i = 0; i < n; ++i)
if (right[i] == 0)
constructSpan(i);
}
public void constructNonproj(int[] heads) {
for (int i = 0; i < n; ++i) {
nonproj[i] = 0;
}
for (int m = 0; m < n; ++m) {
int h = heads[m];
int sm = m < h ? m : h;
int la = m > h ? m : h;
for (int tm = sm + 1; tm < la; ++tm) {
// head
int th = heads[tm];
if (th < sm || th > la) {
nonproj[m]++;
}
}
}
}
}
| |
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.banners;
import android.app.PendingIntent;
import android.graphics.Bitmap;
import android.graphics.drawable.BitmapDrawable;
import android.text.TextUtils;
import org.chromium.base.CalledByNative;
import org.chromium.base.JNINamespace;
import org.chromium.chrome.browser.EmptyTabObserver;
import org.chromium.chrome.browser.Tab;
import org.chromium.chrome.browser.TabObserver;
import org.chromium.content.browser.ContentView;
import org.chromium.content_public.browser.WebContents;
import org.chromium.ui.R;
/**
* Manages an AppBannerView for a Tab and its ContentView.
*
* The AppBannerManager manages a single AppBannerView, dismissing it when the user navigates to a
* new page or creating a new one when it detects that the current webpage is requesting a banner to
* be built. The actual observation of the WebContents (which triggers the automatic creation and
* removal of banners, among other things) is done by the native-side AppBannerManager.
*
* This Java-side class owns its native-side counterpart, which is basically used to grab resources
* from the network.
*/
@JNINamespace("banners")
public class AppBannerManager implements AppBannerView.Observer, AppDetailsDelegate.Observer {
private static final String TAG = "AppBannerManager";
/** Retrieves information about a given package. */
private static AppDetailsDelegate sAppDetailsDelegate;
/** Pointer to the native side AppBannerManager. */
private final long mNativePointer;
/** Tab that the AppBannerView/AppBannerManager is owned by. */
private final Tab mTab;
/** ContentView that the AppBannerView/AppBannerManager is currently attached to. */
private ContentView mContentView;
/** Current banner being shown. */
private AppBannerView mBannerView;
/** Data about the app being advertised. */
private AppData mAppData;
/**
* Checks if app banners are enabled.
* @return True if banners are enabled, false otherwise.
*/
public static boolean isEnabled() {
return nativeIsEnabled();
}
/**
* Sets the delegate that provides information about a given package.
* @param delegate Delegate to use. Previously set ones are destroyed.
*/
public static void setAppDetailsDelegate(AppDetailsDelegate delegate) {
if (sAppDetailsDelegate != null) sAppDetailsDelegate.destroy();
sAppDetailsDelegate = delegate;
}
/**
* Constructs an AppBannerManager for the given tab.
* @param tab Tab that the AppBannerManager will be attached to.
*/
public AppBannerManager(Tab tab) {
mNativePointer = nativeInit();
mTab = tab;
mTab.addObserver(createTabObserver());
updatePointers();
}
/**
* Creates a TabObserver for monitoring a Tab, used to react to changes in the ContentView
* or to trigger its own destruction.
* @return TabObserver that can be used to monitor a Tab.
*/
private TabObserver createTabObserver() {
return new EmptyTabObserver() {
@Override
public void onWebContentsSwapped(Tab tab, boolean didStartLoad,
boolean didFinishLoad) {
updatePointers();
}
@Override
public void onContentChanged(Tab tab) {
updatePointers();
}
@Override
public void onDestroyed(Tab tab) {
nativeDestroy(mNativePointer);
mContentView = null;
resetState();
}
};
}
/**
* Updates which ContentView and WebContents the AppBannerView is monitoring.
*/
private void updatePointers() {
if (mContentView != mTab.getContentView()) mContentView = mTab.getContentView();
nativeReplaceWebContents(mNativePointer, mTab.getWebContents());
}
/**
* Grabs package information for the banner asynchronously.
* @param url URL for the page that is triggering the banner.
* @param packageName Name of the package that is being advertised.
*/
@CalledByNative
private void prepareBanner(String url, String packageName) {
// Get rid of whatever banner is there currently.
if (mBannerView != null) dismissCurrentBanner(AppBannerMetricsIds.DISMISS_ERROR);
if (sAppDetailsDelegate == null || !isBannerForCurrentPage(url)) return;
int iconSize = AppBannerView.getIconSize(mContentView.getContext());
sAppDetailsDelegate.getAppDetailsAsynchronously(this, url, packageName, iconSize);
}
/**
* Called when data about the package has been retrieved, which includes the url for the app's
* icon but not the icon Bitmap itself. Kicks off a background task to retrieve it.
* @param data Data about the app. Null if the task failed.
*/
@Override
public void onAppDetailsRetrieved(AppData data) {
if (data == null || !isBannerForCurrentPage(data.siteUrl())) return;
mAppData = data;
String imageUrl = data.imageUrl();
if (TextUtils.isEmpty(imageUrl) || !nativeFetchIcon(mNativePointer, imageUrl)) resetState();
}
/**
* Called when all the data required to show a banner has finally been retrieved.
* Creates the banner and shows it, as long as the banner is still meant for the current page.
* @param imageUrl URL of the icon.
* @param appIcon Bitmap containing the icon itself.
* @return Whether or not the banner was created.
*/
@CalledByNative
private boolean createBanner(String imageUrl, Bitmap appIcon) {
if (mAppData == null || !isBannerForCurrentPage(mAppData.siteUrl())) return false;
if (!TextUtils.equals(mAppData.imageUrl(), imageUrl)) {
resetState();
return false;
}
mAppData.setIcon(new BitmapDrawable(mContentView.getContext().getResources(), appIcon));
mBannerView = AppBannerView.create(mContentView, this, mAppData);
return true;
}
/**
* Dismisses whatever banner is currently being displayed. This is treated as an automatic
* dismissal and not one that blocks the banner from appearing in the future.
* @param dismissalType What triggered the dismissal.
*/
@CalledByNative
private void dismissCurrentBanner(int dismissalType) {
if (mBannerView != null) mBannerView.dismiss(dismissalType);
resetState();
}
@Override
public void onBannerRemoved(AppBannerView banner) {
if (mBannerView != banner) return;
resetState();
}
@Override
public void onBannerBlocked(AppBannerView banner, String url, String packageName) {
if (mBannerView != banner) return;
nativeBlockBanner(mNativePointer, url, packageName);
}
@Override
public void onBannerDismissEvent(AppBannerView banner, int eventType) {
if (mBannerView != banner) return;
nativeRecordDismissEvent(eventType);
}
@Override
public void onBannerInstallEvent(AppBannerView banner, int eventType) {
if (mBannerView != banner) return;
nativeRecordInstallEvent(eventType);
}
@Override
public boolean onFireIntent(AppBannerView banner, PendingIntent intent) {
if (mBannerView != banner) return false;
return mTab.getWindowAndroid().showIntent(intent, banner, R.string.low_memory_error);
}
/**
* Resets all of the state, killing off any running tasks.
*/
private void resetState() {
if (mBannerView != null) {
mBannerView.destroy();
mBannerView = null;
}
mAppData = null;
}
/**
* Checks to see if the banner is for the currently displayed page.
* @param bannerUrl URL that requested a banner.
* @return True if the user is still on the same page.
*/
private boolean isBannerForCurrentPage(String bannerUrl) {
return mContentView != null && TextUtils.equals(mContentView.getUrl(), bannerUrl);
}
private static native boolean nativeIsEnabled();
private native long nativeInit();
private native void nativeDestroy(long nativeAppBannerManager);
private native void nativeReplaceWebContents(long nativeAppBannerManager,
WebContents webContents);
private native void nativeBlockBanner(
long nativeAppBannerManager, String url, String packageName);
private native boolean nativeFetchIcon(long nativeAppBannerManager, String imageUrl);
// UMA tracking.
private static native void nativeRecordDismissEvent(int metric);
private static native void nativeRecordInstallEvent(int metric);
}
| |
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ui;
import com.intellij.openapi.util.Iconable;
import com.intellij.ui.icons.RowIcon;
import org.jetbrains.annotations.ApiStatus;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import javax.swing.*;
import java.awt.*;
import java.lang.invoke.MethodHandles;
import java.lang.invoke.MethodType;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Function;
public interface IconManager {
static @NotNull IconManager getInstance() {
return IconManagerHelper.instance;
}
// Icon Loader is quite heavy, better to not instantiate class unless required
static void activate(@Nullable IconManager impl) throws Throwable {
IconManagerHelper.activate(impl);
}
@TestOnly
static void deactivate() {
IconManagerHelper.deactivate();
}
@NotNull Icon getStubIcon();
@NotNull Icon getIcon(@NotNull String path, @NotNull Class<?> aClass);
/**
* Path must be specified without a leading slash, in a format for {@link ClassLoader#getResourceAsStream(String)}
*/
@ApiStatus.Internal
@NotNull Icon loadRasterizedIcon(@NotNull String path, @NotNull ClassLoader classLoader, long cacheKey, int flags);
/**
* @deprecated Method just for backward compatibility (old generated icon classes).
*/
@Deprecated
@ApiStatus.Internal
@ApiStatus.ScheduledForRemoval(inVersion = "2021.3")
default @NotNull Icon loadRasterizedIcon(@NotNull String path, @NotNull Class<?> aClass, long cacheKey, int flags) {
return loadRasterizedIcon(path.startsWith("/") ? path.substring(1) : path, aClass.getClassLoader(), cacheKey, flags);
}
default @NotNull Icon createEmptyIcon(@NotNull Icon icon) {
return icon;
}
default @NotNull Icon createOffsetIcon(@NotNull Icon icon) {
return icon;
}
@NotNull Icon createLayered(Icon @NotNull ... icons);
default @NotNull Icon colorize(Graphics2D g, @NotNull Icon source, @NotNull Color color) {
return source;
}
@NotNull <T> Icon createDeferredIcon(@Nullable Icon base, T param, @NotNull Function<? super T, ? extends Icon> iconProducer);
@NotNull RowIcon createLayeredIcon(@NotNull Iconable instance, Icon icon, int flags);
default @NotNull RowIcon createRowIcon(int iconCount) {
return createRowIcon(iconCount, RowIcon.Alignment.TOP);
}
@NotNull RowIcon createRowIcon(int iconCount, RowIcon.Alignment alignment);
@NotNull RowIcon createRowIcon(Icon @NotNull ... icons);
void registerIconLayer(int flagMask, @NotNull Icon icon);
@NotNull Icon tooltipOnlyIfComposite(@NotNull Icon icon);
}
final class IconManagerHelper {
private static final AtomicBoolean isActivated = new AtomicBoolean();
static volatile IconManager instance = DummyIconManager.INSTANCE;
static void activate(@Nullable IconManager impl) throws Throwable {
if (!isActivated.compareAndSet(false, true)) {
return;
}
if (impl == null) {
Class<?> implClass = IconManagerHelper.class.getClassLoader().loadClass("com.intellij.ui.CoreIconManager");
instance = (IconManager)MethodHandles.lookup().findConstructor(implClass, MethodType.methodType(void.class)).invoke();
}
else {
instance = impl;
}
}
static void deactivate() {
if (isActivated.compareAndSet(true, false)) {
instance = DummyIconManager.INSTANCE;
}
}
}
final class DummyIconManager implements IconManager {
static final IconManager INSTANCE = new DummyIconManager();
private DummyIconManager() {
}
@Override
public @NotNull Icon getStubIcon() {
return DummyIcon.INSTANCE;
}
@Override
public @NotNull Icon getIcon(@NotNull String path, @NotNull Class<?> aClass) {
return new DummyIcon(path);
}
@Override
public @NotNull Icon loadRasterizedIcon(@NotNull String path, @NotNull ClassLoader classLoader, long cacheKey, int flags) {
return new DummyIcon(path);
}
@Override
public @NotNull RowIcon createLayeredIcon(@NotNull Iconable instance, Icon icon, int flags) {
Icon[] icons = new Icon[2];
icons[0] = icon;
return new DummyRowIcon(icons);
}
@Override
public void registerIconLayer(int flagMask, @NotNull Icon icon) {
}
@Override
public @NotNull Icon tooltipOnlyIfComposite(@NotNull Icon icon) {
return icon;
}
@Override
public @NotNull <T> Icon createDeferredIcon(@Nullable Icon base, T param, @NotNull Function<? super T, ? extends Icon> iconProducer) {
return base;
}
@Override
public @NotNull RowIcon createRowIcon(int iconCount, RowIcon.Alignment alignment) {
return new DummyRowIcon(iconCount);
}
@Override
public @NotNull Icon createLayered(Icon @NotNull ... icons) {
return new DummyRowIcon(icons);
}
@Override
public @NotNull RowIcon createRowIcon(Icon @NotNull ... icons) {
return new DummyRowIcon(icons);
}
private static class DummyIcon implements Icon {
static final DummyIcon INSTANCE = new DummyIcon("<DummyIcon>");
private final String path;
private DummyIcon(@NotNull String path) {
this.path = path;
}
@Override
public void paintIcon(Component c, Graphics g, int x, int y) {
}
@Override
public int getIconWidth() {
return 16;
}
@Override
public int getIconHeight() {
return 16;
}
@Override
public int hashCode() {
return path.hashCode();
}
@Override
public boolean equals(Object obj) {
return this == obj || (obj instanceof DummyIcon && ((DummyIcon)obj).path == path);
}
@Override
public String toString() {
return path;
}
}
private static final class DummyRowIcon extends DummyIcon implements RowIcon {
private Icon[] icons;
DummyRowIcon(int iconCount) {
super("<DummyRowIcon>");
icons = new Icon[iconCount];
}
DummyRowIcon(Icon[] icons) {
super("<DummyRowIcon>");
this.icons = icons;
}
@Override
public int getIconCount() {
return icons == null ? 0 : icons.length;
}
@Override
public Icon getIcon(int index) {
return icons[index];
}
@Override
public void setIcon(Icon icon, int i) {
if (icons == null) {
icons = new Icon[4];
}
icons[i] = icon;
}
@Override
public @NotNull Icon getDarkIcon(boolean isDark) {
return this;
}
@Override
public Icon @NotNull [] getAllIcons() {
List<Icon> list = new ArrayList<>();
for (Icon element : icons) {
if (element != null) {
list.add(element);
}
}
return list.toArray(new Icon[0]);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
return o instanceof DummyRowIcon && Arrays.equals(icons, ((DummyRowIcon)o).icons);
}
@Override
public int hashCode() {
return icons.length > 0 ? icons[0].hashCode() : 0;
}
@Override
public String toString() {
return "Row icon. myIcons=" + Arrays.asList(icons);
}
}
}
| |
package voldemort.store.readonly.fetcher;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FilterInputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.text.NumberFormat;
import java.util.HashMap;
import java.util.Map;
import java.util.zip.GZIPInputStream;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.fs.FileSystem;
import org.apache.log4j.Logger;
import voldemort.server.protocol.admin.AsyncOperationStatus;
import voldemort.store.readonly.checksum.CheckSum;
import voldemort.store.readonly.checksum.CheckSum.CheckSumType;
public class BasicFetchStrategy implements FetchStrategy {
private final FileSystem fs;
private final HdfsCopyStats stats;
private final byte[] buffer;
private final HdfsFetcher fetcher;
private final int bufferSize;
private final AsyncOperationStatus status;
private static final Logger logger = Logger.getLogger(BasicFetchStrategy.class);
public BasicFetchStrategy(HdfsFetcher fetcher,
FileSystem fs,
HdfsCopyStats stats,
AsyncOperationStatus status,
int bufferSize) {
this.fs = fs;
this.stats = stats;
this.status = status;
this.buffer = new byte[bufferSize];
this.bufferSize = bufferSize;
this.fetcher = fetcher;
}
@Override
public Map<HdfsFile, byte[]> fetch(HdfsDirectory directory, File dest) throws IOException {
Map<HdfsFile, byte[]> fileCheckSumMap = new HashMap<HdfsFile, byte[]>(directory.getFiles().size());
CheckSumType checkSumType = directory.getCheckSumType();
for (HdfsFile file : directory.getFiles()) {
String fileName = file.getDiskFileName();
File copyLocation = new File(dest, fileName);
byte[] fileCheckSum = copyFileWithCheckSum(file, copyLocation, checkSumType);
if(fileCheckSum != null) {
fileCheckSumMap.put(file, fileCheckSum);
}
}
return fileCheckSumMap;
}
/**
* Function to copy a file from the given filesystem with a checksum of type
* 'checkSumType' computed and returned. In case an error occurs during such
* a copy, we do a retry for a maximum of NUM_RETRIES
*
* @param source
* Source path of the file to copy
* @param dest
* Destination path of the file on the local machine
* @param checkSumType
* Type of the Checksum to be computed for this file
* @return A Checksum (generator) of type checkSumType which contains the
* computed checksum of the copied file
* @throws IOException
*/
private byte[] copyFileWithCheckSum(HdfsFile source, File dest, CheckSumType checkSumType)
throws IOException {
byte[] checkSum = null;
logger.debug("Starting copy of " + source + " to " + dest);
// Check if its Gzip compressed
boolean isCompressed = source.isCompressed();
FilterInputStream input = null;
OutputStream output = null;
long startTimeMS = System.currentTimeMillis();
int previousAttempt = 0;
for (int attempt = 1; attempt <= fetcher.getMaxAttempts(); attempt++) {
boolean success = false;
long totalBytesRead = 0;
boolean fsOpened = false;
try {
CheckSum fileCheckSumGenerator = null;
// Create a per file checksum generator
if (checkSumType != null) {
fileCheckSumGenerator = CheckSum.getInstance(checkSumType);
}
logger.info("Starting attempt #" + attempt + "/" + fetcher.getMaxAttempts() +
" to fetch remote file: " + source + " to local destination: " + dest);
input = new ThrottledInputStream(fs.open(source.getPath()), fetcher.getThrottler(), stats);
if (isCompressed) {
// We are already bounded by the "hdfs.fetcher.buffer.size"
// specified in the Voldemort config, the default value of
// which is 64K. Using the same as the buffer size for
// GZIPInputStream as well.
input = new GZIPInputStream(input, this.bufferSize);
}
fsOpened = true;
output = new BufferedOutputStream(new FileOutputStream(dest));
int read;
while (true) {
read = input.read(buffer);
if (read < 0) {
break;
} else {
output.write(buffer, 0, read);
}
// Update the per file checksum
if(fileCheckSumGenerator != null) {
fileCheckSumGenerator.update(buffer, 0, read);
}
stats.recordBytesWritten(read);
totalBytesRead += read;
boolean reportIntervalPassed = stats.getBytesTransferredSinceLastReport() > fetcher.getReportingIntervalBytes();
if (attempt != previousAttempt || reportIntervalPassed) {
previousAttempt = attempt;
NumberFormat format = NumberFormat.getNumberInstance();
format.setMaximumFractionDigits(2);
String message = stats.getTotalBytesTransferred() / (1024 * 1024) + " MB copied at "
+ format.format(stats.getBytesTransferredPerSecond() / (1024 * 1024)) + " MB/sec"
+ ", " + format.format(stats.getPercentCopied()) + " % complete"
+ ", attempt: #" + attempt + "/" + fetcher.getMaxAttempts()
+ ", current file: " + dest.getName();
if (this.status == null) {
// This is to accommodate tests and the old ReadOnlyStoreManagementServlet code path
// FIXME: Delete this when we get rid of the old code which does not use status
logger.info(message);
} else {
this.status.setStatus(message);
// status.toString() is more detailed than just the message. We print the whole
// thing so that server-side logs are very similar to client (BnP) -side logs.
logger.info(this.status.toString());
}
if (reportIntervalPassed) {
stats.reset();
}
}
}
if(fileCheckSumGenerator != null) {
checkSum = fileCheckSumGenerator.getCheckSum();
}
stats.reportFileDownloaded(dest,
startTimeMS,
source.getSize(),
System.currentTimeMillis() - startTimeMS,
attempt,
totalBytesRead,
checkSum);
logger.info("Completed copy of " + source + " to " + dest);
success = true;
} catch (IOException e) {
if(!fsOpened) {
logger.error("Error while opening the file stream to " + source, e);
} else {
logger.error("Error while copying file " + source + " after " + totalBytesRead + " bytes.", e);
}
if(e.getCause() != null) {
logger.error("Cause of error ", e.getCause());
}
if(attempt < fetcher.getMaxAttempts()) {
logger.info("Will retry copying after " + fetcher.getRetryDelayMs() + " ms");
sleepForRetryDelayMs();
} else {
stats.reportFileError(dest, fetcher.getMaxAttempts(), startTimeMS, e);
logger.info("Fetcher giving up copy after " + fetcher.getMaxAttempts() + " attempts");
throw e;
}
} finally {
IOUtils.closeQuietly(output);
IOUtils.closeQuietly(input);
if(success) {
break;
}
}
}
return checkSum;
}
private void sleepForRetryDelayMs() {
if (fetcher.getRetryDelayMs() > 0) {
try {
Thread.sleep(fetcher.getRetryDelayMs());
} catch (InterruptedException ie) {
logger.error("Fetcher interrupted while waiting to retry", ie);
}
}
}
@Override
public byte[] fetch(HdfsFile file, File dest, CheckSumType checkSumType) throws IOException {
return copyFileWithCheckSum(file, dest, checkSumType);
}
}
| |
/*
* Copyright 2020 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.config.policy.elasticagents;
import com.thoughtworks.go.config.elastic.ClusterProfile;
import com.thoughtworks.go.config.elastic.ElasticProfile;
import com.thoughtworks.go.config.policy.Allow;
import com.thoughtworks.go.config.policy.Result;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import static org.assertj.core.api.AssertionsForClassTypes.assertThat;
class ElasticAgentProfilesAllowDirectiveTest {
@Nested
class shouldDefinePermissions {
@Test
void forViewOfAllElasticAgentProfiles() {
Allow directive = new Allow("view", "elastic_agent_profile", "*");
Result viewAllElasticAgentProfiles = directive.apply("view", ElasticProfile.class, "*", null);
Result viewAllClusterProfiles = directive.apply("view", ClusterProfile.class, "*", null);
Result administerAllElasticAgentProfiles = directive.apply("administer", ElasticProfile.class, "*", null);
Result administerAllClusterProfiles = directive.apply("administer", ClusterProfile.class, "*", null);
assertThat(viewAllElasticAgentProfiles).isEqualTo(Result.ALLOW);
assertThat(viewAllClusterProfiles).isEqualTo(Result.ALLOW);
assertThat(administerAllElasticAgentProfiles).isEqualTo(Result.SKIP);
assertThat(administerAllClusterProfiles).isEqualTo(Result.SKIP);
}
@Test
void forAdministerOfAllElasticAgentProfiles() {
Allow directive = new Allow("administer", "elastic_agent_profile", "*");
Result viewAllElasticAgentProfiles = directive.apply("view", ElasticProfile.class, "*", null);
Result viewAllClusterProfiles = directive.apply("view", ClusterProfile.class, "*", null);
Result administerAllElasticAgentProfiles = directive.apply("administer", ElasticProfile.class, "*", null);
Result administerAllClusterProfiles = directive.apply("administer", ClusterProfile.class, "*", null);
assertThat(viewAllElasticAgentProfiles).isEqualTo(Result.ALLOW);
assertThat(viewAllClusterProfiles).isEqualTo(Result.ALLOW);
assertThat(administerAllElasticAgentProfiles).isEqualTo(Result.ALLOW);
assertThat(administerAllClusterProfiles).isEqualTo(Result.SKIP);
}
@Test
void forViewOfAllElasticAgentProfiles_usingWildcardAllowAllElasticAgentProfilesPattern() {
Allow directive = new Allow("view", "elastic_agent_profile", "*:*");
Result viewAllElasticAgentProfiles = directive.apply("view", ElasticProfile.class, "*", null);
Result viewAllClusterProfiles = directive.apply("view", ClusterProfile.class, "*", null);
Result administerAllElasticAgentProfiles = directive.apply("administer", ElasticProfile.class, "*", null);
Result administerAllClusterProfiles = directive.apply("administer", ClusterProfile.class, "*", null);
assertThat(viewAllElasticAgentProfiles).isEqualTo(Result.ALLOW);
assertThat(viewAllClusterProfiles).isEqualTo(Result.ALLOW);
assertThat(administerAllElasticAgentProfiles).isEqualTo(Result.SKIP);
assertThat(administerAllClusterProfiles).isEqualTo(Result.SKIP);
}
@Test
void forAdministerOfAllElasticAgentProfiles_usingWildcardAllowAllElasticAgentProfilesPattern() {
Allow directive = new Allow("administer", "elastic_agent_profile", "*:*");
Result viewAllElasticAgentProfiles = directive.apply("view", ElasticProfile.class, "*", null);
Result viewAllClusterProfiles = directive.apply("view", ClusterProfile.class, "*", null);
Result administerAllElasticAgentProfiles = directive.apply("administer", ElasticProfile.class, "*", null);
Result administerAllClusterProfiles = directive.apply("administer", ClusterProfile.class, "*", null);
assertThat(viewAllElasticAgentProfiles).isEqualTo(Result.ALLOW);
assertThat(viewAllClusterProfiles).isEqualTo(Result.ALLOW);
assertThat(administerAllElasticAgentProfiles).isEqualTo(Result.ALLOW);
assertThat(administerAllClusterProfiles).isEqualTo(Result.SKIP);
}
@Test
void forViewOfAllElasticAgentProfilesWithinCluster() {
Allow directive = new Allow("view", "elastic_agent_profile", "team1_*:*");
Result viewElasticAgentProfilesUnderTeam1UAT = directive.apply("view", ElasticProfile.class, "*", "team1_uat");
Result viewElasticAgentProfilesUnderTeam2UAT = directive.apply("view", ElasticProfile.class, "*", "team2_uat");
Result viewTeam1UATClusterProfile = directive.apply("view", ClusterProfile.class, "team1_uat", null);
Result viewTeam2UATClusterProfiles = directive.apply("view", ClusterProfile.class, "team2_uat", null);
Result administerElasticAgentProfilesUnderTeam1UAT = directive.apply("administer", ElasticProfile.class, "*", "team1_uat");
Result administerElasticAgentProfilesUnderTeam2UAT = directive.apply("administer", ElasticProfile.class, "*", "team2_uat");
Result administerTeam1UATClusterProfile = directive.apply("administer", ClusterProfile.class, "team1_uat", null);
Result administerTeam2UATClusterProfiles = directive.apply("administer", ClusterProfile.class, "team2_uat", null);
assertThat(viewElasticAgentProfilesUnderTeam1UAT).isEqualTo(Result.ALLOW);
assertThat(viewElasticAgentProfilesUnderTeam2UAT).isEqualTo(Result.SKIP);
assertThat(viewTeam1UATClusterProfile).isEqualTo(Result.ALLOW);
assertThat(viewTeam2UATClusterProfiles).isEqualTo(Result.SKIP);
assertThat(administerElasticAgentProfilesUnderTeam1UAT).isEqualTo(Result.SKIP);
assertThat(administerElasticAgentProfilesUnderTeam2UAT).isEqualTo(Result.SKIP);
assertThat(administerTeam1UATClusterProfile).isEqualTo(Result.SKIP);
assertThat(administerTeam2UATClusterProfiles).isEqualTo(Result.SKIP);
}
@Test
void forAdministerOfAllElasticAgentProfilesWithinCluster() {
Allow directive = new Allow("administer", "elastic_agent_profile", "team1_*:*");
Result viewElasticAgentProfilesUnderTeam1UAT = directive.apply("view", ElasticProfile.class, "*", "team1_uat");
Result viewElasticAgentProfilesUnderTeam2UAT = directive.apply("view", ElasticProfile.class, "*", "team2_uat");
Result viewTeam1UATClusterProfile = directive.apply("view", ClusterProfile.class, "team1_uat", null);
Result viewTeam2UATClusterProfiles = directive.apply("view", ClusterProfile.class, "team2_uat", null);
Result administerElasticAgentProfilesUnderTeam1UAT = directive.apply("administer", ElasticProfile.class, "*", "team1_uat");
Result administerElasticAgentProfilesUnderTeam2UAT = directive.apply("administer", ElasticProfile.class, "*", "team2_uat");
Result administerTeam1UATClusterProfile = directive.apply("administer", ClusterProfile.class, "team1_uat", null);
Result administerTeam2UATClusterProfiles = directive.apply("administer", ClusterProfile.class, "team2_uat", null);
assertThat(viewElasticAgentProfilesUnderTeam1UAT).isEqualTo(Result.ALLOW);
assertThat(viewElasticAgentProfilesUnderTeam2UAT).isEqualTo(Result.SKIP);
assertThat(viewTeam1UATClusterProfile).isEqualTo(Result.ALLOW);
assertThat(viewTeam2UATClusterProfiles).isEqualTo(Result.SKIP);
assertThat(administerElasticAgentProfilesUnderTeam1UAT).isEqualTo(Result.ALLOW);
assertThat(administerElasticAgentProfilesUnderTeam2UAT).isEqualTo(Result.SKIP);
assertThat(administerTeam1UATClusterProfile).isEqualTo(Result.SKIP);
assertThat(administerTeam2UATClusterProfiles).isEqualTo(Result.SKIP);
}
@Test
void forViewOfSpecificElasticAgentProfilesWithinCluster() {
Allow directive = new Allow("view", "elastic_agent_profile", "team1_*:agent1_*");
Result viewAgent1ElasticAgentProfilesUnderTeam1UAT = directive.apply("view", ElasticProfile.class, "agent1_high_mem", "team1_uat");
Result viewAgent1ElasticAgentProfilesUnderTeam2UAT = directive.apply("view", ElasticProfile.class, "agent1_high_mem", "team2_uat");
Result viewAgent2ElasticAgentProfilesUnderTeam1UAT = directive.apply("view", ElasticProfile.class, "agent2_high_mem", "team1_uat");
Result viewAgent2ElasticAgentProfilesUnderTeam2UAT = directive.apply("view", ElasticProfile.class, "agent2_high_mem", "team2_uat");
Result viewTeam1UATClusterProfile = directive.apply("view", ClusterProfile.class, "team1_uat", null);
Result viewTeam2UATClusterProfiles = directive.apply("view", ClusterProfile.class, "team2_uat", null);
Result administerAgent1ElasticAgentProfilesUnderTeam1UAT = directive.apply("administer", ElasticProfile.class, "agent1_high_mem", "team1_uat");
Result administerAgent1ElasticAgentProfilesUnderTeam2UAT = directive.apply("administer", ElasticProfile.class, "agent1_high_mem", "team2_uat");
Result administerAgent2ElasticAgentProfilesUnderTeam1UAT = directive.apply("administer", ElasticProfile.class, "agent2_high_mem", "team1_uat");
Result administerAgent2ElasticAgentProfilesUnderTeam2UAT = directive.apply("administer", ElasticProfile.class, "agent2_high_mem", "team2_uat");
Result administerTeam1UATClusterProfile = directive.apply("administer", ClusterProfile.class, "team1_uat", null);
Result administerTeam2UATClusterProfiles = directive.apply("administer", ClusterProfile.class, "team2_uat", null);
assertThat(viewAgent1ElasticAgentProfilesUnderTeam1UAT).isEqualTo(Result.ALLOW);
assertThat(viewAgent1ElasticAgentProfilesUnderTeam2UAT).isEqualTo(Result.SKIP);
assertThat(viewAgent2ElasticAgentProfilesUnderTeam1UAT).isEqualTo(Result.SKIP);
assertThat(viewAgent2ElasticAgentProfilesUnderTeam2UAT).isEqualTo(Result.SKIP);
assertThat(viewTeam1UATClusterProfile).isEqualTo(Result.ALLOW);
assertThat(viewTeam2UATClusterProfiles).isEqualTo(Result.SKIP);
assertThat(administerAgent1ElasticAgentProfilesUnderTeam1UAT).isEqualTo(Result.SKIP);
assertThat(administerAgent1ElasticAgentProfilesUnderTeam2UAT).isEqualTo(Result.SKIP);
assertThat(administerAgent2ElasticAgentProfilesUnderTeam1UAT).isEqualTo(Result.SKIP);
assertThat(administerAgent2ElasticAgentProfilesUnderTeam2UAT).isEqualTo(Result.SKIP);
assertThat(administerTeam1UATClusterProfile).isEqualTo(Result.SKIP);
assertThat(administerTeam2UATClusterProfiles).isEqualTo(Result.SKIP);
}
@Test
void forAdministerOfSpecificElasticAgentProfilesWithinCluster() {
Allow directive = new Allow("administer", "elastic_agent_profile", "team1_*:agent1_*");
Result viewAgent1ElasticAgentProfilesUnderTeam1UAT = directive.apply("view", ElasticProfile.class, "agent1_high_mem", "team1_uat");
Result viewAgent1ElasticAgentProfilesUnderTeam2UAT = directive.apply("view", ElasticProfile.class, "agent1_high_mem", "team2_uat");
Result viewAgent2ElasticAgentProfilesUnderTeam1UAT = directive.apply("view", ElasticProfile.class, "agent2_high_mem", "team1_uat");
Result viewAgent2ElasticAgentProfilesUnderTeam2UAT = directive.apply("view", ElasticProfile.class, "agent2_high_mem", "team2_uat");
Result viewTeam1UATClusterProfile = directive.apply("view", ClusterProfile.class, "team1_uat", null);
Result viewTeam2UATClusterProfiles = directive.apply("view", ClusterProfile.class, "team2_uat", null);
Result administerAgent1ElasticAgentProfilesUnderTeam1UAT = directive.apply("administer", ElasticProfile.class, "agent1_high_mem", "team1_uat");
Result administerAgent1ElasticAgentProfilesUnderTeam2UAT = directive.apply("administer", ElasticProfile.class, "agent1_high_mem", "team2_uat");
Result administerAgent2ElasticAgentProfilesUnderTeam1UAT = directive.apply("administer", ElasticProfile.class, "agent2_high_mem", "team1_uat");
Result administerAgent2ElasticAgentProfilesUnderTeam2UAT = directive.apply("administer", ElasticProfile.class, "agent2_high_mem", "team2_uat");
Result administerTeam1UATClusterProfile = directive.apply("administer", ClusterProfile.class, "team1_uat", null);
Result administerTeam2UATClusterProfiles = directive.apply("administer", ClusterProfile.class, "team2_uat", null);
assertThat(viewAgent1ElasticAgentProfilesUnderTeam1UAT).isEqualTo(Result.ALLOW);
assertThat(viewAgent1ElasticAgentProfilesUnderTeam2UAT).isEqualTo(Result.SKIP);
assertThat(viewAgent2ElasticAgentProfilesUnderTeam1UAT).isEqualTo(Result.SKIP);
assertThat(viewAgent2ElasticAgentProfilesUnderTeam2UAT).isEqualTo(Result.SKIP);
assertThat(viewTeam1UATClusterProfile).isEqualTo(Result.ALLOW);
assertThat(viewTeam2UATClusterProfiles).isEqualTo(Result.SKIP);
assertThat(administerAgent1ElasticAgentProfilesUnderTeam1UAT).isEqualTo(Result.ALLOW);
assertThat(administerAgent1ElasticAgentProfilesUnderTeam2UAT).isEqualTo(Result.SKIP);
assertThat(administerAgent2ElasticAgentProfilesUnderTeam1UAT).isEqualTo(Result.SKIP);
assertThat(administerAgent2ElasticAgentProfilesUnderTeam2UAT).isEqualTo(Result.SKIP);
assertThat(administerTeam1UATClusterProfile).isEqualTo(Result.SKIP);
assertThat(administerTeam2UATClusterProfiles).isEqualTo(Result.SKIP);
}
}
}
| |
package com.maxifier.guice.jpa;
import com.intellij.codeHighlighting.HighlightDisplayLevel;
import com.intellij.codeInspection.*;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.project.Project;
import com.intellij.psi.*;
import com.intellij.psi.util.PsiElementFilter;
import com.intellij.psi.util.PsiTreeUtil;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.List;
import static com.maxifier.guice.jpa.GuiceJPAInspection.*;
/**
* Created by: Aleksey Didik
* Date: 3/16/11
* Time: 4:30 PM
* <p/>
* Copyright (c) 1999-2011 Maxifier Ltd. All Rights Reserved.
* Code proprietary and confidential.
* Use is subject to license terms.
*
* @author Aleksey Didik
*/
public class EntityManagerInspection extends AbstractDBInspection {
private static final String EM_TYPE = "EntityManager";
private static final String EM_FULL_TYPE = "javax.persistence.EntityManager";
private static final String TRANSACTION_ATTRIBUTE = "transaction";
private static final String DB_FIX_ACTIONS_FAMILY = "@DB fix actions";
@Nonnull
@Override
public String getID() {
return "EntityManagerInspection";
}
@Override
public String getAlternativeID() {
return "EntityManagerInspection";
}
@Nonnull
@Override
public String getGroupDisplayName() {
return INSPECTIONS_GROUP_NAME;
}
@Nonnull
@Override
public String getDisplayName() {
return "EntityManager usage";
}
@Nonnull
@Override
public String getShortName() {
return "entity-manager";
}
@Override
public boolean isEnabledByDefault() {
return true;
}
@Nonnull
@Override
public HighlightDisplayLevel getDefaultLevel() {
return HighlightDisplayLevel.ERROR;
}
@Override
public ProblemDescriptor[] checkFile(@Nonnull PsiFile file, @Nonnull InspectionManager manager, boolean isOnTheFly) {
List<ProblemDescriptor> problemDescriptors = new ArrayList<ProblemDescriptor>();
PsiElement[] classes = PsiTreeUtil.collectElements(file, new PsiClassesFilter());
for (PsiElement psiClass : classes) {
checkClass((PsiClass) psiClass, manager, problemDescriptors, isOnTheFly);
}
return problemDescriptors.toArray(new ProblemDescriptor[problemDescriptors.size()]);
}
private void checkClass(PsiClass psiClass, InspectionManager inspectionManager, List<ProblemDescriptor> problemDescriptors, boolean onTheFly) {
PsiIdentifier nameIdentifier = psiClass.getNameIdentifier();
if (nameIdentifier == null) {
return;
}
final PsiField emField = checkEMField(psiClass, inspectionManager, problemDescriptors, onTheFly);
//check is emField injected
boolean isEmInjected = isEmInjected(psiClass, emField);
if (emField != null && isEmInjected) {
checkUsageWithoutDB(psiClass, inspectionManager, problemDescriptors, emField, onTheFly);
checkNecessityOfTransaction(psiClass, emField, inspectionManager, problemDescriptors, onTheFly);
checkRawTransactionUsage(psiClass, emField, inspectionManager, problemDescriptors, onTheFly);
}
//check annotated but without usages
checkAnnotatedWithoutUsages(psiClass, inspectionManager, problemDescriptors, emField, onTheFly);
}
private boolean isEmInjected(PsiClass psiClass, PsiField emField) {
// PsiMethod[] constructors = psiClass.getConstructors();
// for (PsiMethod constructor : constructors) {
// PsiElement[] fieldUsages = getFieldUsages(emField, constructor);
//
// }
return true;
}
private void checkRawTransactionUsage(PsiClass psiClass,
PsiField emField,
InspectionManager inspectionManager,
List<ProblemDescriptor> problemDescriptors,
boolean onTheFly) {
for (PsiMethod psiMethod : psiClass.getMethods()) {
PsiAnnotation dbAnnotation = getAnnotation(psiMethod, DB_NAME);
if (dbAnnotation != null) {
for (PsiElement emFieldUsage : getFieldUsages(emField, psiMethod)) {
if (checkMethodCall(emFieldUsage, "getTransaction", "joinTransaction")) {
problemDescriptors.add(inspectionManager.createProblemDescriptor(
emFieldUsage,
"Usage of getTransaction() or joinTransaction() methods is disallowed for @DB annotated methods," +
" use @DB(transaction=REQUIRED) instead.",
onTheFly,
LocalQuickFix.EMPTY_ARRAY,
ProblemHighlightType.GENERIC_ERROR_OR_WARNING
));
}
}
}
}
}
private void checkAnnotatedWithoutUsages(PsiClass psiClass,
InspectionManager inspectionManager,
List<ProblemDescriptor> problemDescriptors,
PsiField emField,
boolean onTheFly) {
for (PsiMethod psiMethod : psiClass.getMethods()) {
PsiAnnotation dbAnnotation = getAnnotation(psiMethod, DB_NAME);
if (dbAnnotation != null) {
if (emField == null || getFieldUsages(emField, psiMethod).length == 0) {
problemDescriptors.add(inspectionManager.createProblemDescriptor(
dbAnnotation,
"@DB annotation is not required for this method",
onTheFly,
new LocalQuickFix[]{new DeleteAnnotationFixAction(dbAnnotation)},
ProblemHighlightType.LIKE_UNUSED_SYMBOL
));
}
}
}
}
private void checkNecessityOfTransaction(PsiClass psiClass,
final PsiField emField,
InspectionManager inspectionManager,
List<ProblemDescriptor> problemDescriptors,
boolean onTheFly) {
for (PsiMethod psiMethod : psiClass.getMethods()) {
PsiAnnotation dbAnnotation = getAnnotation(psiMethod, DB_NAME);
if (dbAnnotation != null) {
PsiAnnotationMemberValue transactionAttribute = dbAnnotation.findAttributeValue(TRANSACTION_ATTRIBUTE);
@SuppressWarnings({"ConstantConditions"})
boolean isTransaction = !transactionAttribute.getText().contains("NOT_REQUIRED");
PsiElement[] methodCalls = getFieldUsages(emField, psiMethod);
boolean trRequired = false;
for (PsiElement methodCall : methodCalls) {
if (checkMethodCall(methodCall, "persist", "remove", "merge")) {
trRequired = true;
if (!isTransaction)
problemDescriptors.add(inspectionManager.createProblemDescriptor(
methodCall.getParent(),
"Transaction support is required for this usage of EntityManager",
onTheFly,
new LocalQuickFix[]{new AddTransactionRequiredFixAction(dbAnnotation)},
ProblemHighlightType.GENERIC_ERROR_OR_WARNING
));
}
}
if (!trRequired && isTransaction) {
problemDescriptors.add(inspectionManager.createProblemDescriptor(
dbAnnotation.getParameterList(),
"Transaction support is not required for this method",
true,
new LocalQuickFix[]{new DeleteTransactionRequiredFixAction(dbAnnotation)},
ProblemHighlightType.LIKE_UNUSED_SYMBOL
));
}
}
}
}
private void checkUsageWithoutDB(PsiClass psiClass,
InspectionManager inspectionManager,
List<ProblemDescriptor> problemDescriptors,
final PsiField emField,
boolean onTheFly) {
for (PsiMethod psiMethod : psiClass.getMethods()) {
if (getAnnotation(psiMethod, DB_NAME) == null && psiMethod.getBody() != null) {
PsiElement[] emFieldRefs = getFieldUsages(emField, psiMethod);
if (emFieldRefs.length != 0) {
//noinspection ConstantConditions
problemDescriptors.add(inspectionManager.createProblemDescriptor(
psiMethod.getNameIdentifier(),
"Method which use EntityManager should be annotated with @DB annotation",
onTheFly,
new LocalQuickFix[]{new AddAnnotationFixAction(psiMethod)},
ProblemHighlightType.GENERIC_ERROR_OR_WARNING
));
}
}
}
}
private void checkFieldModifiers(PsiField psiField,
InspectionManager inspectionManager,
List<ProblemDescriptor> problemDescriptors,
boolean onTheFly) {
PsiModifierList modifierList = psiField.getModifierList();
if (modifierList != null) {
if (modifierList.hasModifierProperty(STATIC_MODIFIER)) {
PsiElement staticModifier = getModifier(modifierList, STATIC_MODIFIER);
problemDescriptors.add(
inspectionManager.createProblemDescriptor(
staticModifier,
"EntityManager field should not be static",
new DeleteModifierFixAction(staticModifier),
ProblemHighlightType.GENERIC_ERROR_OR_WARNING,
onTheFly
)
);
}
if (!modifierList.hasModifierProperty(FINAL_MODIFIER)) {
problemDescriptors.add(
inspectionManager.createProblemDescriptor(
psiField,
"EntityManager field should be final",
new AddModifierFixAction(modifierList, FINAL_MODIFIER),
ProblemHighlightType.GENERIC_ERROR_OR_WARNING,
true
)
);
}
}
}
private PsiField checkEMField(PsiClass psiClass,
InspectionManager inspectionManager,
List<ProblemDescriptor> problemDescriptors,
boolean onTheFly) {
for (PsiField psiField : psiClass.getAllFields()) {
String typeName = psiField.getType().getCanonicalText();
if (typeName.equals(EM_TYPE) || typeName.equals(EM_FULL_TYPE)) {
//noinspection ConstantConditions
if (psiField.getContainingClass().equals(psiClass)) {
checkFieldModifiers(psiField, inspectionManager, problemDescriptors, onTheFly);
}
return psiField;
}
}
// //add error that we need to have EM field
// //noinspection ConstantConditions
// problemDescriptors.add(
// inspectionManager.createProblemDescriptor(
// psiClass.getNameIdentifier(),
// "Class with @DB annotated methods should have an EntityManager field",
// true,
// LocalQuickFix.EMPTY_ARRAY,
// ProblemHighlightType.GENERIC_ERROR_OR_WARNING
//
// )
// );
return null;
}
private PsiElement[] getFieldUsages(final PsiField emField, PsiMethod psiMethod) {
return PsiTreeUtil.collectElements(psiMethod.getBody(), new PsiRefToFilter(emField));
}
private boolean checkMethodCall(PsiElement fieldRef, String... methods) {
String methodCall = fieldRef.getText();
for (String method : methods) {
if (methodCall.contains(method + "(")) {
return true;
}
}
return false;
}
private static class AddAnnotationFixAction extends IntentionAndQuickFixAction {
private final PsiMethod psiMethod;
AddAnnotationFixAction(PsiMethod psiMethod) {
this.psiMethod = psiMethod;
}
@Nonnull
@Override
public String getName() {
return "Add @DB annotation to method";
}
@Nonnull
@Override
public String getFamilyName() {
return DB_FIX_ACTIONS_FAMILY;
}
@Override
public void applyFix(Project project, PsiFile psiFile, @Nullable Editor editor) {
psiMethod.getModifierList().addAnnotation("DB");
}
}
private static class AddTransactionRequiredFixAction extends IntentionAndQuickFixAction {
private final PsiAnnotation psiAnnotation;
AddTransactionRequiredFixAction(PsiAnnotation psiAnnotation) {
this.psiAnnotation = psiAnnotation;
}
@Nonnull
@Override
public String getName() {
return "Add transaction required attribute to @DB";
}
@Nonnull
@Override
public String getFamilyName() {
return DB_FIX_ACTIONS_FAMILY;
}
@Override
public void applyFix(Project project, PsiFile psiFile, @Nullable Editor editor) {
PsiAnnotationOwner modList = psiAnnotation.getOwner();
psiAnnotation.delete();
PsiElement[] psiElements = PsiTreeUtil.collectElements(psiFile, new RequiredStaticImportFilter());
if (psiElements.length == 0) {
modList.addAnnotation("DB(transaction = DB.Transaction.REQUIRED)");
} else {
modList.addAnnotation("DB(transaction = REQUIRED)");
}
}
private static class RequiredStaticImportFilter implements PsiElementFilter {
@Override
public boolean isAccepted(PsiElement element) {
if (element instanceof PsiImportStaticStatement) {
PsiImportStaticStatement importStatement = (PsiImportStaticStatement) element;
String referenceName = importStatement.getReferenceName();
if (referenceName != null && referenceName.equals("REQUIRED")) {
return true;
}
}
return false;
}
}
}
private static class DeleteTransactionRequiredFixAction extends IntentionAndQuickFixAction {
private final PsiAnnotation psiAnnotation;
DeleteTransactionRequiredFixAction(PsiAnnotation psiAnnotation) {
this.psiAnnotation = psiAnnotation;
}
@Nonnull
@Override
public String getName() {
return "Delete transaction required attribute to @DB";
}
@Nonnull
@Override
public String getFamilyName() {
return DB_FIX_ACTIONS_FAMILY;
}
@Override
public void applyFix(Project project, PsiFile psiFile, @Nullable Editor editor) {
PsiAnnotationOwner modList = psiAnnotation.getOwner();
psiAnnotation.delete();
modList.addAnnotation("DB");
}
}
private static class DeleteAnnotationFixAction extends IntentionAndQuickFixAction {
private final PsiAnnotation psiAnnotation;
DeleteAnnotationFixAction(PsiAnnotation psiAnnotation) {
this.psiAnnotation = psiAnnotation;
}
@Nonnull
@Override
public String getName() {
return "Delete not required annotation";
}
@Nonnull
@Override
public String getFamilyName() {
return DB_FIX_ACTIONS_FAMILY;
}
@Override
public void applyFix(Project project, PsiFile psiFile, @Nullable Editor editor) {
psiAnnotation.delete();
}
}
private static class PsiClassesFilter implements PsiElementFilter {
@Override
public boolean isAccepted(PsiElement psiElement) {
if (!(psiElement instanceof PsiClass)) {
return false;
}
PsiClass psiClass = (PsiClass) psiElement;
return !(psiClass.isEnum() || psiClass.isInterface());
}
}
private static class PsiRefToFilter implements PsiElementFilter {
private final PsiField emField;
public PsiRefToFilter(PsiField emField) {
this.emField = emField;
}
@Override
public boolean isAccepted(PsiElement element) {
if (element instanceof PsiMethodCallExpression) {
PsiMethodCallExpression methodCall = (PsiMethodCallExpression) element;
PsiExpression qualifierExpression = methodCall.getMethodExpression().getQualifierExpression();
if (qualifierExpression != null) {
PsiReference reference = qualifierExpression.getReference();
if (reference != null && reference.isReferenceTo(emField)) {
return true;
}
}
PsiExpressionList argumentList = methodCall.getArgumentList();
for (PsiExpression psiExpression : argumentList.getExpressions()) {
PsiReference reference = psiExpression.getReference();
if ((reference != null) && reference.isReferenceTo(emField)) {
return true;
}
}
}
return false;
}
}
}
| |
/**
* Copyright 2014 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package org.openshift.ping.common;
import static org.openshift.ping.common.Utils.getSystemEnvInt;
import static org.openshift.ping.common.Utils.trimToNull;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.InputStream;
import java.lang.reflect.Method;
import java.net.HttpURLConnection;
import java.net.InetSocketAddress;
import java.net.Proxy;
import java.net.URL;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.jgroups.Address;
import org.jgroups.Event;
import org.jgroups.Message;
import org.jgroups.annotations.Property;
import org.jgroups.protocols.PING;
import org.openshift.ping.common.compatibility.CompatibilityException;
import org.openshift.ping.common.compatibility.CompatibilityUtils;
import org.openshift.ping.common.server.Server;
import org.openshift.ping.common.server.ServerFactory;
import org.openshift.ping.common.server.Servers;
public abstract class OpenshiftPing extends PING {
private String clusterName;
private final String _systemEnvPrefix;
@Property
private int connectTimeout = 5000;
private int _connectTimeout;
@Property
private int readTimeout = 30000;
private int _readTimeout;
@Property
private int operationAttempts = 3;
private int _operationAttempts;
@Property
private long operationSleep = 1000;
private long _operationSleep;
private ServerFactory _serverFactory;
private Server _server;
private String _serverName;
private static Method sendMethod; //handled via reflection due to JGroups 3/4 incompatibility
private static Method setSrcMethod;
public OpenshiftPing(String systemEnvPrefix) {
super();
_systemEnvPrefix = trimToNull(systemEnvPrefix);
try {
if(CompatibilityUtils.isJGroups4()) {
sendMethod = this.getClass().getMethod("up", Message.class);
} else {
sendMethod = this.getClass().getMethod("up", Event.class);
}
} catch (Exception e) {
throw new CompatibilityException("Could not find suitable 'up' method.", e);
}
try {
//the return parameter changed in JGroups 3/4 :D
setSrcMethod = Message.class.getMethod("setSrc", Address.class);
} catch (Exception e) {
throw new CompatibilityException("Could not find suitable 'setSrc' method.", e);
}
}
protected final String getSystemEnvName(String systemEnvSuffix) {
StringBuilder sb = new StringBuilder();
String suffix = trimToNull(systemEnvSuffix);
if (suffix != null) {
if (_systemEnvPrefix != null) {
sb.append(_systemEnvPrefix);
}
sb.append(suffix);
}
return sb.length() > 0 ? sb.toString() : null;
}
protected final int getConnectTimeout() {
return _connectTimeout;
}
protected final int getReadTimeout() {
return _readTimeout;
}
protected final int getOperationAttempts() {
return _operationAttempts;
}
protected final long getOperationSleep() {
return _operationSleep;
}
protected abstract boolean isClusteringEnabled();
protected abstract int getServerPort();
public final void setServerFactory(ServerFactory serverFactory) {
_serverFactory = serverFactory;
}
@Override
public void init() throws Exception {
super.init();
_connectTimeout = getSystemEnvInt(getSystemEnvName("CONNECT_TIMEOUT"), connectTimeout);
_readTimeout = getSystemEnvInt(getSystemEnvName("READ_TIMEOUT"), readTimeout);
_operationAttempts = getSystemEnvInt(getSystemEnvName("OPERATION_ATTEMPTS"), operationAttempts);
_operationSleep = (long) getSystemEnvInt(getSystemEnvName("OPERATION_SLEEP"), (int) operationSleep);
}
@Override
public void destroy() {
_connectTimeout = 0;
_readTimeout = 0;
_operationAttempts = 0;
_operationSleep = 0l;
super.destroy();
}
@Override
public void start() throws Exception {
if (isClusteringEnabled()) {
int serverPort = getServerPort();
if (_serverFactory != null) {
_server = _serverFactory.getServer(serverPort);
} else {
_server = Servers.getServer(serverPort);
}
_serverName = _server.getClass().getSimpleName();
if (log.isInfoEnabled()) {
log.info(String.format("Starting %s on port %s for channel address: %s", _serverName, serverPort, stack
.getChannel().getAddress()));
}
boolean started = _server.start(stack.getChannel());
if (log.isInfoEnabled()) {
log.info(String.format("%s %s.", _serverName, started ? "started" : "reused (pre-existing)"));
}
}
super.start();
}
@Override
public void stop() {
try {
if (_server != null) {
if (log.isInfoEnabled()) {
log.info(String.format("Stopping server: %s", _serverName));
}
boolean stopped = _server.stop(stack.getChannel());
if (log.isInfoEnabled()) {
log.info(String.format("%s %s.", _serverName, stopped ? "stopped" : "not stopped (still in use)"));
}
}
} finally {
super.stop();
}
}
public Object down(Event evt) {
switch (evt.getType()) {
case Event.CONNECT:
case Event.CONNECT_WITH_STATE_TRANSFER:
case Event.CONNECT_USE_FLUSH:
case Event.CONNECT_WITH_STATE_TRANSFER_USE_FLUSH:
clusterName = (String) evt.getArg();
break;
}
return super.down(evt);
}
@Override
protected void sendMcastDiscoveryRequest(Message msg) {
List<InetSocketAddress> nodes = readAll();
if (nodes == null) {
return;
}
if (msg.getSrc() == null) {
setSrc(msg);
}
for (InetSocketAddress node : nodes) {
// forward the request to each node
timer.execute(new SendDiscoveryRequest(node, msg));
}
}
public void handlePingRequest(InputStream stream) throws Exception {
DataInputStream dataInput = new DataInputStream(stream);
Message msg = new Message();
msg.readFrom(dataInput);
try {
sendUp(msg);
} catch (Exception e) {
log.error("Error processing GET_MBRS_REQ.", e);
}
}
private void setSrc(Message msg) {
try {
setSrcMethod.invoke(msg, local_addr);
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (Exception e) {
throw new CompatibilityException("Could not invoke 'setSrc' method.", e);
}
}
private void sendUp(Message msg) {
try {
if(CompatibilityUtils.isJGroups4()) {
sendMethod.invoke(this, msg);
} else {
sendMethod.invoke(this, new Event(1, msg));
}
} catch (Exception e) {
throw new CompatibilityException("Could not invoke 'up' method.", e);
}
}
private List<InetSocketAddress> readAll() {
if (isClusteringEnabled()) {
return doReadAll(clusterName);
} else {
return Collections.emptyList();
}
}
protected abstract List<InetSocketAddress> doReadAll(String clusterName);
private final class SendDiscoveryRequest implements Runnable {
private final InetSocketAddress node;
private final Message msg;
private int attempts;
private SendDiscoveryRequest(InetSocketAddress node, Message msg) {
this.node = node;
this.msg = msg;
}
@Override
public void run() {
++attempts;
final String url = String.format("http://%s:%s", node.getHostString(), node.getPort());
if (log.isTraceEnabled()) {
log.trace(String.format(
"%s opening connection: url [%s], clusterName [%s], connectTimeout [%s], readTimeout [%s]",
getClass().getSimpleName(), url, clusterName, _connectTimeout, _readTimeout));
}
HttpURLConnection connection = null;
try {
connection = (HttpURLConnection) new URL(url).openConnection(Proxy.NO_PROXY);
connection.addRequestProperty(Server.CLUSTER_NAME, clusterName);
if (_connectTimeout < 0 || _readTimeout < 0) {
throw new IllegalArgumentException(String.format(
"Neither connectTimeout [%s] nor readTimeout [%s] can be less than 0 for URLConnection.",
_connectTimeout, _readTimeout));
}
connection.setConnectTimeout(_connectTimeout);
connection.setReadTimeout(_readTimeout);
connection.setDoOutput(true);
connection.setRequestMethod("POST");
DataOutputStream out = new DataOutputStream(connection.getOutputStream());
msg.writeTo(out);
out.flush();
String responseMessage = connection.getResponseMessage();
if (log.isTraceEnabled()) {
log.trace(String.format(
"%s received response from server: url [%s], clusterName [%s], response [%s]", getClass()
.getSimpleName(), url, clusterName, responseMessage));
}
} catch (Exception e) {
log.warn(String.format("Error sending ping request: url [%s], clusterName [%s], attempts[%d]: %s", url,
clusterName, attempts, e.getLocalizedMessage()));
if (attempts < _operationAttempts) {
timer.schedule(this, _operationSleep, TimeUnit.MILLISECONDS);
}
} finally {
try {
connection.disconnect();
} catch (Exception e) {
}
}
}
}
}
| |
/*
* Copyright 2017 Phoboslabs.me
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package me.phoboslabs.illuminati.processor;
import com.google.auto.service.AutoService;
import me.phoboslabs.illuminati.annotation.Illuminati;
import me.phoboslabs.illuminati.common.properties.IlluminatiPropertiesHelper;
import me.phoboslabs.illuminati.common.util.StringObjectUtils;
import javax.annotation.processing.*;
import javax.lang.model.SourceVersion;
import javax.lang.model.element.Element;
import javax.lang.model.element.ElementKind;
import javax.lang.model.element.PackageElement;
import javax.lang.model.element.TypeElement;
import javax.tools.Diagnostic;
import javax.tools.Diagnostic.Kind;
import javax.tools.JavaFileObject;
import java.io.IOException;
import java.io.InputStream;
import java.io.Writer;
import java.lang.reflect.Method;
import java.util.*;
import static me.phoboslabs.illuminati.common.constant.IlluminatiConstant.*;
/**
* Created by leekyoungil (leekyoungil@gmail.com) on 10/07/2017.
*/
@AutoService(Processor.class)
public class IlluminatiProcessor extends AbstractProcessor {
private Filer filer;
private Messager messager;
private String generatedIlluminatiTemplate;
@Override public synchronized void init(ProcessingEnvironment processingEnv) {
super.init(processingEnv);
this.filer = processingEnv.getFiler();
this.messager = processingEnv.getMessager();
}
@Override public Set<String> getSupportedAnnotationTypes() {
Set<String> annotataions = new LinkedHashSet<>();
annotataions.add(Illuminati.class.getCanonicalName());
return annotataions;
}
@Override public SourceVersion getSupportedSourceVersion() {
return SourceVersion.RELEASE_8;// SourceVersion.latestSupported();
}
private static final List<ElementKind> ANNOTATION_ELEMENT_KIND = Collections.unmodifiableList(Arrays.asList(ElementKind.CLASS, ElementKind.METHOD));
@Override public boolean process(Set<? extends TypeElement> typeElements, RoundEnvironment env) {
this.messager.printMessage(Kind.WARNING, "start illuminati compile");
outerloop:
for (TypeElement typeElement : typeElements) {
for (Element element : env.getElementsAnnotatedWith(typeElement)) {
Illuminati illuminati = element.getAnnotation(Illuminati.class);
if (illuminati == null) {
continue;
}
if (!ANNOTATION_ELEMENT_KIND.contains(element.getKind())) {
this.messager.printMessage(Kind.ERROR, "The class %s is not class or method."+ element.getSimpleName());
break outerloop;
}
final PackageElement pkg = processingEnv.getElementUtils().getPackageOf(element);
if (pkg == null) {
this.messager.printMessage(Kind.ERROR, "Sorry, basePackage is wrong in properties read process.");
break outerloop;
}
if (!this.setGeneratedIlluminatiTemplate(pkg.toString())) {
continue;
}
try {
final JavaFileObject javaFile = this.filer.createSourceFile("IlluminatiPointcutGenerated");
try (final Writer writer = javaFile.openWriter()) {
if (writer != null) {
writer.write(this.generatedIlluminatiTemplate);
writer.close();
this.messager.printMessage(Kind.NOTE, "generate source code!!");
} else {
this.messager.printMessage(Kind.ERROR, "Sorry, something is wrong in writer 'IlluminatiPointcutGenerated.java' process.");
}
// IlluminatiPointcutGenerated must exists only one on classloader.
break outerloop;
} catch (IOException ioe) {
throw ioe;
}
} catch (IOException ioe) {
this.messager.printMessage(Kind.ERROR, "Sorry, something is wrong in generated 'IlluminatiPointcutGenerated.java' process.");
break outerloop;
}
}
}
return true;
}
/**
* Prints an error message
*
* @param e The element which has caused the error. Can be null
* @param msg The error message
*/
public void error(Element e, String msg) {
this.messager.printMessage(Diagnostic.Kind.ERROR, msg, e);
}
/**
* Generated the Illuminati Client class body.
*
* @param basePackageName assign a properties file setting dto. Can not be null
* @return boolean if failed is false and another is true.
*/
private boolean setGeneratedIlluminatiTemplate (final String basePackageName) {
// step 1. set basicImport
this.generatedIlluminatiTemplate = "package {basePackageName};\r\n".concat(this.getImport());
// step 2. base package name
this.generatedIlluminatiTemplate = this.generatedIlluminatiTemplate.replace("{basePackageName}", basePackageName);
final String staticConfigurationTemplate = " private final IlluminatiClientInit illuminatiClientInit;\r\n \r\n";
final String illuminatiAnnotationName = "me.phoboslabs.illuminati.annotation.Illuminati";
// step 3. check chaosBomber is activated.
PropertiesHelper propertiesHelper = new PropertiesHelper(this.messager);
final String checkChaosBomber = propertiesHelper.getPropertiesValueByKey("chaosBomber", "false");
String illuminatiExecuteMethod = "";
if (StringObjectUtils.isValid(checkChaosBomber) && "true".equalsIgnoreCase(checkChaosBomber)) {
illuminatiExecuteMethod = "ByChaosBomber";
}
// step 4. set the method body
this.generatedIlluminatiTemplate += ""
+ "@Component\r\n"
+ "@Aspect\r\n"
+ "public class IlluminatiPointcutGenerated {\r\n\r\n"
+ staticConfigurationTemplate
+ " public IlluminatiPointcutGenerated() {\r\n"
+ " this.illuminatiClientInit = IlluminatiClientInit.getInstance();\r\n"
+ " }\r\n\r\n"
+ " @Pointcut(\"@within("+illuminatiAnnotationName+") || @annotation("+illuminatiAnnotationName+")\")\r\n"
+ " public void illuminatiPointcutMethod () { }\r\n\r\n"
+ " @Around(\"illuminatiPointcutMethod()\")\r\n"
+ " public Object profile (ProceedingJoinPoint pjp) throws Throwable {\r\n"
+ " if (this.illuminatiClientInit.illuminatiIsInitialized() == false) {\n"
+ " return pjp.proceed();\n"
+ " }\n"
+ " if (illuminatiClientInit.checkIlluminatiIsIgnore(pjp)) {\r\n"
+ " return pjp.proceed();\r\n"
+ " }\r\n"
+ " HttpServletRequest request = null;\r\n"
+ " try {\r\n"
+ " request = ((ServletRequestAttributes) RequestContextHolder.getRequestAttributes()).getRequest();\r\n"
+ " } catch (Exception ignore) {}\r\n"
+ " return illuminatiClientInit.executeIlluminati"+illuminatiExecuteMethod+"(pjp, request);\r\n"
+ " }\r\n"
+ "}\r\n"
;
return true;
}
private String getImport () {
final String[] illuminatis = {
"init.IlluminatiClientInit"
};
final String[] aspectjs = {
"annotation.Aspect",
"ProceedingJoinPoint",
"annotation.Around",
"annotation.Pointcut"
};
final String[] springs = {
"stereotype.Component",
"web.context.request.RequestContextHolder",
"web.context.request.ServletRequestAttributes"
};
final String[] blanks = {
"javax.servlet.http.HttpServletRequest"
};
final Map<String, String[]> imports = new HashMap<>();
imports.put("me.phoboslabs.illuminati.processor", illuminatis);
imports.put("org.aspectj.lang", aspectjs);
imports.put("org.springframework", springs);
imports.put("", blanks);
final StringBuilder importString = new StringBuilder();
imports.forEach((key, value) ->
Arrays.stream(value).forEach(importLib -> {
importString.append("import ");
importString.append(key);
if (!"".equals(key)) {
importString.append(".");
}
importString.append(importLib);
importString.append(";\r\n");
})
);
return importString.toString();
}
/**
*
*/
private class PropertiesHelper {
private final static String DEFAULT_CONFIG_PROPERTIES_FILE_NAME = "illuminati";
private final Messager messager;
PropertiesHelper (Messager messager) {
this.messager = messager;
}
public String getPropertiesValueByKey (final String key, final String defaultValue) {
final IlluminatiProcessorPropertiesImpl illuminatiProperties = this.getIlluminatiProperties();
if (illuminatiProperties == null) {
return defaultValue;
}
String propertiesValue = null;
if (StringObjectUtils.isValid(key)) {
try {
final String methodName = "get".concat(key.substring(0, 1).toUpperCase()).concat(key.substring(1));
final Method getNameMethod = IlluminatiProcessorPropertiesImpl.class.getMethod(methodName);
propertiesValue = (String) getNameMethod.invoke(illuminatiProperties);
}
catch (Exception ex) {
this.messager.printMessage(Diagnostic.Kind.WARNING, "Sorry, unable to find method. (" + ex.toString() + ")");
}
}
return (StringObjectUtils.isValid(propertiesValue)) ? propertiesValue : defaultValue;
}
private IlluminatiProcessorPropertiesImpl getIlluminatiProperties () {
IlluminatiProcessorPropertiesImpl illuminatiProperties = null;
for (String extension : CONFIG_FILE_EXTENSTIONS) {
StringBuilder dotBeforeExtension = new StringBuilder(".");
if (StringObjectUtils.isValid(PROFILES_PHASE)) {
dotBeforeExtension.append("-");
dotBeforeExtension.append(PROFILES_PHASE);
dotBeforeExtension.append(".");
}
StringBuilder fullFileName = new StringBuilder(DEFAULT_CONFIG_PROPERTIES_FILE_NAME);
fullFileName.append(dotBeforeExtension.toString());
fullFileName.append(extension);
illuminatiProperties = getIlluminatiPropertiesByFile(fullFileName.toString());
if (illuminatiProperties != null) {
break;
}
}
if (illuminatiProperties == null) {
illuminatiProperties = getIlluminatiPropertiesFromBasicFiles();
}
if (illuminatiProperties == null) {
this.messager.printMessage(Diagnostic.Kind.WARNING, "Sorry, unable to find config file");
}
return illuminatiProperties;
}
}
private IlluminatiProcessorPropertiesImpl getIlluminatiPropertiesByFile(final String configPropertiesFileName) {
IlluminatiProcessorPropertiesImpl illuminatiProperties = null;
try (InputStream input = IlluminatiPropertiesHelper.class.getClassLoader().getResourceAsStream(configPropertiesFileName)) {
if (input == null) {
return null;
}
if (configPropertiesFileName.contains(".yml") || configPropertiesFileName.contains(".yaml")) {
illuminatiProperties = YAML_MAPPER.readValue(input, IlluminatiProcessorPropertiesImpl.class);
} else {
final Properties prop = new Properties();
prop.load(input);
illuminatiProperties = new IlluminatiProcessorPropertiesImpl(prop);
}
} catch (IOException ex) {
this.messager.printMessage(Diagnostic.Kind.WARNING, "Sorry, something is wrong in read process. (" + ex.toString() + ")");
}
return illuminatiProperties;
}
private IlluminatiProcessorPropertiesImpl getIlluminatiPropertiesFromBasicFiles() {
IlluminatiProcessorPropertiesImpl illuminatiProperties;
for (String fileName : BASIC_CONFIG_FILES) {
illuminatiProperties = getIlluminatiPropertiesByFile(fileName);
if (illuminatiProperties != null) {
return illuminatiProperties;
}
}
return null;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.jms.cf;
import java.lang.reflect.Method;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map.Entry;
import javax.jms.ConnectionFactory;
import javax.net.ssl.SSLContext;
import org.apache.nifi.annotation.behavior.DynamicProperty;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.SeeAlso;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.annotation.lifecycle.OnDisabled;
import org.apache.nifi.annotation.lifecycle.OnEnabled;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.components.ValidationContext;
import org.apache.nifi.components.ValidationResult;
import org.apache.nifi.components.Validator;
import org.apache.nifi.controller.AbstractControllerService;
import org.apache.nifi.controller.ConfigurationContext;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.processor.util.StandardValidators;
import org.apache.nifi.reporting.InitializationException;
import org.apache.nifi.ssl.SSLContextService;
import org.apache.nifi.ssl.SSLContextService.ClientAuth;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Provides a factory service that creates and initializes
* {@link ConnectionFactory} specific to the third party JMS system.
* <p>
* It accomplishes it by adjusting current classpath by adding to it the
* additional resources (i.e., JMS client libraries) provided by the user via
* {@link JMSConnectionFactoryProviderDefinition#CLIENT_LIB_DIR_PATH}, allowing
* it then to create an instance of the target {@link ConnectionFactory} based
* on the provided
* {@link JMSConnectionFactoryProviderDefinition#CONNECTION_FACTORY_IMPL} which
* can be than access via {@link #getConnectionFactory()} method.
* </p>
*/
@Tags({"jms", "messaging", "integration", "queue", "topic", "publish", "subscribe"})
@CapabilityDescription("Provides a generic service to create vendor specific javax.jms.ConnectionFactory implementations. "
+ "ConnectionFactory can be served once this service is configured successfully")
@DynamicProperty(name = "The name of a Connection Factory configuration property.", value = "The value of a given Connection Factory configuration property.",
description = "The properties that are set following Java Beans convention where a property name is derived from the 'set*' method of the vendor "
+ "specific ConnectionFactory's implementation. For example, 'com.ibm.mq.jms.MQConnectionFactory.setChannel(String)' would imply 'channel' "
+ "property and 'com.ibm.mq.jms.MQConnectionFactory.setTransportType(int)' would imply 'transportType' property.")
@SeeAlso(classNames = {"org.apache.nifi.jms.processors.ConsumeJMS", "org.apache.nifi.jms.processors.PublishJMS"})
public class JMSConnectionFactoryProvider extends AbstractControllerService implements JMSConnectionFactoryProviderDefinition {
private final Logger logger = LoggerFactory.getLogger(JMSConnectionFactoryProvider.class);
private static final List<PropertyDescriptor> PROPERTY_DESCRIPTORS;
private volatile boolean configured;
private volatile ConnectionFactory connectionFactory;
private static final String BROKER = "broker";
private static final String CF_IMPL = "cf";
private static final String CF_LIB = "cflib";
public static final PropertyDescriptor CONNECTION_FACTORY_IMPL = new PropertyDescriptor.Builder()
.name(CF_IMPL)
.displayName("MQ ConnectionFactory Implementation")
.description("A fully qualified name of the JMS ConnectionFactory implementation "
+ "class (i.e., org.apache.activemq.ActiveMQConnectionFactory)")
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.required(true)
.expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
.build();
public static final PropertyDescriptor CLIENT_LIB_DIR_PATH = new PropertyDescriptor.Builder()
.name(CF_LIB)
.displayName("MQ Client Libraries path (i.e., /usr/jms/lib)")
.description("Path to the directory with additional resources (i.e., JARs, configuration files etc.) to be added "
+ "to the classpath. Such resources typically represent target MQ client libraries for the "
+ "ConnectionFactory implementation.")
.addValidator(StandardValidators.createListValidator(true, true, StandardValidators.createURLorFileValidator()))
.required(true)
.dynamicallyModifiesClasspath(true)
.expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
.build();
// ConnectionFactory specific properties
public static final PropertyDescriptor BROKER_URI = new PropertyDescriptor.Builder()
.name(BROKER)
.displayName("Broker URI")
.description("URI pointing to the network location of the JMS Message broker. For example, "
+ "'tcp://myhost:61616' for ActiveMQ or 'myhost:1414' for IBM MQ")
.addValidator(new NonEmptyBrokerURIValidator())
.required(true)
.expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
.build();
public static final PropertyDescriptor SSL_CONTEXT_SERVICE = new PropertyDescriptor.Builder()
.name("SSL Context Service")
.description("The SSL Context Service used to provide client certificate information for TLS/SSL connections.")
.required(false)
.identifiesControllerService(SSLContextService.class)
.build();
static {
PROPERTY_DESCRIPTORS = Collections.unmodifiableList(Arrays.asList(CONNECTION_FACTORY_IMPL, CLIENT_LIB_DIR_PATH, BROKER_URI, SSL_CONTEXT_SERVICE));
}
@Override
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
return PROPERTY_DESCRIPTORS;
}
@Override
protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) {
return new PropertyDescriptor.Builder()
.description("Specifies the value for '" + propertyDescriptorName
+ "' property to be set on the provided ConnectionFactory implementation.")
.name(propertyDescriptorName)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.dynamic(true)
.build();
}
@Override
public void resetConnectionFactory(ConnectionFactory cachedFactory) {
if (cachedFactory == connectionFactory) {
getLogger().debug("Resetting connection factory");
connectionFactory = null;
}
}
/**
* @return new instance of {@link ConnectionFactory}
*/
@Override
public ConnectionFactory getConnectionFactory() {
if (this.configured) {
return this.connectionFactory;
}
throw new IllegalStateException("ConnectionFactory can not be obtained unless "
+ "this ControllerService is configured. See onConfigure(ConfigurationContext) method.");
}
@OnEnabled
public void enable(ConfigurationContext context) throws InitializationException {
try {
if (!this.configured) {
if (logger.isInfoEnabled()) {
logger.info("Configuring " + this.getClass().getSimpleName() + " for '"
+ context.getProperty(CONNECTION_FACTORY_IMPL).evaluateAttributeExpressions().getValue() + "' to be connected to '"
+ context.getProperty(BROKER_URI).evaluateAttributeExpressions().getValue() + "'");
}
this.createConnectionFactoryInstance(context);
this.setConnectionFactoryProperties(context);
}
this.configured = true;
} catch (Exception e) {
logger.error("Failed to configure " + this.getClass().getSimpleName(), e);
this.configured = false;
throw new IllegalStateException(e);
}
}
@OnDisabled
public void disable() {
this.connectionFactory = null;
this.configured = false;
}
/**
* This operation follows standard bean convention by matching property name
* to its corresponding 'setter' method. Once the method was located it is
* invoked to set the corresponding property to a value provided by during
* service configuration. For example, 'channel' property will correspond to
* 'setChannel(..) method and 'queueManager' property will correspond to
* setQueueManager(..) method with a single argument.
* <p>
* There are also few adjustments to accommodate well known brokers. For
* example ActiveMQ ConnectionFactory accepts address of the Message Broker
* in a form of URL while IBMs in the form of host/port pair (more common).
* So this method will use value retrieved from the 'BROKER_URI' static
* property 'as is' if ConnectionFactory implementation is coming from
* ActiveMQ and for all others (for now) the 'BROKER_URI' value will be
* split on ':' and the resulting pair will be used to execute
* setHostName(..) and setPort(..) methods on the provided
* ConnectionFactory. This may need to be maintained and adjusted to
* accommodate other implementation of ConnectionFactory, but only for
* URL/Host/Port issue. All other properties are set as dynamic properties
* where user essentially provides both property name and value, The bean
* convention is also explained in user manual for this component with links
* pointing to documentation of various ConnectionFactories.
*
* @see #setProperty(String, String) method
*/
private void setConnectionFactoryProperties(ConfigurationContext context) {
for (final Entry<PropertyDescriptor, String> entry : context.getProperties().entrySet()) {
PropertyDescriptor descriptor = entry.getKey();
String propertyName = descriptor.getName();
if (descriptor.isDynamic()) {
this.setProperty(propertyName, entry.getValue());
} else {
if (propertyName.equals(BROKER)) {
String brokerValue = context.getProperty(descriptor).evaluateAttributeExpressions().getValue();
if (context.getProperty(CONNECTION_FACTORY_IMPL).evaluateAttributeExpressions().getValue().startsWith("org.apache.activemq")) {
this.setProperty("brokerURL", brokerValue);
} else {
String[] hostPort = brokerValue.split(":");
if (hostPort.length == 2) {
this.setProperty("hostName", hostPort[0]);
this.setProperty("port", hostPort[1]);
} else if (hostPort.length != 2) {
this.setProperty("serverUrl", brokerValue); // for tibco
} else {
throw new IllegalArgumentException("Failed to parse broker url: " + brokerValue);
}
}
SSLContextService sc = context.getProperty(SSL_CONTEXT_SERVICE).asControllerService(SSLContextService.class);
if (sc != null) {
SSLContext ssl = sc.createSSLContext(ClientAuth.NONE);
this.setProperty("sSLSocketFactory", ssl.getSocketFactory());
}
} // ignore 'else', since it's the only non-dynamic property that is relevant to CF configuration
}
}
}
/**
* Sets corresponding {@link ConnectionFactory}'s property to a
* 'propertyValue' by invoking a 'setter' method that corresponds to
* 'propertyName'. For example, 'channel' property will correspond to
* 'setChannel(..) method and 'queueManager' property will correspond to
* setQueueManager(..) method with a single argument.
* <p>
* NOTE: There is a limited type conversion to accommodate property value
* types since all NiFi configuration properties comes as String. It is
* accomplished by checking the argument type of the method and executing
* its corresponding conversion to target primitive (e.g., value 'true' will
* go thru Boolean.parseBoolean(propertyValue) if method argument is of type
* boolean). None-primitive values are not supported at the moment and will
* result in {@link IllegalArgumentException}. It is OK though since based
* on analysis of several ConnectionFactory implementation the all seem to
* follow bean convention and all their properties using Java primitives as
* arguments.
*/
private void setProperty(String propertyName, Object propertyValue) {
String methodName = this.toMethodName(propertyName);
Method[] methods = Utils.findMethods(methodName, this.connectionFactory.getClass());
if (methods != null && methods.length > 0) {
try {
for (Method method : methods) {
Class<?> returnType = method.getParameterTypes()[0];
if (String.class.isAssignableFrom(returnType)) {
method.invoke(this.connectionFactory, propertyValue);
return;
} else if (int.class.isAssignableFrom(returnType)) {
method.invoke(this.connectionFactory, Integer.parseInt((String) propertyValue));
return;
} else if (long.class.isAssignableFrom(returnType)) {
method.invoke(this.connectionFactory, Long.parseLong((String) propertyValue));
return;
} else if (boolean.class.isAssignableFrom(returnType)) {
method.invoke(this.connectionFactory, Boolean.parseBoolean((String) propertyValue));
return;
}
}
methods[0].invoke(this.connectionFactory, propertyValue);
} catch (Exception e) {
throw new IllegalStateException("Failed to set property " + propertyName, e);
}
} else if (propertyName.equals("hostName")) {
this.setProperty("host", propertyValue); // try 'host' as another common convention.
}
}
/**
* Creates an instance of the {@link ConnectionFactory} from the provided
* 'CONNECTION_FACTORY_IMPL'.
*/
private void createConnectionFactoryInstance(ConfigurationContext context) {
String connectionFactoryImplName = context.getProperty(CONNECTION_FACTORY_IMPL).evaluateAttributeExpressions().getValue();
this.connectionFactory = Utils.newDefaultInstance(connectionFactoryImplName);
}
/**
* Will convert propertyName to a method name following bean convention. For
* example, 'channel' property will correspond to 'setChannel method and
* 'queueManager' property will correspond to setQueueManager method name
*/
private String toMethodName(String propertyName) {
char c[] = propertyName.toCharArray();
c[0] = Character.toUpperCase(c[0]);
return "set" + new String(c);
}
/**
* {@link Validator} that ensures that brokerURI's length > 0 after EL
* evaluation
*/
static class NonEmptyBrokerURIValidator implements Validator {
@Override
public ValidationResult validate(String subject, String input, ValidationContext context) {
if (context.isExpressionLanguageSupported(subject) && context.isExpressionLanguagePresent(input)) {
return new ValidationResult.Builder().subject(subject).input(input).explanation("Expression Language Present").valid(true).build();
}
return StandardValidators.NON_EMPTY_VALIDATOR.validate(subject, input, context);
}
}
}
| |
package org.tlc.mtg.sim.player;
import org.tlc.mtg.nouns.Card;
import org.tlc.mtg.nouns.CardSorts;
import org.tlc.mtg.nouns.Mana;
import org.tlc.mtg.nouns.Phases;
import org.tlc.mtg.nouns.ResolvedType;
import org.tlc.mtg.nouns.Phase;
import org.tlc.mtg.sim.Stats;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
*/
public class Player {
private int turn;
private int life;
private Cards deck = new Cards();
private Hand hand = new Hand();
private Board board = new Board();
private Cards out = new Cards();
private List<Phase<Player>> phases = new ArrayList<>();
private Stats[] stats = null;
private Stats cur;
private boolean constrained;
public Player(Stats[] stats) {
this.stats = stats;
resetPlayer(0);
}
public void resetPlayer(int toDepth) {
constrained = false;
turn = 0;
life = 20;
deck.reset();
hand.reset();
board.reset();
out.reset();
if( stats != null && stats.length > 0 ) {
cur = stats[turn];
}
}
/**
* Given a hand and a board with lands, find the maximal thing to cast in hand.
* This returns a card, but makes no changes to the hand or board. This is a simple match of resources to cards.
* @return the card to cast.
*/
public Card findTopCast() {
Card ret = null;
Cards.CardCollector cc = new Cards.CardCollector();
getHand().visit(cc);
Collections.sort(cc.getCollected(), new CardSorts.HiToLowDamageSort());
for( Card c : cc.getCollected() ) {
if( canCast(c) ) {
ret = c;
break;
}
}
return ret;
}
public void placeLand(Card c) {
c.applyPhase(Phases.COMES_INTO_PLAY);
getBoard().getLand().add(c);
}
/**
* Given the card in your hand, place this card as a spell.
* @param c a given card in your hand
*/
public void castSpell(Card c) {
Set<Card> plan = castPlan(c);
if( plan == null )
return;
if( ! hand.takeSpecific(c) ) {
return;
}
c.applyPhase(Phases.CAST);
c.applyPhase(Phases.COMES_INTO_PLAY);
c.resType.play(getBoard(), c);
for( Card land : plan ) {
land.tapped = true;
}
}
public void initialDraw() {
List<Card> draw = deck.takeFromTop(7);
for( Card c : draw ) {
c.applyPhase(Phases.DRAW);
}
hand.resetAndAddAllCards(draw);
}
public void addToLife(int n) {
life += n;
}
public void removeFromLife(int n) {
life -= n;
}
protected boolean canCast(Card c) {
Set<Card> plan = castPlan(c);
if( plan == null )
return false;
if( c.cost.hasCost && plan.size() == c.cost.slots.length ) {
return true;
} else {
return false;
}
}
protected Set<Card> castPlan(Card c) {
if( c.cost.hasCost ) {
Set<Card> toBeUsed = new HashSet<>();
List<Card> src = collectManaSources(toBeUsed);
for( Mana m : c.cost.slots) {
Card land = findAndRemove(src, m);
if( land == null ) {
return null;
}
toBeUsed.add(land);
}
return toBeUsed;
} else {
return new HashSet<>();
}
}
protected List<Card> collectManaSources(Set<Card> manaPotential) {
Cards.ManaSourceCollector cc = new Cards.ManaSourceCollector();
getBoard().getLand().visit(cc);
getBoard().getCritters().visit(cc);
getBoard().getArtifacts().visit(cc);
return cc.getCollected();
}
protected Card findAndRemove(List<Card> src, Mana m) {
Card ret = null;
for( Card c : src ) {
if( ! (c.resType.equals(ResolvedType.LAND) || c.manaSrc != null) ) {
continue;
}
if( c.tapped ) {
continue;
}
if( m.accepts(c) ) {
ret = c;
break;
}
}
if( ret != null ) {
src.remove(ret);
}
return ret;
}
public int getLife() {
return life;
}
public Cards getDeck() {
return deck;
}
public Hand getHand() {
return hand;
}
public Board getBoard() {
return board;
}
public Cards getOut() {
return out;
}
public void incTurnAndCur() {
cur = stats[turn];
turn++;
}
public int getTurn() {
return turn;
}
public boolean isConstrained() {
return constrained;
}
public void setConstrained(boolean constrained) {
this.constrained = constrained;
}
public List<Phase<Player>> getPhases() {
return phases;
}
public Stats[] getStats() {
return stats;
}
public Stats getCur() {
return cur;
}
@Override
public String toString() {
return "L: " + life + " H: " + hand.depth() + " D: " + deck.depth() + " @ T " + turn;
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.java.codeInsight.completion;
import com.intellij.JavaTestUtil;
import com.intellij.codeInsight.CodeInsightSettings;
import com.intellij.codeInsight.completion.CompletionType;
import com.intellij.codeInsight.completion.LightFixtureCompletionTestCase;
import com.intellij.codeInsight.completion.StaticallyImportable;
import com.intellij.codeInsight.lookup.Lookup;
import com.intellij.codeInsight.lookup.LookupElement;
import com.intellij.codeInsight.lookup.LookupElementPresentation;
import com.intellij.codeInsight.template.SmartCompletionContextType;
import com.intellij.codeInsight.template.Template;
import com.intellij.codeInsight.template.TemplateContextType;
import com.intellij.codeInsight.template.TemplateManager;
import com.intellij.codeInsight.template.impl.TemplateImpl;
import com.intellij.lang.java.JavaLanguage;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.psi.PsiClass;
import com.intellij.psi.codeStyle.CodeStyleSettingsManager;
import com.intellij.psi.codeStyle.CommonCodeStyleSettings;
import com.intellij.psi.codeStyle.JavaCodeStyleSettings;
import com.intellij.testFramework.fixtures.CodeInsightTestUtil;
import com.intellij.util.containers.ContainerUtil;
public class SmartTypeCompletionTest extends LightFixtureCompletionTestCase {
@Override
protected String getBasePath() {
return JavaTestUtil.getRelativeJavaTestDataPath() + "/codeInsight/completion/smartType/";
}
public void testParenAfterCast1() {
String path = "/parenAfterCast";
configureByFile(path + "/before1.java");
checkResultByFile(path + "/after1.java");
}
public void testParenAfterCast2() {
String path = "/parenAfterCast";
configureByFile(path + "/before2.java");
checkResultByFile(path + "/after2.java");
}
public void testParenAfterCast3() {
String path = "/parenAfterCast";
configureByFile(path + "/before3.java");
checkResultByFile(path + "/after3.java");
}
public void testParenAfterCall1() {
String path = "/parenAfterCall";
configureByFile(path + "/before1.java");
checkResultByFile(path + "/after1.java");
}
public void testParenAfterCall2() {
String path = "/parenAfterCall";
configureByFile(path + "/before2.java");
checkResultByFile(path + "/after2.java");
}
public void testParenAfterCall3() {
String path = "/parenAfterCall";
configureByFile(path + "/before3.java");
checkResultByFile(path + "/after3.java");
}
public void testParenAfterCall4() {
String path = "/parenAfterCall";
configureByFile(path + "/before4.java");
checkResultByFile(path + "/after4.java");
}
public void testParenAfterCall5() {
String path = "/parenAfterCall";
configureByFile(path + "/before5.java");
checkResultByFile(path + "/after5.java");
}
public void testParenAfterCall6() {
String path = "/parenAfterCall";
configureByFile(path + "/before6.java");
checkResultByFile(path + "/after6.java");
}
public void testParenAfterCall1_SpaceWithinMethodCallParens() {
String path = "/parenAfterCall";
myFixture.configureByFile(path + "/before1.java");
getCodeStyleSettings().SPACE_WITHIN_METHOD_CALL_PARENTHESES = true;
complete();
checkResultByFile(path + "/after1_space.java");
}
public void testParenAfterIf1() {
String path = "/parenAfterIf";
configureByFile(path + "/before1.java");
checkResultByFile(path + "/after1.java");
}
public void testParenAfterIf2() {
String path = "/parenAfterIf";
configureByFile(path + "/before2.java");
checkResultByFile(path + "/after2.java");
}
public void testForceLookupForAbstractClasses() {
String path = "/afterNew";
configureByFile(path + "/before9.java");
checkResultByFile(path + "/after9.java");
}
public void testAfterNew1() {
String path = "/afterNew";
configureByFile(path + "/before1.java");
select();
checkResultByFile(path + "/after1.java");
}
public void testAfterNew2() {
String path = "/afterNew";
configureByFile(path + "/before2.java");
select();
checkResultByFile(path + "/after2.java");
}
public void testAfterNew3() {
String path = "/afterNew";
configureByFile(path + "/before3.java");
select();
checkResultByFile(path + "/after3.java");
}
public void testAfterNew4() {
String path = "/afterNew";
configureByFile(path + "/before4.java");
select();
checkResultByFile(path + "/after4.java");
}
public void testAfterNew5() {
String path = "/afterNew";
configureByFile(path + "/before5.java");
select();
checkResultByFile(path + "/after5.java");
}
public void testAfterNew6() {
String path = "/afterNew";
configureByFile(path + "/before6.java");
select();
checkResultByFile(path + "/after6.java");
}
public void testAfterNew7() {
String path = "/afterNew";
configureByFile(path + "/before7.java");
select();
checkResultByFile(path + "/after7.java");
}
public void testAfterNew8() {
String path = "/afterNew";
configureByFile(path + "/before8.java");
select();
checkResultByFile(path + "/after8.java");
}
public void testAfterNew9() {
String path = "/afterNew";
configureByFile(path + "/before10.java");
select();
checkResultByFile(path + "/after10.java");
}
public void testAfterNew10() {
String path = "/afterNew";
configureByFile(path + "/before12.java");
//select();
checkResultByFile(path + "/after12.java");
}
public void testAfterNew11() {
String path = "/afterNew";
configureByFile(path + "/before13.java");
//select();
checkResultByFile(path + "/after13.java");
}
public void testAfterThrowNew1() {
String path = "/afterNew";
configureByFile(path + "/before14.java");
//select();
checkResultByFile(path + "/after14.java");
}
public void testAfterThrowNew2() {
String path = "/afterNew";
configureByFile(path + "/before15.java");
select();
checkResultByFile(path + "/after15.java");
}
public void testAfterThrowNew3() {
String path = "/afterNew";
configureByFile(path + "/before16.java");
//select();
checkResultByFile(path + "/after16.java");
}
public void testCastInThrow() { doTest(); }
public void testNonExistentGenericAfterNew() { doTest('\n'); }
public void testParenAfterNewWithinInnerExpr() {
String path = "/afterNew";
configureByFile(path + "/LastArgInInnerNewBefore.java");
checkResultByFile(path + "/LastArgInInnerNewAfter.java");
//configureByFile(path + "/LastArgInInnerNewBefore2.java");
//performAction();
//checkResultByFile(path + "/LastArgInInnerNewAfter2.java");
configureByFile(path + "/LastArgInInnerNewBefore3.java");
checkResultByFile(path + "/LastArgInInnerNewAfter3.java");
configureByFile(path + "/LastArgInInnerNewBefore4.java");
checkResultByFile(path + "/LastArgInInnerNewAfter4.java");
}
public void testReturn1() {
String path = "/return";
configureByFile(path + "/before1.java");
checkResultByFile(path + "/after1.java");
}
public void testReturn2() {
String path = "/return";
configureByFile(path + "/before2.java");
checkResultByFile(path + "/after2.java");
}
public void testReturn3() {
String path = "/return";
configureByFile(path + "/before3.java");
checkResultByFile(path + "/after3.java");
}
public void testGenerics1() {
String path = "/generics";
configureByFile(path + "/before1.java");
checkResultByFile(path + "/after1.java");
}
public void testGenerics2() {
String path = "/generics";
configureByFile(path + "/before2.java");
checkResultByFile(path + "/after2.java");
}
public void testGenerics3() {
String path = "/generics";
configureByFile(path + "/before3.java");
checkResultByFile(path + "/after3.java");
}
public void testGenerics4() {
String path = "/generics";
configureByFile(path + "/before4.java");
checkResultByFile(path + "/after4.java");
}
public void testGenerics5() {
String path = "/generics";
configureByFile(path + "/before5.java");
checkResultByFile(path + "/after5.java");
}
public void testAfterInstanceOf1() {
String path = "/afterInstanceOf";
configureByFile(path + "/before1.java");
checkResultByFile(path + "/after1.java");
}
public void testAfterInstanceOf2() {
String path = "/afterInstanceOf";
configureByFile(path + "/before2.java");
checkResultByFile(path + "/after2.java");
}
public void testInsideCatch() { doTest(); }
public void testInsideCatchFinal() { doTest(); }
public void testInsideCatchWithoutThrow() { doTest(); }
public void testGenerics6() {
String path = "/generics";
configureByFile(path + "/before6.java");
checkResultByFile(path + "/after6.java");
}
public void testWildcardNew1() {
String path = "/generics";
configureByFile(path + "/before7.java");
checkResultByFile(path + "/after7.java");
}
public void testWildcardNew2() {
String path = "/generics";
configureByFile(path + "/before8.java");
checkResultByFile(path + "/after8.java");
}
public void testWildcardEliminated() {
String path = "/generics";
configureByFile(path + "/before9.java");
selectItem(myItems[1]);
checkResultByFile(path + "/after9.java");
}
public void testBug1() { doTest(); }
public void testQualifiedThis() { doTest(); }
public void testBug2() {
configureByFile("/Bug2.java");
}
public void testSillyAssignment1() {
configureByFile("/Silly1.java");
checkResultByFile("/Silly1.java");
}
public void testVarargs1() { doTest('\n'); }
public void testEnumConstInSwitch() { doTest(); }
public void testEnumConstInSwitchOutside() { doTest(); }
public void testIntConstInSwitch() { doTest(); }
public void testDoubleEmptyArray() {
configureByTestName();
checkResultByFile("/"+getTestName(false) + ".java");
assertEquals(2, myItems.length);
}
public void testCollectionsEmptySetInMethodCall() { doTest(); }
public void testCollectionsEmptySetInTernary() { doTest(); }
public void testStringConstantInAnno() { doTest(); }
public void testCollectionsEmptySetInTernary2() { doTest(); }
public void testConstructorOnSeparateLineInMethodCall() { doTest(); }
public void testConstructorWithExistingParens() { doTest(); }
public void testMethodAnnotationNamedParameter() { doTest(); }
public void testInheritedClass() { doTest(); }
public void testClassLiteralInAnno1() { doTest(); }
public void testMeaninglessExplicitWildcardParam() { doTest(); }
public void testExplicitWildcardArrayParam() { doTest(); }
public void testCatchInAnonymous() { doTest(); }
public void testThrowRuntimeException() { doTest(); }
public void testParameterizedConstructor() { doTest(); }
public void testNewInnerClassNameShortPrefix() { doTest('\n'); }
public void testNewInnerOfParameterizedClass() { doTest(); }
public void testQualifiedThisInAnonymousConstructor() { doTest(); }
public void testExceptionTwice() { doTest(); }
public void testExceptionTwice2() { doTest(); }
public void testNewInnerRunnable() { doTest(); }
public void testArrayAccessIndex() { doTest(); }
public void testThrowExceptionConstructor() { doTest('\n'); }
public void testJavadocThrows() { doTest(); }
public void testMethodThrows() { doTest(); }
public void testDoNotExcludeAssignedVariable() { doTest(); }
public void testArrayIndexTailType() { doTest(); }
public void testPrivateOverloads() { doTest(); }
public void testInaccessibleMethodArgument() { doTest(); }
public void testPolyadicExpression() { doTest(); }
public void testCastAutoboxing() {
doItemTest();
}
public void testCastAutoboxing2() {
doItemTest();
}
public void testCastAutoboxing3() {
doItemTest();
}
public void testCastWildcards() { doTest(); }
public void testNoSecondMethodTypeArguments() { doTest(Lookup.REPLACE_SELECT_CHAR); }
public void testNoFieldsInSuperConstructorCall() { doTest(); }
public void testChainMethodsInSuperConstructorCall() { doTest(); }
public void testNoUninitializedFieldsInConstructor() {
configureByTestName();
assertStringItems("aac", "aab", "hashCode");
}
public void testNoUninitializedSuperFieldsInConstructor() {
configureByTestName();
assertStringItems("input", "baseConstant");
}
public void testFieldsSetInAnotherConstructor() { doTest(); }
public void testFieldsSetAbove() { doTest(); }
public void testHonorSelection() {
configureByTestName();
select();
checkResultByTestName();
}
public void testTypeParametersInheritors() {
configureByTestName();
assertStringItems("Foo", "Bar", "Goo");
select();
checkResultByTestName();
}
public void testVoidExpectedType() {
configureByTestName();
assertStringItems("notify", "notifyAll", "wait", "wait", "wait", "equals", "hashCode", "toString", "getClass");
type("eq");
assertEquals("equals", assertOneElement(getLookup().getItems()).getLookupString());
select();
checkResultByTestName();
}
public void testDoubleSemicolonPenetration() { doTest(); }
public void testTypeParametersInheritorsComma() { doTest(); }
public void testTypeParametersInheritorsInExpression() { doTest(); }
//do we need to see all Object inheritors at all?
public void _testTypeParametersObjectInheritors() { doTest(); }
public void testDoubleThis() {
doTest();
assertNull(myItems);
}
public void testSmartFinish() { doTest(Lookup.COMPLETE_STATEMENT_SELECT_CHAR); }
public void testSillyAssignmentInTernary() { doTest(); }
public void testSameFieldInAnotherObject() { doTest(); }
public void testUnqualifiedConstantInSwitch() { doTest(); }
public void testAmbiguousConstant() { doTest(); }
public void testSameNamedFieldAndLocal() { doTest(); }
public void testNoTailWhenNoPairBracket() { doTestNoPairBracket(Lookup.NORMAL_SELECT_CHAR); }
public void testNoTailWhenNoPairBracket2() { doTestNoPairBracket(Lookup.NORMAL_SELECT_CHAR); }
public void testAnonymousNoPairBracket() { doTestNoPairBracket(Lookup.NORMAL_SELECT_CHAR); }
private void doTestNoPairBracket(final char c) {
boolean old = CodeInsightSettings.getInstance().AUTOINSERT_PAIR_BRACKET;
CodeInsightSettings.getInstance().AUTOINSERT_PAIR_BRACKET = false;
try {
doTest(c);
}
finally {
CodeInsightSettings.getInstance().AUTOINSERT_PAIR_BRACKET = old;
}
}
public void testNoConstructorTailWhenNoPairBracket() { doTestNoPairBracket(Lookup.NORMAL_SELECT_CHAR); }
public void testConstructorNoPairBracketSemicolon() { doTestNoPairBracket(';'); }
public void testMethodNoPairBracketComma() { doTestNoPairBracket(','); }
public void testAbstractClassTwice() {
configureByTestName();
assertOneElement(myItems);
}
public void testConstantTwice() { doTest(); }
public void testConstantTwice2() {
configureByTestName();
assertEquals(2, myItems.length);
}
public void testNoKeyConstant() {
configureByTestName();
assertStringItems("A_KEY", "create");
}
public void testUserDataListAddAll() {
doTest();
}
public void testStaticSubclass() {
doTest();
}
public void testMethodCallDot() { doTest('\n'); }
public void testNegateVariable() { doTest(); }
public void testExclamationMethodFinish() { doTest('!'); }
public void testExclamationVariableFinish() { doTest('!'); }
public void testExclamationStaticFieldFinish() { doTest('!'); }
public void testExclamationFinishNonBoolean() { doTest('!'); }
public void testExcludeDeclaredConstant() { doTest(); }
public void testTabMethodInBinaryExpression() { doTest('\t'); }
public void testIfConditionBinaryExpression() { doTest(); }
public void testDelegationToParent() { doTest('\t'); }
public void testBeforeBinaryExpressionInMethodCall() { doTest(); }
public void testAssignableToAfterCast() { doTest(); }
public void testInstanceMethodParametersFromStaticContext() { doTest(); }
public void testInstanceMethodParametersFromStaticContext2() { doTest(); }
public void testBeforeCastToArray() { doTest(); }
public void testHidingFields() { doTest(); }
public void testVoidCast() { doAntiTest(); }
public void testIntPlusLongNotDouble() { doTest(); }
public void testNestedAssignments() { doTest(); }
public void testAfterNewInTernary() { doTest(); }
public void testSuggestAnythingWhenWildcardExpected() {
configureByTestName();
assertStringItems("X", "Y", "Z");
}
public void testNewVararg() {
configureByTestName();
assertStringItems("Foo", "Foo", "Foo");
assertEquals("{...} (default package)", LookupElementPresentation.renderElement(myItems[0]).getTailText());
assertEquals("[] (default package)", LookupElementPresentation.renderElement(myItems[1]).getTailText());
assertEquals("[]{...} (default package)", LookupElementPresentation.renderElement(myItems[2]).getTailText());
}
public void testNewVararg2() {
configureByTestName();
assertStringItems("String", "String", "String");
assertEquals(" (java.lang)", LookupElementPresentation.renderElement(myItems[0]).getTailText());
assertEquals("[] (java.lang)", LookupElementPresentation.renderElement(myItems[1]).getTailText());
assertEquals("[]{...} (java.lang)", LookupElementPresentation.renderElement(myItems[2]).getTailText());
}
public void testNewByteArray() {
configureByTestName();
assertStringItems("byte");
assertEquals("[]", LookupElementPresentation.renderElement(myItems[0]).getTailText());
}
public void testNewByteArray2() {
configureByTestName();
assertStringItems("byte", "byte");
assertEquals("[]", LookupElementPresentation.renderElement(myItems[0]).getTailText());
assertEquals("[]{...}", LookupElementPresentation.renderElement(myItems[1]).getTailText());
}
public void testInsideStringLiteral() { doAntiTest(); }
public void testDefaultAnnoParam() { doTest(); }
public void testNewWithTypeParameterErasure() { doTest(); }
public void testEverythingDoubles() {
configureByTestName();
assertStringItems("hashCode", "indexOf", "lastIndexOf", "size");
}
public void testNonStaticInnerClass() {
configureByTestName();
assertEmpty(myItems);
checkResultByFile("/" + getTestName(false) + ".java");
}
//todo 2nd completion
public void _testDefaultAnnoParam2() { doTest(); }
public void testAnnotationValue() {doTest(); }
public void testLiveTemplate() {
final Template template = TemplateManager.getInstance(getProject()).createTemplate("foo", "zzz");
template.addTextSegment("FooFactory.createFoo()");
final SmartCompletionContextType completionContextType =
ContainerUtil.findInstance(TemplateContextType.EP_NAME.getExtensions(), SmartCompletionContextType.class);
((TemplateImpl)template).getTemplateContext().setEnabled(completionContextType, true);
CodeInsightTestUtil.addTemplate(template, myFixture.getTestRootDisposable());
doTest();
}
public void testInThisExpression() { doTest(); }
public void testSuggestNull() { doTest(); }
public void testNoNullAfterDot() {
configureByTestName();
assertEmpty(myItems);
checkResultByFile("/" + getTestName(false) + ".java");
}
public void testDefaultAnnoMethodValue() { doTest(); }
public void testNewAnonymousFunction() { doTest(); }
public void testNewRunnableInsideMethod() {
CommonCodeStyleSettings settings = getCodeStyleSettings();
boolean lParenOnNextLine = settings.CALL_PARAMETERS_LPAREN_ON_NEXT_LINE;
try {
settings.CALL_PARAMETERS_LPAREN_ON_NEXT_LINE = true;
doTest();
} finally {
settings.CALL_PARAMETERS_LPAREN_ON_NEXT_LINE = lParenOnNextLine;
}
}
public void testNewRunnableInsideMethodMultiParams() {
CommonCodeStyleSettings settings = getCodeStyleSettings();
boolean lParenOnNextLine = settings.CALL_PARAMETERS_LPAREN_ON_NEXT_LINE;
boolean rParenOnNextLine = settings.CALL_PARAMETERS_RPAREN_ON_NEXT_LINE;
try {
settings.CALL_PARAMETERS_LPAREN_ON_NEXT_LINE = true;
settings.CALL_PARAMETERS_RPAREN_ON_NEXT_LINE = true;
doTest();
} finally {
settings.CALL_PARAMETERS_LPAREN_ON_NEXT_LINE = lParenOnNextLine;
settings.CALL_PARAMETERS_RPAREN_ON_NEXT_LINE = rParenOnNextLine;
}
}
public void testUseIntConstantsFromTargetClass() { doTest(); }
public void testUseObjectConstantsFromTargetClass() { doTest(); }
public void testUseIntConstantsFromTargetClassReturnValue() { doTest(); }
public void testUseIntConstantsFromConstructedClass() { doTest(); }
public void testUseIntConstantsInPlus() { doTest(); }
public void testUseIntConstantsInOr() { doTest(); }
public void testExtraSemicolonAfterMethodParam() {
getCodeStyleSettings().SPACE_WITHIN_METHOD_CALL_PARENTHESES = true;
doTest();
}
public void testNoSemicolonInsideParentheses() { doTest(); }
public void testAssignFromTheSameFieldOfAnotherObject() {
doTest();
}
public void testTailAfterInstanceOf() {
doTest();
}
public void testReplaceWholeReferenceChain() { doTest(Lookup.REPLACE_SELECT_CHAR); }
public void testDoubleTrueInOverloadedMethodCall() { doTest(Lookup.REPLACE_SELECT_CHAR); }
public void testMethodColon() { doFirstItemTest(':'); }
public void testVariableColon() { doFirstItemTest(':'); }
public void testConditionalColonOnNextLine() { doFirstItemTest(':'); }
private void doFirstItemTest(char c) {
configureByTestName();
select(c);
checkResultByTestName();
}
public void testOneElementArray() { doTest(); }
public void testCastToArray() { doTest(); }
public void testCommaDoublePenetration() {
doFirstItemTest(',');
}
public void testSuperMethodArguments() {
configureByTestName();
getLookup().setCurrentItem(getLookup().getItems().get(1));
select();
checkResultByTestName();
}
public void testDelegateMethodArguments() {
configureByTestName();
getLookup().setCurrentItem(getLookup().getItems().get(1));
select();
checkResultByTestName();
}
public void testSameMethodArgumentsInIf() {
configureByTestName();
getLookup().setCurrentItem(getLookup().getItems().get(1));
select();
checkResultByTestName();
}
public void testSuperConstructorArguments() {
configureByTestName();
getLookup().setCurrentItem(getLookup().getItems().get(2));
select();
checkResultByTestName();
}
public void testSameNamedArguments() {
configureByTestName();
getLookup().setCurrentItem(getLookup().getItems().get(4));
select();
checkResultByTestName();
}
public void testSameNamedArgumentsDelegation() {
configureByTestName();
getLookup().setCurrentItem(getLookup().getItems().get(1));
select();
checkResultByTestName();
}
public void testSameSignatureWithGenerics() {
configureByTestName();
myFixture.assertPreferredCompletionItems(0, "i", "z", "zz", "i, z, zz");
}
public void testSameSignatureWithoutClosingParen() {
configureByTestName();
myFixture.assertPreferredCompletionItems(0, "someString", "someString, number");
getLookup().setCurrentItem(getLookup().getItems().get(1));
select();
checkResultByTestName();
}
public void testSuggestTypeParametersInTypeArgumentList() {
configureByTestName();
myFixture.assertPreferredCompletionItems(0, "T", "String");
}
public void testWrongAnonymous() {
configureByTestName();
select();
checkResultByTestName();
}
public void testAfterNewWithGenerics() {
doActionTest();
}
public void testClassLiteral() {
doActionTest();
assertStringItems("String.class");
LookupElement item = myFixture.getLookupElements()[0];
LookupElementPresentation p = LookupElementPresentation.renderElement(item);
assertEquals("String.class", p.getItemText());
assertEquals(" (java.lang)", p.getTailText());
assertNull(p.getTypeText());
assertInstanceOf(item.getPsiElement(), PsiClass.class);
}
public void testNoClassLiteral() {
doActionTest();
assertStringItems("Object.class", "getClass", "forName", "forName");
}
public void testClassLiteralInAnno2() {
doItemTest();
}
public void testClassLiteralInheritors() {
doItemTest();
}
public void testInsertOverride() {
JavaCodeStyleSettings styleSettings = JavaCodeStyleSettings.getInstance(getProject());
styleSettings.INSERT_OVERRIDE_ANNOTATION = true;
doItemTest();
}
public void testForeach() {
doActionTest();
}
public void testIDEADEV2626() {
doItemTest();
}
public void testDontSuggestWildcardGenerics() { doItemTest(); }
public void testCastWith2TypeParameters() { doTest(); }
public void testClassLiteralInArrayAnnoInitializer() { doTest(); }
public void testClassLiteralInArrayAnnoInitializer2() { doTest(); }
public void testAnnotation() {
configureByTestName();
assertStringItems("ElementType.ANNOTATION_TYPE", "ElementType.CONSTRUCTOR",
"ElementType.FIELD", "ElementType.LOCAL_VARIABLE",
"ElementType.METHOD", "ElementType.PACKAGE", "ElementType.PARAMETER",
"ElementType.TYPE" /*, "ElementType.TYPE_PARAMETER", "ElementType.TYPE_USE"*/);
}
public void testAnnotation2() {
configureByTestName();
assertStringItems("RetentionPolicy.CLASS", "RetentionPolicy.RUNTIME", "RetentionPolicy.SOURCE");
}
public void testAnnotation2_2() {
configureByTestName();
assertSameElements(myFixture.getLookupElementStrings(), "RetentionPolicy.CLASS", "RetentionPolicy.SOURCE", "RetentionPolicy.RUNTIME");
}
public void testAnnotation3() {
doTest();
}
public void testAnnotation3_2() {
doTest();
}
public void testAnnotation4() {
configureByTestName();
checkResultByTestName();
assertStringItems("false", "true");
}
public void testAnnotation5() {
configureByTestName();
checkResultByTestName();
assertStringItems("CONNECTION", "NO_CONNECTION");
}
public void testAnnotation6() {
configureByTestName();
assertStringItems("ElementType.ANNOTATION_TYPE", "ElementType.CONSTRUCTOR",
"ElementType.FIELD", "ElementType.LOCAL_VARIABLE",
"ElementType.METHOD", "ElementType.PACKAGE", "ElementType.PARAMETER",
"ElementType.TYPE"/*, "ElementType.TYPE_PARAMETER", "ElementType.TYPE_USE"*/);
}
public void testArrayClone() {
doTest();
}
public void testIDEADEV5150() {
doTest('\n');
}
public void testIDEADEV7835() {
doTest();
}
public void testTypeArgs1() {
doTest();
}
public void testTypeArgs2() {
doTest();
}
public void testTypeArgsOverwrite() { doTest(); }
public void testIfConditionExpectedType() { doTest(); }
public void testUnboundTypeArgs() { doTest(); }
public void testUnboundTypeArgs2() { doTest(); }
public void testSameTypeArg() { doTest(); }
public void testIDEADEV2668() {
doTest();
}
public void testExcessiveTail() { doTest(); }
public void testSeveralTypeArguments() { doTest(); }
public void testSeveralTypeArgumentsSomeUnknown() { doTest(); }
public void testExtendsInTypeCast() {
doTest();
}
public void testTabMethodCall() {
doFirstItemTest(Lookup.REPLACE_SELECT_CHAR);
}
public void testConstructorArgsSmartEnter() { doTest(Lookup.COMPLETE_STATEMENT_SELECT_CHAR); }
public void testIDEADEV13148() {
configureByFile("/IDEADEV13148.java");
assertStringItems("false", "true"); //todo don't suggest boolean literals in synchronized
}
public void testSuggestNames() {
configureByTestName();
assertStringItems("arrayList", "list");
}
public void testOverloadedMethods() {
doTest();
}
public void testNoCommaBeforeVarargs() { doTest(); }
public void testEnumField() {
doItemTest();
}
public void testEnumField1() {
configureByTestName();
checkResultByTestName();
assertEquals(4, myItems.length);
}
public void testInsertTypeParametersOnImporting() { doTest('\n'); }
public void testEmptyListInReturn() { doItemTest(); }
public void testEmptyListInReturn2() { doTest(); }
public void testEmptyListInReturnTernary() { doItemTest(); }
public void testEmptyListBeforeSemicolon() { doItemTest(); }
public void testEmptyListWithCollectionsPrefix() { doItemTest(); }
public void testForeachLoopVariableInIterableExpression() { doAntiTest(); }
public void testStaticallyImportedMagicMethod() {
configureByTestName();
assertStringItems("foo");
selectItem(myItems[0], '\t');
checkResultByTestName();
}
public void _testCallVarargArgument() { doTest(); }
public void testTabToReplaceClassKeyword() {
configureByTestName();
selectItem(myItems[0], Lookup.REPLACE_SELECT_CHAR);
checkResultByTestName();
}
public void testNoTypeParametersForToArray() {
doTest();
}
public void testStaticallyImportedField() { doTest('\n'); }
public void testSiblingOfAStaticallyImportedField() { doTest(); }
public void testPrimitiveArrayClassInMethod() { doTest(); }
public void testPrimitiveClassInAnno() { doTest(); }
public void testNewInnerClassOfSuper() { doTest(); }
public void testAssertThatMatcher() { doTest(); }
public void testInferFromCall() {
doTest();
}
public void testInferFromCall1() {
doTest();
}
public void testCastToParameterizedType() { doActionTest(); }
public void testInnerEnumInMethod() {
doItemTest();
}
public void testEnumAsDefaultAnnotationParam() { doTest(); }
public void testBreakLabel() {
myFixture.configureByText(
"a.java",
"class a{{\n" +
" foo: while (true) break <caret>\n" +
"}}");
complete();
myFixture.checkResult(
"class a{{\n" +
" foo: while (true) break foo;<caret>\n" +
"}}");
}
public void testContinueLabel() {
myFixture.configureByText(
"a.java",
"class a{{\n" +
" foo: while (true) continue <caret>\n" +
"}}");
complete();
myFixture.checkResult(
"class a{{\n" +
" foo: while (true) continue foo;<caret>\n" +
"}}");
}
public void testNewAbstractInsideAnonymous() { doTest(); }
public void testFilterPrivateConstructors() { doAntiTest(); }
public void testExplicitMethodTypeParametersQualify() { doTest(); }
public void testExplicitMethodTypeParametersOverZealous() { doTest(); }
public void testExplicitMethodTypeParametersFromSuperClass() { doTest(); }
public void testWildcardedInstanceof() { doTest(); }
public void testWildcardedInstanceof2() { doTest(); }
public void testWildcardedInstanceof3() { doTest(); }
public void testCheckStaticImportsType() { doAntiTest(); }
public void testThisFieldAssignedToItself() { doAntiTest(); }
public void testCaseMissingEnumValue() { doTest(); }
public void testCaseMissingEnumValue2() { doTest(); }
public void testNoHiddenParameter() { doTest(); }
public void testTypeVariableInstanceOf() {
configureByTestName();
performAction();
assertStringItems("Bar", "Goo");
}
public void testAutoImportExpectedType() {
boolean old = CodeInsightSettings.getInstance().ADD_UNAMBIGIOUS_IMPORTS_ON_THE_FLY;
CodeInsightSettings.getInstance().ADD_UNAMBIGIOUS_IMPORTS_ON_THE_FLY = true;
try {
configureByTestName();
performAction();
myFixture.assertPreferredCompletionItems(1, "List", "ArrayList", "AbstractList");
}
finally {
CodeInsightSettings.getInstance().ADD_UNAMBIGIOUS_IMPORTS_ON_THE_FLY = old;
}
}
public void testNoWrongSubstitutorFromStats() {
doTest();
FileDocumentManager.getInstance().saveDocument(myFixture.getEditor().getDocument());
doTest(); // stats are changed now
}
public void testCommonPrefixWithSelection() {
doItemTest();
}
public void testNewAbstractClassWithConstructorArgs() {
doItemTest();
}
public void testArrayInitializerBeforeVarargs() { doTest(); }
public void testDuplicateMembersFromSuperClass() { doTest(); }
public void testInnerAfterNew() { doTest(); }
public void testOuterAfterNew() { doTest(); }
public void testEverythingInStringConcatenation() { doTest(); }
public void testGetClassWhenClassExpected() { doTest(); }
public void testMemberImportStatically() {
configureByTestName();
StaticallyImportable item = myItems[0].as(StaticallyImportable.CLASS_CONDITION_KEY);
assertNotNull(item);
assertTrue(item.canBeImported());
assertTrue(myItems[1].as(StaticallyImportable.CLASS_CONDITION_KEY).canBeImported());
item.setShouldBeImported(true);
type('\n');
checkResultByTestName();
}
public void testNoNewEnum() {
configureByTestName();
assertStringItems("Foo");
}
public void testDuplicateMembersFromSuperClassInAnotherFile() {
myFixture.addClass("class Super { public static final Super FOO = null; }");
doTest();
}
public void testInsideGenericClassLiteral() {
configureByTestName();
assertStringItems("String.class", "StringBuffer.class", "StringBuilder.class");
}
public void testArrayAnnoParameter() {
doActionTest();
}
public void testInnerClassImports() {
JavaCodeStyleSettings settings = JavaCodeStyleSettings.getInstance(getProject());
settings.INSERT_INNER_CLASS_IMPORTS = true;
myFixture.addClass("package java.awt.geom; public class Point2D { public static class Double {} }");
doActionTest();
}
public void testCastWithGenerics() {
doActionTest();
}
public void testInnerEnum() {
configureByTestName();
getLookup().setCurrentItem(ContainerUtil.find(myItems, lookupItem -> "Bar.Fubar.Bar".equals(lookupItem.getLookupString())));
select('\n');
checkResultByTestName();
}
public void testQualifiedAfterNew() {
myFixture.addClass("package foo; public interface Foo<T> {}");
myFixture.addClass("package bar; public class Bar implements foo.Foo {}");
doTest();
}
public void testAfterQualifiedNew() {
myFixture.addClass("class Aa { public class B { } }");
doTest();
}
public void testTabAfterNew() {
doFirstItemTest('\t');
}
public void testSuggestMethodReturnType() {
configureByTestName();
myFixture.assertPreferredCompletionItems(0, "Serializable", "CharSequence", "Object");
}
public void testSuggestMethodReturnTypeAnonymous() {
configureByTestName();
assertOrderedEquals(myFixture.getLookupElementStrings(), "Object");
}
public void testSuggestCastReturnTypeByCalledMethod() { doTest(); }
public void testOnlyInterfacesInImplements() { doTest(); }
public void testNonStaticField() { doAntiTest(); }
public void testLocalClassInExpectedTypeArguments() { doTest(); }
private void doActionTest() {
configureByTestName();
checkResultByTestName();
}
private void doItemTest() {
doFirstItemTest('\n');
}
private void performAction() {
complete();
}
private void doTest() {
doTest(Lookup.NORMAL_SELECT_CHAR);
}
private void doTest(final char c) {
boolean old = CodeInsightSettings.getInstance().AUTOCOMPLETE_ON_SMART_TYPE_COMPLETION;
if (c != Lookup.NORMAL_SELECT_CHAR) {
CodeInsightSettings.getInstance().AUTOCOMPLETE_ON_SMART_TYPE_COMPLETION = false;
}
try {
configureByTestName();
if (myItems != null) {
select(c);
}
checkResultByTestName();
}
finally {
if (c != Lookup.NORMAL_SELECT_CHAR) {
CodeInsightSettings.getInstance().AUTOCOMPLETE_ON_SMART_TYPE_COMPLETION = old;
}
}
}
private void checkResultByTestName() {
checkResultByFile("/" + getTestName(false) + "-out.java");
}
@Override
protected void complete() {
myItems = myFixture.complete(CompletionType.SMART);
}
private void select() {
select(Lookup.NORMAL_SELECT_CHAR);
}
private void select(final char c) {
final Lookup lookup = getLookup();
if (lookup != null) {
selectItem(lookup.getCurrentItem(), c);
}
}
public void testSpaceAfterCommaInMethodCall() {
getCodeStyleSettings().SPACE_AFTER_COMMA = false;
doTest(',');
}
private CommonCodeStyleSettings getCodeStyleSettings() {
return CodeStyleSettingsManager.getSettings(getProject()).getCommonSettings(JavaLanguage.INSTANCE);
}
public void testOnlyCompatibleTypes() {
configureByTestName();
assertOrderedEquals(myFixture.getLookupElementStrings(), "get2");
}
public void testQualifyOuterClassCall() { doActionTest(); }
public void testExpressionSubtypesInCast() {
configureByTestName();
myFixture.assertPreferredCompletionItems(0, "String", "StringBuffer", "StringBuilder");
}
public void testStaticBuilder() { doTest(); }
public void testStaticBuilderWithArguments() { doTest(); }
public void testStaticBuilderWithInterfaceAndGenerics() { doTest(); }
public void testStaticBuilderWithGenerics() {
configureByTestName();
assertEquals("Map.builder().get(...)", LookupElementPresentation.renderElement(myItems[0]).getItemText());
myFixture.type('\t');
checkResultByTestName();
}
public void testFreeGenericsAfterClassLiteral() {
configureByTestName();
myFixture.assertPreferredCompletionItems(0, "String.class", "tryCast");
}
public void testNewHashMapTypeArguments() { doTest(); }
public void testNewMapTypeArguments() { doTest(); }
public void testNewMapObjectTypeArguments() { doTest(); }
public void testNoUnrelatedMethodSuggestion() {
configureByTestName();
assertOrderedEquals(myFixture.getLookupElementStrings(), "this");
}
public void testLog4jLevel() {
myFixture.addClass("package org.apache.log4j; " +
"public class Category { " +
" public void log(Priority priority, Object message); " +
"}" +
"public class Priority { " +
" final static public Priority FATAL;" + //deprecated
"}" +
"public class Level extends Priority { " +
" final static public Level FATAL;" +
"}");
doTest('\n');
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/dataset_service.proto
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Runtime operation information for [DatasetService.ImportData][google.cloud.aiplatform.v1.DatasetService.ImportData].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.ImportDataOperationMetadata}
*/
public final class ImportDataOperationMetadata extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.ImportDataOperationMetadata)
ImportDataOperationMetadataOrBuilder {
private static final long serialVersionUID = 0L;
// Use ImportDataOperationMetadata.newBuilder() to construct.
private ImportDataOperationMetadata(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ImportDataOperationMetadata() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ImportDataOperationMetadata();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private ImportDataOperationMetadata(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.aiplatform.v1.GenericOperationMetadata.Builder subBuilder = null;
if (genericMetadata_ != null) {
subBuilder = genericMetadata_.toBuilder();
}
genericMetadata_ =
input.readMessage(
com.google.cloud.aiplatform.v1.GenericOperationMetadata.parser(),
extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(genericMetadata_);
genericMetadata_ = subBuilder.buildPartial();
}
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.DatasetServiceProto
.internal_static_google_cloud_aiplatform_v1_ImportDataOperationMetadata_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.DatasetServiceProto
.internal_static_google_cloud_aiplatform_v1_ImportDataOperationMetadata_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.ImportDataOperationMetadata.class,
com.google.cloud.aiplatform.v1.ImportDataOperationMetadata.Builder.class);
}
public static final int GENERIC_METADATA_FIELD_NUMBER = 1;
private com.google.cloud.aiplatform.v1.GenericOperationMetadata genericMetadata_;
/**
*
*
* <pre>
* The common part of the operation metadata.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*
* @return Whether the genericMetadata field is set.
*/
@java.lang.Override
public boolean hasGenericMetadata() {
return genericMetadata_ != null;
}
/**
*
*
* <pre>
* The common part of the operation metadata.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*
* @return The genericMetadata.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.GenericOperationMetadata getGenericMetadata() {
return genericMetadata_ == null
? com.google.cloud.aiplatform.v1.GenericOperationMetadata.getDefaultInstance()
: genericMetadata_;
}
/**
*
*
* <pre>
* The common part of the operation metadata.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.GenericOperationMetadataOrBuilder
getGenericMetadataOrBuilder() {
return getGenericMetadata();
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (genericMetadata_ != null) {
output.writeMessage(1, getGenericMetadata());
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (genericMetadata_ != null) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getGenericMetadata());
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.ImportDataOperationMetadata)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.ImportDataOperationMetadata other =
(com.google.cloud.aiplatform.v1.ImportDataOperationMetadata) obj;
if (hasGenericMetadata() != other.hasGenericMetadata()) return false;
if (hasGenericMetadata()) {
if (!getGenericMetadata().equals(other.getGenericMetadata())) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasGenericMetadata()) {
hash = (37 * hash) + GENERIC_METADATA_FIELD_NUMBER;
hash = (53 * hash) + getGenericMetadata().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.ImportDataOperationMetadata parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ImportDataOperationMetadata parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ImportDataOperationMetadata parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ImportDataOperationMetadata parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ImportDataOperationMetadata parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ImportDataOperationMetadata parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ImportDataOperationMetadata parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ImportDataOperationMetadata parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ImportDataOperationMetadata parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ImportDataOperationMetadata parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ImportDataOperationMetadata parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ImportDataOperationMetadata parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1.ImportDataOperationMetadata prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Runtime operation information for [DatasetService.ImportData][google.cloud.aiplatform.v1.DatasetService.ImportData].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.ImportDataOperationMetadata}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.ImportDataOperationMetadata)
com.google.cloud.aiplatform.v1.ImportDataOperationMetadataOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.DatasetServiceProto
.internal_static_google_cloud_aiplatform_v1_ImportDataOperationMetadata_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.DatasetServiceProto
.internal_static_google_cloud_aiplatform_v1_ImportDataOperationMetadata_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.ImportDataOperationMetadata.class,
com.google.cloud.aiplatform.v1.ImportDataOperationMetadata.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1.ImportDataOperationMetadata.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
if (genericMetadataBuilder_ == null) {
genericMetadata_ = null;
} else {
genericMetadata_ = null;
genericMetadataBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.DatasetServiceProto
.internal_static_google_cloud_aiplatform_v1_ImportDataOperationMetadata_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ImportDataOperationMetadata getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.ImportDataOperationMetadata.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ImportDataOperationMetadata build() {
com.google.cloud.aiplatform.v1.ImportDataOperationMetadata result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ImportDataOperationMetadata buildPartial() {
com.google.cloud.aiplatform.v1.ImportDataOperationMetadata result =
new com.google.cloud.aiplatform.v1.ImportDataOperationMetadata(this);
if (genericMetadataBuilder_ == null) {
result.genericMetadata_ = genericMetadata_;
} else {
result.genericMetadata_ = genericMetadataBuilder_.build();
}
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.ImportDataOperationMetadata) {
return mergeFrom((com.google.cloud.aiplatform.v1.ImportDataOperationMetadata) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1.ImportDataOperationMetadata other) {
if (other == com.google.cloud.aiplatform.v1.ImportDataOperationMetadata.getDefaultInstance())
return this;
if (other.hasGenericMetadata()) {
mergeGenericMetadata(other.getGenericMetadata());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.aiplatform.v1.ImportDataOperationMetadata parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.aiplatform.v1.ImportDataOperationMetadata) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private com.google.cloud.aiplatform.v1.GenericOperationMetadata genericMetadata_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.GenericOperationMetadata,
com.google.cloud.aiplatform.v1.GenericOperationMetadata.Builder,
com.google.cloud.aiplatform.v1.GenericOperationMetadataOrBuilder>
genericMetadataBuilder_;
/**
*
*
* <pre>
* The common part of the operation metadata.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*
* @return Whether the genericMetadata field is set.
*/
public boolean hasGenericMetadata() {
return genericMetadataBuilder_ != null || genericMetadata_ != null;
}
/**
*
*
* <pre>
* The common part of the operation metadata.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*
* @return The genericMetadata.
*/
public com.google.cloud.aiplatform.v1.GenericOperationMetadata getGenericMetadata() {
if (genericMetadataBuilder_ == null) {
return genericMetadata_ == null
? com.google.cloud.aiplatform.v1.GenericOperationMetadata.getDefaultInstance()
: genericMetadata_;
} else {
return genericMetadataBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The common part of the operation metadata.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
public Builder setGenericMetadata(
com.google.cloud.aiplatform.v1.GenericOperationMetadata value) {
if (genericMetadataBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
genericMetadata_ = value;
onChanged();
} else {
genericMetadataBuilder_.setMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The common part of the operation metadata.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
public Builder setGenericMetadata(
com.google.cloud.aiplatform.v1.GenericOperationMetadata.Builder builderForValue) {
if (genericMetadataBuilder_ == null) {
genericMetadata_ = builderForValue.build();
onChanged();
} else {
genericMetadataBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The common part of the operation metadata.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
public Builder mergeGenericMetadata(
com.google.cloud.aiplatform.v1.GenericOperationMetadata value) {
if (genericMetadataBuilder_ == null) {
if (genericMetadata_ != null) {
genericMetadata_ =
com.google.cloud.aiplatform.v1.GenericOperationMetadata.newBuilder(genericMetadata_)
.mergeFrom(value)
.buildPartial();
} else {
genericMetadata_ = value;
}
onChanged();
} else {
genericMetadataBuilder_.mergeFrom(value);
}
return this;
}
/**
*
*
* <pre>
* The common part of the operation metadata.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
public Builder clearGenericMetadata() {
if (genericMetadataBuilder_ == null) {
genericMetadata_ = null;
onChanged();
} else {
genericMetadata_ = null;
genericMetadataBuilder_ = null;
}
return this;
}
/**
*
*
* <pre>
* The common part of the operation metadata.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
public com.google.cloud.aiplatform.v1.GenericOperationMetadata.Builder
getGenericMetadataBuilder() {
onChanged();
return getGenericMetadataFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The common part of the operation metadata.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
public com.google.cloud.aiplatform.v1.GenericOperationMetadataOrBuilder
getGenericMetadataOrBuilder() {
if (genericMetadataBuilder_ != null) {
return genericMetadataBuilder_.getMessageOrBuilder();
} else {
return genericMetadata_ == null
? com.google.cloud.aiplatform.v1.GenericOperationMetadata.getDefaultInstance()
: genericMetadata_;
}
}
/**
*
*
* <pre>
* The common part of the operation metadata.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.GenericOperationMetadata,
com.google.cloud.aiplatform.v1.GenericOperationMetadata.Builder,
com.google.cloud.aiplatform.v1.GenericOperationMetadataOrBuilder>
getGenericMetadataFieldBuilder() {
if (genericMetadataBuilder_ == null) {
genericMetadataBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.GenericOperationMetadata,
com.google.cloud.aiplatform.v1.GenericOperationMetadata.Builder,
com.google.cloud.aiplatform.v1.GenericOperationMetadataOrBuilder>(
getGenericMetadata(), getParentForChildren(), isClean());
genericMetadata_ = null;
}
return genericMetadataBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.ImportDataOperationMetadata)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.ImportDataOperationMetadata)
private static final com.google.cloud.aiplatform.v1.ImportDataOperationMetadata DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.ImportDataOperationMetadata();
}
public static com.google.cloud.aiplatform.v1.ImportDataOperationMetadata getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ImportDataOperationMetadata> PARSER =
new com.google.protobuf.AbstractParser<ImportDataOperationMetadata>() {
@java.lang.Override
public ImportDataOperationMetadata parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ImportDataOperationMetadata(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<ImportDataOperationMetadata> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ImportDataOperationMetadata> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ImportDataOperationMetadata getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* *
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* /
*/
package org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.docker;
import java.io.File;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class DockerRunCommand extends DockerCommand {
private static final String RUN_COMMAND = "run";
private final Map<String, String> userEnv;
/** The following are mandatory: */
public DockerRunCommand(String containerId, String user, String image) {
super(RUN_COMMAND);
super.addCommandArguments("name", containerId);
super.addCommandArguments("user", user);
super.addCommandArguments("image", image);
this.userEnv = new LinkedHashMap<String, String>();
}
public DockerRunCommand removeContainerOnExit() {
super.addCommandArguments("rm", "true");
return this;
}
public DockerRunCommand detachOnRun() {
super.addCommandArguments("detach", "true");
return this;
}
public DockerRunCommand setContainerWorkDir(String workdir) {
super.addCommandArguments("workdir", workdir);
return this;
}
public DockerRunCommand setNetworkType(String type) {
super.addCommandArguments("net", type);
return this;
}
public DockerRunCommand setPidNamespace(String type) {
super.addCommandArguments("pid", type);
return this;
}
public DockerRunCommand addMountLocation(String sourcePath, String
destinationPath, String mode) {
super.addCommandArguments("mounts", sourcePath + ":" +
destinationPath + ":" + mode);
return this;
}
public DockerRunCommand addReadWriteMountLocation(String sourcePath, String
destinationPath) {
return addMountLocation(sourcePath, destinationPath, "rw");
}
public DockerRunCommand addAllReadWriteMountLocations(List<String> paths) {
for (String dir: paths) {
this.addReadWriteMountLocation(dir, dir);
}
return this;
}
public DockerRunCommand addReadOnlyMountLocation(String sourcePath, String
destinationPath, boolean createSource) {
boolean sourceExists = new File(sourcePath).exists();
if (!sourceExists && !createSource) {
return this;
}
return addReadOnlyMountLocation(sourcePath, destinationPath);
}
public DockerRunCommand addReadOnlyMountLocation(String sourcePath, String
destinationPath) {
return addMountLocation(sourcePath, destinationPath, "ro");
}
public DockerRunCommand addAllReadOnlyMountLocations(List<String> paths) {
for (String dir: paths) {
this.addReadOnlyMountLocation(dir, dir);
}
return this;
}
public DockerRunCommand addTmpfsMount(String mount) {
super.addCommandArguments("tmpfs", mount);
return this;
}
public DockerRunCommand setVolumeDriver(String volumeDriver) {
super.addCommandArguments("volume-driver", volumeDriver);
return this;
}
public DockerRunCommand setCGroupParent(String parentPath) {
super.addCommandArguments("cgroup-parent", parentPath);
return this;
}
/* Run a privileged container. Use with extreme care */
public DockerRunCommand setPrivileged() {
super.addCommandArguments("privileged", "true");
return this;
}
public DockerRunCommand setCapabilities(Set<String> capabilties) {
//first, drop all capabilities
super.addCommandArguments("cap-drop", "ALL");
//now, add the capabilities supplied
for (String capability : capabilties) {
super.addCommandArguments("cap-add", capability);
}
return this;
}
public DockerRunCommand setHostname(String hostname) {
super.addCommandArguments("hostname", hostname);
return this;
}
public DockerRunCommand addDevice(String sourceDevice, String
destinationDevice) {
super.addCommandArguments("devices", sourceDevice + ":" +
destinationDevice);
return this;
}
public DockerRunCommand enableDetach() {
super.addCommandArguments("detach", "true");
return this;
}
public DockerRunCommand disableDetach() {
super.addCommandArguments("detach", "false");
return this;
}
public DockerRunCommand groupAdd(String[] groups) {
super.addCommandArguments("group-add", String.join(",", groups));
return this;
}
public DockerRunCommand setOverrideCommandWithArgs(
List<String> overrideCommandWithArgs) {
for(String override: overrideCommandWithArgs) {
super.addCommandArguments("launch-command", override);
}
return this;
}
@Override
public Map<String, List<String>> getDockerCommandWithArguments() {
return super.getDockerCommandWithArguments();
}
public DockerRunCommand setOverrideDisabled(boolean toggle) {
String value = Boolean.toString(toggle);
super.addCommandArguments("use-entry-point", value);
return this;
}
public DockerRunCommand setLogDir(String logDir) {
super.addCommandArguments("log-dir", logDir);
return this;
}
/**
* Check if user defined environment variables are empty.
*
* @return true if user defined environment variables are not empty.
*/
public boolean containsEnv() {
if (userEnv.size() > 0) {
return true;
}
return false;
}
/**
* Get user defined environment variables.
*
* @return a map of user defined environment variables
*/
public Map<String, String> getEnv() {
return userEnv;
}
/**
* Add user defined environment variables.
*
* @param environment A map of user defined environment variables
*/
public final void addEnv(Map<String, String> environment) {
userEnv.putAll(environment);
}
public DockerRunCommand setYarnSysFS(boolean toggle) {
String value = Boolean.toString(toggle);
super.addCommandArguments("use-yarn-sysfs", value);
return this;
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.plugins.groovy.griffon;
import com.intellij.execution.ExecutionException;
import com.intellij.execution.configurations.GeneralCommandLine;
import com.intellij.execution.configurations.JavaParameters;
import com.intellij.execution.configurations.ParametersList;
import com.intellij.lang.properties.IProperty;
import com.intellij.lang.properties.psi.PropertiesFile;
import com.intellij.openapi.application.AccessToken;
import com.intellij.openapi.application.WriteAction;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.projectRoots.JavaSdkType;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.roots.ModuleRootManager;
import com.intellij.openapi.roots.OrderRootType;
import com.intellij.openapi.roots.libraries.Library;
import com.intellij.openapi.roots.libraries.LibraryKind;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vcs.changes.ChangeListManager;
import com.intellij.openapi.vcs.changes.IgnoredBeanFactory;
import com.intellij.openapi.vfs.JarFileSystem;
import com.intellij.openapi.vfs.VfsUtil;
import com.intellij.openapi.vfs.VfsUtilCore;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiManager;
import com.intellij.util.containers.MultiMap;
import gnu.trove.TIntArrayList;
import icons.JetgroovyIcons;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.jps.model.java.JavaSourceRootType;
import org.jetbrains.jps.model.module.JpsModuleSourceRootType;
import org.jetbrains.plugins.groovy.GroovyFileType;
import org.jetbrains.plugins.groovy.mvc.*;
import javax.swing.*;
import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* @author peter
*/
public class GriffonFramework extends MvcFramework {
@NonNls private static final String GRIFFON_COMMON_PLUGINS = "-griffonPlugins";
private static final String GLOBAL_PLUGINS_MODULE_NAME = "GriffonGlobalPlugins";
public static final String GRIFFON_USER_LIBRARY = "Griffon:lib";
private static final Pattern PLUGIN_NAME_JSON_PATTERN = Pattern.compile("\"name\"\\s*:\\s*\"([^\"]+)\"");
private static final Pattern PLUGIN_VERSION_JSON_PATTERN = Pattern.compile("\"version\"\\s*:\\s*\"([^\"]+)\"");
private GriffonFramework() {
}
@Override
public boolean hasSupport(@NotNull Module module) {
return findAppRoot(module) != null && !isAuxModule(module) && getSdkRoot(module) != null;
}
@NotNull
@Override
public String getApplicationDirectoryName() {
return "griffon-app";
}
@Override
public boolean isToReformatOnCreation(VirtualFile file) {
return file.getFileType() == GroovyFileType.GROOVY_FILE_TYPE;
}
@Override
public void upgradeFramework(@NotNull Module module) {
}
@Nullable
@Override
protected GeneralCommandLine getCreationCommandLine(Module module) {
GriffonCreateProjectDialog dialog = new GriffonCreateProjectDialog(module);
if (!dialog.showAndGet()) {
return null;
}
return createCommandAndShowErrors(module, true, dialog.getCommand());
}
@Override
public boolean updatesWholeProject() {
return false;
}
@Override
public void updateProjectStructure(@NotNull final Module module) {
if (!MvcModuleStructureUtil.isEnabledStructureUpdate()) return;
final VirtualFile root = findAppRoot(module);
if (root == null) return;
AccessToken token = WriteAction.start();
try {
MvcModuleStructureUtil.updateModuleStructure(module, createProjectStructure(module, false), root);
if (hasSupport(module)) {
MvcModuleStructureUtil.updateAuxiliaryPluginsModuleRoots(module, this);
MvcModuleStructureUtil.updateGlobalPluginModule(module.getProject(), this);
}
}
finally {
token.finish();
}
final Project project = module.getProject();
ChangeListManager.getInstance(project).addFilesToIgnore(IgnoredBeanFactory.ignoreUnderDirectory(getUserHomeGriffon(), project));
}
@Override
public void ensureRunConfigurationExists(@NotNull Module module) {
final VirtualFile root = findAppRoot(module);
if (root != null) {
ensureRunConfigurationExists(module, GriffonRunConfigurationType.getInstance(), "Griffon:" + root.getName());
}
}
@Override
public String getInstalledPluginNameByPath(Project project, @NotNull VirtualFile pluginPath) {
String nameFromPluginXml = super.getInstalledPluginNameByPath(project, pluginPath);
if (nameFromPluginXml != null) {
return nameFromPluginXml;
}
VirtualFile pluginJson = pluginPath.findChild("plugin.json");
if (pluginJson != null) {
String pluginAndVersion = pluginPath.getName(); // pluginName-version
TIntArrayList separatorIndexes = new TIntArrayList();
int start = -1;
while (true) {
start = pluginAndVersion.indexOf('-', start + 1);
if (start == -1) break;
separatorIndexes.add(start);
}
if (separatorIndexes.size() == 1) {
return pluginAndVersion.substring(0, separatorIndexes.get(0));
}
if (!separatorIndexes.isEmpty()) {
String json;
try {
json = VfsUtil.loadText(pluginJson);
}
catch (IOException e) {
return null;
}
for (int i = 0; i < separatorIndexes.size(); i++) {
int idx = separatorIndexes.get(i);
String name = pluginAndVersion.substring(0, idx);
String version = pluginAndVersion.substring(idx + 1);
if (hasValue(PLUGIN_NAME_JSON_PATTERN, json, name) && hasValue(PLUGIN_VERSION_JSON_PATTERN, json, version)) {
return name;
}
}
}
}
return null;
}
private static boolean hasValue(Pattern pattern, String text, String value) {
Matcher matcher = pattern.matcher(text);
while (matcher.find()) {
if (matcher.group(1).equals(value)) {
return true;
}
}
return false;
}
@Override
protected boolean isCoreJar(@NotNull VirtualFile localFile) {
return GriffonLibraryPresentationProvider.isGriffonCoreJar(localFile);
}
@Override
public VirtualFile getSdkRoot(@Nullable Module module) {
if (module == null) return null;
final VirtualFile[] classRoots = ModuleRootManager.getInstance(module).orderEntries().librariesOnly().getClassesRoots();
for (VirtualFile file : classRoots) {
if (GriffonLibraryPresentationProvider.isGriffonCoreJar(file)) {
final VirtualFile localFile = JarFileSystem.getInstance().getVirtualFileForJar(file);
if (localFile != null) {
final VirtualFile parent = localFile.getParent();
if (parent != null) {
return parent.getParent();
}
}
return null;
}
}
return null;
}
@Override
public String getUserLibraryName() {
return GRIFFON_USER_LIBRARY;
}
@Override
public JavaParameters createJavaParameters(@NotNull Module module, boolean forCreation, boolean forTests,
boolean classpathFromDependencies,
@NotNull MvcCommand command) throws ExecutionException {
JavaParameters params = new JavaParameters();
Sdk sdk = ModuleRootManager.getInstance(module).getSdk();
params.setJdk(sdk);
final VirtualFile sdkRoot = getSdkRoot(module);
if (sdkRoot == null) {
return params;
}
params.addEnv(getSdkHomePropertyName(), FileUtil.toSystemDependentName(sdkRoot.getPath()));
final VirtualFile lib = sdkRoot.findChild("lib");
if (lib != null) {
for (final VirtualFile child : lib.getChildren()) {
final String name = child.getName();
if (name.startsWith("groovy-all-") && name.endsWith(".jar")) {
params.getClassPath().add(child);
}
}
}
final VirtualFile dist = sdkRoot.findChild("dist");
if (dist != null) {
for (final VirtualFile child : dist.getChildren()) {
final String name = child.getName();
if (name.endsWith(".jar")) {
if (name.startsWith("griffon-cli-") || name.startsWith("griffon-rt-") || name.startsWith("griffon-resources-")) {
params.getClassPath().add(child);
}
}
}
}
/////////////////////////////////////////////////////////////
params.setMainClass("org.codehaus.griffon.cli.support.GriffonStarter");
final VirtualFile rootFile;
if (forCreation) {
VirtualFile[] roots = ModuleRootManager.getInstance(module).getContentRoots();
if (roots.length != 1) {
throw new ExecutionException("Failed to initialize griffon module: module " + module.getName() + " contains more than one root");
}
command.getArgs().add(0, roots[0].getName());
rootFile = roots[0].getParent();
}
else {
rootFile = findAppRoot(module);
if (rootFile == null) {
throw new ExecutionException("Failed to run griffon command: module " + module.getName() + " is not a Griffon module");
}
}
String workDir = VfsUtilCore.virtualToIoFile(rootFile).getAbsolutePath();
params.getVMParametersList().addParametersString(command.getVmOptions());
if (!params.getVMParametersList().getParametersString().contains(XMX_JVM_PARAMETER)) {
params.getVMParametersList().add("-Xmx256M");
}
final String griffonHomePath = FileUtil.toSystemDependentName(sdkRoot.getPath());
params.getVMParametersList().add("-Dgriffon.home=" + griffonHomePath);
params.getVMParametersList().add("-Dbase.dir=" + workDir);
assert sdk != null;
params.getVMParametersList().add("-Dtools.jar=" + ((JavaSdkType)sdk.getSdkType()).getToolsPath(sdk));
final String confpath = griffonHomePath + GROOVY_STARTER_CONF;
params.getVMParametersList().add("-Dgroovy.starter.conf=" + confpath);
params.getVMParametersList().add(
"-Dgroovy.sanitized.stacktraces=\"groovy., org.codehaus.groovy., java., javax., sun., gjdk.groovy., gant., org.codehaus.gant.\"");
params.getProgramParametersList().add("--main");
params.getProgramParametersList().add("org.codehaus.griffon.cli.GriffonScriptRunner");
params.getProgramParametersList().add("--conf");
params.getProgramParametersList().add(confpath);
if (!forCreation && classpathFromDependencies) {
final String path = getApplicationClassPath(module).getPathsString();
if (StringUtil.isNotEmpty(path)) {
params.getProgramParametersList().add("--classpath");
params.getProgramParametersList().add(path);
}
}
params.setWorkingDirectory(workDir);
ParametersList paramList = new ParametersList();
command.addToParametersList(paramList);
params.getProgramParametersList().add(paramList.getParametersString());
params.setDefaultCharset(module.getProject());
return params;
}
@NotNull
@Override
public String getFrameworkName() {
return "Griffon";
}
@Override
public Icon getIcon() {
return JetgroovyIcons.Griffon.Griffon;
}
@Override
public Icon getToolWindowIcon() {
return JetgroovyIcons.Griffon.GriffonToolWindow;
}
@Override
public String getSdkHomePropertyName() {
return "GRIFFON_HOME";
}
@Override
protected String getCommonPluginSuffix() {
return GRIFFON_COMMON_PLUGINS;
}
@Override
public String getGlobalPluginsModuleName() {
return GLOBAL_PLUGINS_MODULE_NAME;
}
@Override
@Nullable
public File getDefaultSdkWorkDir(@NotNull Module module) {
final String version = GriffonLibraryPresentationProvider.getGriffonVersion(module);
if (version == null) return null;
return new File(getUserHomeGriffon(), version);
}
@Override
public boolean isSDKLibrary(Library library) {
return GriffonLibraryPresentationProvider.isGriffonSdk(library.getFiles(OrderRootType.CLASSES));
}
@Override
public MvcProjectStructure createProjectStructure(@NotNull Module module, boolean auxModule) {
return new GriffonProjectStructure(module, auxModule);
}
@Override
public LibraryKind getLibraryKind() {
return GriffonLibraryPresentationProvider.GRIFFON_KIND;
}
@Override
public String getSomeFrameworkClass() {
return "griffon.core.GriffonApplication";
}
public static String getUserHomeGriffon() {
return MvcPathMacros.getSdkWorkDirParent("griffon");
}
public static GriffonFramework getInstance() {
return EP_NAME.findExtension(GriffonFramework.class);
}
public VirtualFile getApplicationPropertiesFile(Module module) {
final VirtualFile appRoot = findAppRoot(module);
return appRoot != null ? appRoot.findChild("application.properties") : null;
}
@Override
public String getApplicationName(Module module) {
final VirtualFile appProperties = getApplicationPropertiesFile(module);
if (appProperties != null) {
final PsiFile file = PsiManager.getInstance(module.getProject()).findFile(appProperties);
if (file instanceof PropertiesFile) {
final IProperty property = ((PropertiesFile)file).findPropertyByKey("application.name");
return property != null ? property.getValue() : super.getApplicationName(module);
}
}
return super.getApplicationName(module);
}
@Override
public boolean isRunTargetActionSupported(Module module) {
return true;
}
private static class GriffonProjectStructure extends MvcProjectStructure {
public static final String[] TEST_DIRS = new String[]{"unit", "integration", "shared"};
public static final String[] SRC_DIR_SUBFOLDER = new String[]{"main", "cli"};
public static final String[] GRIFFON_APP_SOURCE_ROOTS = new String[]{"models", "views", "controllers", "services", "conf", "lifecycle"};
public GriffonProjectStructure(Module module, final boolean auxModule) {
super(module, auxModule, getUserHomeGriffon(), GriffonFramework.getInstance().getSdkWorkDir(module));
}
@Override
@NotNull
public String getUserLibraryName() {
return GRIFFON_USER_LIBRARY;
}
@Override
public MultiMap<JpsModuleSourceRootType<?>, String> getSourceFolders() {
MultiMap<JpsModuleSourceRootType<?>, String> res = new MultiMap<JpsModuleSourceRootType<?>, String>();
for (VirtualFile root : ModuleRootManager.getInstance(myModule).getContentRoots()) {
VirtualFile srcDir = root.findChild("src");
if (srcDir != null) {
for (String child : SRC_DIR_SUBFOLDER) {
if (srcDir.findChild(child) != null) {
res.putValue(JavaSourceRootType.SOURCE, "src/" + child);
}
}
}
VirtualFile griffonApp = root.findChild("griffon-app");
if (griffonApp != null) {
for (String child : GRIFFON_APP_SOURCE_ROOTS) {
if (griffonApp.findChild(child) != null) {
res.putValue(JavaSourceRootType.SOURCE, "griffon-app/" + child);
}
}
for (VirtualFile child : root.getChildren()) {
if (child.getNameWithoutExtension().endsWith("GriffonAddon")) {
res.putValue(JavaSourceRootType.SOURCE, "");
break;
}
}
}
List<GriffonSourceInspector.GriffonSource> sources = GriffonSourceInspector.processModuleMetadata(myModule);
for (GriffonSourceInspector.GriffonSource source : sources) {
res.putValue(JavaSourceRootType.SOURCE, source.getPath());
}
VirtualFile testDir = root.findChild("test");
if (testDir != null) {
for (String child : TEST_DIRS) {
if (testDir.findChild(child) != null) {
res.putValue(JavaSourceRootType.TEST_SOURCE, "test/" + child);
}
}
}
}
return res;
}
@Override
public String[] getInvalidSourceFolders() {
return new String[]{"src"};
}
@Override
public String[] getExcludedFolders() {
return new String[]{"target/classes", "target/test-classes"};
}
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package org.jetbrains.plugins.groovy.lang.psi.impl;
import com.intellij.lang.ASTNode;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.pom.java.LanguageLevel;
import com.intellij.psi.*;
import com.intellij.psi.impl.source.DummyHolder;
import com.intellij.psi.impl.source.codeStyle.CodeEditUtil;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.ArrayUtil;
import com.intellij.util.IncorrectOperationException;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.plugins.groovy.GroovyFileType;
import org.jetbrains.plugins.groovy.config.GroovyConfigUtils;
import org.jetbrains.plugins.groovy.lang.groovydoc.psi.api.GrDocComment;
import org.jetbrains.plugins.groovy.lang.groovydoc.psi.api.GrDocMemberReference;
import org.jetbrains.plugins.groovy.lang.groovydoc.psi.api.GrDocReferenceElement;
import org.jetbrains.plugins.groovy.lang.groovydoc.psi.api.GrDocTag;
import org.jetbrains.plugins.groovy.lang.psi.*;
import org.jetbrains.plugins.groovy.lang.psi.api.auxiliary.modifiers.GrModifier;
import org.jetbrains.plugins.groovy.lang.psi.api.auxiliary.modifiers.GrModifierList;
import org.jetbrains.plugins.groovy.lang.psi.api.auxiliary.modifiers.annotation.GrAnnotation;
import org.jetbrains.plugins.groovy.lang.psi.api.signatures.GrSignature;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.*;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.arguments.GrArgumentList;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.arguments.GrNamedArgument;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.blocks.GrClosableBlock;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.blocks.GrCodeBlock;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.clauses.GrCaseSection;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.*;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.literals.GrLiteral;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.path.GrMethodCallExpression;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.params.GrParameter;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.*;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.members.GrEnumConstant;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.members.GrMethod;
import org.jetbrains.plugins.groovy.lang.psi.api.toplevel.GrTopStatement;
import org.jetbrains.plugins.groovy.lang.psi.api.toplevel.imports.GrImportStatement;
import org.jetbrains.plugins.groovy.lang.psi.api.types.GrClosureParameter;
import org.jetbrains.plugins.groovy.lang.psi.api.types.GrCodeReferenceElement;
import org.jetbrains.plugins.groovy.lang.psi.api.types.GrTypeElement;
import org.jetbrains.plugins.groovy.lang.psi.impl.statements.expressions.TypesUtil;
import org.jetbrains.plugins.groovy.lang.psi.util.ErrorUtil;
import org.jetbrains.plugins.groovy.lang.psi.util.GrStringUtil;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import static org.jetbrains.plugins.groovy.lang.psi.GroovyElementTypes.CODE_REFERENCE;
/**
* @author ven
*/
@SuppressWarnings("ConstantConditions")
public class GroovyPsiElementFactoryImpl extends GroovyPsiElementFactory {
private static final Logger LOG = Logger.getInstance(GroovyPsiElementFactoryImpl.class);
private final Project myProject;
private final PsiManager myManager;
public GroovyPsiElementFactoryImpl(Project project, PsiManager manager) {
myProject = project;
myManager = manager;
}
@Override
@NotNull
public PsiElement createReferenceNameFromText(@NotNull String refName) {
GroovyFileBase file = createGroovyFileChecked("a." + refName);
GrTopStatement statement = file.getTopStatements()[0];
if (!(statement instanceof GrReferenceExpression)) {
throw new IncorrectOperationException("Incorrect reference name: " + refName);
}
final PsiElement element = ((GrReferenceExpression)statement).getReferenceNameElement();
if (element == null) {
throw new IncorrectOperationException("Incorrect reference name: " + refName);
}
return element;
}
@NotNull
@Override
public PsiElement createDocMemberReferenceNameFromText(@NotNull String idText) {
GrDocMemberReference reference = createDocMemberReferenceFromText("Foo", idText);
LOG.assertTrue(reference != null, idText);
return reference.getReferenceNameElement();
}
@NotNull
@Override
public GrDocMemberReference createDocMemberReferenceFromText(@NotNull String className, @NotNull String text) {
PsiFile file = createGroovyFileChecked("/** @see " + className + "#" + text + " */");
PsiElement element = file.getFirstChild();
assert element instanceof GrDocComment;
GrDocTag tag = PsiTreeUtil.getChildOfType(element, GrDocTag.class);
assert tag != null : "Doc tag points to null";
return PsiTreeUtil.getChildOfType(tag, GrDocMemberReference.class);
}
@NotNull
@Override
public GrDocReferenceElement createDocReferenceElementFromFQN(@NotNull String qName) {
PsiFile file = createGroovyFileChecked("/** @see " + qName + " */");
PsiElement element = file.getFirstChild();
assert element instanceof GrDocComment;
GrDocTag tag = PsiTreeUtil.getChildOfType(element, GrDocTag.class);
assert tag != null : "Doc tag points to null";
return PsiTreeUtil.getChildOfType(tag, GrDocReferenceElement.class);
}
@NotNull
@Override
public GrCodeReferenceElement createReferenceElementFromText(@NotNull String refName, final PsiElement context) {
GroovyDummyElement dummyElement = new GroovyDummyElement(CODE_REFERENCE, refName);
DummyHolder holder = new DummyHolder(myManager, dummyElement, context);
PsiElement element = holder.getFirstChild();
if (!(element instanceof GrCodeReferenceElement)) {
throw new IncorrectOperationException("Incorrect code reference '" + refName + "'");
}
return (GrCodeReferenceElement)element;
}
@NotNull
@Override
public GrReferenceExpression createReferenceExpressionFromText(@NotNull String idText) {
GroovyFileBase file = createGroovyFileChecked(idText);
final GrTopStatement[] statements = file.getTopStatements();
if (!(statements.length == 1 && statements[0] instanceof GrReferenceExpression)) throw new IncorrectOperationException(idText);
return (GrReferenceExpression) statements[0];
}
@NotNull
@Override
public GrReferenceExpression createReferenceExpressionFromText(@NotNull String idText, PsiElement context) {
GroovyFile file = createGroovyFileChecked(idText, false, context);
GrTopStatement[] statements = file.getTopStatements();
if (statements.length != 1) throw new IncorrectOperationException("refText: " + idText);
if (!(statements[0] instanceof GrReferenceExpression)) throw new IncorrectOperationException("refText: " + idText);
return (GrReferenceExpression)statements[0];
}
@NotNull
@Override
public GrReferenceExpression createReferenceElementForClass(@NotNull PsiClass aClass) {
final String text;
if (aClass instanceof PsiAnonymousClass) {
text = ((PsiAnonymousClass)aClass).getBaseClassType().getPresentableText();
}
else {
text = aClass.getName();
}
return createReferenceExpressionFromText(text);
}
@Override
@NotNull
public GrExpression createExpressionFromText(@NotNull String text, PsiElement context) {
GroovyFile file = createGroovyFile(text, false, context);
GrTopStatement[] topStatements = file.getTopStatements();
if (topStatements.length == 0 || !(topStatements[0] instanceof GrExpression)) {
throw new IncorrectOperationException("incorrect expression = '" + text + "'");
}
return (GrExpression) topStatements[0];
}
@NotNull
@Override
public GrCodeReferenceElement createReferenceElementByType(PsiClassType type) {
if (type instanceof GrClassReferenceType) {
GrReferenceElement reference = ((GrClassReferenceType)type).getReference();
if (reference instanceof GrCodeReferenceElement) {
return (GrCodeReferenceElement)reference;
}
}
final PsiClassType.ClassResolveResult resolveResult = type.resolveGenerics();
final PsiClass refClass = resolveResult.getElement();
assert refClass != null : type;
return createCodeReferenceElementFromText(type.getCanonicalText());
}
@NotNull
@Override
public PsiTypeParameterList createTypeParameterList() {
return createMethodFromText("def <> void foo(){}").getTypeParameterList();
}
@NotNull
@Override
public PsiTypeParameter createTypeParameter(String name, PsiClassType[] superTypes) {
StringBuilder builder = new StringBuilder();
builder.append("def <").append(name);
if (superTypes.length > 1 ||
superTypes.length == 1 && !superTypes[0].equalsToText(CommonClassNames.JAVA_LANG_OBJECT)) {
builder.append(" extends ");
for (PsiClassType type : superTypes) {
if (type.equalsToText(CommonClassNames.JAVA_LANG_OBJECT)) continue;
builder.append(type.getCanonicalText()).append(',');
}
builder.delete(builder.length() - 1, builder.length());
}
builder.append("> void foo(){}");
try {
return createMethodFromText(builder).getTypeParameters()[0];
}
catch (RuntimeException e) {
throw new IncorrectOperationException("type parameter text: " + builder);
}
}
@NotNull
@Override
public GrVariableDeclaration createVariableDeclaration(@Nullable String[] modifiers,
@Nullable GrExpression initializer,
@Nullable PsiType type,
String... identifiers) {
String initializerText;
if (initializer != null) {
if (initializer instanceof GrApplicationStatement &&
!GroovyConfigUtils.getInstance().isVersionAtLeast(initializer, GroovyConfigUtils.GROOVY1_8, false)) {
initializer = createMethodCallByAppCall((GrApplicationStatement)initializer);
}
assert initializer != null;
initializerText = initializer.getText();
}
else {
initializerText = null;
}
return createVariableDeclaration(modifiers, initializerText, type, identifiers);
}
@NotNull
@Override
public GrVariableDeclaration createVariableDeclaration(@Nullable String[] modifiers,
@Nullable String initializer,
@Nullable PsiType type,
String... identifiers) {
StringBuilder text = writeModifiers(modifiers);
if (type != null && type != PsiType.NULL) {
final PsiType unboxed = TypesUtil.unboxPrimitiveTypeWrapper(type);
final String typeText = getTypeText(unboxed);
text.append(typeText).append(" ");
} else if (text.length() == 0) {
text.insert(0, "def ");
}
if (identifiers.length > 1 && initializer != null) {
text.append('(');
}
for (int i = 0; i < identifiers.length; i++) {
if (i > 0) text.append(", ");
String identifier = identifiers[i];
text.append(identifier);
}
if (identifiers.length > 1 && initializer != null) {
text.append(')');
}
if (!StringUtil.isEmptyOrSpaces(initializer)) {
text.append(" = ").append(initializer);
}
GrTopStatement[] topStatements = createGroovyFileChecked(text).getTopStatements();
if (topStatements.length == 0 || !(topStatements[0] instanceof GrVariableDeclaration)) {
topStatements = createGroovyFileChecked("def " + text).getTopStatements();
}
if (topStatements.length == 0 || !(topStatements[0] instanceof GrVariableDeclaration)) {
throw new RuntimeException("Invalid arguments, text = " + text);
}
final GrVariableDeclaration statement = (GrVariableDeclaration)topStatements[0];
//todo switch-case formatting should work without this hack
CodeEditUtil.markToReformatBefore(statement.getNode().findLeafElementAt(0), true);
return statement;
}
@NotNull
@Override
public GrEnumConstant createEnumConstantFromText(@NotNull String text) {
GroovyFile file = createGroovyFileChecked("enum E{" + text + "}");
final GrEnumTypeDefinition enumClass = (GrEnumTypeDefinition)file.getClasses()[0];
return enumClass.getEnumConstants()[0];
}
@NotNull
@Override
public GrVariableDeclaration createFieldDeclaration(@NotNull String[] modifiers,
@NotNull String identifier,
@Nullable GrExpression initializer,
@Nullable PsiType type) {
final String varDeclaration = createVariableDeclaration(modifiers, initializer, type, identifier).getText();
final GroovyFileBase file = createGroovyFileChecked("class A { " + varDeclaration + "}");
final GrTypeDefinitionBody body = file.getTypeDefinitions()[0].getBody();
LOG.assertTrue(body.getMemberDeclarations().length == 1 && body.getMemberDeclarations()[0] instanceof GrVariableDeclaration,
"ident = <" + identifier + "> initializer = " + (initializer == null ? "_null_" : ("<" + initializer.getText()) + ">"));
return (GrVariableDeclaration) body.getMemberDeclarations()[0];
}
@NotNull
@Override
public GrVariableDeclaration createFieldDeclarationFromText(@NotNull String text) {
final GroovyFile file = createGroovyFileChecked("class X{\n" + text + "\n}");
final PsiClass psiClass = file.getClasses()[0];
return (GrVariableDeclaration)psiClass.getFields()[0].getParent();
}
private static StringBuilder writeModifiers(String[] modifiers) {
StringBuilder text = new StringBuilder();
if (!(modifiers == null || modifiers.length == 0)) {
for (String modifier : modifiers) {
text.append(modifier);
text.append(" ");
}
}
return text;
}
private static String getTypeText(PsiType type) {
if (!(type instanceof PsiArrayType)) {
final String canonical = type.getCanonicalText();
final String text = canonical != null ? canonical : type.getPresentableText();
if ("null".equals(text)) {
return "";
}
else {
return text;
}
}
else {
return getTypeText(((PsiArrayType)type).getComponentType()) + "[]";
}
}
@NotNull
@Override
public GrTopStatement createTopElementFromText(@NotNull String text) {
GroovyFile dummyFile = createGroovyFileChecked(text);
final GrTopStatement[] topStatements = dummyFile.getTopStatements();
if (topStatements.length != 1) throw new IncorrectOperationException("text = '" + text + "'");
return topStatements[0];
}
@NotNull
@Override
public GrClosableBlock createClosureFromText(@NotNull String closureText, PsiElement context) throws IncorrectOperationException {
GroovyFile psiFile = createGroovyFileChecked("def __hdsjfghk_sdhjfshglk_foo = " + closureText, false, context);
final GrStatement st = psiFile.getStatements()[0];
LOG.assertTrue(st instanceof GrVariableDeclaration, closureText);
final GrExpression initializer = ((GrVariableDeclaration)st).getVariables()[0].getInitializerGroovy();
LOG.assertTrue(initializer instanceof GrClosableBlock, closureText);
return ((GrClosableBlock)initializer);
}
private GroovyFileImpl createDummyFile(@NotNull CharSequence text, boolean physical) {
final String fileName = DUMMY_FILE_NAME + '.' + GroovyFileType.GROOVY_FILE_TYPE.getDefaultExtension();
final long stamp = System.currentTimeMillis();
final PsiFileFactory factory = PsiFileFactory.getInstance(myProject);
return (GroovyFileImpl) factory.createFileFromText(fileName, GroovyFileType.GROOVY_FILE_TYPE, text, stamp, physical);
}
@NotNull
@Override
public GrParameter createParameter(@NotNull String name,
@Nullable String typeText,
@Nullable String initializer,
@Nullable GroovyPsiElement context,
String... modifiers) throws IncorrectOperationException {
try {
StringBuilder fileText = new StringBuilder();
fileText.append("def dsfsadfnbhfjks_weyripouh_huihnrecuio(");
for (String modifier : modifiers) {
fileText.append(modifier).append(' ');
}
if (StringUtil.isNotEmpty(typeText)) {
fileText.append(typeText).append(' ');
}
fileText.append(name);
if (initializer != null && !initializer.isEmpty()) {
fileText.append(" = ").append(initializer);
}
fileText.append("){}");
GroovyFile groovyFile = createGroovyFileChecked(fileText, false, context);
ASTNode node = groovyFile.getFirstChild().getNode();
return ((GrMethod)node.getPsi()).getParameters()[0];
}
catch (RuntimeException e) {
throw new IncorrectOperationException("name = " + name + ", type = " + typeText + ", initializer = " + initializer);
}
}
@NotNull
@Override
public GrCodeReferenceElement createTypeOrPackageReference(@NotNull String qName) {
try {
final GroovyFileBase file = createGroovyFileChecked("def i = new " + qName + "()");
final GrStatement[] statements = file.getStatements();
final GrVariableDeclaration variableDeclaration = (GrVariableDeclaration)statements[0];
final GrVariable var = variableDeclaration.getVariables()[0];
final GrExpression initializer = var.getInitializerGroovy();
return ((GrNewExpression)initializer).getReferenceElement();
}
catch (RuntimeException e) {
throw new IncorrectOperationException("reference text=" + qName, (Throwable)e);
}
}
@NotNull
@Override
public GrTypeDefinition createTypeDefinition(@NotNull String text) throws IncorrectOperationException {
final GroovyFileBase file = createGroovyFileChecked(text);
final GrTypeDefinition[] classes = file.getTypeDefinitions();
if (classes.length != 1) throw new IncorrectOperationException("Incorrect type definition text");
return classes[0];
}
@Override
@NotNull
public GrTypeElement createTypeElement(@NotNull String typeText, @Nullable final PsiElement context) throws IncorrectOperationException {
final GroovyFile file = createGroovyFileChecked("def " + typeText + " someVar", false, context);
GrTopStatement[] topStatements = file.getTopStatements();
if (topStatements == null || topStatements.length == 0) throw new IncorrectOperationException("can't create type element from:" + typeText);
GrTopStatement statement = topStatements[0];
if (!(statement instanceof GrVariableDeclaration)) throw new IncorrectOperationException("can't create type element from:" + typeText);
GrVariableDeclaration decl = (GrVariableDeclaration) statement;
final GrTypeElement element = decl.getTypeElementGroovy();
if (element == null) throw new IncorrectOperationException("can't create type element from:" + typeText);
return element;
}
@NotNull
@Override
public GrTypeElement createTypeElement(@NotNull PsiType type) throws IncorrectOperationException {
final String typeText = getTypeText(type);
if (typeText == null)
throw new IncorrectOperationException("Cannot create type element: cannot obtain text for type");
return createTypeElement(typeText);
}
@NotNull
@Override
public PsiClassType createType(@NotNull PsiClass aClass) {
return JavaPsiFacade.getElementFactory(myProject).createType(aClass);
}
@NotNull
@Override
public GrParenthesizedExpression createParenthesizedExpr(@NotNull GrExpression expression) {
return ((GrParenthesizedExpression) createExpressionFromText("(" + expression.getText() + ")"));
}
@NotNull
@Override
public PsiElement createStringLiteralForReference(@NotNull String text) {
return createLiteralFromValue(text).getFirstChild();
}
@NotNull
@Override
public PsiElement createModifierFromText(@NotNull String name) {
final GroovyFileBase file = createGroovyFileChecked(name + " foo() {}");
final GrTopStatement[] definitions = file.getTopStatements();
if (definitions.length != 1) throw new IncorrectOperationException(name);
return definitions[0].getFirstChild().getFirstChild();
}
@NotNull
@Override
public GrCodeBlock createMethodBodyFromText(@NotNull String text) {
final GroovyFileBase file = createGroovyFileChecked("def foo () {" + text + "}");
final GrMethod method = (GrMethod) file.getTopStatements()[0];
return method.getBlock();
}
@NotNull
@Override
public GrVariableDeclaration createSimpleVariableDeclaration(@NotNull String name, @NotNull String typeText) {
String classText;
if (Character.isLowerCase(typeText.charAt(0))) {
classText = "class A { def " + typeText + " " + name + "}";
} else {
classText = "class A { " + typeText + " " + name + "}";
}
GroovyFileBase file = createGroovyFileChecked(classText);
final GrTypeDefinitionBody body = file.getTypeDefinitions()[0].getBody();
return (GrVariableDeclaration) body.getMemberDeclarations()[0];
}
@NotNull
@Override
public PsiElement createDotToken(@NotNull String newDot) {
return createReferenceExpressionFromText("a" + newDot + "b").getDotToken();
}
@NotNull
@Override
public GrMethod createConstructorFromText(@NotNull String constructorName,
@Nullable String[] paramTypes,
@NotNull String[] paramNames,
@Nullable String body,
@Nullable PsiElement context) {
final CharSequence text = generateMethodText(null, constructorName, null, paramTypes, paramNames, body, true);
return createConstructorFromText(constructorName, text, context);
}
@NotNull
@Override
public GrMethod createConstructorFromText(String constructorName, CharSequence constructorText, @Nullable PsiElement context) {
GroovyFile file = createGroovyFileChecked("class " + constructorName + "{" + constructorText + "}", false, context);
GrTypeDefinition definition = file.getTypeDefinitions()[0];
if (definition == null) {
throw new IncorrectOperationException("constructorName: " + constructorName + ", text: " + constructorText);
}
GrMethod[] methods = definition.getCodeMethods();
if (methods.length != 1) {
throw new IncorrectOperationException("constructorName: " + constructorName + ", text: " + constructorText);
}
return methods[0];
}
@Override
@NotNull
public GrMethod createMethodFromText(String methodText, @Nullable PsiElement context) {
if (methodText == null) throw new IncorrectOperationException("Method text not provided");
GroovyFile file = createGroovyFile(methodText, false, context);
GrTopStatement[] definitions = file.getTopStatements();
if (definitions.length != 1) {
throw new IncorrectOperationException("Can't create method from text: '" + file.getText() + "'");
}
GrTopStatement definition = definitions[0];
if (!(definition instanceof GrMethod)) {
throw new IncorrectOperationException("Can't create method from text: '" + file.getText() + "'");
}
return ((GrMethod)definition);
}
@NotNull
@Override
public GrAnnotation createAnnotationFromText(@NotNull @NonNls String annotationText, @Nullable PsiElement context) throws IncorrectOperationException {
return createMethodFromText(annotationText + " void ___shdjklf_pqweirupncp_foo() {}", context).getModifierList().getRawAnnotations()[0];
}
@NotNull
@Override
public GrMethod createMethodFromSignature(@NotNull String name, @NotNull GrSignature signature) {
StringBuilder builder = new StringBuilder("public");
final PsiType returnType = signature.getReturnType();
if (returnType != null && returnType != PsiType.NULL) {
builder.append(' ');
builder.append(returnType.getCanonicalText());
}
builder.append(' ').append(name).append('(');
int i = 0;
for (GrClosureParameter parameter : signature.getParameters()) {
final PsiType type = parameter.getType();
if (type != null) {
builder.append(type.getCanonicalText());
builder.append(' ');
}
builder.append('p').append(++i);
final GrExpression initializer = parameter.getDefaultInitializer();
if (initializer != null) {
builder.append(" = ").append(initializer.getText());
builder.append(", ");
}
}
if (signature.getParameterCount() > 0) {
builder.delete(builder.length() - 2, builder.length());
}
builder.append("){}");
return createMethodFromText(builder);
}
@NotNull
@Override
public GrAnnotation createAnnotationFromText(@NotNull String annoText) {
return createAnnotationFromText(annoText, null);
}
private GroovyFile createGroovyFileChecked(@NotNull CharSequence idText) {
return createGroovyFileChecked(idText, false, null);
}
private GroovyFile createGroovyFileChecked(@NotNull CharSequence idText, boolean isPhysical, @Nullable PsiElement context) {
final GroovyFileImpl file = createDummyFile(idText, isPhysical);
if (ErrorUtil.containsError(file)) {
throw new IncorrectOperationException("cannot create file from text: " + idText);
}
file.setContext(context);
return file;
}
/**
* use createGroovyFileChecked() inside GroovyPsiElementFactoryImpl instead of this method
*/
@NotNull
@Override
public GroovyFile createGroovyFile(@NotNull CharSequence idText, boolean isPhysical, @Nullable PsiElement context) {
GroovyFileImpl file = createDummyFile(idText, isPhysical);
file.setContext(context);
return file;
}
@NotNull
@Override
public PsiElement createWhiteSpace() {
PsiFile dummyFile = createDummyFile(" ", false);
return dummyFile.getFirstChild();
}
@Override
@NotNull
public PsiElement createLineTerminator(int length) {
String text = length <= 1 ? "\n" : "";
if (length > 1) {
text = StringUtil.repeatSymbol('\n', length);
}
return createLineTerminator(text);
}
@Override
@NotNull
public PsiElement createLineTerminator(@NotNull String text) {
PsiFile dummyFile = createGroovyFileChecked(text);
PsiElement child = dummyFile.getFirstChild();
assert child != null;
return child;
}
@NotNull
@Override
public GrArgumentList createExpressionArgumentList(GrExpression... expressions) {
StringBuilder text = new StringBuilder();
text.append("ven (");
for (GrExpression expression : expressions) {
text.append(expression.getText()).append(", ");
}
if (expressions.length > 0) {
text.delete(text.length() - 2, text.length());
}
text.append(')');
PsiFile file = createGroovyFileChecked(text);
assert file.getChildren()[0] != null && (file.getChildren()[0] instanceof GrMethodCallExpression);
return (((GrMethodCallExpression) file.getChildren()[0])).getArgumentList();
}
@NotNull
@Override
public GrNamedArgument createNamedArgument(@NotNull final String name, @NotNull final GrExpression expression) {
PsiFile file = createGroovyFileChecked("foo (" + name + ":" + expression.getText() + ")");
assert file.getChildren()[0] != null;
GrCall call = (GrCall)file.getChildren()[0];
return call.getArgumentList().getNamedArguments()[0];
}
@NotNull
@Override
public GrStatement createStatementFromText(@NotNull CharSequence text) {
return createStatementFromText(text, null);
}
@NotNull
@Override
public GrStatement createStatementFromText(@NotNull CharSequence text, @Nullable PsiElement context) {
GroovyFile file = createGroovyFileChecked(text, false, context);
GrTopStatement[] statements = file.getTopStatements();
if (statements.length != 1) {
throw new IncorrectOperationException("count = " + statements.length + ", " + text);
}
if (!(statements[0] instanceof GrStatement)) {
throw new IncorrectOperationException("type = " + statements[0].getClass().getName() + ", " + text);
}
return (GrStatement)statements[0];
}
@NotNull
@Override
public GrBlockStatement createBlockStatement(@NonNls GrStatement... statements) {
StringBuilder text = new StringBuilder();
text.append("while (true) { \n");
for (GrStatement statement : statements) {
text.append(statement.getText()).append("\n");
}
text.append("}");
PsiFile file = createGroovyFileChecked(text);
LOG.assertTrue(file.getChildren()[0] != null && (file.getChildren()[0] instanceof GrWhileStatement), text);
return (GrBlockStatement) ((GrWhileStatement) file.getChildren()[0]).getBody();
}
@NotNull
@Override
public GrMethodCallExpression createMethodCallByAppCall(@NotNull GrApplicationStatement callExpr) {
StringBuilder text = new StringBuilder();
text.append(callExpr.getInvokedExpression().getText());
text.append("(");
final GrCommandArgumentList argumentList = callExpr.getArgumentList();
if (argumentList != null) text.append(argumentList.getText());
text.append(")");
PsiFile file = createGroovyFileChecked(text);
assert file.getChildren()[0] != null && (file.getChildren()[0] instanceof GrMethodCallExpression);
return ((GrMethodCallExpression)file.getChildren()[0]);
}
@NotNull
@Override
public GrCodeReferenceElement createCodeReferenceElementFromClass(@NotNull PsiClass aClass) {
if (aClass instanceof PsiAnonymousClass) {
throw new IncorrectOperationException("cannot create code reference element for anonymous class " + aClass.getText());
}
return createCodeReferenceElementFromText(aClass.getQualifiedName());
}
@NotNull
@Override
public GrCodeReferenceElement createCodeReferenceElementFromText(@NotNull String text) {
GroovyFile file = createGroovyFileChecked("class X extends " + text + "{}");
PsiClass[] classes = file.getClasses();
if (classes.length != 1) throw new IncorrectOperationException("cannot create code reference element for class" + text);
GrExtendsClause extendsClause = ((GrTypeDefinition)classes[0]).getExtendsClause();
if (extendsClause == null) throw new IncorrectOperationException("cannot create code reference element for class" + text);
GrCodeReferenceElement[] refElements = extendsClause.getReferenceElementsGroovy();
if (refElements.length != 1) throw new IncorrectOperationException("cannot create code reference element for class" + text);
return refElements[0];
}
@NotNull
@Override
public GrReferenceExpression createThisExpression(@Nullable PsiClass psiClass) {
final String text;
if (psiClass == null) {
text = "this";
}
else {
final String qname = psiClass.getQualifiedName();
if (StringUtil.isEmpty(qname)) {
text = "this";
}
else {
text = qname + ".this";
}
}
return createReferenceExpressionFromText(text, psiClass);
}
@NotNull
@Override
public GrBlockStatement createBlockStatementFromText(@NotNull String text, @Nullable PsiElement context) {
GroovyFile file = createGroovyFileChecked("if(true)" + text, false, context);
GrStatement[] statements = file.getStatements();
LOG.assertTrue(statements.length == 1 && statements[0] instanceof GrIfStatement, text);
GrStatement branch = ((GrIfStatement)statements[0]).getThenBranch();
LOG.assertTrue(branch instanceof GrBlockStatement);
return (GrBlockStatement)branch;
}
@NotNull
@Override
public GrModifierList createModifierList(@NotNull CharSequence text) {
final GrMethod method = createMethodFromText(text + " void foo()");
return method.getModifierList();
}
@NotNull
@Override
public GrCaseSection createSwitchSection(@NotNull String text) {
final GrStatement statement = createStatementFromText("switch (a) {\n" + text + "\n}");
if (!(statement instanceof GrSwitchStatement)) {
throw new IncorrectOperationException("Cannot create switch section from text: " + text);
}
final GrCaseSection[] sections = ((GrSwitchStatement)statement).getCaseSections();
if (sections.length != 1) throw new IncorrectOperationException("Cannot create switch section from text: " + text);
return sections[0];
}
@NotNull
@Override
public GrImportStatement createImportStatementFromText(@NotNull String qName, boolean isStatic, boolean isOnDemand, String alias) {
return createImportStatement(qName, isStatic, isOnDemand, alias, null);
}
@NotNull
@Override
public GrImportStatement createImportStatementFromText(@NotNull String text) {
PsiFile dummyFile = createGroovyFileChecked(text);
return ((GrImportStatement) dummyFile.getFirstChild());
}
@NotNull
@Override
public GrImportStatement createImportStatement(@NotNull String qname,
boolean isStatic,
boolean isOnDemand,
String alias,
PsiElement context) {
StringBuilder builder = new StringBuilder();
builder.append("import ");
if (isStatic) {
builder.append("static ");
}
builder.append(qname);
if (isOnDemand) {
builder.append(".*");
}
if (StringUtil.isNotEmpty(alias)) {
builder.append(" as ").append(alias);
}
PsiFile dummyFile = createGroovyFileChecked(builder, false, context);
return ((GrImportStatement)dummyFile.getFirstChild());
}
private static CharSequence generateMethodText(@Nullable String modifier,
@NotNull String name,
@Nullable String type,
@NotNull String[] paramTypes,
@NotNull String[] paramNames,
@Nullable String body,
boolean isConstructor) {
StringBuilder builder = new StringBuilder();
if (modifier != null) {
builder.append(modifier);
builder.append(" ");
}
if (!isConstructor) {
builder.append("def ");
}
//This is for constructor creation
if (type != null) {
builder.append(type);
builder.append(" ");
}
builder.append(name);
builder.append("(");
for (int i = 0; i < paramNames.length; i++) {
String paramType = paramTypes == null ? null : paramTypes[i];
if (i > 0) builder.append(", ");
if (paramType != null) {
builder.append(paramType);
builder.append(" ");
}
builder.append(paramNames[i]);
}
builder.append(")");
if (body != null) {
builder.append(body);
}
else {
builder.append("{");
builder.append("}");
}
return builder;
}
@NotNull
@Override
public GrMethod createMethodFromText(@NotNull String modifier, @NotNull String name, @Nullable String type, @NotNull String[] paramTypes, PsiElement context) {
PsiType psiType;
List<PsiType> res = new ArrayList<>();
final GroovyPsiElementFactory factory = GroovyPsiElementFactory.getInstance(myProject);
for (String paramType : paramTypes) {
try {
psiType = factory.createTypeElement(paramType).getType();
}
catch (IncorrectOperationException e) {
psiType = TypesUtil.getJavaLangObject(context);
}
res.add(psiType);
}
String[] paramNames = GroovyNamesUtil.getMethodArgumentsNames(myProject, res.toArray(PsiType.createArray(res.size())));
final CharSequence text = generateMethodText(modifier, name, type, paramTypes, paramNames, null, false);
return createMethodFromText(text.toString(), context);
}
@Override
@NotNull
public GrDocComment createDocCommentFromText(@NotNull String text) {
return (GrDocComment)createGroovyFileChecked(text).getFirstChild();
}
@NotNull
@Override
public GrConstructorInvocation createConstructorInvocation(@NotNull String text) {
return createConstructorInvocation(text, null);
}
@NotNull
@Override
public GrConstructorInvocation createConstructorInvocation(@NotNull String text, @Nullable PsiElement context) {
GroovyFile file = createGroovyFileChecked("class Foo{ def Foo(){" + text + "}}", false, context);
return PsiImplUtil.getChainingConstructorInvocation((GrMethod)file.getClasses()[0].getConstructors()[0]);
}
@NotNull
@Override
public PsiReferenceList createThrownList(@NotNull PsiClassType[] exceptionTypes) {
if (exceptionTypes.length == 0) {
return createMethodFromText("def foo(){}", null).getThrowsList();
}
String[] types = new String[exceptionTypes.length];
for (int i = 0; i < types.length; i++) {
types[i] = exceptionTypes[i].getCanonicalText();
}
final String end = StringUtil.join(types, ",");
return createMethodFromText("def foo() throws " + end + "{}", null).getThrowsList();
}
@NotNull
@Override
public GrCatchClause createCatchClause(@NotNull PsiClassType type, @NotNull String parameterName) {
StringBuilder buffer = new StringBuilder("try{} catch(");
if (type == null) {
buffer.append("Throwable ");
}
else {
buffer.append(type.getCanonicalText()).append(" ");
}
buffer.append(parameterName).append("){\n}");
final GrTryCatchStatement statement = (GrTryCatchStatement)createStatementFromText(buffer);
return statement.getCatchClauses()[0];
}
@NotNull
@Override
public GrArgumentList createArgumentList() {
return ((GrCall)createExpressionFromText("foo()")).getArgumentList();
}
@NotNull
@Override
public GrArgumentList createArgumentListFromText(@NotNull String argListText) {
try {
return ((GrCall)createExpressionFromText("foo " + argListText)).getArgumentList();
}
catch (IncorrectOperationException e) {
LOG.debug(argListText);
throw e;
}
}
@NotNull
@Override
public GrExtendsClause createExtendsClause() {
final GrTypeDefinition typeDefinition = createTypeDefinition("class A extends B {}");
final GrExtendsClause clause = typeDefinition.getExtendsClause();
clause.getReferenceElementsGroovy()[0].delete();
return clause;
}
@NotNull
@Override
public GrImplementsClause createImplementsClause() {
final GrTypeDefinition typeDefinition = createTypeDefinition("class A implements B {}");
final GrImplementsClause clause = typeDefinition.getImplementsClause();
clause.getReferenceElementsGroovy()[0].delete();
return clause;
}
@NotNull
@Override
public GrLiteral createLiteralFromValue(@Nullable Object value) {
if (value instanceof String) {
StringBuilder buffer = GrStringUtil.getLiteralTextByValue((String)value);
final GrExpression expr = createExpressionFromText(buffer);
LOG.assertTrue(expr instanceof GrLiteral, "value = " + value);
return (GrLiteral)expr;
}
if (value == null) {
return (GrLiteral)createExpressionFromText("null");
}
if (value instanceof Boolean) {
return (GrLiteral)createExpressionFromText(value.toString());
}
throw new IncorrectOperationException("Can not create literal from type: " + value.getClass().getName());
}
@NotNull
@Override
public PsiClass createClass(@NonNls @NotNull String name) throws IncorrectOperationException {
return createTypeDefinition("class " + name + "{}");
}
@NotNull
@Override
public PsiClass createInterface(@NonNls @NotNull String name) throws IncorrectOperationException {
return createTypeDefinition("interface " + name + "{}");
}
@NotNull
@Override
public PsiClass createEnum(@NotNull @NonNls String name) throws IncorrectOperationException {
return createTypeDefinition("enum " + name + "{}");
}
@NotNull
@Override
public GrField createField(@NotNull @NonNls String name, @NotNull PsiType type) throws IncorrectOperationException {
final GrVariableDeclaration fieldDeclaration = createFieldDeclaration(ArrayUtil.EMPTY_STRING_ARRAY, name, null, type);
return (GrField)fieldDeclaration.getVariables()[0];
}
@NotNull
@Override
public GrTraitTypeDefinition createTrait(@NotNull String name) {
return (GrTraitTypeDefinition)createTypeDefinition("trait " + name + "{}");
}
@NotNull
@Override
public GrMethod createMethod(@NotNull @NonNls String name, @Nullable PsiType returnType) throws IncorrectOperationException {
return createMethod(name, returnType, null);
}
@NotNull
@Override
public GrMethod createMethod(@NotNull @NonNls String name, PsiType returnType, PsiElement context) throws IncorrectOperationException {
final StringBuilder builder = new StringBuilder();
builder.append("def <T>");
if (returnType != null) {
builder.append(returnType.getCanonicalText());
}
builder.append(' ');
if (GroovyNamesUtil.isIdentifier(name)) {
builder.append(name);
}
else {
builder.append('"');
builder.append(GrStringUtil.escapeSymbolsForGString(name, true, false));
builder.append('"');
}
builder.append("(){}");
GrMethod method = createMethodFromText(builder.toString(), context);
PsiTypeParameterList typeParameterList = method.getTypeParameterList();
assert typeParameterList != null;
typeParameterList.getFirstChild().delete();
typeParameterList.getFirstChild().delete();
typeParameterList.getFirstChild().delete();
if (returnType != null) {
method.getModifierList().setModifierProperty(GrModifier.DEF, false);
}
return method;
}
@NotNull
@Override
public GrMethod createConstructor() {
return createConstructorFromText("Foo", "Foo(){}", null);
}
@NotNull
@Override
public GrClassInitializer createClassInitializer() throws IncorrectOperationException {
final GrTypeDefinition typeDefinition = createTypeDefinition("class X {{}}");
return typeDefinition.getInitializers()[0];
}
@NotNull
@Override
public GrParameter createParameter(@NotNull @NonNls String name, @Nullable PsiType type) throws IncorrectOperationException {
return createParameter(name, type == null ? null : type.getCanonicalText(), null, null);
}
@NotNull
@Override
public GrParameter createParameter(@NotNull @NonNls String name, @Nullable PsiType type, PsiElement context) throws IncorrectOperationException {
return createParameter(name, type == null ? null : type.getCanonicalText(), null, context instanceof GroovyPsiElement ? (GroovyPsiElement)context : null);
}
@NotNull
@Override
public PsiParameterList createParameterList(@NotNull @NonNls String[] names, @NotNull PsiType[] types) throws IncorrectOperationException {
final StringBuilder builder = new StringBuilder();
builder.append("def foo(");
for (int i = 0; i < names.length; i++) {
String name = names[i];
final PsiType type = types[i];
if (type != null) {
builder.append(type.getCanonicalText());
builder.append(' ');
}
builder.append(name);
builder.append(',');
}
if (names.length > 0) {
builder.delete(builder.length() - 1, builder.length());
}
builder.append("){}");
final GrMethod method = createMethodFromText(builder);
return method.getParameterList();
}
@NotNull
@Override
public PsiClass createAnnotationType(@NotNull @NonNls String name) throws IncorrectOperationException {
return createTypeDefinition("@interface " + name + "{}");
}
@NotNull
@Override
public PsiMethod createConstructor(@NotNull @NonNls String name) {
return createConstructorFromText(name, name + "(){}", null);
}
@Override
public PsiMethod createConstructor(@NotNull @NonNls String name, PsiElement context) {
return createConstructorFromText(name, name + "(){}", context);
}
@NotNull
@Override
public PsiClassType createType(@NotNull PsiClass resolve, @NotNull PsiSubstitutor substitutor) {
return JavaPsiFacade.getElementFactory(myProject).createType(resolve, substitutor);
}
@NotNull
@Override
public PsiClassType createType(@NotNull PsiClass resolve, @NotNull PsiSubstitutor substitutor, @NotNull LanguageLevel languageLevel) {
return JavaPsiFacade.getElementFactory(myProject).createType(resolve, substitutor, languageLevel);
}
@SuppressWarnings("deprecation")
@NotNull
@Override
public PsiClassType createType(@NotNull PsiClass resolve,
@NotNull PsiSubstitutor substitutor,
@NotNull LanguageLevel languageLevel,
@NotNull PsiAnnotation[] annotations) {
return JavaPsiFacade.getElementFactory(myProject).createType(resolve, substitutor, languageLevel, annotations);
}
@NotNull
@Override
public PsiClassType createType(@NotNull PsiClass aClass, PsiType parameters) {
return JavaPsiFacade.getElementFactory(myProject).createType(aClass, parameters);
}
@NotNull
@Override
public PsiClassType createType(@NotNull PsiClass aClass, PsiType... parameters) {
return JavaPsiFacade.getElementFactory(myProject).createType(aClass, parameters);
}
@NotNull
@Override
public PsiSubstitutor createRawSubstitutor(@NotNull PsiTypeParameterListOwner owner) {
return JavaPsiFacade.getElementFactory(myProject).createRawSubstitutor(owner);
}
@NotNull
@Override
public PsiSubstitutor createSubstitutor(@NotNull Map<PsiTypeParameter, PsiType> map) {
return JavaPsiFacade.getElementFactory(myProject).createSubstitutor(map);
}
@Override
public PsiPrimitiveType createPrimitiveType(@NotNull String text) {
return JavaPsiFacade.getElementFactory(myProject).createPrimitiveType(text);
}
@NotNull
@Override
public PsiClassType createTypeByFQClassName(@NotNull @NonNls String qName) {
return JavaPsiFacade.getElementFactory(myProject).createTypeByFQClassName(qName);
}
@NotNull
@Override
public PsiClassType createTypeByFQClassName(@NotNull @NonNls String qName, @NotNull GlobalSearchScope resolveScope) {
return JavaPsiFacade.getElementFactory(myProject).createTypeByFQClassName(qName, resolveScope);
}
@Override
public boolean isValidClassName(@NotNull String name) {
return GroovyNamesUtil.isIdentifier(name);
}
@Override
public boolean isValidMethodName(@NotNull String name) {
return true;
}
@Override
public boolean isValidParameterName(@NotNull String name) {
return GroovyNamesUtil.isIdentifier(name);
}
@Override
public boolean isValidFieldName(@NotNull String name) {
return GroovyNamesUtil.isIdentifier(name);
}
@Override
public boolean isValidLocalVariableName(@NotNull String name) {
return GroovyNamesUtil.isIdentifier(name);
}
}
| |
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.apksig.internal.apk.v1;
import com.android.apksig.ApkVerifier.Issue;
import com.android.apksig.ApkVerifier.IssueWithParams;
import com.android.apksig.apk.ApkFormatException;
import com.android.apksig.apk.ApkUtils;
import com.android.apksig.internal.jar.ManifestParser;
import com.android.apksig.internal.util.AndroidSdkVersion;
import com.android.apksig.internal.util.InclusiveIntRange;
import com.android.apksig.internal.util.MessageDigestSink;
import com.android.apksig.internal.zip.CentralDirectoryRecord;
import com.android.apksig.internal.zip.LocalFileRecord;
import com.android.apksig.util.DataSource;
import com.android.apksig.zip.ZipFormatException;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.security.SignatureException;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Base64;
import java.util.Base64.Decoder;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.jar.Attributes;
import sun.security.pkcs.PKCS7;
import sun.security.pkcs.SignerInfo;
/**
* APK verifier which uses JAR signing (aka v1 signing scheme).
*
* @see <a href="https://docs.oracle.com/javase/8/docs/technotes/guides/jar/jar.html#Signed_JAR_File">Signed JAR File</a>
*/
public abstract class V1SchemeVerifier {
private static final String MANIFEST_ENTRY_NAME = V1SchemeSigner.MANIFEST_ENTRY_NAME;
private V1SchemeVerifier() {}
/**
* Verifies the provided APK's JAR signatures and returns the result of verification. APK is
* considered verified only if {@link Result#verified} is {@code true}. If verification fails,
* the result will contain errors -- see {@link Result#getErrors()}.
*
* @throws ApkFormatException if the APK is malformed
* @throws IOException if an I/O error occurs when reading the APK
* @throws NoSuchAlgorithmException if the APK's JAR signatures cannot be verified because a
* required cryptographic algorithm implementation is missing
*/
public static Result verify(
DataSource apk,
ApkUtils.ZipSections apkSections,
Map<Integer, String> supportedApkSigSchemeNames,
Set<Integer> foundApkSigSchemeIds,
int minSdkVersion,
int maxSdkVersion) throws IOException, ApkFormatException, NoSuchAlgorithmException {
if (minSdkVersion > maxSdkVersion) {
throw new IllegalArgumentException(
"minSdkVersion (" + minSdkVersion + ") > maxSdkVersion (" + maxSdkVersion
+ ")");
}
Result result = new Result();
// Parse the ZIP Central Directory and check that there are no entries with duplicate names.
List<CentralDirectoryRecord> cdRecords = parseZipCentralDirectory(apk, apkSections);
Set<String> cdEntryNames = checkForDuplicateEntries(cdRecords, result);
if (result.containsErrors()) {
return result;
}
// Verify JAR signature(s).
Signers.verify(
apk,
apkSections.getZipCentralDirectoryOffset(),
cdRecords,
cdEntryNames,
supportedApkSigSchemeNames,
foundApkSigSchemeIds,
minSdkVersion,
maxSdkVersion,
result);
return result;
}
/**
* Returns the set of entry names and reports any duplicate entry names in the {@code result}
* as errors.
*/
private static Set<String> checkForDuplicateEntries(
List<CentralDirectoryRecord> cdRecords, Result result) {
Set<String> cdEntryNames = new HashSet<>(cdRecords.size());
Set<String> duplicateCdEntryNames = null;
for (CentralDirectoryRecord cdRecord : cdRecords) {
String entryName = cdRecord.getName();
if (!cdEntryNames.add(entryName)) {
// This is an error. Report this once per duplicate name.
if (duplicateCdEntryNames == null) {
duplicateCdEntryNames = new HashSet<>();
}
if (duplicateCdEntryNames.add(entryName)) {
result.addError(Issue.JAR_SIG_DUPLICATE_ZIP_ENTRY, entryName);
}
}
}
return cdEntryNames;
}
/**
* All JAR signers of an APK.
*/
private static class Signers {
/**
* Verifies JAR signatures of the provided APK and populates the provided result container
* with errors, warnings, and information about signers. The APK is considered verified if
* the {@link Result#verified} is {@code true}.
*/
private static void verify(
DataSource apk,
long cdStartOffset,
List<CentralDirectoryRecord> cdRecords,
Set<String> cdEntryNames,
Map<Integer, String> supportedApkSigSchemeNames,
Set<Integer> foundApkSigSchemeIds,
int minSdkVersion,
int maxSdkVersion,
Result result) throws ApkFormatException, IOException, NoSuchAlgorithmException {
// Find JAR manifest and signature block files.
CentralDirectoryRecord manifestEntry = null;
Map<String, CentralDirectoryRecord> sigFileEntries = new HashMap<>(1);
List<CentralDirectoryRecord> sigBlockEntries = new ArrayList<>(1);
for (CentralDirectoryRecord cdRecord : cdRecords) {
String entryName = cdRecord.getName();
if (!entryName.startsWith("META-INF/")) {
continue;
}
if ((manifestEntry == null) && (MANIFEST_ENTRY_NAME.equals(entryName))) {
manifestEntry = cdRecord;
continue;
}
if (entryName.endsWith(".SF")) {
sigFileEntries.put(entryName, cdRecord);
continue;
}
if ((entryName.endsWith(".RSA"))
|| (entryName.endsWith(".DSA"))
|| (entryName.endsWith(".EC"))) {
sigBlockEntries.add(cdRecord);
continue;
}
}
if (manifestEntry == null) {
result.addError(Issue.JAR_SIG_NO_MANIFEST);
return;
}
// Parse the JAR manifest and check that all JAR entries it references exist in the APK.
byte[] manifestBytes;
try {
manifestBytes =
LocalFileRecord.getUncompressedData(apk, manifestEntry, cdStartOffset);
} catch (ZipFormatException e) {
throw new ApkFormatException("Malformed ZIP entry: " + manifestEntry.getName(), e);
}
Map<String, ManifestParser.Section> entryNameToManifestSection = null;
ManifestParser manifest = new ManifestParser(manifestBytes);
ManifestParser.Section manifestMainSection = manifest.readSection();
List<ManifestParser.Section> manifestIndividualSections = manifest.readAllSections();
entryNameToManifestSection = new HashMap<>(manifestIndividualSections.size());
int manifestSectionNumber = 0;
for (ManifestParser.Section manifestSection : manifestIndividualSections) {
manifestSectionNumber++;
String entryName = manifestSection.getName();
if (entryName == null) {
result.addError(Issue.JAR_SIG_UNNNAMED_MANIFEST_SECTION, manifestSectionNumber);
continue;
}
if (entryNameToManifestSection.put(entryName, manifestSection) != null) {
result.addError(Issue.JAR_SIG_DUPLICATE_MANIFEST_SECTION, entryName);
continue;
}
if (!cdEntryNames.contains(entryName)) {
result.addError(
Issue.JAR_SIG_MISSING_ZIP_ENTRY_REFERENCED_IN_MANIFEST, entryName);
continue;
}
}
if (result.containsErrors()) {
return;
}
// STATE OF AFFAIRS:
// * All JAR entries listed in JAR manifest are present in the APK.
// Identify signers
List<Signer> signers = new ArrayList<>(sigBlockEntries.size());
for (CentralDirectoryRecord sigBlockEntry : sigBlockEntries) {
String sigBlockEntryName = sigBlockEntry.getName();
int extensionDelimiterIndex = sigBlockEntryName.lastIndexOf('.');
if (extensionDelimiterIndex == -1) {
throw new RuntimeException(
"Signature block file name does not contain extension: "
+ sigBlockEntryName);
}
String sigFileEntryName =
sigBlockEntryName.substring(0, extensionDelimiterIndex) + ".SF";
CentralDirectoryRecord sigFileEntry = sigFileEntries.get(sigFileEntryName);
if (sigFileEntry == null) {
result.addWarning(
Issue.JAR_SIG_MISSING_FILE, sigBlockEntryName, sigFileEntryName);
continue;
}
String signerName = sigBlockEntryName.substring("META-INF/".length());
Result.SignerInfo signerInfo =
new Result.SignerInfo(
signerName, sigBlockEntryName, sigFileEntry.getName());
Signer signer = new Signer(signerName, sigBlockEntry, sigFileEntry, signerInfo);
signers.add(signer);
}
if (signers.isEmpty()) {
result.addError(Issue.JAR_SIG_NO_SIGNATURES);
return;
}
// Verify each signer's signature block file .(RSA|DSA|EC) against the corresponding
// signature file .SF. Any error encountered for any signer terminates verification, to
// mimic Android's behavior.
for (Signer signer : signers) {
signer.verifySigBlockAgainstSigFile(
apk, cdStartOffset, minSdkVersion, maxSdkVersion);
if (signer.getResult().containsErrors()) {
result.signers.add(signer.getResult());
}
}
if (result.containsErrors()) {
return;
}
// STATE OF AFFAIRS:
// * All JAR entries listed in JAR manifest are present in the APK.
// * All signature files (.SF) verify against corresponding block files (.RSA|.DSA|.EC).
// Verify each signer's signature file (.SF) against the JAR manifest.
List<Signer> remainingSigners = new ArrayList<>(signers.size());
for (Signer signer : signers) {
signer.verifySigFileAgainstManifest(
manifestBytes,
manifestMainSection,
entryNameToManifestSection,
supportedApkSigSchemeNames,
foundApkSigSchemeIds,
minSdkVersion,
maxSdkVersion);
if (signer.isIgnored()) {
result.ignoredSigners.add(signer.getResult());
} else {
if (signer.getResult().containsErrors()) {
result.signers.add(signer.getResult());
} else {
remainingSigners.add(signer);
}
}
}
if (result.containsErrors()) {
return;
}
signers = remainingSigners;
if (signers.isEmpty()) {
result.addError(Issue.JAR_SIG_NO_SIGNATURES);
return;
}
// STATE OF AFFAIRS:
// * All signature files (.SF) verify against corresponding block files (.RSA|.DSA|.EC).
// * Contents of all JAR manifest sections listed in .SF files verify against .SF files.
// * All JAR entries listed in JAR manifest are present in the APK.
// Verify data of JAR entries against JAR manifest and .SF files. On Android, an APK's
// JAR entry is considered signed by signers associated with an .SF file iff the entry
// is mentioned in the .SF file and the entry's digest(s) mentioned in the JAR manifest
// match theentry's uncompressed data. Android requires that all such JAR entries are
// signed by the same set of signers. This set may be smaller than the set of signers
// we've identified so far.
Set<Signer> apkSigners =
verifyJarEntriesAgainstManifestAndSigners(
apk,
cdStartOffset,
cdRecords,
entryNameToManifestSection,
signers,
minSdkVersion,
maxSdkVersion,
result);
if (result.containsErrors()) {
return;
}
// STATE OF AFFAIRS:
// * All signature files (.SF) verify against corresponding block files (.RSA|.DSA|.EC).
// * Contents of all JAR manifest sections listed in .SF files verify against .SF files.
// * All JAR entries listed in JAR manifest are present in the APK.
// * All JAR entries present in the APK and supposed to be covered by JAR signature
// (i.e., reside outside of META-INF/) are covered by signatures from the same set
// of signers.
// Report any JAR entries which aren't covered by signature.
Set<String> signatureEntryNames = new HashSet<>(1 + result.signers.size() * 2);
signatureEntryNames.add(manifestEntry.getName());
for (Signer signer : apkSigners) {
signatureEntryNames.add(signer.getSignatureBlockEntryName());
signatureEntryNames.add(signer.getSignatureFileEntryName());
}
for (CentralDirectoryRecord cdRecord : cdRecords) {
String entryName = cdRecord.getName();
if ((entryName.startsWith("META-INF/"))
&& (!entryName.endsWith("/"))
&& (!signatureEntryNames.contains(entryName))) {
result.addWarning(Issue.JAR_SIG_UNPROTECTED_ZIP_ENTRY, entryName);
}
}
// Reflect the sets of used signers and ignored signers in the result.
for (Signer signer : signers) {
if (apkSigners.contains(signer)) {
result.signers.add(signer.getResult());
} else {
result.ignoredSigners.add(signer.getResult());
}
}
result.verified = true;
}
}
private static class Signer {
private final String mName;
private final Result.SignerInfo mResult;
private final CentralDirectoryRecord mSignatureFileEntry;
private final CentralDirectoryRecord mSignatureBlockEntry;
private boolean mIgnored;
private byte[] mSigFileBytes;
private Set<String> mSigFileEntryNames;
private Signer(
String name,
CentralDirectoryRecord sigBlockEntry,
CentralDirectoryRecord sigFileEntry,
Result.SignerInfo result) {
mName = name;
mResult = result;
mSignatureBlockEntry = sigBlockEntry;
mSignatureFileEntry = sigFileEntry;
}
public String getName() {
return mName;
}
public String getSignatureFileEntryName() {
return mSignatureFileEntry.getName();
}
public String getSignatureBlockEntryName() {
return mSignatureBlockEntry.getName();
}
void setIgnored() {
mIgnored = true;
}
public boolean isIgnored() {
return mIgnored;
}
public Set<String> getSigFileEntryNames() {
return mSigFileEntryNames;
}
public Result.SignerInfo getResult() {
return mResult;
}
@SuppressWarnings("restriction")
public void verifySigBlockAgainstSigFile(
DataSource apk, long cdStartOffset, int minSdkVersion, int maxSdkVersion)
throws IOException, ApkFormatException, NoSuchAlgorithmException {
byte[] sigBlockBytes;
try {
sigBlockBytes =
LocalFileRecord.getUncompressedData(
apk, mSignatureBlockEntry, cdStartOffset);
} catch (ZipFormatException e) {
throw new ApkFormatException(
"Malformed ZIP entry: " + mSignatureBlockEntry.getName(), e);
}
try {
mSigFileBytes =
LocalFileRecord.getUncompressedData(
apk, mSignatureFileEntry, cdStartOffset);
} catch (ZipFormatException e) {
throw new ApkFormatException(
"Malformed ZIP entry: " + mSignatureFileEntry.getName(), e);
}
PKCS7 sigBlock;
try {
sigBlock = new PKCS7(sigBlockBytes);
} catch (IOException e) {
if (e.getCause() instanceof CertificateException) {
mResult.addError(
Issue.JAR_SIG_MALFORMED_CERTIFICATE, mSignatureBlockEntry.getName(), e);
} else {
mResult.addError(
Issue.JAR_SIG_PARSE_EXCEPTION, mSignatureBlockEntry.getName(), e);
}
return;
}
SignerInfo[] unverifiedSignerInfos = sigBlock.getSignerInfos();
if ((unverifiedSignerInfos == null) || (unverifiedSignerInfos.length == 0)) {
mResult.addError(Issue.JAR_SIG_NO_SIGNERS, mSignatureBlockEntry.getName());
return;
}
SignerInfo verifiedSignerInfo = null;
if ((unverifiedSignerInfos != null) && (unverifiedSignerInfos.length > 0)) {
for (int i = 0; i < unverifiedSignerInfos.length; i++) {
SignerInfo unverifiedSignerInfo = unverifiedSignerInfos[i];
String digestAlgorithmOid =
unverifiedSignerInfo.getDigestAlgorithmId().getOID().toString();
String signatureAlgorithmOid =
unverifiedSignerInfo
.getDigestEncryptionAlgorithmId().getOID().toString();
InclusiveIntRange desiredApiLevels =
InclusiveIntRange.fromTo(minSdkVersion, maxSdkVersion);
List<InclusiveIntRange> apiLevelsWhereDigestAndSigAlgorithmSupported =
getSigAlgSupportedApiLevels(digestAlgorithmOid, signatureAlgorithmOid);
List<InclusiveIntRange> apiLevelsWhereDigestAlgorithmNotSupported =
desiredApiLevels.getValuesNotIn(apiLevelsWhereDigestAndSigAlgorithmSupported);
if (!apiLevelsWhereDigestAlgorithmNotSupported.isEmpty()) {
mResult.addError(
Issue.JAR_SIG_UNSUPPORTED_SIG_ALG,
mSignatureBlockEntry.getName(),
digestAlgorithmOid,
signatureAlgorithmOid,
String.valueOf(apiLevelsWhereDigestAlgorithmNotSupported));
return;
}
try {
verifiedSignerInfo = sigBlock.verify(unverifiedSignerInfo, mSigFileBytes);
} catch (SignatureException e) {
mResult.addError(
Issue.JAR_SIG_VERIFY_EXCEPTION,
mSignatureBlockEntry.getName(),
mSignatureFileEntry.getName(),
e);
return;
}
if (verifiedSignerInfo != null) {
// Verified
break;
}
// Did not verify
if (minSdkVersion < AndroidSdkVersion.N) {
// Prior to N, Android attempted to verify only the first SignerInfo.
mResult.addError(
Issue.JAR_SIG_DID_NOT_VERIFY,
mSignatureBlockEntry.getName(),
mSignatureFileEntry.getName());
return;
}
}
}
if (verifiedSignerInfo == null) {
mResult.addError(Issue.JAR_SIG_NO_SIGNERS, mSignatureBlockEntry.getName());
return;
}
// TODO: PKCS7 class doesn't guarantee that returned certificates' getEncoded returns
// the original encoded form of certificates rather than the DER re-encoded form. We
// need to replace the PKCS7 parser/verifier.
List<X509Certificate> certChain;
try {
certChain = verifiedSignerInfo.getCertificateChain(sigBlock);
} catch (IOException e) {
throw new RuntimeException(
"Failed to obtain cert chain from " + mSignatureBlockEntry.getName(), e);
}
if ((certChain == null) || (certChain.isEmpty())) {
throw new RuntimeException("Verified SignerInfo does not have a certificate chain");
}
mResult.certChain.clear();
mResult.certChain.addAll(certChain);
}
private static final String OID_DIGEST_MD5 = "1.2.840.113549.2.5";
private static final String OID_DIGEST_SHA1 = "1.3.14.3.2.26";
private static final String OID_DIGEST_SHA224 = "2.16.840.1.101.3.4.2.4";
private static final String OID_DIGEST_SHA256 = "2.16.840.1.101.3.4.2.1";
private static final String OID_DIGEST_SHA384 = "2.16.840.1.101.3.4.2.2";
private static final String OID_DIGEST_SHA512 = "2.16.840.1.101.3.4.2.3";
private static final String OID_SIG_RSA = "1.2.840.113549.1.1.1";
private static final String OID_SIG_MD5_WITH_RSA = "1.2.840.113549.1.1.4";
private static final String OID_SIG_SHA1_WITH_RSA = "1.2.840.113549.1.1.5";
private static final String OID_SIG_SHA224_WITH_RSA = "1.2.840.113549.1.1.14";
private static final String OID_SIG_SHA256_WITH_RSA = "1.2.840.113549.1.1.11";
private static final String OID_SIG_SHA384_WITH_RSA = "1.2.840.113549.1.1.12";
private static final String OID_SIG_SHA512_WITH_RSA = "1.2.840.113549.1.1.13";
private static final String OID_SIG_DSA = "1.2.840.10040.4.1";
private static final String OID_SIG_SHA1_WITH_DSA = "1.2.840.10040.4.3";
private static final String OID_SIG_SHA224_WITH_DSA = "2.16.840.1.101.3.4.3.1";
private static final String OID_SIG_SHA256_WITH_DSA = "2.16.840.1.101.3.4.3.2";
private static final String OID_SIG_EC_PUBLIC_KEY = "1.2.840.10045.2.1";
private static final String OID_SIG_SHA1_WITH_ECDSA = "1.2.840.10045.4.1";
private static final String OID_SIG_SHA224_WITH_ECDSA = "1.2.840.10045.4.3.1";
private static final String OID_SIG_SHA256_WITH_ECDSA = "1.2.840.10045.4.3.2";
private static final String OID_SIG_SHA384_WITH_ECDSA = "1.2.840.10045.4.3.3";
private static final String OID_SIG_SHA512_WITH_ECDSA = "1.2.840.10045.4.3.4";
private static final Map<String, List<InclusiveIntRange>> SUPPORTED_SIG_ALG_OIDS =
new HashMap<>();
{
addSupportedSigAlg(
OID_DIGEST_MD5, OID_SIG_RSA,
InclusiveIntRange.from(0));
addSupportedSigAlg(
OID_DIGEST_MD5, OID_SIG_MD5_WITH_RSA,
InclusiveIntRange.fromTo(0, 8), InclusiveIntRange.from(21));
addSupportedSigAlg(
OID_DIGEST_MD5, OID_SIG_SHA1_WITH_RSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_MD5, OID_SIG_SHA224_WITH_RSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_MD5, OID_SIG_SHA256_WITH_RSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_MD5, OID_SIG_SHA384_WITH_RSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_MD5, OID_SIG_SHA512_WITH_RSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA1, OID_SIG_RSA,
InclusiveIntRange.from(0));
addSupportedSigAlg(
OID_DIGEST_SHA1, OID_SIG_MD5_WITH_RSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA1, OID_SIG_SHA1_WITH_RSA,
InclusiveIntRange.from(0));
addSupportedSigAlg(
OID_DIGEST_SHA1, OID_SIG_SHA224_WITH_RSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA1, OID_SIG_SHA256_WITH_RSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA1, OID_SIG_SHA384_WITH_RSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA1, OID_SIG_SHA512_WITH_RSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA224, OID_SIG_RSA,
InclusiveIntRange.fromTo(0, 8), InclusiveIntRange.from(21));
addSupportedSigAlg(
OID_DIGEST_SHA224, OID_SIG_MD5_WITH_RSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA224, OID_SIG_SHA1_WITH_RSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA224, OID_SIG_SHA224_WITH_RSA,
InclusiveIntRange.fromTo(0, 8), InclusiveIntRange.from(21));
addSupportedSigAlg(
OID_DIGEST_SHA224, OID_SIG_SHA256_WITH_RSA,
InclusiveIntRange.fromTo(21, 21));
addSupportedSigAlg(
OID_DIGEST_SHA224, OID_SIG_SHA384_WITH_RSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA224, OID_SIG_SHA512_WITH_RSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA256, OID_SIG_RSA,
InclusiveIntRange.fromTo(0, 8), InclusiveIntRange.from(18));
addSupportedSigAlg(
OID_DIGEST_SHA256, OID_SIG_MD5_WITH_RSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA256, OID_SIG_SHA1_WITH_RSA,
InclusiveIntRange.fromTo(21, 21));
addSupportedSigAlg(
OID_DIGEST_SHA256, OID_SIG_SHA224_WITH_RSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA256, OID_SIG_SHA256_WITH_RSA,
InclusiveIntRange.fromTo(0, 8), InclusiveIntRange.from(18));
addSupportedSigAlg(
OID_DIGEST_SHA256, OID_SIG_SHA384_WITH_RSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA256, OID_SIG_SHA512_WITH_RSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA384, OID_SIG_RSA,
InclusiveIntRange.from(18));
addSupportedSigAlg(
OID_DIGEST_SHA384, OID_SIG_MD5_WITH_RSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA384, OID_SIG_SHA1_WITH_RSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA384, OID_SIG_SHA224_WITH_RSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA384, OID_SIG_SHA256_WITH_RSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA384, OID_SIG_SHA384_WITH_RSA,
InclusiveIntRange.from(21));
addSupportedSigAlg(
OID_DIGEST_SHA384, OID_SIG_SHA512_WITH_RSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA512, OID_SIG_RSA,
InclusiveIntRange.from(18));
addSupportedSigAlg(
OID_DIGEST_SHA512, OID_SIG_MD5_WITH_RSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA512, OID_SIG_SHA1_WITH_RSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA512, OID_SIG_SHA224_WITH_RSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA512, OID_SIG_SHA256_WITH_RSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA512, OID_SIG_SHA384_WITH_RSA,
InclusiveIntRange.fromTo(21, 21));
addSupportedSigAlg(
OID_DIGEST_SHA512, OID_SIG_SHA512_WITH_RSA,
InclusiveIntRange.from(21));
addSupportedSigAlg(
OID_DIGEST_MD5, OID_SIG_SHA1_WITH_DSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_MD5, OID_SIG_SHA224_WITH_DSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_MD5, OID_SIG_SHA256_WITH_DSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA1, OID_SIG_DSA,
InclusiveIntRange.from(0));
addSupportedSigAlg(
OID_DIGEST_SHA1, OID_SIG_SHA1_WITH_DSA,
InclusiveIntRange.from(9));
addSupportedSigAlg(
OID_DIGEST_SHA1, OID_SIG_SHA224_WITH_DSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA1, OID_SIG_SHA256_WITH_DSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA224, OID_SIG_DSA,
InclusiveIntRange.from(22));
addSupportedSigAlg(
OID_DIGEST_SHA224, OID_SIG_SHA1_WITH_DSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA224, OID_SIG_SHA224_WITH_DSA,
InclusiveIntRange.from(21));
addSupportedSigAlg(
OID_DIGEST_SHA224, OID_SIG_SHA256_WITH_DSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA256, OID_SIG_DSA,
InclusiveIntRange.from(22));
addSupportedSigAlg(
OID_DIGEST_SHA256, OID_SIG_SHA1_WITH_DSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA256, OID_SIG_SHA224_WITH_DSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA256, OID_SIG_SHA256_WITH_DSA,
InclusiveIntRange.from(21));
addSupportedSigAlg(
OID_DIGEST_SHA384, OID_SIG_SHA1_WITH_DSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA384, OID_SIG_SHA224_WITH_DSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA384, OID_SIG_SHA256_WITH_DSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA512, OID_SIG_SHA1_WITH_DSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA512, OID_SIG_SHA224_WITH_DSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA512, OID_SIG_SHA256_WITH_DSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA1, OID_SIG_EC_PUBLIC_KEY,
InclusiveIntRange.from(18));
addSupportedSigAlg(
OID_DIGEST_SHA224, OID_SIG_EC_PUBLIC_KEY,
InclusiveIntRange.from(21));
addSupportedSigAlg(
OID_DIGEST_SHA256, OID_SIG_EC_PUBLIC_KEY,
InclusiveIntRange.from(18));
addSupportedSigAlg(
OID_DIGEST_SHA384, OID_SIG_EC_PUBLIC_KEY,
InclusiveIntRange.from(18));
addSupportedSigAlg(
OID_DIGEST_SHA512, OID_SIG_EC_PUBLIC_KEY,
InclusiveIntRange.from(18));
addSupportedSigAlg(
OID_DIGEST_MD5, OID_SIG_SHA1_WITH_ECDSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_MD5, OID_SIG_SHA224_WITH_ECDSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_MD5, OID_SIG_SHA256_WITH_ECDSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_MD5, OID_SIG_SHA384_WITH_ECDSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_MD5, OID_SIG_SHA512_WITH_ECDSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA1, OID_SIG_SHA1_WITH_ECDSA,
InclusiveIntRange.from(18));
addSupportedSigAlg(
OID_DIGEST_SHA1, OID_SIG_SHA224_WITH_ECDSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA1, OID_SIG_SHA256_WITH_ECDSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA1, OID_SIG_SHA384_WITH_ECDSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA1, OID_SIG_SHA512_WITH_ECDSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA224, OID_SIG_SHA1_WITH_ECDSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA224, OID_SIG_SHA224_WITH_ECDSA,
InclusiveIntRange.from(21));
addSupportedSigAlg(
OID_DIGEST_SHA224, OID_SIG_SHA256_WITH_ECDSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA224, OID_SIG_SHA384_WITH_ECDSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA224, OID_SIG_SHA512_WITH_ECDSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA256, OID_SIG_SHA1_WITH_ECDSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA256, OID_SIG_SHA224_WITH_ECDSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA256, OID_SIG_SHA256_WITH_ECDSA,
InclusiveIntRange.from(21));
addSupportedSigAlg(
OID_DIGEST_SHA256, OID_SIG_SHA384_WITH_ECDSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA256, OID_SIG_SHA512_WITH_ECDSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA384, OID_SIG_SHA1_WITH_ECDSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA384, OID_SIG_SHA224_WITH_ECDSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA384, OID_SIG_SHA256_WITH_ECDSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA384, OID_SIG_SHA384_WITH_ECDSA,
InclusiveIntRange.from(21));
addSupportedSigAlg(
OID_DIGEST_SHA384, OID_SIG_SHA512_WITH_ECDSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA512, OID_SIG_SHA1_WITH_ECDSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA512, OID_SIG_SHA224_WITH_ECDSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA512, OID_SIG_SHA256_WITH_ECDSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA512, OID_SIG_SHA384_WITH_ECDSA,
InclusiveIntRange.fromTo(21, 23));
addSupportedSigAlg(
OID_DIGEST_SHA512, OID_SIG_SHA512_WITH_ECDSA,
InclusiveIntRange.from(21));
}
private static void addSupportedSigAlg(
String digestAlgorithmOid,
String signatureAlgorithmOid,
InclusiveIntRange... supportedApiLevels) {
SUPPORTED_SIG_ALG_OIDS.put(
digestAlgorithmOid + "with" + signatureAlgorithmOid,
Arrays.asList(supportedApiLevels));
}
private List<InclusiveIntRange> getSigAlgSupportedApiLevels(
String digestAlgorithmOid,
String signatureAlgorithmOid) {
List<InclusiveIntRange> result =
SUPPORTED_SIG_ALG_OIDS.get(digestAlgorithmOid + "with" + signatureAlgorithmOid);
return (result != null) ? result : Collections.emptyList();
}
public void verifySigFileAgainstManifest(
byte[] manifestBytes,
ManifestParser.Section manifestMainSection,
Map<String, ManifestParser.Section> entryNameToManifestSection,
Map<Integer, String> supportedApkSigSchemeNames,
Set<Integer> foundApkSigSchemeIds,
int minSdkVersion,
int maxSdkVersion) throws NoSuchAlgorithmException {
// Inspect the main section of the .SF file.
ManifestParser sf = new ManifestParser(mSigFileBytes);
ManifestParser.Section sfMainSection = sf.readSection();
if (sfMainSection.getAttributeValue(Attributes.Name.SIGNATURE_VERSION) == null) {
mResult.addError(
Issue.JAR_SIG_MISSING_VERSION_ATTR_IN_SIG_FILE,
mSignatureFileEntry.getName());
setIgnored();
return;
}
if (maxSdkVersion >= AndroidSdkVersion.N) {
// Android N and newer rejects APKs whose .SF file says they were supposed to be
// signed with APK Signature Scheme v2 (or newer) and yet no such signature was
// found.
checkForStrippedApkSignatures(
sfMainSection, supportedApkSigSchemeNames, foundApkSigSchemeIds);
if (mResult.containsErrors()) {
return;
}
}
boolean createdBySigntool = false;
String createdBy = sfMainSection.getAttributeValue("Created-By");
if (createdBy != null) {
createdBySigntool = createdBy.indexOf("signtool") != -1;
}
boolean manifestDigestVerified =
verifyManifestDigest(
sfMainSection,
createdBySigntool,
manifestBytes,
minSdkVersion,
maxSdkVersion);
if (!createdBySigntool) {
verifyManifestMainSectionDigest(
sfMainSection,
manifestMainSection,
manifestBytes,
minSdkVersion,
maxSdkVersion);
}
if (mResult.containsErrors()) {
return;
}
// Inspect per-entry sections of .SF file. Technically, if the digest of JAR manifest
// verifies, per-entry sections should be ignored. However, most Android platform
// implementations require that such sections exist.
List<ManifestParser.Section> sfSections = sf.readAllSections();
Set<String> sfEntryNames = new HashSet<>(sfSections.size());
int sfSectionNumber = 0;
for (ManifestParser.Section sfSection : sfSections) {
sfSectionNumber++;
String entryName = sfSection.getName();
if (entryName == null) {
mResult.addError(
Issue.JAR_SIG_UNNNAMED_SIG_FILE_SECTION,
mSignatureFileEntry.getName(),
sfSectionNumber);
setIgnored();
return;
}
if (!sfEntryNames.add(entryName)) {
mResult.addError(
Issue.JAR_SIG_DUPLICATE_SIG_FILE_SECTION,
mSignatureFileEntry.getName(),
entryName);
setIgnored();
return;
}
if (manifestDigestVerified) {
// No need to verify this entry's corresponding JAR manifest entry because the
// JAR manifest verifies in full.
continue;
}
// Whole-file digest of JAR manifest hasn't been verified. Thus, we need to verify
// the digest of the JAR manifest section corresponding to this .SF section.
ManifestParser.Section manifestSection = entryNameToManifestSection.get(entryName);
if (manifestSection == null) {
mResult.addError(
Issue.JAR_SIG_NO_ZIP_ENTRY_DIGEST_IN_SIG_FILE,
entryName,
mSignatureFileEntry.getName());
setIgnored();
continue;
}
verifyManifestIndividualSectionDigest(
sfSection,
createdBySigntool,
manifestSection,
manifestBytes,
minSdkVersion,
maxSdkVersion);
}
mSigFileEntryNames = sfEntryNames;
}
/**
* Returns {@code true} if the whole-file digest of the manifest against the main section of
* the .SF file.
*/
private boolean verifyManifestDigest(
ManifestParser.Section sfMainSection,
boolean createdBySigntool,
byte[] manifestBytes,
int minSdkVersion,
int maxSdkVersion) throws NoSuchAlgorithmException {
Collection<NamedDigest> expectedDigests =
getDigestsToVerify(
sfMainSection,
((createdBySigntool) ? "-Digest" : "-Digest-Manifest"),
minSdkVersion,
maxSdkVersion);
boolean digestFound = !expectedDigests.isEmpty();
if (!digestFound) {
mResult.addWarning(
Issue.JAR_SIG_NO_MANIFEST_DIGEST_IN_SIG_FILE,
mSignatureFileEntry.getName());
return false;
}
boolean verified = true;
for (NamedDigest expectedDigest : expectedDigests) {
String jcaDigestAlgorithm = expectedDigest.jcaDigestAlgorithm;
byte[] actual = digest(jcaDigestAlgorithm, manifestBytes);
byte[] expected = expectedDigest.digest;
if (!Arrays.equals(expected, actual)) {
mResult.addWarning(
Issue.JAR_SIG_ZIP_ENTRY_DIGEST_DID_NOT_VERIFY,
V1SchemeSigner.MANIFEST_ENTRY_NAME,
jcaDigestAlgorithm,
mSignatureFileEntry.getName(),
Base64.getEncoder().encodeToString(actual),
Base64.getEncoder().encodeToString(expected));
verified = false;
}
}
return verified;
}
/**
* Verifies the digest of the manifest's main section against the main section of the .SF
* file.
*/
private void verifyManifestMainSectionDigest(
ManifestParser.Section sfMainSection,
ManifestParser.Section manifestMainSection,
byte[] manifestBytes,
int minSdkVersion,
int maxSdkVersion) throws NoSuchAlgorithmException {
Collection<NamedDigest> expectedDigests =
getDigestsToVerify(
sfMainSection,
"-Digest-Manifest-Main-Attributes",
minSdkVersion,
maxSdkVersion);
if (expectedDigests.isEmpty()) {
return;
}
for (NamedDigest expectedDigest : expectedDigests) {
String jcaDigestAlgorithm = expectedDigest.jcaDigestAlgorithm;
byte[] actual =
digest(
jcaDigestAlgorithm,
manifestBytes,
manifestMainSection.getStartOffset(),
manifestMainSection.getSizeBytes());
byte[] expected = expectedDigest.digest;
if (!Arrays.equals(expected, actual)) {
mResult.addError(
Issue.JAR_SIG_MANIFEST_MAIN_SECTION_DIGEST_DID_NOT_VERIFY,
jcaDigestAlgorithm,
mSignatureFileEntry.getName(),
Base64.getEncoder().encodeToString(actual),
Base64.getEncoder().encodeToString(expected));
}
}
}
/**
* Verifies the digest of the manifest's individual section against the corresponding
* individual section of the .SF file.
*/
private void verifyManifestIndividualSectionDigest(
ManifestParser.Section sfIndividualSection,
boolean createdBySigntool,
ManifestParser.Section manifestIndividualSection,
byte[] manifestBytes,
int minSdkVersion,
int maxSdkVersion) throws NoSuchAlgorithmException {
String entryName = sfIndividualSection.getName();
Collection<NamedDigest> expectedDigests =
getDigestsToVerify(
sfIndividualSection, "-Digest", minSdkVersion, maxSdkVersion);
if (expectedDigests.isEmpty()) {
mResult.addError(
Issue.JAR_SIG_NO_ZIP_ENTRY_DIGEST_IN_SIG_FILE,
entryName,
mSignatureFileEntry.getName());
return;
}
int sectionStartIndex = manifestIndividualSection.getStartOffset();
int sectionSizeBytes = manifestIndividualSection.getSizeBytes();
if (createdBySigntool) {
int sectionEndIndex = sectionStartIndex + sectionSizeBytes;
if ((manifestBytes[sectionEndIndex - 1] == '\n')
&& (manifestBytes[sectionEndIndex - 2] == '\n')) {
sectionSizeBytes--;
}
}
for (NamedDigest expectedDigest : expectedDigests) {
String jcaDigestAlgorithm = expectedDigest.jcaDigestAlgorithm;
byte[] actual =
digest(
jcaDigestAlgorithm,
manifestBytes,
sectionStartIndex,
sectionSizeBytes);
byte[] expected = expectedDigest.digest;
if (!Arrays.equals(expected, actual)) {
mResult.addError(
Issue.JAR_SIG_MANIFEST_SECTION_DIGEST_DID_NOT_VERIFY,
entryName,
jcaDigestAlgorithm,
mSignatureFileEntry.getName(),
Base64.getEncoder().encodeToString(actual),
Base64.getEncoder().encodeToString(expected));
}
}
}
private void checkForStrippedApkSignatures(
ManifestParser.Section sfMainSection,
Map<Integer, String> supportedApkSigSchemeNames,
Set<Integer> foundApkSigSchemeIds) {
String signedWithApkSchemes =
sfMainSection.getAttributeValue(
V1SchemeSigner.SF_ATTRIBUTE_NAME_ANDROID_APK_SIGNED_NAME_STR);
// This field contains a comma-separated list of APK signature scheme IDs which were
// used to sign this APK. Android rejects APKs where an ID is known to the platform but
// the APK didn't verify using that scheme.
if (signedWithApkSchemes == null) {
// APK signature (e.g., v2 scheme) stripping protections not enabled.
if (!foundApkSigSchemeIds.isEmpty()) {
// APK is signed with an APK signature scheme such as v2 scheme.
mResult.addWarning(
Issue.JAR_SIG_NO_APK_SIG_STRIP_PROTECTION,
mSignatureFileEntry.getName());
}
return;
}
if (supportedApkSigSchemeNames.isEmpty()) {
return;
}
Set<Integer> supportedApkSigSchemeIds = supportedApkSigSchemeNames.keySet();
Set<Integer> supportedExpectedApkSigSchemeIds = new HashSet<>(1);
StringTokenizer tokenizer = new StringTokenizer(signedWithApkSchemes, ",");
while (tokenizer.hasMoreTokens()) {
String idText = tokenizer.nextToken().trim();
if (idText.isEmpty()) {
continue;
}
int id;
try {
id = Integer.parseInt(idText);
} catch (Exception ignored) {
continue;
}
// This APK was supposed to be signed with the APK signature scheme having
// this ID.
if (supportedApkSigSchemeIds.contains(id)) {
supportedExpectedApkSigSchemeIds.add(id);
} else {
mResult.addWarning(
Issue.JAR_SIG_UNKNOWN_APK_SIG_SCHEME_ID,
mSignatureFileEntry.getName(),
id);
}
}
for (int id : supportedExpectedApkSigSchemeIds) {
if (!foundApkSigSchemeIds.contains(id)) {
String apkSigSchemeName = supportedApkSigSchemeNames.get(id);
mResult.addError(
Issue.JAR_SIG_MISSING_APK_SIG_REFERENCED,
mSignatureFileEntry.getName(),
id,
apkSigSchemeName);
}
}
}
}
private static Collection<NamedDigest> getDigestsToVerify(
ManifestParser.Section section,
String digestAttrSuffix,
int minSdkVersion,
int maxSdkVersion) {
Decoder base64Decoder = Base64.getDecoder();
List<NamedDigest> result = new ArrayList<>(1);
if (minSdkVersion < AndroidSdkVersion.JELLY_BEAN_MR2) {
// Prior to JB MR2, Android platform's logic for picking a digest algorithm to verify is
// to rely on the ancient Digest-Algorithms attribute which contains
// whitespace-separated list of digest algorithms (defaulting to SHA-1) to try. The
// first digest attribute (with supported digest algorithm) found using the list is
// used.
String algs = section.getAttributeValue("Digest-Algorithms");
if (algs == null) {
algs = "SHA SHA1";
}
StringTokenizer tokens = new StringTokenizer(algs);
while (tokens.hasMoreTokens()) {
String alg = tokens.nextToken();
String attrName = alg + digestAttrSuffix;
String digestBase64 = section.getAttributeValue(attrName);
if (digestBase64 == null) {
// Attribute not found
continue;
}
alg = getCanonicalJcaMessageDigestAlgorithm(alg);
if ((alg == null)
|| (getMinSdkVersionFromWhichSupportedInManifestOrSignatureFile(alg)
> minSdkVersion)) {
// Unsupported digest algorithm
continue;
}
// Supported digest algorithm
result.add(new NamedDigest(alg, base64Decoder.decode(digestBase64)));
break;
}
// No supported digests found -- this will fail to verify on pre-JB MR2 Androids.
if (result.isEmpty()) {
return result;
}
}
if (maxSdkVersion >= AndroidSdkVersion.JELLY_BEAN_MR2) {
// On JB MR2 and newer, Android platform picks the strongest algorithm out of:
// SHA-512, SHA-384, SHA-256, SHA-1.
for (String alg : JB_MR2_AND_NEWER_DIGEST_ALGS) {
String attrName = getJarDigestAttributeName(alg, digestAttrSuffix);
String digestBase64 = section.getAttributeValue(attrName);
if (digestBase64 == null) {
// Attribute not found
continue;
}
byte[] digest = base64Decoder.decode(digestBase64);
byte[] digestInResult = getDigest(result, alg);
if ((digestInResult == null) || (!Arrays.equals(digestInResult, digest))) {
result.add(new NamedDigest(alg, digest));
}
break;
}
}
return result;
}
private static final String[] JB_MR2_AND_NEWER_DIGEST_ALGS = {
"SHA-512",
"SHA-384",
"SHA-256",
"SHA-1",
};
private static String getCanonicalJcaMessageDigestAlgorithm(String algorithm) {
return UPPER_CASE_JCA_DIGEST_ALG_TO_CANONICAL.get(algorithm.toUpperCase(Locale.US));
}
public static int getMinSdkVersionFromWhichSupportedInManifestOrSignatureFile(
String jcaAlgorithmName) {
Integer result =
MIN_SDK_VESION_FROM_WHICH_DIGEST_SUPPORTED_IN_MANIFEST.get(
jcaAlgorithmName.toUpperCase(Locale.US));
return (result != null) ? result : Integer.MAX_VALUE;
}
private static String getJarDigestAttributeName(
String jcaDigestAlgorithm, String attrNameSuffix) {
if ("SHA-1".equalsIgnoreCase(jcaDigestAlgorithm)) {
return "SHA1" + attrNameSuffix;
} else {
return jcaDigestAlgorithm + attrNameSuffix;
}
}
private static final Map<String, String> UPPER_CASE_JCA_DIGEST_ALG_TO_CANONICAL;
static {
UPPER_CASE_JCA_DIGEST_ALG_TO_CANONICAL = new HashMap<>(8);
UPPER_CASE_JCA_DIGEST_ALG_TO_CANONICAL.put("MD5", "MD5");
UPPER_CASE_JCA_DIGEST_ALG_TO_CANONICAL.put("SHA", "SHA-1");
UPPER_CASE_JCA_DIGEST_ALG_TO_CANONICAL.put("SHA1", "SHA-1");
UPPER_CASE_JCA_DIGEST_ALG_TO_CANONICAL.put("SHA-1", "SHA-1");
UPPER_CASE_JCA_DIGEST_ALG_TO_CANONICAL.put("SHA-256", "SHA-256");
UPPER_CASE_JCA_DIGEST_ALG_TO_CANONICAL.put("SHA-384", "SHA-384");
UPPER_CASE_JCA_DIGEST_ALG_TO_CANONICAL.put("SHA-512", "SHA-512");
}
private static final Map<String, Integer>
MIN_SDK_VESION_FROM_WHICH_DIGEST_SUPPORTED_IN_MANIFEST;
static {
MIN_SDK_VESION_FROM_WHICH_DIGEST_SUPPORTED_IN_MANIFEST = new HashMap<>(5);
MIN_SDK_VESION_FROM_WHICH_DIGEST_SUPPORTED_IN_MANIFEST.put("MD5", 0);
MIN_SDK_VESION_FROM_WHICH_DIGEST_SUPPORTED_IN_MANIFEST.put("SHA-1", 0);
MIN_SDK_VESION_FROM_WHICH_DIGEST_SUPPORTED_IN_MANIFEST.put("SHA-256", 0);
MIN_SDK_VESION_FROM_WHICH_DIGEST_SUPPORTED_IN_MANIFEST.put(
"SHA-384", AndroidSdkVersion.GINGERBREAD);
MIN_SDK_VESION_FROM_WHICH_DIGEST_SUPPORTED_IN_MANIFEST.put(
"SHA-512", AndroidSdkVersion.GINGERBREAD);
}
private static byte[] getDigest(Collection<NamedDigest> digests, String jcaDigestAlgorithm) {
for (NamedDigest digest : digests) {
if (digest.jcaDigestAlgorithm.equalsIgnoreCase(jcaDigestAlgorithm)) {
return digest.digest;
}
}
return null;
}
public static List<CentralDirectoryRecord> parseZipCentralDirectory(
DataSource apk,
ApkUtils.ZipSections apkSections)
throws IOException, ApkFormatException {
// Read the ZIP Central Directory
long cdSizeBytes = apkSections.getZipCentralDirectorySizeBytes();
if (cdSizeBytes > Integer.MAX_VALUE) {
throw new ApkFormatException("ZIP Central Directory too large: " + cdSizeBytes);
}
long cdOffset = apkSections.getZipCentralDirectoryOffset();
ByteBuffer cd = apk.getByteBuffer(cdOffset, (int) cdSizeBytes);
cd.order(ByteOrder.LITTLE_ENDIAN);
// Parse the ZIP Central Directory
int expectedCdRecordCount = apkSections.getZipCentralDirectoryRecordCount();
List<CentralDirectoryRecord> cdRecords = new ArrayList<>(expectedCdRecordCount);
for (int i = 0; i < expectedCdRecordCount; i++) {
CentralDirectoryRecord cdRecord;
int offsetInsideCd = cd.position();
try {
cdRecord = CentralDirectoryRecord.getRecord(cd);
} catch (ZipFormatException e) {
throw new ApkFormatException(
"Malformed ZIP Central Directory record #" + (i + 1)
+ " at file offset " + (cdOffset + offsetInsideCd),
e);
}
String entryName = cdRecord.getName();
if (entryName.endsWith("/")) {
// Ignore directory entries
continue;
}
cdRecords.add(cdRecord);
}
// There may be more data in Central Directory, but we don't warn or throw because Android
// ignores unused CD data.
return cdRecords;
}
/**
* Returns {@code true} if the provided JAR entry must be mentioned in signed JAR archive's
* manifest for the APK to verify on Android.
*/
private static boolean isJarEntryDigestNeededInManifest(String entryName) {
// NOTE: This logic is different from what's required by the JAR signing scheme. This is
// because Android's APK verification logic differs from that spec. In particular, JAR
// signing spec includes into JAR manifest all files in subdirectories of META-INF and
// any files inside META-INF not related to signatures.
if (entryName.startsWith("META-INF/")) {
return false;
}
return !entryName.endsWith("/");
}
private static Set<Signer> verifyJarEntriesAgainstManifestAndSigners(
DataSource apk,
long cdOffsetInApk,
Collection<CentralDirectoryRecord> cdRecords,
Map<String, ManifestParser.Section> entryNameToManifestSection,
List<Signer> signers,
int minSdkVersion,
int maxSdkVersion,
Result result) throws ApkFormatException, IOException, NoSuchAlgorithmException {
// Iterate over APK contents as sequentially as possible to improve performance.
List<CentralDirectoryRecord> cdRecordsSortedByLocalFileHeaderOffset =
new ArrayList<>(cdRecords);
Collections.sort(
cdRecordsSortedByLocalFileHeaderOffset,
CentralDirectoryRecord.BY_LOCAL_FILE_HEADER_OFFSET_COMPARATOR);
Set<String> manifestEntryNamesMissingFromApk =
new HashSet<>(entryNameToManifestSection.keySet());
List<Signer> firstSignedEntrySigners = null;
String firstSignedEntryName = null;
for (CentralDirectoryRecord cdRecord : cdRecordsSortedByLocalFileHeaderOffset) {
String entryName = cdRecord.getName();
manifestEntryNamesMissingFromApk.remove(entryName);
if (!isJarEntryDigestNeededInManifest(entryName)) {
continue;
}
ManifestParser.Section manifestSection = entryNameToManifestSection.get(entryName);
if (manifestSection == null) {
result.addError(Issue.JAR_SIG_NO_ZIP_ENTRY_DIGEST_IN_MANIFEST, entryName);
continue;
}
List<Signer> entrySigners = new ArrayList<>(signers.size());
for (Signer signer : signers) {
if (signer.getSigFileEntryNames().contains(entryName)) {
entrySigners.add(signer);
}
}
if (entrySigners.isEmpty()) {
result.addError(Issue.JAR_SIG_ZIP_ENTRY_NOT_SIGNED, entryName);
continue;
}
if (firstSignedEntrySigners == null) {
firstSignedEntrySigners = entrySigners;
firstSignedEntryName = entryName;
} else if (!entrySigners.equals(firstSignedEntrySigners)) {
result.addError(
Issue.JAR_SIG_ZIP_ENTRY_SIGNERS_MISMATCH,
firstSignedEntryName,
getSignerNames(firstSignedEntrySigners),
entryName,
getSignerNames(entrySigners));
continue;
}
Collection<NamedDigest> expectedDigests =
getDigestsToVerify(manifestSection, "-Digest", minSdkVersion, maxSdkVersion);
if (expectedDigests.isEmpty()) {
result.addError(Issue.JAR_SIG_NO_ZIP_ENTRY_DIGEST_IN_MANIFEST, entryName);
continue;
}
MessageDigest[] mds = new MessageDigest[expectedDigests.size()];
int mdIndex = 0;
for (NamedDigest expectedDigest : expectedDigests) {
mds[mdIndex] = getMessageDigest(expectedDigest.jcaDigestAlgorithm);
mdIndex++;
}
try {
LocalFileRecord.outputUncompressedData(
apk,
cdRecord,
cdOffsetInApk,
new MessageDigestSink(mds));
} catch (ZipFormatException e) {
throw new ApkFormatException("Malformed ZIP entry: " + entryName, e);
} catch (IOException e) {
throw new IOException("Failed to read entry: " + entryName, e);
}
mdIndex = 0;
for (NamedDigest expectedDigest : expectedDigests) {
byte[] actualDigest = mds[mdIndex].digest();
if (!Arrays.equals(expectedDigest.digest, actualDigest)) {
result.addError(
Issue.JAR_SIG_ZIP_ENTRY_DIGEST_DID_NOT_VERIFY,
entryName,
expectedDigest.jcaDigestAlgorithm,
V1SchemeSigner.MANIFEST_ENTRY_NAME,
Base64.getEncoder().encodeToString(actualDigest),
Base64.getEncoder().encodeToString(expectedDigest.digest));
}
}
}
if (firstSignedEntrySigners == null) {
result.addError(Issue.JAR_SIG_NO_SIGNED_ZIP_ENTRIES);
return Collections.emptySet();
} else {
return new HashSet<>(firstSignedEntrySigners);
}
}
private static List<String> getSignerNames(List<Signer> signers) {
if (signers.isEmpty()) {
return Collections.emptyList();
}
List<String> result = new ArrayList<>(signers.size());
for (Signer signer : signers) {
result.add(signer.getName());
}
return result;
}
private static MessageDigest getMessageDigest(String algorithm)
throws NoSuchAlgorithmException {
return MessageDigest.getInstance(algorithm);
}
private static byte[] digest(String algorithm, byte[] data, int offset, int length)
throws NoSuchAlgorithmException {
MessageDigest md = getMessageDigest(algorithm);
md.update(data, offset, length);
return md.digest();
}
private static byte[] digest(String algorithm, byte[] data) throws NoSuchAlgorithmException {
return getMessageDigest(algorithm).digest(data);
}
private static class NamedDigest {
private final String jcaDigestAlgorithm;
private final byte[] digest;
private NamedDigest(String jcaDigestAlgorithm, byte[] digest) {
this.jcaDigestAlgorithm = jcaDigestAlgorithm;
this.digest = digest;
}
}
public static class Result {
/** Whether the APK's JAR signature verifies. */
public boolean verified;
/** List of APK's signers. These signers are used by Android. */
public final List<SignerInfo> signers = new ArrayList<>();
/**
* Signers encountered in the APK but not included in the set of the APK's signers. These
* signers are ignored by Android.
*/
public final List<SignerInfo> ignoredSigners = new ArrayList<>();
private final List<IssueWithParams> mWarnings = new ArrayList<>();
private final List<IssueWithParams> mErrors = new ArrayList<>();
private boolean containsErrors() {
if (!mErrors.isEmpty()) {
return true;
}
for (SignerInfo signer : signers) {
if (signer.containsErrors()) {
return true;
}
}
return false;
}
private void addError(Issue msg, Object... parameters) {
mErrors.add(new IssueWithParams(msg, parameters));
}
private void addWarning(Issue msg, Object... parameters) {
mWarnings.add(new IssueWithParams(msg, parameters));
}
public List<IssueWithParams> getErrors() {
return mErrors;
}
public List<IssueWithParams> getWarnings() {
return mWarnings;
}
public static class SignerInfo {
public final String name;
public final String signatureFileName;
public final String signatureBlockFileName;
public final List<X509Certificate> certChain = new ArrayList<>();
private final List<IssueWithParams> mWarnings = new ArrayList<>();
private final List<IssueWithParams> mErrors = new ArrayList<>();
private SignerInfo(
String name, String signatureBlockFileName, String signatureFileName) {
this.name = name;
this.signatureBlockFileName = signatureBlockFileName;
this.signatureFileName = signatureFileName;
}
private boolean containsErrors() {
return !mErrors.isEmpty();
}
private void addError(Issue msg, Object... parameters) {
mErrors.add(new IssueWithParams(msg, parameters));
}
private void addWarning(Issue msg, Object... parameters) {
mWarnings.add(new IssueWithParams(msg, parameters));
}
public List<IssueWithParams> getErrors() {
return mErrors;
}
public List<IssueWithParams> getWarnings() {
return mWarnings;
}
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.servicecatalog.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Information about a parameter used to provision a product.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/servicecatalog-2015-12-10/ProvisioningArtifactParameter"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ProvisioningArtifactParameter implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The parameter key.
* </p>
*/
private String parameterKey;
/**
* <p>
* The default value.
* </p>
*/
private String defaultValue;
/**
* <p>
* The parameter type.
* </p>
*/
private String parameterType;
/**
* <p>
* If this value is true, the value for this parameter is obfuscated from view when the parameter is retrieved. This
* parameter is used to hide sensitive information.
* </p>
*/
private Boolean isNoEcho;
/**
* <p>
* The description of the parameter.
* </p>
*/
private String description;
/**
* <p>
* Constraints that the administrator has put on a parameter.
* </p>
*/
private ParameterConstraints parameterConstraints;
/**
* <p>
* The parameter key.
* </p>
*
* @param parameterKey
* The parameter key.
*/
public void setParameterKey(String parameterKey) {
this.parameterKey = parameterKey;
}
/**
* <p>
* The parameter key.
* </p>
*
* @return The parameter key.
*/
public String getParameterKey() {
return this.parameterKey;
}
/**
* <p>
* The parameter key.
* </p>
*
* @param parameterKey
* The parameter key.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ProvisioningArtifactParameter withParameterKey(String parameterKey) {
setParameterKey(parameterKey);
return this;
}
/**
* <p>
* The default value.
* </p>
*
* @param defaultValue
* The default value.
*/
public void setDefaultValue(String defaultValue) {
this.defaultValue = defaultValue;
}
/**
* <p>
* The default value.
* </p>
*
* @return The default value.
*/
public String getDefaultValue() {
return this.defaultValue;
}
/**
* <p>
* The default value.
* </p>
*
* @param defaultValue
* The default value.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ProvisioningArtifactParameter withDefaultValue(String defaultValue) {
setDefaultValue(defaultValue);
return this;
}
/**
* <p>
* The parameter type.
* </p>
*
* @param parameterType
* The parameter type.
*/
public void setParameterType(String parameterType) {
this.parameterType = parameterType;
}
/**
* <p>
* The parameter type.
* </p>
*
* @return The parameter type.
*/
public String getParameterType() {
return this.parameterType;
}
/**
* <p>
* The parameter type.
* </p>
*
* @param parameterType
* The parameter type.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ProvisioningArtifactParameter withParameterType(String parameterType) {
setParameterType(parameterType);
return this;
}
/**
* <p>
* If this value is true, the value for this parameter is obfuscated from view when the parameter is retrieved. This
* parameter is used to hide sensitive information.
* </p>
*
* @param isNoEcho
* If this value is true, the value for this parameter is obfuscated from view when the parameter is
* retrieved. This parameter is used to hide sensitive information.
*/
public void setIsNoEcho(Boolean isNoEcho) {
this.isNoEcho = isNoEcho;
}
/**
* <p>
* If this value is true, the value for this parameter is obfuscated from view when the parameter is retrieved. This
* parameter is used to hide sensitive information.
* </p>
*
* @return If this value is true, the value for this parameter is obfuscated from view when the parameter is
* retrieved. This parameter is used to hide sensitive information.
*/
public Boolean getIsNoEcho() {
return this.isNoEcho;
}
/**
* <p>
* If this value is true, the value for this parameter is obfuscated from view when the parameter is retrieved. This
* parameter is used to hide sensitive information.
* </p>
*
* @param isNoEcho
* If this value is true, the value for this parameter is obfuscated from view when the parameter is
* retrieved. This parameter is used to hide sensitive information.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ProvisioningArtifactParameter withIsNoEcho(Boolean isNoEcho) {
setIsNoEcho(isNoEcho);
return this;
}
/**
* <p>
* If this value is true, the value for this parameter is obfuscated from view when the parameter is retrieved. This
* parameter is used to hide sensitive information.
* </p>
*
* @return If this value is true, the value for this parameter is obfuscated from view when the parameter is
* retrieved. This parameter is used to hide sensitive information.
*/
public Boolean isNoEcho() {
return this.isNoEcho;
}
/**
* <p>
* The description of the parameter.
* </p>
*
* @param description
* The description of the parameter.
*/
public void setDescription(String description) {
this.description = description;
}
/**
* <p>
* The description of the parameter.
* </p>
*
* @return The description of the parameter.
*/
public String getDescription() {
return this.description;
}
/**
* <p>
* The description of the parameter.
* </p>
*
* @param description
* The description of the parameter.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ProvisioningArtifactParameter withDescription(String description) {
setDescription(description);
return this;
}
/**
* <p>
* Constraints that the administrator has put on a parameter.
* </p>
*
* @param parameterConstraints
* Constraints that the administrator has put on a parameter.
*/
public void setParameterConstraints(ParameterConstraints parameterConstraints) {
this.parameterConstraints = parameterConstraints;
}
/**
* <p>
* Constraints that the administrator has put on a parameter.
* </p>
*
* @return Constraints that the administrator has put on a parameter.
*/
public ParameterConstraints getParameterConstraints() {
return this.parameterConstraints;
}
/**
* <p>
* Constraints that the administrator has put on a parameter.
* </p>
*
* @param parameterConstraints
* Constraints that the administrator has put on a parameter.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ProvisioningArtifactParameter withParameterConstraints(ParameterConstraints parameterConstraints) {
setParameterConstraints(parameterConstraints);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getParameterKey() != null)
sb.append("ParameterKey: ").append(getParameterKey()).append(",");
if (getDefaultValue() != null)
sb.append("DefaultValue: ").append(getDefaultValue()).append(",");
if (getParameterType() != null)
sb.append("ParameterType: ").append(getParameterType()).append(",");
if (getIsNoEcho() != null)
sb.append("IsNoEcho: ").append(getIsNoEcho()).append(",");
if (getDescription() != null)
sb.append("Description: ").append(getDescription()).append(",");
if (getParameterConstraints() != null)
sb.append("ParameterConstraints: ").append(getParameterConstraints());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ProvisioningArtifactParameter == false)
return false;
ProvisioningArtifactParameter other = (ProvisioningArtifactParameter) obj;
if (other.getParameterKey() == null ^ this.getParameterKey() == null)
return false;
if (other.getParameterKey() != null && other.getParameterKey().equals(this.getParameterKey()) == false)
return false;
if (other.getDefaultValue() == null ^ this.getDefaultValue() == null)
return false;
if (other.getDefaultValue() != null && other.getDefaultValue().equals(this.getDefaultValue()) == false)
return false;
if (other.getParameterType() == null ^ this.getParameterType() == null)
return false;
if (other.getParameterType() != null && other.getParameterType().equals(this.getParameterType()) == false)
return false;
if (other.getIsNoEcho() == null ^ this.getIsNoEcho() == null)
return false;
if (other.getIsNoEcho() != null && other.getIsNoEcho().equals(this.getIsNoEcho()) == false)
return false;
if (other.getDescription() == null ^ this.getDescription() == null)
return false;
if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false)
return false;
if (other.getParameterConstraints() == null ^ this.getParameterConstraints() == null)
return false;
if (other.getParameterConstraints() != null && other.getParameterConstraints().equals(this.getParameterConstraints()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getParameterKey() == null) ? 0 : getParameterKey().hashCode());
hashCode = prime * hashCode + ((getDefaultValue() == null) ? 0 : getDefaultValue().hashCode());
hashCode = prime * hashCode + ((getParameterType() == null) ? 0 : getParameterType().hashCode());
hashCode = prime * hashCode + ((getIsNoEcho() == null) ? 0 : getIsNoEcho().hashCode());
hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode());
hashCode = prime * hashCode + ((getParameterConstraints() == null) ? 0 : getParameterConstraints().hashCode());
return hashCode;
}
@Override
public ProvisioningArtifactParameter clone() {
try {
return (ProvisioningArtifactParameter) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.servicecatalog.model.transform.ProvisioningArtifactParameterMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package restaurant_7;
import menu.Menus;
import admin.admin;
import order.order;
import order.list_order;
/**
*
* @author nurhuda ganteng
*/
public class Main_Program extends javax.swing.JFrame {
/**
* Creates new form Main_Program
*/
public Main_Program() {
initComponents();
}
Menus menu = new Menus();
admin frm_admin = new admin();
order frm_order = new order();
list_order frm_list_order = new list_order();
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jFrame1 = new javax.swing.JFrame();
Dekstop = new javax.swing.JDesktopPane();
jButton1 = new javax.swing.JButton();
jButton2 = new javax.swing.JButton();
jButton3 = new javax.swing.JButton();
jButton4 = new javax.swing.JButton();
jButton5 = new javax.swing.JButton();
javax.swing.GroupLayout jFrame1Layout = new javax.swing.GroupLayout(jFrame1.getContentPane());
jFrame1.getContentPane().setLayout(jFrame1Layout);
jFrame1Layout.setHorizontalGroup(
jFrame1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGap(0, 400, Short.MAX_VALUE)
);
jFrame1Layout.setVerticalGroup(
jFrame1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGap(0, 300, Short.MAX_VALUE)
);
setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE);
setAlwaysOnTop(true);
javax.swing.GroupLayout DekstopLayout = new javax.swing.GroupLayout(Dekstop);
Dekstop.setLayout(DekstopLayout);
DekstopLayout.setHorizontalGroup(
DekstopLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGap(0, 907, Short.MAX_VALUE)
);
DekstopLayout.setVerticalGroup(
DekstopLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGap(0, 0, Short.MAX_VALUE)
);
jButton1.setText("Order");
jButton1.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton1ActionPerformed(evt);
}
});
jButton2.setText("Menu");
jButton2.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton2ActionPerformed(evt);
}
});
jButton3.setText("List Order");
jButton3.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton3ActionPerformed(evt);
}
});
jButton4.setText("Admin");
jButton4.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton4ActionPerformed(evt);
}
});
jButton5.setText("Tutup");
jButton5.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton5ActionPerformed(evt);
}
});
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false)
.addComponent(jButton2, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jButton3, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jButton1, javax.swing.GroupLayout.PREFERRED_SIZE, 70, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jButton4, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addComponent(jButton5))
.addGap(18, 18, 18)
.addComponent(Dekstop)
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGap(37, 37, 37)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(Dekstop, javax.swing.GroupLayout.Alignment.TRAILING)
.addGroup(layout.createSequentialGroup()
.addComponent(jButton1)
.addGap(18, 18, 18)
.addComponent(jButton2)
.addGap(18, 18, 18)
.addComponent(jButton3)
.addGap(29, 29, 29)
.addComponent(jButton4)
.addGap(43, 43, 43)
.addComponent(jButton5)
.addGap(0, 753, Short.MAX_VALUE)))
.addContainerGap())
);
pack();
}// </editor-fold>//GEN-END:initComponents
private void jButton3ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton3ActionPerformed
// TODO add your handling code here:
cls();
Dekstop.add(frm_list_order);
frm_list_order.setVisible(true);
}//GEN-LAST:event_jButton3ActionPerformed
private void jButton2ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton2ActionPerformed
// TODO add your handling code here:
cls();
Dekstop.add(menu);
menu.setVisible(true);
}//GEN-LAST:event_jButton2ActionPerformed
private void jButton4ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton4ActionPerformed
// TODO add your handling code here:
cls();
Dekstop.add(frm_admin);
frm_admin.setVisible(true);
}//GEN-LAST:event_jButton4ActionPerformed
private void jButton1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton1ActionPerformed
// TODO add your handling code here:
cls();
Dekstop.add(frm_order);
frm_order.setVisible(true);
}//GEN-LAST:event_jButton1ActionPerformed
private void jButton5ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton5ActionPerformed
// TODO add your handling code here:
System.exit(0);
}//GEN-LAST:event_jButton5ActionPerformed
/**
* @param args the command line arguments
*/
public static void main(String args[]) {
/* Set the Nimbus look and feel */
//<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) ">
/* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel.
* For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html
*/
try {
for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) {
if ("Nimbus".equals(info.getName())) {
javax.swing.UIManager.setLookAndFeel(info.getClassName());
break;
}
}
} catch (ClassNotFoundException ex) {
java.util.logging.Logger.getLogger(Main_Program.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (InstantiationException ex) {
java.util.logging.Logger.getLogger(Main_Program.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (IllegalAccessException ex) {
java.util.logging.Logger.getLogger(Main_Program.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (javax.swing.UnsupportedLookAndFeelException ex) {
java.util.logging.Logger.getLogger(Main_Program.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
}
//</editor-fold>
/* Create and display the form */
java.awt.EventQueue.invokeLater(new Runnable() {
public void run() {
new Main_Program().setVisible(true);
}
});
}
public void cls(){
menu.setVisible(false);
frm_admin.setVisible(false);
frm_order.setVisible(false);
frm_list_order.setVisible(false);
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JDesktopPane Dekstop;
private javax.swing.JButton jButton1;
private javax.swing.JButton jButton2;
private javax.swing.JButton jButton3;
private javax.swing.JButton jButton4;
private javax.swing.JButton jButton5;
private javax.swing.JFrame jFrame1;
// End of variables declaration//GEN-END:variables
}
| |
/**********************************************************************************
* $URL: https://source.sakaiproject.org/svn/sam/tags/sakai-10.1/samigo-app/src/java/org/sakaiproject/tool/assessment/ui/bean/evaluation/SubmissionStatusBean.java $
* $Id: SubmissionStatusBean.java 305964 2014-02-14 01:05:35Z ktsao@stanford.edu $
***********************************************************************************
*
* Copyright (c) 2004, 2005, 2006, 2007, 2008, 2009 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.tool.assessment.ui.bean.evaluation;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import javax.faces.event.ActionEvent;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.sakaiproject.jsf.model.PhaseAware;
import org.sakaiproject.tool.assessment.business.entity.RecordingData;
import org.sakaiproject.tool.assessment.services.assessment.PublishedAssessmentService;
import org.sakaiproject.tool.assessment.ui.bean.util.Validator;
import org.sakaiproject.tool.assessment.ui.listener.evaluation.SubmissionStatusListener;
import org.sakaiproject.tool.assessment.ui.listener.util.ContextUtil;
/**
* <p>Description: class form for evaluating submission status</p>
*
*
*/
public class SubmissionStatusBean
implements Serializable, PhaseAware
{
private String assessmentId;
private String publishedId;
/** Use serialVersionUID for interoperability. */
private final static long serialVersionUID = 5517587781720762296L;
private String assessmentName;
private String anonymous;
private String groupName;
private String maxScore;
private Collection agents;
private Collection sortedAgents;
private String totalScore;
private String adjustmentTotalScore;
private String totalScoreComments;
private String sortProperty;
private String lateHandling; // read-only property set for UI late handling
private String dueDate;
private String sortType;
private boolean sortAscending = true;
private String roleSelection;
private String allSubmissions;
private RecordingData recordingData;
private String totalPeople;
private String firstItem;
private HashMap answeredItems;
private static Log log = LogFactory.getLog(SubmissionStatusBean.class);
//private String selectedSectionFilterValue = TotalScoresBean.ALL_SECTIONS_SELECT_VALUE;
private String selectedSectionFilterValue = null;
private ArrayList allAgents;
// Paging.
private int firstScoreRow;
private int maxDisplayedScoreRows;
private int scoreDataRows;
// Searching
private String searchString;
private String defaultSearchString;
private Boolean releasedToGroups = null;
/**
* Creates a new SubmissionStatusBean object.
*/
public SubmissionStatusBean()
{
log.debug("Creating a new SubmissionStatusBean");
resetFields();
}
protected void init() {
defaultSearchString = ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.EvaluationMessages", "search_default_student_search_string");
if (searchString == null) {
searchString = defaultSearchString;
}
// Get allAgents only at the first time
if (allAgents == null) {
allAgents = getAllAgents();
}
ArrayList matchingAgents;
if (isFilteredSearch()) {
matchingAgents = findMatchingAgents(searchString);
}
else {
matchingAgents = allAgents;
}
scoreDataRows = matchingAgents.size();
ArrayList newAgents = new ArrayList();
if (maxDisplayedScoreRows == 0) {
newAgents = matchingAgents;
} else {
int nextPageRow = Math.min(firstScoreRow + maxDisplayedScoreRows, scoreDataRows);
newAgents = new ArrayList(matchingAgents.subList(firstScoreRow, nextPageRow));
log.debug("init(): subList " + firstScoreRow + ", " + nextPageRow);
}
agents = newAgents;
}
// Following three methods are for interface PhaseAware
public void endProcessValidators() {
log.debug("endProcessValidators");
}
public void endProcessUpdates() {
log.debug("endProcessUpdates");
}
public void startRenderResponse() {
log.debug("startRenderResponse");
init();
}
/**
* get assessment name
*
* @return the name
*/
public String getAssessmentName()
{
return Validator.check(assessmentName, "N/A");
}
/**
* set assessment name
*
* @param passessmentName the name
*/
public void setAssessmentName(String passessmentName)
{
assessmentName = passessmentName;
}
/**
* get assessment id
*
* @return the assessment id
*/
public String getAssessmentId()
{
return Validator.check(assessmentId, "0");
}
/**
* set assessment id
*
* @param passessmentId the id
*/
public void setAssessmentId(String passessmentId)
{
assessmentId = passessmentId;
}
/**
* get published id
*
* @return the published id
*/
public String getPublishedId()
{
return Validator.check(publishedId, "0");
}
/**
* set published id
*
* @param passessmentId the id
*/
public void setPublishedId(String ppublishedId)
{
publishedId = ppublishedId;
}
/**
* Is this anonymous grading?
*
* @return anonymous grading? true or false
*/
public String getAnonymous()
{
return Validator.check(anonymous, "false");
}
/**
* Set switch if this is anonymous grading.
*
* @param panonymous anonymous grading? true or false
*/
public void setAnonymous(String panonymous)
{
anonymous = panonymous;
}
/**
* Get the group name
* @return group name
*/
public String getGroupName()
{
return Validator.check(groupName, "N/A");
}
/**
* set the group name
*
* @param pgroupName the name
*/
public void setGroupName(String pgroupName)
{
groupName = pgroupName;
}
/**
* get the max score
*
* @return the max score
*/
public String getMaxScore()
{
return Validator.check(maxScore, "N/A");
}
/**
* set max score
*
* @param pmaxScore set the max score
*/
public void setMaxScore(String pmaxScore)
{
maxScore = pmaxScore;
}
/**
* get an agent result collection
*
* @return the collection
*/
public Collection getAgents()
{
if (agents == null)
return new ArrayList();
return agents;
}
/**
* set the agent collection
*
* @param pagents the collection
*/
public void setAgents(Collection pagents)
{
agents = pagents;
}
/** This is a read-only calculated property.
* @return list of uppercase student initials
*/
public String getAgentInitials()
{
Collection c = getAgents();
StringBuilder initialsbuf = new StringBuilder();
if (c.isEmpty())
{
return "";
}
Iterator it = c.iterator();
while (it.hasNext())
{
try
{
AgentResults ar = (AgentResults) it.next();
String initial = ar.getLastInitial();
initialsbuf.append(initial);
}
catch (Exception ex)
{
// if there is any problem, we skip, and go on
log.warn(ex.getMessage());
}
}
String initials = initialsbuf.toString();
return initials.toUpperCase();
}
/**
* get agent resutls as an array
*
* @return the array
*/
public Object[] getAgentArray()
{
if (agents == null)
return new Object[0];
return agents.toArray();
}
/**
* get the total number of students for this assessment
*
* @return the number
*/
public String getTotalPeople()
{
return Validator.check(totalPeople, "N/A");
}
/**
* set the total number of people
*
* @param ptotalPeople the total
*/
public void setTotalPeople(String ptotalPeople)
{
totalPeople = ptotalPeople;
}
/**
*
* @return the total score
*/
public String getTotalScore()
{
return Validator.check(totalScore, "N/A");
}
/**
* set the total score
*
* @param pTotalScore the total score
*/
public void setTotalScore(String pTotalScore)
{
totalScore = pTotalScore;
}
/**
* get the adjustment to the total score
*
* @return the total score
*/
public String getAdjustmentTotalScore()
{
return Validator.check(adjustmentTotalScore, "N/A");
}
/**
* set the adjustment to total score
*
* @param pAdjustmentTotalScore the adjustment
*/
public void setAdjustmentTotalScore(String pAdjustmentTotalScore)
{
adjustmentTotalScore = pAdjustmentTotalScore;
}
/**
* get total score
*
* @return the total score
*/
public String getTotalScoreComments()
{
return Validator.check(totalScoreComments, "");
}
/**
* set comments for totals score
*
* @param pTotalScoreComments the comments
*/
public void setTotalScoreComments(String pTotalScoreComments)
{
log.debug("setting total score comments to " + pTotalScoreComments);
totalScoreComments = pTotalScoreComments;
}
/**
* get late handling
*
* @return late handlign
*/
public String getLateHandling()
{
return Validator.check(lateHandling, "1");
}
/**
* set late handling
*
* @param plateHandling the late handling
*/
public void setLateHandling(String plateHandling)
{
lateHandling = plateHandling;
}
/**
* get the due date
*
* @return the due date as a String
*/
public String getDueDate()
{
return Validator.check(dueDate, "N/A");
}
/**
* set due date string
*
* @param dateString the date string
*/
public void setDueDate(String dateString)
{
dueDate = dateString;
}
/**
* get sort type
* @return sort type
*/
public String getSortType()
{
return Validator.check(sortType, "lastName");
}
/**
* set sort type, trigger property sorts
* @param psortType the type
*/
public void setSortType(String psortType)
{
sortType = psortType;
}
/**
* is scores table sorted in ascending order
* @return true if it is
*/
public boolean isSortAscending()
{
return sortAscending;
}
/**
*
* @param sortAscending is scores table sorted in ascending order
*/
public void setSortAscending(boolean sortAscending)
{
this.sortAscending = sortAscending;
}
/**
* Is this an all submissions or, just the largest
* @return true if is is, else false
*/
public String getAllSubmissions()
{
return Validator.check(allSubmissions, "false");
}
/**
* set whether all submissions are to be exposed
* @param pallSubmissions true if it is
*/
public void setAllSubmissions(String pallSubmissions)
{
allSubmissions = pallSubmissions;
}
/**
* DOCUMENTATION PENDING
*
* @return DOCUMENTATION PENDING
*/
public String getRoleSelection()
{
return Validator.check(roleSelection, "N/A");
}
/**
* DOCUMENTATION PENDING
*
* @param proleSelection DOCUMENTATION PENDING
*/
public void setRoleSelection(String proleSelection)
{
roleSelection = proleSelection;
}
/**
* DOCUMENTATION PENDING
*
* @return DOCUMENTATION PENDING
*/
public String getFirstItem()
{
return Validator.check(firstItem, "");
}
/**
* DOCUMENTATION PENDING
*
* @param proleSelection DOCUMENTATION PENDING
*/
public void setFirstItem(String pfirstItem)
{
firstItem = pfirstItem;
}
/**
* reset the fields
*/
public void resetFields()
{
agents = new ArrayList();
setAgents(agents);
}
/**
* encapsulates audio recording info
* @return recording data
*/
public RecordingData getRecordingData()
{
return this.recordingData;
}
/**
* encapsulates audio recording info
* @param rd
*/
public void setRecordingData(RecordingData rd)
{
this.recordingData = rd;
}
/**
* This returns a map of which items actually have answers.
* Used by QuestionScores.
*/
public HashMap getAnsweredItems()
{
return answeredItems;
}
/**
* This stores a map of which items actually have answers.
* Used by QuestionScores.
*/
public void setAnsweredItems(HashMap newItems)
{
answeredItems = newItems;
}
public String getSelectedSectionFilterValue() {
// lazy initialization
if (selectedSectionFilterValue == null) {
if (isReleasedToGroups()) {
setSelectedSectionFilterValue(TotalScoresBean.RELEASED_SECTIONS_GROUPS_SELECT_VALUE);
}
else {
setSelectedSectionFilterValue(TotalScoresBean.ALL_SECTIONS_SELECT_VALUE);
}
}
return selectedSectionFilterValue;
}
public void setSelectedSectionFilterValue(String param) {
if (!param.equals(this.selectedSectionFilterValue)) {
this.selectedSectionFilterValue = param;
setFirstRow(0); // clear the paging when we update the search
}
}
public int getFirstRow() {
return firstScoreRow;
}
public void setFirstRow(int firstRow) {
firstScoreRow = firstRow;
}
public int getMaxDisplayedRows() {
return maxDisplayedScoreRows;
}
public void setMaxDisplayedRows(int maxDisplayedRows) {
maxDisplayedScoreRows = maxDisplayedRows;
}
public int getDataRows() {
return scoreDataRows;
}
public void setAllAgents(ArrayList allAgents) {
this.allAgents = allAgents;
}
public ArrayList getAllAgents()
{
log.debug("getAllAgents()");
TotalScoresBean totalScoresBean = (TotalScoresBean) ContextUtil.lookupBean("totalScores");
String publishedId = ContextUtil.lookupParam("publishedId");
SubmissionStatusListener submissionStatusListener = new SubmissionStatusListener();
if (!submissionStatusListener.submissionStatus(publishedId, this, totalScoresBean, false)) {
throw new RuntimeException("failed to call questionScores.");
}
return allAgents;
}
public String getSearchString() {
return searchString;
}
public void setSearchString(String searchString) {
if (StringUtils.trimToNull(searchString) == null) {
searchString = defaultSearchString;
}
if (!StringUtils.equals(searchString, this.searchString)) {
log.debug("setSearchString " + searchString);
this.searchString = searchString;
setFirstRow(0); // clear the paging when we update the search
}
}
public void search(ActionEvent event) {
// We don't need to do anything special here, since init will handle the search
log.debug("search");
}
public void clear(ActionEvent event) {
log.debug("clear");
setSearchString(null);
}
private boolean isFilteredSearch() {
return !StringUtils.equals(searchString, defaultSearchString);
}
public ArrayList findMatchingAgents(final String pattern) {
ArrayList filteredList = new ArrayList();
// name1 example: John Doe
StringBuilder name1;
// name2 example: Doe, John
StringBuilder name2;
for(Iterator iter = allAgents.iterator(); iter.hasNext();) {
AgentResults result = (AgentResults)iter.next();
// name1 example: John Doe
name1 = new StringBuilder(result.getFirstName());
name1.append(" ");
name1.append(result.getLastName());
// name2 example: Doe, John
name2 = new StringBuilder(result.getLastName());
name2.append(", ");
name2.append(result.getFirstName());
if (result.getFirstName().toLowerCase().startsWith(pattern.toLowerCase()) ||
result.getLastName().toLowerCase().startsWith(pattern.toLowerCase()) ||
result.getAgentEid().toLowerCase().startsWith(pattern.toLowerCase()) ||
name1.toString().toLowerCase().startsWith(pattern.toLowerCase()) ||
name2.toString().toLowerCase().startsWith(pattern.toLowerCase())) {
filteredList.add(result);
}
}
return filteredList;
}
/**
* @return
*/
public boolean isReleasedToGroups() {
if (releasedToGroups == null) {
PublishedAssessmentService publishedAssessmentService = new PublishedAssessmentService();
releasedToGroups = publishedAssessmentService.isReleasedToGroups(publishedId);
}
return releasedToGroups;
}
}
| |
package com.github.bachelorpraktikum.visualisierbar.model;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import java.util.HashSet;
import java.util.Set;
import javafx.collections.ObservableList;
import org.junit.Test;
public class MessagesTest {
@Test
public void testInNotNull() {
assertNotNull(Messages.in(new Context()));
}
@Test(expected = NullPointerException.class)
public void testInNullContext() {
Messages.in(null);
}
@Test
public void testInSameForSameContext() {
Context context = new Context();
Messages messages = Messages.in(context);
System.gc();
Messages same = Messages.in(context);
assertSame(messages, same);
assertEquals(messages, same);
}
@Test
public void testInDifferentForDifferentContext() {
Context context1 = new Context();
Context context2 = new Context();
Messages messages1 = Messages.in(context1);
Messages messages2 = Messages.in(context2);
assertNotSame(messages1, messages2);
assertNotEquals(messages1, messages2);
}
@Test
public void testGetEventsEmptyNotNull() {
Messages messages = Messages.in(new Context());
ObservableList<? extends Event> events = messages.getEvents();
assertNotNull(events);
assertTrue(events.isEmpty());
}
private Node createRandomNode(Context context) {
return Node.in(context).create("n", new Coordinates(42, 420));
}
@Test
public void testAdd() {
Context context = new Context();
Messages messages = Messages.in(context);
ObservableList<? extends Event> events = messages.getEvents();
int time = 100;
String text = "testasdf";
Node node = createRandomNode(context);
messages.add(time, text, node);
assertEquals(1, events.size());
Event event = events.get(0);
assertEquals(time, event.getTime());
assertTrue(event.getDescription().contains(String.valueOf(time)));
assertTrue(event.getDescription().contains(text));
assertTrue(event.getWarnings().isEmpty());
}
@Test(expected = IllegalArgumentException.class)
public void testAddNegativeTime() {
int time = -1;
String text = "test";
Context context = new Context();
Node node = createRandomNode(context);
Messages.in(context).add(time, text, node);
}
@Test
public void testAddInvalidTime() {
Context context = new Context();
Node node = createRandomNode(context);
Messages messages = Messages.in(context);
int firstTime = 10;
messages.add(firstTime, "test", node);
Event firstEvent = messages.getEvents().get(0);
assertEquals(firstTime, firstEvent.getTime());
assertTrue(firstEvent.getWarnings().isEmpty());
String text = "test2";
messages.add(5, text, node);
Event secondEvent = messages.getEvents().get(1);
assertEquals(firstTime, secondEvent.getTime());
assertTrue(secondEvent.getDescription().contains(text));
assertEquals(1, secondEvent.getWarnings().size());
}
@Test(expected = NullPointerException.class)
public void testAddNullText() {
Context context = new Context();
Node node = createRandomNode(context);
Messages.in(context).add(10, null, node);
}
@Test(expected = NullPointerException.class)
public void testAddNullNode() {
Messages.in(new Context()).add(10, "test", null);
}
@Test
public void testFireEventsInBetween() {
Context context = new Context();
Node node = createRandomNode(context);
Messages messages = Messages.in(context);
messages.add(5, "test", node);
messages.add(10, "test2", node);
messages.add(15, "test3", node);
Set<MessageEvent> fired = new HashSet<>();
MessageEvent.testFire = fired::add;
assertTrue(messages.fireEventsBetween(n -> null, 0, 10));
assertEquals(2, fired.size());
}
@Test
public void testFireEventsInBetweenStepping() {
Context context = new Context();
Node node = createRandomNode(context);
Messages messages = Messages.in(context);
messages.add(5, "test", node);
messages.add(10, "test2", node);
messages.add(15, "test3", node);
messages.add(20, "test4", node);
messages.add(25, "test5", node);
Set<MessageEvent> fired = new HashSet<>();
MessageEvent.testFire = fired::add;
// first step, fires two events
assertTrue(messages.fireEventsBetween(n -> null, 0, 14));
assertEquals(2, fired.size());
// second step, fires one event
fired.clear();
assertTrue(messages.fireEventsBetween(n -> null, 14, 19));
assertEquals(1, fired.size());
}
@Test
public void testFireEventsInBetweenStartAtEventTime() {
Context context = new Context();
Node node = createRandomNode(context);
Messages messages = Messages.in(context);
messages.add(5, "test", node);
messages.add(10, "test2", node);
Set<MessageEvent> fired = new HashSet<>();
MessageEvent.testFire = fired::add;
// startTime is excluded (see JavaDoc)
assertFalse(messages.fireEventsBetween(n -> null, 5, 8));
assertTrue(fired.isEmpty());
}
@Test
public void testFireInBetweenNegativeStart() {
Context context = new Context();
Node node = createRandomNode(context);
Messages messages = Messages.in(context);
messages.add(0, "test", node);
messages.add(5, "test2", node);
Set<MessageEvent> fired = new HashSet<>();
MessageEvent.testFire = fired::add;
assertTrue(messages.fireEventsBetween(n -> null, -1, 4));
assertEquals(1, fired.size());
}
@Test
public void testFireInBetweenReversedOrder() {
Context context = new Context();
Node node = createRandomNode(context);
Messages messages = Messages.in(context);
messages.add(5, "test", node);
messages.add(10, "test2", node);
Set<MessageEvent> fired = new HashSet<>();
MessageEvent.testFire = fired::add;
assertTrue(messages.fireEventsBetween(n -> null, 8, 12));
assertEquals(1, fired.size());
fired.clear();
assertTrue(messages.fireEventsBetween(n -> null, 2, 6));
assertEquals(1, fired.size());
}
@Test
public void testFireEventsInBetweenRefire() {
Context context = new Context();
Node node = createRandomNode(context);
Messages messages = Messages.in(context);
messages.add(5, "test", node);
messages.add(10, "test2", node);
Set<MessageEvent> fired = new HashSet<>();
MessageEvent.testFire = fired::add;
assertTrue(messages.fireEventsBetween(n -> null, 8, 12));
assertEquals(1, fired.size());
fired.clear();
assertTrue(messages.fireEventsBetween(n -> null, 4, 13));
assertEquals(2, fired.size());
}
@Test
public void testFireEventsInBetweenEmpty() {
Context context = new Context();
Messages messages = Messages.in(context);
Set<MessageEvent> fired = new HashSet<>();
MessageEvent.testFire = fired::add;
assertFalse(messages.fireEventsBetween(n -> null, 0, 1000));
assertTrue(fired.isEmpty());
}
@Test(expected = NullPointerException.class)
public void testFireEventsInBetweenNullResolver() {
Messages messages = Messages.in(new Context());
messages.fireEventsBetween(null, 0, 1000);
}
@Test(expected = IllegalArgumentException.class)
public void testFireEventsInBetweenInvalidTimes() {
Messages messages = Messages.in(new Context());
messages.fireEventsBetween(n -> null, 1000, 0);
}
@Test
public void testToString() {
Context context = new Context();
Messages messages = Messages.in(context);
Node node = createRandomNode(context);
int time = 10;
String text = "test1";
messages.add(time, text, node);
Event event = messages.getEvents().get(0);
assertNotNull(event.toString());
assertTrue(event.toString().contains(String.valueOf(time)));
assertTrue(event.toString().contains(text));
assertTrue(event.toString().contains(node.getName()));
}
@Test
public void testIntPairIsPrivate() {
for (Class<?> type : Messages.class.getClasses()) {
if (type.getSimpleName().equals("IntPair")) {
fail("IntPair should not be public");
}
}
}
@Test
public void testIntPair() throws ReflectiveOperationException {
// needs to be done with reflection, because IntPair is - and should be - private
Class<?> type = Messages.class.getDeclaredClasses()[0];
Method getKey = type.getDeclaredMethod("getKey");
Method getValue = type.getDeclaredMethod("getValue");
Constructor<?> constructor = type.getDeclaredConstructor(int.class, int.class);
Object intPair = constructor.newInstance(42, 420);
assertEquals(42, getKey.invoke(intPair));
assertEquals(420, getValue.invoke(intPair));
}
}
| |
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.idea.maven.importing;
import com.intellij.compiler.impl.javaCompiler.javac.JavacConfiguration;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.module.ModifiableModuleModel;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleType;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.LibraryOrderEntry;
import com.intellij.openapi.roots.ModifiableRootModel;
import com.intellij.openapi.roots.ModuleRootModel;
import com.intellij.openapi.roots.OrderEntry;
import com.intellij.openapi.roots.impl.libraries.LibraryImpl;
import com.intellij.openapi.roots.libraries.Library;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.ArrayUtil;
import com.intellij.util.Function;
import com.intellij.util.containers.Stack;
import gnu.trove.THashMap;
import gnu.trove.THashSet;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.idea.maven.importing.configurers.MavenModuleConfigurer;
import org.jetbrains.idea.maven.model.MavenArtifact;
import org.jetbrains.idea.maven.project.*;
import org.jetbrains.idea.maven.utils.MavenLog;
import org.jetbrains.idea.maven.utils.MavenProcessCanceledException;
import org.jetbrains.idea.maven.utils.MavenProgressIndicator;
import org.jetbrains.idea.maven.utils.MavenUtil;
import org.jetbrains.jps.model.java.compiler.JpsJavaCompilerOptions;
import java.io.File;
import java.io.IOException;
import java.util.*;
public class MavenProjectImporter {
private static final Logger LOG = Logger.getInstance(MavenProjectImporter.class);
private final Project myProject;
private final MavenProjectsTree myProjectsTree;
private final Map<VirtualFile, Module> myFileToModuleMapping;
private volatile Map<MavenProject, MavenProjectChanges> myProjectsToImportWithChanges;
private volatile Set<MavenProject> myAllProjects;
private final boolean myImportModuleGroupsRequired;
private final MavenModifiableModelsProvider myModelsProvider;
private final MavenImportingSettings myImportingSettings;
private final ModifiableModuleModel myModuleModel;
private final List<Module> myCreatedModules = new ArrayList<Module>();
private final Map<MavenProject, Module> myMavenProjectToModule = new THashMap<MavenProject, Module>();
private final Map<MavenProject, String> myMavenProjectToModuleName = new THashMap<MavenProject, String>();
private final Map<MavenProject, String> myMavenProjectToModulePath = new THashMap<MavenProject, String>();
public MavenProjectImporter(Project p,
MavenProjectsTree projectsTree,
Map<VirtualFile, Module> fileToModuleMapping,
Map<MavenProject, MavenProjectChanges> projectsToImportWithChanges,
boolean importModuleGroupsRequired,
MavenModifiableModelsProvider modelsProvider,
MavenImportingSettings importingSettings) {
myProject = p;
myProjectsTree = projectsTree;
myFileToModuleMapping = fileToModuleMapping;
myProjectsToImportWithChanges = projectsToImportWithChanges;
myImportModuleGroupsRequired = importModuleGroupsRequired;
myModelsProvider = modelsProvider;
myImportingSettings = importingSettings;
myModuleModel = modelsProvider.getModuleModel();
}
@Nullable
public List<MavenProjectsProcessorTask> importProject() {
List<MavenProjectsProcessorTask> postTasks = new ArrayList<MavenProjectsProcessorTask>();
boolean hasChanges;
// in the case projects are changed during importing we must memorise them
myAllProjects = new LinkedHashSet<MavenProject>(myProjectsTree.getProjects());
myAllProjects.addAll(myProjectsToImportWithChanges.keySet()); // some projects may already have been removed from the tree
hasChanges = deleteIncompatibleModules();
myProjectsToImportWithChanges = collectProjectsToImport(myProjectsToImportWithChanges);
mapMavenProjectsToModulesAndNames();
if (myProject.isDisposed()) return null;
final boolean projectsHaveChanges = projectsToImportHaveChanges();
if (projectsHaveChanges) {
hasChanges = true;
importModules(postTasks);
scheduleRefreshResolvedArtifacts(postTasks);
}
if (projectsHaveChanges || myImportModuleGroupsRequired) {
hasChanges = true;
configModuleGroups();
}
if (myProject.isDisposed()) return null;
try {
boolean modulesDeleted = deleteObsoleteModules();
hasChanges |= modulesDeleted;
if (hasChanges) {
removeUnusedProjectLibraries();
}
}
catch (ProcessCanceledException e) {
throw e;
}
catch (Exception e) {
disposeModifiableModels();
LOG.error(e);
return null;
}
if (hasChanges) {
MavenUtil.invokeAndWaitWriteAction(myProject, new Runnable() {
public void run() {
myModelsProvider.commit();
if (projectsHaveChanges) {
removeOutdatedCompilerConfigSettings();
for (MavenProject mavenProject : myAllProjects) {
Module module = myMavenProjectToModule.get(mavenProject);
if (module != null && module.isDisposed()) {
module = null;
}
for (MavenModuleConfigurer configurer : MavenModuleConfigurer.getConfigurers()) {
configurer.configure(mavenProject, myProject, module);
}
}
}
}
});
}
else {
disposeModifiableModels();
}
return postTasks;
}
private void disposeModifiableModels() {
MavenUtil.invokeAndWaitWriteAction(myProject, new Runnable() {
public void run() {
myModelsProvider.dispose();
}
});
}
private boolean projectsToImportHaveChanges() {
for (MavenProjectChanges each : myProjectsToImportWithChanges.values()) {
if (each.hasChanges()) return true;
}
return false;
}
private Map<MavenProject, MavenProjectChanges> collectProjectsToImport(Map<MavenProject, MavenProjectChanges> projectsToImport) {
Map<MavenProject, MavenProjectChanges> result = new THashMap<MavenProject, MavenProjectChanges>(projectsToImport);
result.putAll(collectNewlyCreatedProjects()); // e.g. when 'create modules fro aggregators' setting changes
Set<MavenProject> allProjectsToImport = result.keySet();
Set<MavenProject> selectedProjectsToImport = selectProjectsToImport(allProjectsToImport);
Iterator<MavenProject> it = allProjectsToImport.iterator();
while (it.hasNext()) {
if (!selectedProjectsToImport.contains(it.next())) it.remove();
}
return result;
}
private Map<MavenProject, MavenProjectChanges> collectNewlyCreatedProjects() {
Map<MavenProject, MavenProjectChanges> result = new THashMap<MavenProject, MavenProjectChanges>();
for (MavenProject each : myAllProjects) {
Module module = myFileToModuleMapping.get(each.getFile());
if (module == null) {
result.put(each, MavenProjectChanges.ALL);
}
}
return result;
}
private Set<MavenProject> selectProjectsToImport(Collection<MavenProject> originalProjects) {
Set<MavenProject> result = new THashSet<MavenProject>();
for (MavenProject each : originalProjects) {
if (!shouldCreateModuleFor(each)) continue;
result.add(each);
}
return result;
}
private boolean shouldCreateModuleFor(MavenProject project) {
if (myProjectsTree.isIgnored(project)) return false;
return !project.isAggregator() || myImportingSettings.isCreateModulesForAggregators();
}
private boolean deleteIncompatibleModules() {
final Pair<List<Pair<MavenProject, Module>>, List<Pair<MavenProject, Module>>> incompatible = collectIncompatibleModulesWithProjects();
final List<Pair<MavenProject, Module>> incompatibleMavenized = incompatible.first;
final List<Pair<MavenProject, Module>> incompatibleNotMavenized = incompatible.second;
if (incompatibleMavenized.isEmpty() && incompatibleNotMavenized.isEmpty()) return false;
boolean changed = false;
// For already mavenized modules the type may change because maven project plugins were resolved and MavenImporter asked to create a module of a different type.
// In such cases we must change module type silently.
for (Pair<MavenProject, Module> each : incompatibleMavenized) {
myFileToModuleMapping.remove(each.first.getFile());
myModuleModel.disposeModule(each.second);
changed |= true;
}
if (incompatibleNotMavenized.isEmpty()) return changed;
final int[] result = new int[1];
MavenUtil.invokeAndWait(myProject, myModelsProvider.getModalityStateForQuestionDialogs(), new Runnable() {
public void run() {
String message = ProjectBundle.message("maven.import.incompatible.modules",
incompatibleNotMavenized.size(),
formatProjectsWithModules(incompatibleNotMavenized));
String[] options = {
ProjectBundle.message("maven.import.incompatible.modules.recreate"),
ProjectBundle.message("maven.import.incompatible.modules.ignore")
};
result[0] = Messages.showOkCancelDialog(myProject, message,
ProjectBundle.message("maven.project.import.title"),
options[0], options[1], Messages.getQuestionIcon());
}
});
if (result[0] == Messages.OK) {
for (Pair<MavenProject, Module> each : incompatibleNotMavenized) {
myFileToModuleMapping.remove(each.first.getFile());
myModuleModel.disposeModule(each.second);
}
changed |= true;
}
else {
myProjectsTree.setIgnoredState(MavenUtil.collectFirsts(incompatibleNotMavenized), true, true);
changed |= false;
}
return changed;
}
/**
* Collects modules that need to change module type
* @return the first List in returned Pair contains already mavenized modules, the second List - not mavenized
*/
private Pair<List<Pair<MavenProject, Module>>, List<Pair<MavenProject, Module>>> collectIncompatibleModulesWithProjects() {
List<Pair<MavenProject, Module>> incompatibleMavenized = new ArrayList<Pair<MavenProject, Module>>();
List<Pair<MavenProject, Module>> incompatibleNotMavenized = new ArrayList<Pair<MavenProject, Module>>();
MavenProjectsManager manager = MavenProjectsManager.getInstance(myProject);
for (MavenProject each : myAllProjects) {
Module module = myFileToModuleMapping.get(each.getFile());
if (module == null) continue;
if (shouldCreateModuleFor(each) && !(ModuleType.get(module).equals(each.getModuleType()))) {
(manager.isMavenizedModule(module) ? incompatibleMavenized : incompatibleNotMavenized).add(Pair.create(each, module));
}
}
return Pair.create(incompatibleMavenized, incompatibleNotMavenized);
}
private static String formatProjectsWithModules(List<Pair<MavenProject, Module>> projectsWithModules) {
return StringUtil.join(projectsWithModules, new Function<Pair<MavenProject, Module>, String>() {
public String fun(Pair<MavenProject, Module> each) {
MavenProject project = each.first;
Module module = each.second;
return ModuleType.get(module).getName() +
" '" +
module.getName() +
"' for Maven project " +
project.getMavenId().getDisplayString();
}
}, "<br>");
}
private boolean deleteObsoleteModules() {
final List<Module> obsoleteModules = collectObsoleteModules();
if (obsoleteModules.isEmpty()) return false;
setMavenizedModules(obsoleteModules, false);
final int[] result = new int[1];
MavenUtil.invokeAndWait(myProject, myModelsProvider.getModalityStateForQuestionDialogs(), new Runnable() {
public void run() {
result[0] = Messages.showYesNoDialog(myProject,
ProjectBundle.message("maven.import.message.delete.obsolete", formatModules(obsoleteModules)),
ProjectBundle.message("maven.project.import.title"),
Messages.getQuestionIcon());
}
});
if (result[0] == Messages.NO) return false;// NO
for (Module each : obsoleteModules) {
if (!each.isDisposed()) {
myModuleModel.disposeModule(each);
}
}
return true;
}
private List<Module> collectObsoleteModules() {
List<Module> remainingModules = new ArrayList<Module>();
Collections.addAll(remainingModules, myModuleModel.getModules());
for (MavenProject each : selectProjectsToImport(myAllProjects)) {
remainingModules.remove(myMavenProjectToModule.get(each));
}
List<Module> obsolete = new ArrayList<Module>();
final MavenProjectsManager manager = MavenProjectsManager.getInstance(myProject);
for (Module each : remainingModules) {
if (manager.isMavenizedModule(each)) {
obsolete.add(each);
}
}
return obsolete;
}
private static String formatModules(final Collection<Module> modules) {
StringBuilder res = new StringBuilder();
int i = 0;
for (Module module : modules) {
res.append('\'').append(module.getName()).append("'\n");
if (++i > 20) break;
}
if (i > 20) {
res.append("\n ... and other ").append(modules.size() - 20).append(" modules");
}
return res.toString();
}
private static void doRefreshFiles(Set<File> files) {
LocalFileSystem.getInstance().refreshIoFiles(files);
}
private void scheduleRefreshResolvedArtifacts(List<MavenProjectsProcessorTask> postTasks) {
// We have to refresh all the resolved artifacts manually in order to
// update all the VirtualFilePointers. It is not enough to call
// VirtualFileManager.refresh() since the newly created files will be only
// picked by FS when FileWatcher finishes its work. And in the case of import
// it doesn't finish in time.
// I couldn't manage to write a test for this since behaviour of VirtualFileManager
// and FileWatcher differs from real-life execution.
List<MavenArtifact> artifacts = new ArrayList<MavenArtifact>();
for (MavenProject each : myProjectsToImportWithChanges.keySet()) {
artifacts.addAll(each.getDependencies());
}
final Set<File> files = new THashSet<File>();
for (MavenArtifact each : artifacts) {
if (each.isResolved()) files.add(each.getFile());
}
if (ApplicationManager.getApplication().isUnitTestMode()) {
doRefreshFiles(files);
}
else {
postTasks.add(new MavenProjectsProcessorTask() {
public void perform(Project project, MavenEmbeddersManager embeddersManager, MavenConsole console, MavenProgressIndicator indicator)
throws MavenProcessCanceledException {
indicator.setText("Refreshing files...");
doRefreshFiles(files);
}
});
}
}
private void mapMavenProjectsToModulesAndNames() {
for (MavenProject each : myAllProjects) {
Module module = myFileToModuleMapping.get(each.getFile());
if (module != null) {
myMavenProjectToModule.put(each, module);
}
}
MavenModuleNameMapper.map(myAllProjects,
myMavenProjectToModule,
myMavenProjectToModuleName,
myMavenProjectToModulePath,
myImportingSettings.getDedicatedModuleDir());
}
private void removeOutdatedCompilerConfigSettings() {
ApplicationManager.getApplication().assertWriteAccessAllowed();
final JpsJavaCompilerOptions javacOptions = JavacConfiguration.getOptions(myProject, JavacConfiguration.class);
String options = javacOptions.ADDITIONAL_OPTIONS_STRING;
options = options.replaceFirst("(-target (\\S+))", ""); // Old IDEAs saved
javacOptions.ADDITIONAL_OPTIONS_STRING = options;
}
private void importModules(final List<MavenProjectsProcessorTask> postTasks) {
Map<MavenProject, MavenProjectChanges> projectsWithChanges = myProjectsToImportWithChanges;
Set<MavenProject> projectsWithNewlyCreatedModules = new THashSet<MavenProject>();
for (MavenProject each : projectsWithChanges.keySet()) {
if (ensureModuleCreated(each)) {
projectsWithNewlyCreatedModules.add(each);
}
}
List<Module> modulesToMavenize = new ArrayList<Module>();
List<MavenModuleImporter> importers = new ArrayList<MavenModuleImporter>();
for (Map.Entry<MavenProject, MavenProjectChanges> each : projectsWithChanges.entrySet()) {
MavenProject project = each.getKey();
Module module = myMavenProjectToModule.get(project);
boolean isNewModule = projectsWithNewlyCreatedModules.contains(project);
MavenModuleImporter moduleImporter = createModuleImporter(module, project, each.getValue());
modulesToMavenize.add(module);
importers.add(moduleImporter);
moduleImporter.config(isNewModule);
}
for (MavenProject project : myAllProjects) {
if (!projectsWithChanges.containsKey(project)) {
Module module = myMavenProjectToModule.get(project);
if (module == null) continue;
importers.add(createModuleImporter(module, project, null));
}
}
for (MavenModuleImporter importer : importers) {
importer.preConfigFacets();
}
for (MavenModuleImporter importer : importers) {
importer.configFacets(postTasks);
}
setMavenizedModules(modulesToMavenize, true);
}
private void setMavenizedModules(final Collection<Module> modules, final boolean mavenized) {
MavenUtil.invokeAndWaitWriteAction(myProject, new Runnable() {
public void run() {
MavenProjectsManager.getInstance(myProject).setMavenizedModules(modules, mavenized);
}
});
}
private boolean ensureModuleCreated(MavenProject project) {
if (myMavenProjectToModule.get(project) != null) return false;
final String path = myMavenProjectToModulePath.get(project);
// for some reason newModule opens the existing iml file, so we
// have to remove it beforehand.
deleteExistingImlFile(path);
final Module module = myModuleModel.newModule(path, project.getModuleType().getId());
myMavenProjectToModule.put(project, module);
myCreatedModules.add(module);
return true;
}
private void deleteExistingImlFile(final String path) {
MavenUtil.invokeAndWaitWriteAction(myProject, new Runnable() {
public void run() {
try {
VirtualFile file = LocalFileSystem.getInstance().refreshAndFindFileByPath(path);
if (file != null) file.delete(this);
}
catch (IOException e) {
MavenLog.LOG.warn("Cannot delete existing iml file: " + path, e);
}
}
});
}
private MavenModuleImporter createModuleImporter(Module module, MavenProject mavenProject, @Nullable MavenProjectChanges changes) {
return new MavenModuleImporter(module,
myProjectsTree,
mavenProject,
changes,
myMavenProjectToModuleName,
myImportingSettings,
myModelsProvider);
}
private void configModuleGroups() {
if (!myImportingSettings.isCreateModuleGroups()) return;
final Stack<String> groups = new Stack<String>();
final boolean createTopLevelGroup = myProjectsTree.getRootProjects().size() > 1;
myProjectsTree.visit(new MavenProjectsTree.SimpleVisitor() {
int depth = 0;
@Override
public boolean shouldVisit(MavenProject project) {
// in case some project has been added while we were importing
return myMavenProjectToModuleName.containsKey(project);
}
public void visit(MavenProject each) {
depth++;
String name = myMavenProjectToModuleName.get(each);
if (shouldCreateGroup(each)) {
groups.push(ProjectBundle.message("module.group.name", name));
}
if (!shouldCreateModuleFor(each)) {
return;
}
Module module = myModuleModel.findModuleByName(name);
if (module == null) return;
myModuleModel.setModuleGroupPath(module, groups.isEmpty() ? null : ArrayUtil.toStringArray(groups));
}
public void leave(MavenProject each) {
if (shouldCreateGroup(each)) {
groups.pop();
}
depth--;
}
private boolean shouldCreateGroup(MavenProject project) {
return !myProjectsTree.getModules(project).isEmpty()
&& (createTopLevelGroup || depth > 1);
}
});
}
private boolean removeUnusedProjectLibraries() {
Set<Library> unusedLibraries = new HashSet<Library>();
Collections.addAll(unusedLibraries, myModelsProvider.getAllLibraries());
for (ModuleRootModel eachModel : collectModuleModels()) {
for (OrderEntry eachEntry : eachModel.getOrderEntries()) {
if (eachEntry instanceof LibraryOrderEntry) {
unusedLibraries.remove(((LibraryOrderEntry)eachEntry).getLibrary());
}
}
}
boolean removed = false;
for (Library each : unusedLibraries) {
if (!isDisposed(each) && MavenRootModelAdapter.isMavenLibrary(each) && !MavenRootModelAdapter.isChangedByUser(each)) {
myModelsProvider.removeLibrary(each);
removed = true;
}
}
return removed;
}
private static boolean isDisposed(Library library) {
return library instanceof LibraryImpl && ((LibraryImpl)library).isDisposed();
}
private Collection<ModuleRootModel> collectModuleModels() {
Map<Module, ModuleRootModel> rootModels = new THashMap<Module, ModuleRootModel>();
for (MavenProject each : myProjectsToImportWithChanges.keySet()) {
Module module = myMavenProjectToModule.get(each);
ModifiableRootModel rootModel = myModelsProvider.getRootModel(module);
rootModels.put(module, rootModel);
}
for (Module each : myModuleModel.getModules()) {
if (rootModels.containsKey(each)) continue;
rootModels.put(each, myModelsProvider.getRootModel(each));
}
return rootModels.values();
}
public List<Module> getCreatedModules() {
return myCreatedModules;
}
}
| |
/*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.jps.uiDesigner.compiler;
import com.intellij.compiler.instrumentation.FailSafeClassReader;
import com.intellij.compiler.instrumentation.InstrumentationClassFinder;
import com.intellij.compiler.instrumentation.InstrumenterClassWriter;
import com.intellij.openapi.application.PathManager;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.uiDesigner.compiler.*;
import com.intellij.uiDesigner.compiler.Utils;
import com.intellij.uiDesigner.core.GridConstraints;
import com.intellij.uiDesigner.lw.CompiledClassPropertiesProvider;
import com.intellij.uiDesigner.lw.LwRootContainer;
import gnu.trove.THashMap;
import gnu.trove.THashSet;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.jps.ModuleChunk;
import org.jetbrains.jps.ProjectPaths;
import org.jetbrains.jps.builders.DirtyFilesHolder;
import org.jetbrains.jps.builders.java.JavaBuilderUtil;
import org.jetbrains.jps.builders.java.JavaSourceRootDescriptor;
import org.jetbrains.jps.builders.logging.ProjectBuilderLogger;
import org.jetbrains.jps.incremental.*;
import org.jetbrains.jps.incremental.instrumentation.ClassProcessingBuilder;
import org.jetbrains.jps.incremental.messages.BuildMessage;
import org.jetbrains.jps.incremental.messages.CompilerMessage;
import org.jetbrains.jps.incremental.messages.ProgressMessage;
import org.jetbrains.jps.incremental.storage.OneToManyPathsMapping;
import org.jetbrains.jps.model.JpsProject;
import org.jetbrains.jps.uiDesigner.model.JpsUiDesignerConfiguration;
import org.jetbrains.jps.uiDesigner.model.JpsUiDesignerExtensionService;
import org.jetbrains.org.objectweb.asm.ClassReader;
import java.io.*;
import java.util.*;
/**
* @author Eugene Zhuravlev
* Date: 11/20/12
*/
public class FormsInstrumenter extends FormsBuilder {
public static final String BUILDER_NAME = "forms";
public FormsInstrumenter() {
super(BuilderCategory.CLASS_INSTRUMENTER, BUILDER_NAME);
}
@Override
public ExitCode build(CompileContext context, ModuleChunk chunk, DirtyFilesHolder<JavaSourceRootDescriptor, ModuleBuildTarget> dirtyFilesHolder, OutputConsumer outputConsumer) throws ProjectBuildException, IOException {
final JpsProject project = context.getProjectDescriptor().getProject();
final JpsUiDesignerConfiguration config = JpsUiDesignerExtensionService.getInstance().getOrCreateUiDesignerConfiguration(project);
if (!config.isInstrumentClasses()) {
return ExitCode.NOTHING_DONE;
}
final Map<File, Collection<File>> srcToForms = FORMS_TO_COMPILE.get(context);
FORMS_TO_COMPILE.set(context, null);
if (srcToForms == null || srcToForms.isEmpty()) {
return ExitCode.NOTHING_DONE;
}
final Set<File> formsToCompile = new THashSet<File>(FileUtil.FILE_HASHING_STRATEGY);
for (Collection<File> files : srcToForms.values()) {
formsToCompile.addAll(files);
}
if (JavaBuilderUtil.isCompileJavaIncrementally(context)) {
final ProjectBuilderLogger logger = context.getLoggingManager().getProjectBuilderLogger();
if (logger.isEnabled()) {
logger.logCompiledFiles(formsToCompile, getPresentableName(), "Compiling forms:");
}
}
try {
final Collection<File> platformCp = ProjectPaths.getPlatformCompilationClasspath(chunk, false);
final List<File> classpath = new ArrayList<File>();
classpath.addAll(ProjectPaths.getCompilationClasspath(chunk, false));
classpath.add(getResourcePath(GridConstraints.class)); // forms_rt.jar
final Map<File, String> chunkSourcePath = ProjectPaths.getSourceRootsWithDependents(chunk);
classpath.addAll(chunkSourcePath.keySet()); // sourcepath for loading forms resources
final InstrumentationClassFinder finder = ClassProcessingBuilder.createInstrumentationClassFinder(platformCp, classpath, outputConsumer);
try {
final Map<File, Collection<File>> processed = instrumentForms(context, chunk, chunkSourcePath, finder, formsToCompile, outputConsumer);
final OneToManyPathsMapping sourceToFormMap = context.getProjectDescriptor().dataManager.getSourceToFormMap();
for (Map.Entry<File, Collection<File>> entry : processed.entrySet()) {
final File src = entry.getKey();
final Collection<File> forms = entry.getValue();
final Collection<String> formPaths = new ArrayList<String>(forms.size());
for (File form : forms) {
formPaths.add(form.getPath());
}
sourceToFormMap.update(src.getPath(), formPaths);
srcToForms.remove(src);
}
// clean mapping
for (File srcFile : srcToForms.keySet()) {
sourceToFormMap.remove(srcFile.getPath());
}
}
finally {
finder.releaseResources();
}
}
finally {
context.processMessage(new ProgressMessage("Finished instrumenting forms [" + chunk.getPresentableShortName() + "]"));
}
return ExitCode.OK;
}
@Override
public List<String> getCompilableFileExtensions() {
return Collections.emptyList();
}
private Map<File, Collection<File>> instrumentForms(
CompileContext context, ModuleChunk chunk, final Map<File, String> chunkSourcePath, final InstrumentationClassFinder finder, Collection<File> forms, OutputConsumer outConsumer
) throws ProjectBuildException {
final Map<File, Collection<File>> instrumented = new THashMap<File, Collection<File>>(FileUtil.FILE_HASHING_STRATEGY);
final Map<String, File> class2form = new HashMap<String, File>();
final MyNestedFormLoader nestedFormsLoader =
new MyNestedFormLoader(chunkSourcePath, ProjectPaths.getOutputPathsWithDependents(chunk));
for (File formFile : forms) {
final LwRootContainer rootContainer;
try {
rootContainer = Utils.getRootContainer(
formFile.toURI().toURL(), new CompiledClassPropertiesProvider( finder.getLoader())
);
}
catch (AlienFormFileException e) {
// ignore non-IDEA forms
continue;
}
catch (UnexpectedFormElementException e) {
context.processMessage(new CompilerMessage(getPresentableName(), BuildMessage.Kind.ERROR, e.getMessage(), formFile.getPath()));
LOG.info(e);
continue;
}
catch (UIDesignerException e) {
context.processMessage(new CompilerMessage(getPresentableName(), BuildMessage.Kind.ERROR, e.getMessage(), formFile.getPath()));
LOG.info(e);
continue;
}
catch (Exception e) {
throw new ProjectBuildException("Cannot process form file " + formFile.getAbsolutePath(), e);
}
final String classToBind = rootContainer.getClassToBind();
if (classToBind == null) {
continue;
}
final CompiledClass compiled = findClassFile(outConsumer, classToBind);
if (compiled == null) {
context.processMessage(new CompilerMessage(
getPresentableName(), BuildMessage.Kind.WARNING, "Class to bind does not exist: " + classToBind, formFile.getAbsolutePath())
);
continue;
}
final File alreadyProcessedForm = class2form.get(classToBind);
if (alreadyProcessedForm != null) {
context.processMessage(
new CompilerMessage(
getPresentableName(), BuildMessage.Kind.WARNING,
formFile.getAbsolutePath() + ": The form is bound to the class " + classToBind + ".\nAnother form " + alreadyProcessedForm.getAbsolutePath() + " is also bound to this class",
formFile.getAbsolutePath())
);
continue;
}
class2form.put(classToBind, formFile);
addBinding(compiled.getSourceFile(), formFile, instrumented);
try {
context.processMessage(new ProgressMessage("Instrumenting forms... [" + chunk.getPresentableShortName() + "]"));
final BinaryContent originalContent = compiled.getContent();
final ClassReader classReader =
new FailSafeClassReader(originalContent.getBuffer(), originalContent.getOffset(), originalContent.getLength());
final int version = ClassProcessingBuilder.getClassFileVersion(classReader);
final InstrumenterClassWriter classWriter = new InstrumenterClassWriter(ClassProcessingBuilder.getAsmClassWriterFlags(version), finder);
final AsmCodeGenerator codeGenerator = new AsmCodeGenerator(rootContainer, finder, nestedFormsLoader, false, classWriter);
final byte[] patchedBytes = codeGenerator.patchClass(classReader);
if (patchedBytes != null) {
compiled.setContent(new BinaryContent(patchedBytes));
}
final FormErrorInfo[] warnings = codeGenerator.getWarnings();
for (final FormErrorInfo warning : warnings) {
context.processMessage(
new CompilerMessage(getPresentableName(), BuildMessage.Kind.WARNING, warning.getErrorMessage(), formFile.getAbsolutePath())
);
}
final FormErrorInfo[] errors = codeGenerator.getErrors();
if (errors.length > 0) {
StringBuilder message = new StringBuilder();
for (final FormErrorInfo error : errors) {
if (message.length() > 0) {
message.append("\n");
}
message.append(formFile.getAbsolutePath()).append(": ").append(error.getErrorMessage());
}
context.processMessage(new CompilerMessage(getPresentableName(), BuildMessage.Kind.ERROR, message.toString()));
}
}
catch (Exception e) {
context.processMessage(new CompilerMessage(getPresentableName(), BuildMessage.Kind.ERROR, "Forms instrumentation failed" + e.getMessage(), formFile.getAbsolutePath()));
}
}
return instrumented;
}
private static CompiledClass findClassFile(OutputConsumer outputConsumer, String classToBind) {
final Map<String, CompiledClass> compiled = outputConsumer.getCompiledClasses();
while (true) {
final CompiledClass fo = compiled.get(classToBind);
if (fo != null) {
return fo;
}
final int dotIndex = classToBind.lastIndexOf('.');
if (dotIndex <= 0) {
return null;
}
classToBind = classToBind.substring(0, dotIndex) + "$" + classToBind.substring(dotIndex + 1);
}
}
private static File getResourcePath(Class aClass) {
return new File(PathManager.getResourceRoot(aClass, "/" + aClass.getName().replace('.', '/') + ".class"));
}
private static class MyNestedFormLoader implements NestedFormLoader {
private final Map<File, String> mySourceRoots;
private final Collection<File> myOutputRoots;
private final HashMap<String, LwRootContainer> myCache = new HashMap<String, LwRootContainer>();
/**
* @param sourceRoots all source roots for current module chunk and all dependent recursively
* @param outputRoots output roots for this module chunk and all dependent recursively
*/
public MyNestedFormLoader(Map<File, String> sourceRoots, Collection<File> outputRoots) {
mySourceRoots = sourceRoots;
myOutputRoots = outputRoots;
}
public LwRootContainer loadForm(String formFileName) throws Exception {
if (myCache.containsKey(formFileName)) {
return myCache.get(formFileName);
}
final String relPath = FileUtil.toSystemIndependentName(formFileName);
for (Map.Entry<File, String> entry : mySourceRoots.entrySet()) {
final File sourceRoot = entry.getKey();
final String prefix = entry.getValue();
String path = relPath;
if (prefix != null && FileUtil.startsWith(path, prefix)) {
path = path.substring(prefix.length());
}
final File formFile = new File(sourceRoot, path);
if (formFile.exists()) {
final BufferedInputStream stream = new BufferedInputStream(new FileInputStream(formFile));
try {
return loadForm(formFileName, stream);
}
finally {
stream.close();
}
}
}
throw new Exception("Cannot find nested form file " + formFileName);
}
private LwRootContainer loadForm(String formFileName, InputStream resourceStream) throws Exception {
final LwRootContainer container = Utils.getRootContainer(resourceStream, null);
myCache.put(formFileName, container);
return container;
}
public String getClassToBindName(LwRootContainer container) {
final String className = container.getClassToBind();
for (File outputRoot : myOutputRoots) {
final String result = getJVMClassName(outputRoot, className.replace('.', '/'));
if (result != null) {
return result.replace('/', '.');
}
}
return className;
}
}
@Nullable
private static String getJVMClassName(File outputRoot, String className) {
while (true) {
final File candidateClass = new File(outputRoot, className + ".class");
if (candidateClass.exists()) {
return className;
}
final int position = className.lastIndexOf('/');
if (position < 0) {
return null;
}
className = className.substring(0, position) + '$' + className.substring(position + 1);
}
}
}
| |
/*
* Copyright 2001-2006 Stephen Colebourne
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.joda.time;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import junit.framework.TestCase;
import junit.framework.TestSuite;
/**
* This class is a Junit unit test for Months.
*
* @author Stephen Colebourne
*/
public class TestMonths extends TestCase {
// Test in 2002/03 as time zones are more well known
// (before the late 90's they were all over the place)
private static final DateTimeZone PARIS = DateTimeZone.forID("Europe/Paris");
public static void main(String[] args) {
junit.textui.TestRunner.run(suite());
}
public static TestSuite suite() {
return new TestSuite(TestMonths.class);
}
public TestMonths(String name) {
super(name);
}
protected void setUp() throws Exception {
}
protected void tearDown() throws Exception {
}
//-----------------------------------------------------------------------
public void testConstants() {
assertEquals(0, Months.ZERO.getMonths());
assertEquals(1, Months.ONE.getMonths());
assertEquals(2, Months.TWO.getMonths());
assertEquals(3, Months.THREE.getMonths());
assertEquals(4, Months.FOUR.getMonths());
assertEquals(5, Months.FIVE.getMonths());
assertEquals(6, Months.SIX.getMonths());
assertEquals(7, Months.SEVEN.getMonths());
assertEquals(8, Months.EIGHT.getMonths());
assertEquals(9, Months.NINE.getMonths());
assertEquals(10, Months.TEN.getMonths());
assertEquals(11, Months.ELEVEN.getMonths());
assertEquals(12, Months.TWELVE.getMonths());
assertEquals(Integer.MAX_VALUE, Months.MAX_VALUE.getMonths());
assertEquals(Integer.MIN_VALUE, Months.MIN_VALUE.getMonths());
}
//-----------------------------------------------------------------------
public void testFactory_months_int() {
assertSame(Months.ZERO, Months.months(0));
assertSame(Months.ONE, Months.months(1));
assertSame(Months.TWO, Months.months(2));
assertSame(Months.THREE, Months.months(3));
assertSame(Months.FOUR, Months.months(4));
assertSame(Months.FIVE, Months.months(5));
assertSame(Months.SIX, Months.months(6));
assertSame(Months.SEVEN, Months.months(7));
assertSame(Months.EIGHT, Months.months(8));
assertSame(Months.NINE, Months.months(9));
assertSame(Months.TEN, Months.months(10));
assertSame(Months.ELEVEN, Months.months(11));
assertSame(Months.TWELVE, Months.months(12));
assertSame(Months.MAX_VALUE, Months.months(Integer.MAX_VALUE));
assertSame(Months.MIN_VALUE, Months.months(Integer.MIN_VALUE));
assertEquals(-1, Months.months(-1).getMonths());
assertEquals(13, Months.months(13).getMonths());
}
//-----------------------------------------------------------------------
public void testFactory_monthsBetween_RInstant() {
DateTime start = new DateTime(2006, 6, 9, 12, 0, 0, 0, PARIS);
DateTime end1 = new DateTime(2006, 9, 9, 12, 0, 0, 0, PARIS);
DateTime end2 = new DateTime(2006, 12, 9, 12, 0, 0, 0, PARIS);
assertEquals(3, Months.monthsBetween(start, end1).getMonths());
assertEquals(0, Months.monthsBetween(start, start).getMonths());
assertEquals(0, Months.monthsBetween(end1, end1).getMonths());
assertEquals(-3, Months.monthsBetween(end1, start).getMonths());
assertEquals(6, Months.monthsBetween(start, end2).getMonths());
}
public void testFactory_monthsBetween_RPartial() {
LocalDate start = new LocalDate(2006, 6, 9);
LocalDate end1 = new LocalDate(2006, 9, 9);
YearMonthDay end2 = new YearMonthDay(2006, 12, 9);
assertEquals(3, Months.monthsBetween(start, end1).getMonths());
assertEquals(0, Months.monthsBetween(start, start).getMonths());
assertEquals(0, Months.monthsBetween(end1, end1).getMonths());
assertEquals(-3, Months.monthsBetween(end1, start).getMonths());
assertEquals(6, Months.monthsBetween(start, end2).getMonths());
}
public void testFactory_monthsIn_RInterval() {
DateTime start = new DateTime(2006, 6, 9, 12, 0, 0, 0, PARIS);
DateTime end1 = new DateTime(2006, 9, 9, 12, 0, 0, 0, PARIS);
DateTime end2 = new DateTime(2006, 12, 9, 12, 0, 0, 0, PARIS);
assertEquals(0, Months.monthsIn((ReadableInterval) null).getMonths());
assertEquals(3, Months.monthsIn(new Interval(start, end1)).getMonths());
assertEquals(0, Months.monthsIn(new Interval(start, start)).getMonths());
assertEquals(0, Months.monthsIn(new Interval(end1, end1)).getMonths());
assertEquals(6, Months.monthsIn(new Interval(start, end2)).getMonths());
}
public void testFactory_parseMonths_String() {
assertEquals(0, Months.parseMonths((String) null).getMonths());
assertEquals(0, Months.parseMonths("P0M").getMonths());
assertEquals(1, Months.parseMonths("P1M").getMonths());
assertEquals(-3, Months.parseMonths("P-3M").getMonths());
assertEquals(2, Months.parseMonths("P0Y2M").getMonths());
assertEquals(2, Months.parseMonths("P2MT0H0M").getMonths());
try {
Months.parseMonths("P1Y1D");
fail();
} catch (IllegalArgumentException ex) {
// expeceted
}
try {
Months.parseMonths("P1MT1H");
fail();
} catch (IllegalArgumentException ex) {
// expeceted
}
}
//-----------------------------------------------------------------------
public void testGetMethods() {
Months test = Months.months(20);
assertEquals(20, test.getMonths());
}
public void testGetFieldType() {
Months test = Months.months(20);
assertEquals(DurationFieldType.months(), test.getFieldType());
}
public void testGetPeriodType() {
Months test = Months.months(20);
assertEquals(PeriodType.months(), test.getPeriodType());
}
//-----------------------------------------------------------------------
public void testIsGreaterThan() {
assertEquals(true, Months.THREE.isGreaterThan(Months.TWO));
assertEquals(false, Months.THREE.isGreaterThan(Months.THREE));
assertEquals(false, Months.TWO.isGreaterThan(Months.THREE));
assertEquals(true, Months.ONE.isGreaterThan(null));
assertEquals(false, Months.months(-1).isGreaterThan(null));
}
public void testIsLessThan() {
assertEquals(false, Months.THREE.isLessThan(Months.TWO));
assertEquals(false, Months.THREE.isLessThan(Months.THREE));
assertEquals(true, Months.TWO.isLessThan(Months.THREE));
assertEquals(false, Months.ONE.isLessThan(null));
assertEquals(true, Months.months(-1).isLessThan(null));
}
//-----------------------------------------------------------------------
public void testToString() {
Months test = Months.months(20);
assertEquals("P20M", test.toString());
test = Months.months(-20);
assertEquals("P-20M", test.toString());
}
//-----------------------------------------------------------------------
public void testSerialization() throws Exception {
Months test = Months.THREE;
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(baos);
oos.writeObject(test);
byte[] bytes = baos.toByteArray();
oos.close();
ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
ObjectInputStream ois = new ObjectInputStream(bais);
Months result = (Months) ois.readObject();
ois.close();
assertSame(test, result);
}
//-----------------------------------------------------------------------
public void testPlus_int() {
Months test2 = Months.months(2);
Months result = test2.plus(3);
assertEquals(2, test2.getMonths());
assertEquals(5, result.getMonths());
assertEquals(1, Months.ONE.plus(0).getMonths());
try {
Months.MAX_VALUE.plus(1);
fail();
} catch (ArithmeticException ex) {
// expected
}
}
public void testPlus_Months() {
Months test2 = Months.months(2);
Months test3 = Months.months(3);
Months result = test2.plus(test3);
assertEquals(2, test2.getMonths());
assertEquals(3, test3.getMonths());
assertEquals(5, result.getMonths());
assertEquals(1, Months.ONE.plus(Months.ZERO).getMonths());
assertEquals(1, Months.ONE.plus((Months) null).getMonths());
try {
Months.MAX_VALUE.plus(Months.ONE);
fail();
} catch (ArithmeticException ex) {
// expected
}
}
public void testMinus_int() {
Months test2 = Months.months(2);
Months result = test2.minus(3);
assertEquals(2, test2.getMonths());
assertEquals(-1, result.getMonths());
assertEquals(1, Months.ONE.minus(0).getMonths());
try {
Months.MIN_VALUE.minus(1);
fail();
} catch (ArithmeticException ex) {
// expected
}
}
public void testMinus_Months() {
Months test2 = Months.months(2);
Months test3 = Months.months(3);
Months result = test2.minus(test3);
assertEquals(2, test2.getMonths());
assertEquals(3, test3.getMonths());
assertEquals(-1, result.getMonths());
assertEquals(1, Months.ONE.minus(Months.ZERO).getMonths());
assertEquals(1, Months.ONE.minus((Months) null).getMonths());
try {
Months.MIN_VALUE.minus(Months.ONE);
fail();
} catch (ArithmeticException ex) {
// expected
}
}
public void testMultipliedBy_int() {
Months test = Months.months(2);
assertEquals(6, test.multipliedBy(3).getMonths());
assertEquals(2, test.getMonths());
assertEquals(-6, test.multipliedBy(-3).getMonths());
assertSame(test, test.multipliedBy(1));
Months halfMax = Months.months(Integer.MAX_VALUE / 2 + 1);
try {
halfMax.multipliedBy(2);
fail();
} catch (ArithmeticException ex) {
// expected
}
}
public void testDividedBy_int() {
Months test = Months.months(12);
assertEquals(6, test.dividedBy(2).getMonths());
assertEquals(12, test.getMonths());
assertEquals(4, test.dividedBy(3).getMonths());
assertEquals(3, test.dividedBy(4).getMonths());
assertEquals(2, test.dividedBy(5).getMonths());
assertEquals(2, test.dividedBy(6).getMonths());
assertSame(test, test.dividedBy(1));
try {
Months.ONE.dividedBy(0);
fail();
} catch (ArithmeticException ex) {
// expected
}
}
public void testNegated() {
Months test = Months.months(12);
assertEquals(-12, test.negated().getMonths());
assertEquals(12, test.getMonths());
try {
Months.MIN_VALUE.negated();
fail();
} catch (ArithmeticException ex) {
// expected
}
}
//-----------------------------------------------------------------------
public void testAddToLocalDate() {
Months test = Months.months(3);
LocalDate date = new LocalDate(2006, 6, 1);
LocalDate expected = new LocalDate(2006, 9, 1);
assertEquals(expected, date.plus(test));
}
}
| |
package de.opitzconsulting.orcas.diff;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Reader;
import java.io.Writer;
import java.math.BigDecimal;
import java.net.URL;
import java.nio.charset.Charset;
import java.sql.CallableStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.StringTokenizer;
import java.util.function.Supplier;
import de.opitzconsulting.orcas.diff.JdbcConnectionHandler.RunWithCallableStatementProvider;
import de.opitzconsulting.orcas.diff.ParametersCommandline.ParameterTypeMode;
import de.opitzconsulting.orcas.sql.CallableStatementProvider;
import de.opitzconsulting.orcas.sql.WrapperCallableStatement;
import de.opitzconsulting.orcas.sql.WrapperExecuteUpdate;
import de.opitzconsulting.orcas.sql.WrapperIteratorResultSet;
public class OrcasScriptRunner extends Orcas {
public static final String ORCAS_UPDATES_TABLE = "orcas_updates";
public static void main(String[] pArgs) {
new OrcasScriptRunner().mainRun(pArgs);
}
private boolean isInMemorySpoolFile(String pFileName) {
return pFileName.startsWith("inmemory:");
}
@Override
protected ParameterTypeMode getParameterTypeMode() {
return ParameterTypeMode.ORCAS_SCRIPT;
}
@Override
protected void run() throws Exception {
if (getParameters().isOneTimeScriptMode()) {
logInfo("execute one-time-script start" + getLogMessageFileDetail());
} else {
if (getParameters().getScriptUrl() == null) {
logInfo("execute script start" + getLogMessageFileDetail());
}
}
JdbcConnectionHandler.runWithCallableStatementProvider(getParameters(), new RunWithCallableStatementProvider() {
public void run(final CallableStatementProvider pCallableStatementProvider) throws Exception {
RunWithCallableStatementProvider lRunWithCallableStatementProvider = new RunWithCallableStatementProvider() {
public void run(final CallableStatementProvider pOrcasCallableStatementProvider) throws Exception {
final Map<String, Date> lExecutedFilesMap = new HashMap<String, Date>();
if (getParameters().isOneTimeScriptMode()) {
new WrapperIteratorResultSet(
"select scup_script_name, scup_date from " + ORCAS_UPDATES_TABLE + " where scup_logname = ?",
pOrcasCallableStatementProvider,
Collections.singletonList(getParameters().getLogname())) {
protected void useResultSetRow(ResultSet pResultSet) throws SQLException {
lExecutedFilesMap.put(
pResultSet.getString("scup_script_name"),
new Date(pResultSet.getTimestamp("scup_date").getTime()));
}
protected boolean handleSQLException(SQLException pSQLException) {
getDatabaseHandler().createOrcasUpdatesTable(ORCAS_UPDATES_TABLE, pOrcasCallableStatementProvider);
return true;
}
}.execute();
}
try {
if (getParameters().getScriptUrl() != null) {
runURL(
getParameters().getScriptUrl(),
pCallableStatementProvider,
getParameters(),
getParameters().getScriptUrlCharset());
addSpoolfolderScriptIfNeededWithParameterReplace(
getParameters().getScriptUrl(),
getParameters().getScriptUrlFilename(),
getParameters().getScriptUrlCharset(),
getParameters());
} else {
for (File lFile : FolderHandler.getModelFiles(getParameters())) {
if (getParameters().isOneTimeScriptMode()) {
String lScriptfolderAbsolutePath = new File(getParameters().getModelFile()).getAbsolutePath();
String lFileAbsolutePath = lFile.getAbsolutePath();
String lFilePart = lFileAbsolutePath.substring(lScriptfolderAbsolutePath.length());
lFilePart = lFilePart.replace("\\", "/");
lFilePart = lFilePart.substring(1);
if (!lExecutedFilesMap.containsKey(lFilePart)) {
if (!getParameters().isOneTimeScriptLogonlyMode()) {
logInfo("execute one-time-script " + lFilePart);
runFile(lFile, pCallableStatementProvider, getParameters(), null);
addSpoolfolderScriptIfNeeded(lFile);
}
getDatabaseHandler().insertIntoOrcasUpdatesTable(
ORCAS_UPDATES_TABLE,
pOrcasCallableStatementProvider,
lFilePart,
getParameters().getLogname());
} else {
_log.debug("Script already executed: " + lFilePart + " on: " + lExecutedFilesMap.get(lFilePart));
}
} else {
runFile(lFile, pCallableStatementProvider, getParameters(), null);
addSpoolfolderScriptIfNeededWithParameterReplace(lFile, getParameters());
}
}
}
} catch (Exception e) {
throw new RuntimeException(e);
}
_log.debug("execute script done");
}
};
if (!getParameters().isOneTimeScriptMode()) {
lRunWithCallableStatementProvider.run(null);
} else {
JdbcConnectionHandler.runWithCallableStatementProvider(
getParameters(),
getParameters().getOrcasJdbcConnectParameters(),
lRunWithCallableStatementProvider);
}
}
});
}
private String getLogMessageFileDetail() {
return getParameters().getModelFiles() == null ? (": " + getParameters().getModelFile()) : "";
}
public void runURL(URL pURL, CallableStatementProvider pCallableStatementProvider, Parameters pParameters, Charset pCharset) throws Exception {
runReader(new InputStreamReader(pURL.openStream(), pCharset), pCallableStatementProvider, pParameters, null, null);
}
public void runURL(
URL pURL,
CallableStatementProvider pCallableStatementProvider,
Parameters pParameters,
Charset pCharset,
String... pAdditionalParameters) throws Exception {
setParameters(pParameters);
List<String> lAdditionalParameters = new ArrayList<String>();
for (String lAdditionalParameter : pAdditionalParameters) {
lAdditionalParameters.add(lAdditionalParameter);
}
List<String> lOriginalAdditionalParameters = pParameters._additionalParameters;
pParameters._additionalParameters = lAdditionalParameters;
try {
runURL(pURL, pCallableStatementProvider, pParameters, pCharset);
} finally {
pParameters._additionalParameters = lOriginalAdditionalParameters;
}
}
static String doReplace(String pValue, Parameters pParameters) {
String lOrig = null;
for (int i = 0; i < pParameters.getAdditionalParameters().size(); i++) {
String lParameter = pParameters.getAdditionalParameters().get(i);
String lKey = "&" + (i + 1);
if (pValue.contains(lKey)) {
lOrig = pValue;
pValue = pValue.replace(lKey + ".", lParameter);
pValue = pValue.replace(lKey, lParameter);
}
}
if (lOrig != null) {
_log.debug("replaced: " + lOrig + " with: " + pValue);
}
return pValue;
}
private CommandHandler findCommandHandler(String pTrimedLine, List<CommandHandler> pCommandHandlerList) {
for (CommandHandler lCommandHandler : pCommandHandlerList) {
if (lCommandHandler.isCommand(pTrimedLine)) {
return lCommandHandler;
}
}
return null;
}
private void runFile(
File pFile,
final CallableStatementProvider pCallableStatementProvider,
final Parameters pParameters,
SpoolHandler pSpoolHandler) throws Exception {
if (pParameters.getAdditionalParameters().isEmpty()) {
_log.debug("execute script: " + pFile);
} else {
_log.debug("execute script: " + pFile + " " + pParameters.getAdditionalParameters());
}
runReader(
new InputStreamReader(new FileInputStream(pFile), pParameters.getEncoding()),
pCallableStatementProvider,
pParameters,
pFile,
pSpoolHandler);
}
static List<String> parseReaderToLines(Reader pReader) throws IOException {
BufferedReader lBufferedReader = new BufferedReader(pReader);
List<String> lLines = new ArrayList<String>();
String lFileLine;
while ((lFileLine = lBufferedReader.readLine()) != null) {
lLines.add(lFileLine);
}
lBufferedReader.close();
return lLines;
}
private boolean _serveroutput = false;
private Map<String, byte[]> inMemorySpoolFileMap = new HashMap<>();
protected void runReader(
Reader pReader,
final CallableStatementProvider pCallableStatementProvider,
final Parameters pParameters,
File pFile,
SpoolHandler pSpoolHandler) throws Exception {
runLines(parseReaderToLines(pReader), pCallableStatementProvider, pParameters, pFile, pSpoolHandler);
}
void runLines(List<String> pLines, final CallableStatementProvider pCallableStatementProvider, final Parameters pParameters, File pFile)
throws Exception {
runLines(pLines, pCallableStatementProvider, pParameters, pFile, null);
}
void runLines(
List<String> pLines,
final CallableStatementProvider pCallableStatementProvider,
final Parameters pParameters,
File pFile,
SpoolHandler pSpoolHandler) throws Exception {
inMemorySpoolFileMap.clear();
boolean lHasPlSqlModeTerminator = false;
CommentHandler lCommentHandler = new CommentHandler();
for (String lFileLine : pLines) {
String lTrimedLine = lFileLine.trim().toLowerCase();
lCommentHandler.handleLine(lTrimedLine);
if (lCommentHandler.isPlsqlTerminator(lFileLine)) {
lHasPlSqlModeTerminator = true;
}
}
boolean lPlSqlMode = false;
boolean lNonPlSqlMultilineMode = false;
final SpoolHandler lSpoolHandler = pSpoolHandler == null ? createSpoolHandler(pParameters) : pSpoolHandler;
List<CommandHandler> lCommandHandlerList = new ArrayList<OrcasScriptRunner.CommandHandler>();
lCommandHandlerList.add(lSpoolHandler);
lCommandHandlerList.add(new PromptHandler(lSpoolHandler));
lCommandHandlerList.add(new CommandHandler() {
public boolean isCommand(String pTrimedLine) {
if (pTrimedLine.startsWith("set")) {
List<String> lCommands = new ArrayList<String>();
StringTokenizer lStringTokenizer = new StringTokenizer(pTrimedLine, " \t");
while (lStringTokenizer.hasMoreTokens()) {
lCommands.add(lStringTokenizer.nextToken());
}
if (lCommands.size() >= 3) {
if (lCommands.get(1).equalsIgnoreCase("serveroutput")) {
return true;
}
}
}
return false;
}
public void handleCommand(String pLine, File pCurrentFile) throws Exception {
List<String> lCommands = new ArrayList<String>();
StringTokenizer lStringTokenizer = new StringTokenizer(pLine.trim(), " \t");
while (lStringTokenizer.hasMoreTokens()) {
lCommands.add(lStringTokenizer.nextToken());
}
_serveroutput = lCommands.get(2).equalsIgnoreCase("on");
}
});
lCommandHandlerList.add(new CommandHandler() {
public boolean isCommand(String pTrimedLine) {
return pTrimedLine.startsWith("set ") || pTrimedLine.startsWith("quit");
}
public void handleCommand(String pLine, File pCurrentFile) throws Exception {
_log.debug("ignoring: " + pLine);
}
});
lCommandHandlerList.add(createStartHandler(pCallableStatementProvider, pParameters, lSpoolHandler));
StringBuffer lCurrent = null;
lCommentHandler = new CommentHandler();
int[] lStartLineIndex = new int[] { 0 };
int[] lCurrentLineIndex = new int[] { 0 };
Supplier<String>
lLineReferenceProvider =
() -> pFile + "(" + (lStartLineIndex[0] + 1) + (lStartLineIndex[0] != lCurrentLineIndex[0] ? "-" + (lCurrentLineIndex[0] + 1) : "") + ")";
for (String lLine : pLines) {
boolean lCurrentEnd = false;
String lAppend = null;
String lTrimedLine = lLine.trim().toLowerCase();
//check wether we are in a block comment
lCommentHandler.handleLine(lTrimedLine);
if (lPlSqlMode) {
if (lCommentHandler.isPlsqlTerminator(lLine)) {
lCurrentEnd = true;
lPlSqlMode = false;
} else {
lAppend = lLine;
}
} else {
CommandHandler lCommandHandler = findCommandHandler(lTrimedLine, lCommandHandlerList);
if (!lNonPlSqlMultilineMode && lCommandHandler != null) {
lCommandHandler.handleCommand(lLine, pFile);
lStartLineIndex[0] = lCurrentLineIndex[0] + 1;
} else {
if (lTrimedLine.startsWith("--")) {
if (lCurrent != null) {
lAppend = lLine;
}
} else {
if (lTrimedLine.endsWith(";")) {
int lIndex = lLine.lastIndexOf(';');
lAppend = lLine.substring(0, lIndex);
lCurrentEnd = true;
lNonPlSqlMultilineMode = false;
} else {
if (lHasPlSqlModeTerminator && (lTrimedLine.startsWith("create ")
|| lTrimedLine.startsWith("replace ")
|| lTrimedLine.startsWith("begin")
|| lTrimedLine.startsWith("declare"))) {
lPlSqlMode = true;
lAppend = lLine;
} else {
boolean lEmptyLine = lTrimedLine.equals("");
if (lNonPlSqlMultilineMode) {
lAppend = lLine;
} else {
if (!lEmptyLine) {
lNonPlSqlMultilineMode = true;
lAppend = lLine;
}else{
lStartLineIndex[0] = lCurrentLineIndex[0] + 1;
}
}
}
}
}
}
}
if (lAppend != null) {
if (lCurrent == null) {
lCurrent = new StringBuffer();
} else {
lCurrent.append("\n");
}
lCurrent.append(doReplace(lAppend, pParameters));
}
if (lCurrentEnd) {
if (isSelect(lCurrent.toString())) {
executeSelect(lCurrent.toString(), lSpoolHandler, pCallableStatementProvider);
} else {
executeSql(lCurrent.toString(), pCallableStatementProvider, pParameters, lLineReferenceProvider);
}
if (_serveroutput) {
final boolean[] keepRunning = new boolean[1];
do {
new WrapperCallableStatement("begin dbms_output.get_line( ?, ?); end;", pCallableStatementProvider) {
@Override
protected void useCallableStatement(CallableStatement pCallableStatement) throws SQLException {
pCallableStatement.registerOutParameter(1, java.sql.Types.VARCHAR);
pCallableStatement.registerOutParameter(2, java.sql.Types.NUMERIC);
pCallableStatement.executeUpdate();
BigDecimal lStatus = pCallableStatement.getBigDecimal(2);
_log.debug("serveroutput status: " + lStatus);
keepRunning[0] = lStatus != null && lStatus.intValue() == 0;
String lLine = pCallableStatement.getString(1);
if (lLine != null) {
lSpoolHandler.spoolIfActive(lLine);
logInfo("serveroutput: " + lLine);
}
}
}.execute();
} while (keepRunning[0]);
}
lCurrent = null;
lStartLineIndex[0] = lCurrentLineIndex[0] + 1;
}
lCurrentLineIndex[0]++;
}
if (lCurrent != null) {
String lTrim = lCurrent.toString().replace("\n", "").trim();
if (!lTrim.equals("") && !lTrim.equals("/")) {
_log.error(lLineReferenceProvider.get() + ": statemmet not terminated correctly: " + lCurrent.toString());
}
}
lSpoolHandler.spoolHandleFileEnd();
}
protected StartHandler createStartHandler(
final CallableStatementProvider pCallableStatementProvider,
final Parameters pParameters,
SpoolHandler pSpoolHandler) {
return new StartHandler(pParameters, pCallableStatementProvider, pSpoolHandler);
}
protected SpoolHandler createSpoolHandler(Parameters pParameters) {
return new SpoolHandler(pParameters);
}
private void executeSql(
String pSql,
CallableStatementProvider pCallableStatementProvider,
Parameters pParameters,
Supplier<String> pLineReferenceProvider) {
try {
new WrapperExecuteUpdate(pSql, pCallableStatementProvider).execute();
} catch (RuntimeException e) {
pParameters
.getExecuteSqlErrorHandler()
.handleExecutionError(e, pSql, pCallableStatementProvider, pParameters, new ExecuteSqlErrorHandler.ExecuteSqlErrorHandlerCallback() {
@Override
public void rethrow() {
throw new RuntimeException(getLineReference() + e.getMessage(), e);
}
@Override
public void logError() {
_log.warn(getLineReference() + e.getMessage(), e);
}
@Override
public void logInfo(String pMessage) {
OrcasScriptRunner.this.logInfo(getLineReference() + pMessage);
}
@Override
public String getLineReference() {
return pLineReferenceProvider.get() + ": ";
}
});
}
}
private void executeSelect(String pSql, final SpoolHandler pSpoolHandler, CallableStatementProvider pCallableStatementProvider) {
final int[] lRowIndex = new int[] { 0 };
new WrapperIteratorResultSet(pSql, pCallableStatementProvider) {
@Override
protected void useResultSetRow(ResultSet pResultSet) throws SQLException {
int lColumnCount = pResultSet.getMetaData().getColumnCount();
StringBuilder lLine = new StringBuilder();
for (int i = 0; i < lColumnCount; i++) {
if (i > 0) {
lLine.append(", ");
}
Object lObject = pResultSet.getObject(i + 1);
if (lObject == null) {
lLine.append("");
} else {
lLine.append(lObject);
}
}
pSpoolHandler.spoolIfActive(lLine.toString());
lRowIndex[0]++;
logInfo("select [" + lRowIndex[0] + "]: " + lLine);
}
}.execute();
if (lRowIndex[0] != 0) {
logInfo("select rowcount: " + lRowIndex[0]);
} else {
_log.debug("select rowcount: " + lRowIndex[0]);
}
}
private boolean isSelect(String pSql) {
return pSql.toLowerCase().trim().startsWith("select ");
}
private interface CommandHandler {
public boolean isCommand(String pTrimedLine);
public void handleCommand(String pLine, File pCurrentFile) throws Exception;
}
class SpoolHandler implements CommandHandler {
private OutputStream spoolFile;
protected Writer writer;
private Parameters parameters;
private String inMemorySpoolFileName;
public SpoolHandler(Parameters pParameters) {
parameters = pParameters;
}
public boolean isCommand(String pTrimedLine) {
return pTrimedLine.startsWith("spool ");
}
public void spoolIfActive(String pLine) {
if (isSpoolActive()) {
try {
writer.append(pLine);
writer.append("\n");
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
public void spoolHandleFileEnd() throws Exception {
if (isSpoolActive()) {
_log.error("spooling still active");
closeSpoolFile();
}
}
public void handleCommand(String pLine, File pCurrentFile) throws Exception {
String lTrimedLine = pLine.trim();
String lFileName = doReplace(lTrimedLine.substring("spool ".length()), parameters).trim();
lFileName = lFileName.replace(";", "");
if (lFileName.equals("off")) {
if (!isSpoolActive()) {
_log.error("spooling not active: " + lTrimedLine);
} else {
closeSpoolFile();
}
} else {
if (isSpoolActive()) {
_log.warn("spooling already active: " + lTrimedLine);
closeSpoolFile();
}
openSpoolFile(lFileName);
}
}
protected void openSpoolFile(String pFileName) throws FileNotFoundException {
logInfo("start spooling: " + pFileName);
if (isInMemorySpoolFile(pFileName)) {
spoolFile = new ByteArrayOutputStream();
writer = new OutputStreamWriter(spoolFile, getParameters().getEncodingForSqlLog());
inMemorySpoolFileName = pFileName;
} else {
File lFile = new File(pFileName);
if (lFile.getParentFile() != null) {
lFile.getParentFile().mkdirs();
}
spoolFile = new FileOutputStream(lFile);
writer = new OutputStreamWriter(spoolFile, getParameters().getEncodingForSqlLog());
}
}
private boolean isSpoolActive() {
return writer != null;
}
protected void closeSpoolFile() throws IOException {
logInfo("stop spooling");
writer.close();
writer = null;
spoolFile.close();
if (inMemorySpoolFileName != null) {
inMemorySpoolFileMap.put(inMemorySpoolFileName, ((ByteArrayOutputStream) spoolFile).toByteArray());
inMemorySpoolFileName = null;
}
spoolFile = null;
}
}
private class PromptHandler implements CommandHandler {
private static final String PROMPT = "prompt ";
private SpoolHandler spoolHandler;
public PromptHandler(SpoolHandler pSpoolHandler) {
spoolHandler = pSpoolHandler;
}
public boolean isCommand(String pTrimedLine) {
return pTrimedLine.startsWith(PROMPT);
}
public void handleCommand(String pLine, File pCurrentFile) throws Exception {
String lTrimedLine = pLine.trim();
lTrimedLine = lTrimedLine.substring(PROMPT.length(), lTrimedLine.length());
if (lTrimedLine.endsWith(";")) {
lTrimedLine = lTrimedLine.substring(0, lTrimedLine.length() - 1);
}
if (getParameters().getRemovePromptPrefix() != null && lTrimedLine.startsWith(getParameters().getRemovePromptPrefix())) {
lTrimedLine = lTrimedLine.substring(getParameters().getRemovePromptPrefix().length());
}
logInfo(lTrimedLine);
spoolHandler.spoolIfActive(lTrimedLine);
}
}
class StartHandler implements CommandHandler {
private Parameters parameters;
private CallableStatementProvider callableStatementProvider;
private SpoolHandler spoolHandler;
public StartHandler(Parameters pParameters, CallableStatementProvider pCallableStatementProvider, SpoolHandler pSpoolHandler) {
parameters = pParameters;
callableStatementProvider = pCallableStatementProvider;
spoolHandler = pSpoolHandler;
}
public boolean isCommand(String pTrimedLine) {
return pTrimedLine.startsWith("@");
}
public void handleCommand(String pLine, File pCurrentFile) throws Exception {
File lFile;
String lTrimLine = pLine.trim();
if (lTrimLine.startsWith("@@")) {
lFile = new File(pCurrentFile.getParent(), doReplace(lTrimLine.substring(2).trim(), parameters));
} else {
String lFilename = doReplace(lTrimLine.substring(1).trim(), parameters);
if (isInMemorySpoolFile(lFilename) && inMemorySpoolFileMap.containsKey(lFilename)) {
runReader(
new InputStreamReader(new ByteArrayInputStream(inMemorySpoolFileMap.get(lFilename)), parameters.getEncodingForSqlLog()),
callableStatementProvider,
parameters,
null,
spoolHandler);
return;
} else {
lFile = new File(lFilename);
}
}
runFile(lFile, callableStatementProvider, parameters, spoolHandler);
}
}
static class CommentHandler {
boolean isPlsqlTerminator(String pLine) {
return !isInComment && !isInString && pLine.trim().equals("/") && pLine.startsWith("/");
}
private boolean isInComment;
private boolean isInString;
void handleLine(String pTrimedLine) {
for (int i = 0; i < pTrimedLine.length(); i++) {
if (isInComment) {
if (pTrimedLine.charAt(i) == '*') {
if (pTrimedLine.length() > i + 1 && pTrimedLine.charAt(i + 1) == '/') {
isInComment = false;
i++;
}
}
} else {
if (!isInString) {
if (pTrimedLine.charAt(i) == '/') {
if (pTrimedLine.length() > i + 1 && pTrimedLine.charAt(i + 1) == '*') {
isInComment = true;
i++;
}
}
if (pTrimedLine.charAt(i) == '-') {
if (pTrimedLine.length() > i + 1 && pTrimedLine.charAt(i + 1) == '-') {
break;
}
}
if (pTrimedLine.charAt(i) == '\'') {
isInString = true;
}
} else {
if (pTrimedLine.charAt(i) == '\'') {
isInString = false;
}
}
}
}
}
}
}
| |
/*
* Copyright (C) 2009-2013 University of Washington
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.path.episample.android.tasks;
import java.util.HashMap;
import org.kxml2.kdom.Element;
import org.path.common.android.utilities.ClientConnectionManagerFactory;
import org.path.common.android.utilities.DocumentFetchResult;
import org.path.common.android.utilities.ODKFileUtils;
import org.path.common.android.utilities.WebLogger;
import org.path.common.android.utilities.WebUtils;
import org.opendatakit.httpclientandroidlib.client.HttpClient;
import org.opendatakit.httpclientandroidlib.protocol.HttpContext;
import org.path.episample.android.R;
import org.path.episample.android.listeners.FormListDownloaderListener;
import org.path.episample.android.logic.FormDetails;
import org.path.episample.android.logic.PropertiesSingleton;
import org.path.episample.android.preferences.PreferencesActivity;
import android.app.Application;
import android.os.AsyncTask;
/**
* Background task for downloading a formlist from a url.
* In ODK 2.0, the formlist is actually a list of tableIds.
* The actual forms are buried within the zips for those
* tableIds.
*
* @author mitchellsundt@gmail.com
*/
public class DownloadFormListTask extends AsyncTask<Void, String, HashMap<String, FormDetails>> {
private static final String t = "DownloadFormListTask";
// used to store error message if one occurs
public static final String DL_ERROR_MSG = "dlerrormessage";
public static final String DL_AUTH_REQUIRED = "dlauthrequired";
private Application appContext;
private String appName;
private FormListDownloaderListener mStateListener;
private HashMap<String, FormDetails> mFormList;
private static final String NAMESPACE_OPENROSA_ORG_XFORMS_XFORMS_LIST = "http://openrosa.org/xforms/xformsList";
private boolean isXformsListNamespacedElement(Element e) {
return e.getNamespace().equalsIgnoreCase(NAMESPACE_OPENROSA_ORG_XFORMS_XFORMS_LIST);
}
public DownloadFormListTask(String appName) {
super();
this.appName = appName;
}
@Override
protected HashMap<String, FormDetails> doInBackground(Void... values) {
// Getting from a file now
String downloadListUrl = PropertiesSingleton.getProperty(appName,
PreferencesActivity.KEY_SERVER_URL);
// NOTE: /formlist must not be translated! It is the well-known path on
// the server.
String downloadPath = PropertiesSingleton.getProperty(appName,
PreferencesActivity.KEY_FORMLIST_URL);
downloadListUrl += downloadPath;
String auth = PropertiesSingleton.getProperty(appName, PreferencesActivity.KEY_AUTH);
// We populate this with available forms from the specified server.
// <formname, details>
HashMap<String, FormDetails> formList = new HashMap<String, FormDetails>();
// get shared HttpContext so that authentication and cookies are
// retained.
HttpContext localContext = ClientConnectionManagerFactory.get(appName).getHttpContext();
HttpClient httpclient = ClientConnectionManagerFactory.get(appName).createHttpClient(WebUtils.CONNECTION_TIMEOUT);
DocumentFetchResult result = WebUtils.get().getXmlDocument(appName, downloadListUrl, localContext, httpclient,
auth);
// If we can't get the document, return the error, cancel the task
if (result.errorMessage != null) {
if (result.responseCode == 401) {
formList.put(DL_AUTH_REQUIRED, new FormDetails(result.errorMessage));
} else {
formList.put(DL_ERROR_MSG, new FormDetails(result.errorMessage));
}
return formList;
}
if (result.isOpenRosaResponse) {
// Attempt OpenRosa 1.0 parsing
Element xformsElement = result.doc.getRootElement();
if (!xformsElement.getName().equals("xforms")) {
String error = "root element is not <xforms> : " + xformsElement.getName();
WebLogger.getLogger(appName).e(t, "Parsing OpenRosa reply -- " + error);
formList.put(DL_ERROR_MSG,
new FormDetails(appContext.getString(R.string.parse_openrosa_formlist_failed, error)));
return formList;
}
String namespace = xformsElement.getNamespace();
if (!isXformsListNamespacedElement(xformsElement)) {
String error = "root element namespace is incorrect:" + namespace;
WebLogger.getLogger(appName).e(t, "Parsing OpenRosa reply -- " + error);
formList.put(DL_ERROR_MSG,
new FormDetails(appContext.getString(R.string.parse_openrosa_formlist_failed, error)));
return formList;
}
int nElements = xformsElement.getChildCount();
for (int i = 0; i < nElements; ++i) {
if (xformsElement.getType(i) != Element.ELEMENT) {
// e.g., whitespace (text)
continue;
}
Element xformElement = (Element) xformsElement.getElement(i);
if (!isXformsListNamespacedElement(xformElement)) {
// someone else's extension?
continue;
}
String name = xformElement.getName();
if (!name.equalsIgnoreCase("xform")) {
// someone else's extension?
continue;
}
// this is something we know how to interpret
String formId = null;
String formName = null;
String version = null;
String description = null;
String downloadUrl = null;
String manifestUrl = null;
String hash = null;
// don't process descriptionUrl
int fieldCount = xformElement.getChildCount();
for (int j = 0; j < fieldCount; ++j) {
if (xformElement.getType(j) != Element.ELEMENT) {
// whitespace
continue;
}
Element child = xformElement.getElement(j);
if (!isXformsListNamespacedElement(child)) {
// someone else's extension?
continue;
}
String tag = child.getName();
if (tag.equals("formID")) {
formId = ODKFileUtils.getXMLText(child, true);
if (formId != null && formId.length() == 0) {
formId = null;
}
} else if (tag.equals("name")) {
formName = ODKFileUtils.getXMLText(child, true);
if (formName != null && formName.length() == 0) {
formName = null;
}
} else if (tag.equals("version")) {
version = ODKFileUtils.getXMLText(child, true);
if (version != null && version.length() == 0) {
version = null;
}
} else if (tag.equals("hash")) {
hash = ODKFileUtils.getXMLText(child, true);
if (hash != null && hash.length() == 0) {
hash = null;
}
} else if (tag.equals("descriptionText")) {
description = ODKFileUtils.getXMLText(child, true);
if (description != null && description.length() == 0) {
description = null;
}
} else if (tag.equals("downloadUrl")) {
downloadUrl = ODKFileUtils.getXMLText(child, true);
if (downloadUrl != null && downloadUrl.length() == 0) {
downloadUrl = null;
}
} else if (tag.equals("manifestUrl")) {
manifestUrl = ODKFileUtils.getXMLText(child, true);
if (manifestUrl != null && manifestUrl.length() == 0) {
manifestUrl = null;
}
}
}
if (formId == null || downloadUrl == null || formName == null || hash == null) {
String error = "Forms list entry " + Integer.toString(i)
+ " is missing one or more tags: formId, hash, name, or downloadUrl";
WebLogger.getLogger(appName).e(t, "Parsing OpenRosa reply -- " + error);
formList.clear();
formList
.put(
DL_ERROR_MSG,
new FormDetails(appContext.getString(R.string.parse_openrosa_formlist_failed,
error)));
return formList;
}
formList.put(formId, new FormDetails(formName, downloadUrl, manifestUrl, formId, version,
hash));
}
} else {
String error = "Server is not OpenRosa compliant";
WebLogger.getLogger(appName).e(t, error);
formList.clear();
formList.put(DL_ERROR_MSG,
new FormDetails(appContext.getString(R.string.parse_openrosa_formlist_failed, error)));
return formList;
}
return formList;
}
@Override
protected void onPostExecute(HashMap<String, FormDetails> result) {
synchronized (this) {
mFormList = result;
if (mStateListener != null) {
mStateListener.formListDownloadingComplete(mFormList);
}
}
}
@Override
protected void onCancelled(HashMap<String, FormDetails> result) {
synchronized (this) {
// can be null if cancelled before task executes
if (result == null) {
mFormList = new HashMap<String, FormDetails>();
} else {
mFormList = result;
}
if (mStateListener != null) {
mStateListener.formListDownloadingComplete(mFormList);
}
}
}
public HashMap<String, FormDetails> getFormList() {
return mFormList;
}
public void setDownloaderListener(FormListDownloaderListener sl) {
synchronized (this) {
mStateListener = sl;
}
}
public void setApplication(Application appContext) {
synchronized (this) {
this.appContext = appContext;
}
}
public Application getApplication() {
return appContext;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.io.network.netty;
import org.apache.flink.configuration.ConfigOption;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.SecurityOptions;
import org.apache.flink.core.testutils.OneShotLatch;
import org.apache.flink.runtime.io.network.netty.NettyTestUtil.NettyServerAndClient;
import org.apache.flink.runtime.net.SSLUtilsTest;
import org.apache.flink.util.NetUtils;
import org.apache.flink.util.TestLogger;
import org.apache.flink.shaded.netty4.io.netty.channel.Channel;
import org.apache.flink.shaded.netty4.io.netty.channel.ChannelHandler;
import org.apache.flink.shaded.netty4.io.netty.channel.socket.SocketChannel;
import org.apache.flink.shaded.netty4.io.netty.handler.codec.string.StringDecoder;
import org.apache.flink.shaded.netty4.io.netty.handler.codec.string.StringEncoder;
import org.apache.flink.shaded.netty4.io.netty.handler.ssl.SslHandler;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import javax.net.ssl.SSLSessionContext;
import java.net.InetAddress;
import java.util.List;
import static org.apache.flink.configuration.SecurityOptions.SSL_INTERNAL_CLOSE_NOTIFY_FLUSH_TIMEOUT;
import static org.apache.flink.configuration.SecurityOptions.SSL_INTERNAL_HANDSHAKE_TIMEOUT;
import static org.apache.flink.configuration.SecurityOptions.SSL_INTERNAL_SESSION_CACHE_SIZE;
import static org.apache.flink.configuration.SecurityOptions.SSL_INTERNAL_SESSION_TIMEOUT;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
/** Tests for the SSL connection between Netty Server and Client used for the data plane. */
@RunWith(Parameterized.class)
public class NettyClientServerSslTest extends TestLogger {
@Parameterized.Parameter public String sslProvider;
@Parameterized.Parameters(name = "SSL provider = {0}")
public static List<String> parameters() {
return SSLUtilsTest.AVAILABLE_SSL_PROVIDERS;
}
/** Verify valid ssl configuration and connection. */
@Test
public void testValidSslConnection() throws Exception {
testValidSslConnection(createSslConfig());
}
/** Verify valid (advanced) ssl configuration and connection. */
@Test
public void testValidSslConnectionAdvanced() throws Exception {
Configuration sslConfig = createSslConfig();
sslConfig.setInteger(SSL_INTERNAL_SESSION_CACHE_SIZE, 1);
sslConfig.setInteger(SSL_INTERNAL_SESSION_TIMEOUT, 1_000);
sslConfig.setInteger(SSL_INTERNAL_HANDSHAKE_TIMEOUT, 1_000);
sslConfig.setInteger(SSL_INTERNAL_CLOSE_NOTIFY_FLUSH_TIMEOUT, 1_000);
testValidSslConnection(sslConfig);
}
private void testValidSslConnection(Configuration sslConfig) throws Exception {
OneShotLatch serverChannelInitComplete = new OneShotLatch();
final SslHandler[] serverSslHandler = new SslHandler[1];
NettyProtocol protocol = new NoOpProtocol();
NettyConfig nettyConfig = createNettyConfig(sslConfig);
final NettyBufferPool bufferPool = new NettyBufferPool(1);
final NettyServer server =
NettyTestUtil.initServer(
nettyConfig,
bufferPool,
sslHandlerFactory ->
new TestingServerChannelInitializer(
protocol,
sslHandlerFactory,
serverChannelInitComplete,
serverSslHandler));
final NettyClient client = NettyTestUtil.initClient(nettyConfig, protocol, bufferPool);
final NettyServerAndClient serverAndClient = new NettyServerAndClient(server, client);
Channel ch = NettyTestUtil.connect(serverAndClient);
SslHandler clientSslHandler = (SslHandler) ch.pipeline().get("ssl");
assertEqualsOrDefault(
sslConfig,
SSL_INTERNAL_HANDSHAKE_TIMEOUT,
clientSslHandler.getHandshakeTimeoutMillis());
assertEqualsOrDefault(
sslConfig,
SSL_INTERNAL_CLOSE_NOTIFY_FLUSH_TIMEOUT,
clientSslHandler.getCloseNotifyFlushTimeoutMillis());
// should be able to send text data
ch.pipeline().addLast(new StringDecoder()).addLast(new StringEncoder());
ch.writeAndFlush("test").sync();
// session context is only be available after a session was setup -> this should be true
// after data was sent
serverChannelInitComplete.await();
assertNotNull(serverSslHandler[0]);
// verify server parameters
assertEqualsOrDefault(
sslConfig,
SSL_INTERNAL_HANDSHAKE_TIMEOUT,
serverSslHandler[0].getHandshakeTimeoutMillis());
assertEqualsOrDefault(
sslConfig,
SSL_INTERNAL_CLOSE_NOTIFY_FLUSH_TIMEOUT,
serverSslHandler[0].getCloseNotifyFlushTimeoutMillis());
SSLSessionContext sessionContext =
serverSslHandler[0].engine().getSession().getSessionContext();
assertNotNull("bug in unit test setup: session context not available", sessionContext);
// note: can't verify session cache setting at the client - delegate to server instead (with
// our own channel initializer)
assertEqualsOrDefault(
sslConfig, SSL_INTERNAL_SESSION_CACHE_SIZE, sessionContext.getSessionCacheSize());
int sessionTimeout = sslConfig.getInteger(SSL_INTERNAL_SESSION_TIMEOUT);
if (sessionTimeout != -1) {
// session timeout config is in milliseconds but the context returns it in seconds
assertEquals(sessionTimeout / 1000, sessionContext.getSessionTimeout());
} else {
assertTrue(
"default value (-1) should not be propagated",
sessionContext.getSessionTimeout() >= 0);
}
NettyTestUtil.shutdown(serverAndClient);
}
private static void assertEqualsOrDefault(
Configuration sslConfig, ConfigOption<Integer> option, long actual) {
long expected = sslConfig.getInteger(option);
if (expected != option.defaultValue()) {
assertEquals(expected, actual);
} else {
assertTrue(
"default value (" + option.defaultValue() + ") should not be propagated",
actual >= 0);
}
}
/** Verify failure on invalid ssl configuration. */
@Test
public void testInvalidSslConfiguration() throws Exception {
NettyProtocol protocol = new NoOpProtocol();
Configuration config = createSslConfig();
// Modify the keystore password to an incorrect one
config.setString(SecurityOptions.SSL_INTERNAL_KEYSTORE_PASSWORD, "invalidpassword");
NettyConfig nettyConfig = createNettyConfig(config);
NettyTestUtil.NettyServerAndClient serverAndClient = null;
try {
serverAndClient = NettyTestUtil.initServerAndClient(protocol, nettyConfig);
Assert.fail("Created server and client from invalid configuration");
} catch (Exception e) {
// Exception should be thrown as expected
}
NettyTestUtil.shutdown(serverAndClient);
}
/** Verify SSL handshake error when untrusted server certificate is used. */
@Test
public void testSslHandshakeError() throws Exception {
NettyProtocol protocol = new NoOpProtocol();
Configuration config = createSslConfig();
// Use a server certificate which is not present in the truststore
config.setString(
SecurityOptions.SSL_INTERNAL_KEYSTORE, "src/test/resources/untrusted.keystore");
NettyConfig nettyConfig = createNettyConfig(config);
NettyTestUtil.NettyServerAndClient serverAndClient =
NettyTestUtil.initServerAndClient(protocol, nettyConfig);
Channel ch = NettyTestUtil.connect(serverAndClient);
ch.pipeline().addLast(new StringDecoder()).addLast(new StringEncoder());
// Attempting to write data over ssl should fail
assertFalse(ch.writeAndFlush("test").await().isSuccess());
NettyTestUtil.shutdown(serverAndClient);
}
@Test
public void testClientUntrustedCertificate() throws Exception {
final Configuration serverConfig = createSslConfig();
final Configuration clientConfig = createSslConfig();
// give the client a different keystore / certificate
clientConfig.setString(
SecurityOptions.SSL_INTERNAL_KEYSTORE, "src/test/resources/untrusted.keystore");
final NettyConfig nettyServerConfig = createNettyConfig(serverConfig);
final NettyConfig nettyClientConfig = createNettyConfig(clientConfig);
final NettyBufferPool bufferPool = new NettyBufferPool(1);
final NettyProtocol protocol = new NoOpProtocol();
final NettyServer server =
NettyTestUtil.initServer(nettyServerConfig, protocol, bufferPool);
final NettyClient client =
NettyTestUtil.initClient(nettyClientConfig, protocol, bufferPool);
final NettyServerAndClient serverAndClient = new NettyServerAndClient(server, client);
final Channel ch = NettyTestUtil.connect(serverAndClient);
ch.pipeline().addLast(new StringDecoder()).addLast(new StringEncoder());
// Attempting to write data over ssl should fail
assertFalse(ch.writeAndFlush("test").await().isSuccess());
NettyTestUtil.shutdown(serverAndClient);
}
@Test
public void testSslPinningForValidFingerprint() throws Exception {
NettyProtocol protocol = new NoOpProtocol();
Configuration config = createSslConfig();
// pin the certificate based on internal cert
config.setString(
SecurityOptions.SSL_INTERNAL_CERT_FINGERPRINT,
SSLUtilsTest.getCertificateFingerprint(config, "flink.test"));
NettyConfig nettyConfig = createNettyConfig(config);
NettyTestUtil.NettyServerAndClient serverAndClient =
NettyTestUtil.initServerAndClient(protocol, nettyConfig);
Channel ch = NettyTestUtil.connect(serverAndClient);
ch.pipeline().addLast(new StringDecoder()).addLast(new StringEncoder());
assertTrue(ch.writeAndFlush("test").await().isSuccess());
NettyTestUtil.shutdown(serverAndClient);
}
@Test
public void testSslPinningForInvalidFingerprint() throws Exception {
NettyProtocol protocol = new NoOpProtocol();
Configuration config = createSslConfig();
// pin the certificate based on internal cert
config.setString(
SecurityOptions.SSL_INTERNAL_CERT_FINGERPRINT,
SSLUtilsTest.getCertificateFingerprint(config, "flink.test")
.replaceAll("[0-9A-Z]", "0"));
NettyConfig nettyConfig = createNettyConfig(config);
NettyTestUtil.NettyServerAndClient serverAndClient =
NettyTestUtil.initServerAndClient(protocol, nettyConfig);
Channel ch = NettyTestUtil.connect(serverAndClient);
ch.pipeline().addLast(new StringDecoder()).addLast(new StringEncoder());
assertFalse(ch.writeAndFlush("test").await().isSuccess());
NettyTestUtil.shutdown(serverAndClient);
}
private Configuration createSslConfig() {
return SSLUtilsTest.createInternalSslConfigWithKeyAndTrustStores(sslProvider);
}
private static NettyConfig createNettyConfig(Configuration config) {
return new NettyConfig(
InetAddress.getLoopbackAddress(),
NetUtils.getAvailablePort(),
NettyTestUtil.DEFAULT_SEGMENT_SIZE,
1,
config);
}
private static final class NoOpProtocol extends NettyProtocol {
NoOpProtocol() {
super(null, null);
}
@Override
public ChannelHandler[] getServerChannelHandlers() {
return new ChannelHandler[0];
}
@Override
public ChannelHandler[] getClientChannelHandlers() {
return new ChannelHandler[0];
}
}
/**
* Wrapper around {@link NettyServer.ServerChannelInitializer} making the server's SSL handler
* available for the tests.
*/
private static class TestingServerChannelInitializer
extends NettyServer.ServerChannelInitializer {
private final OneShotLatch latch;
private final SslHandler[] serverHandler;
TestingServerChannelInitializer(
NettyProtocol protocol,
SSLHandlerFactory sslHandlerFactory,
OneShotLatch latch,
SslHandler[] serverHandler) {
super(protocol, sslHandlerFactory);
this.latch = latch;
this.serverHandler = serverHandler;
}
@Override
public void initChannel(SocketChannel channel) throws Exception {
super.initChannel(channel);
SslHandler sslHandler = (SslHandler) channel.pipeline().get("ssl");
assertNotNull(sslHandler);
serverHandler[0] = sslHandler;
latch.trigger();
}
}
}
| |
package io.wildernesstp.portal;
import com.massivecraft.factions.P;
import io.wildernesstp.Main;
import io.wildernesstp.util.Manager;
import org.bukkit.Bukkit;
import org.bukkit.Location;
import org.bukkit.World;
import org.bukkit.configuration.ConfigurationSection;
import org.bukkit.configuration.file.YamlConfiguration;
import org.bukkit.entity.Player;
import java.io.File;
import java.io.IOException;
import java.util.*;
/**
* MIT License
* <p>
* Copyright (c) 2019 Quintin VanBooven
* <p>
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
* <p>
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
* <p>
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
public final class PortalManager extends Manager {
/**
* Used to speed up look-ups and minimize disk-io.
* Though, the portals are lazily cached (= Only when accessed it will be attempted to get cached).
*/
private static final Map<Integer, Portal> portalCache = new HashMap<>();
private static final Set<PortalEditSession> sessionCache = new HashSet<>();
private File file;
private YamlConfiguration portals;
private final ConfigurationSection root;
public PortalManager(Main plugin) {
super(plugin);
createFile();
portals = getPortalsConfiguration();
this.root = portals.getConfigurationSection("portals");
fillCache();
}
private void createFile(){
file = new File(this.plugin.getDataFolder() , "Portals.yml");
if(!file.exists()){
try {
file.createNewFile();
portals = YamlConfiguration.loadConfiguration(file);
portals.createSection("portals");
portals.save(file);
}catch(IOException e){
e.printStackTrace();
}
}else{
portals = YamlConfiguration.loadConfiguration(file);
}
}
private YamlConfiguration getPortalsConfiguration(){
file = new File(this.plugin.getDataFolder() , "Portals.yml");
return YamlConfiguration.loadConfiguration(file);
}
private void fillCache(){
portals = getPortalsConfiguration();
if(portals.getConfigurationSection("portals")==null)
return;
for(String portalNum : portals.getConfigurationSection("portals").getKeys(false)){
portalCache.putIfAbsent(Integer.parseInt(portalNum), loadFromFile(portalNum));
}
}
public void saveCache(){
file = new File(plugin.getDataFolder(),"Portals.yml");
portals = YamlConfiguration.loadConfiguration(file);
ArrayList<Portal> portalList = new ArrayList<>();
for(String portal : portals.getConfigurationSection("portals").getKeys(false)){
portalList.add(loadFromFile(portal));
}
for (Portal portal : portalCache.values()) {
if(!portalList.contains(portal)) {
plugin.getLogger().info("WE HERE AT LINE 101");
String path = "portals." + (root.getKeys(false).size() + 1) + ".";
portals.set(path + "world", portal.getWorld().getName());
portals.set(path + "pos-one", String.format("%d, %d, %d", portal.getPositionOne().getBlockX(), portal.getPositionOne().getBlockY(), portal.getPositionOne().getBlockZ()));
portals.set(path + "pos-two", String.format("%d, %d, %d", portal.getPositionTwo().getBlockX(), portal.getPositionTwo().getBlockY(), portal.getPositionTwo().getBlockZ()));
portals.set(path+"worldTo",portal.getWorldTo().getName());
}
try {
portals.save(file);
} catch (IOException e) {
e.printStackTrace();
}
}
}
public Portal createPortal(Portal portal) {
if (this.getPortal(this.getPortalId(portal)).isPresent()) {
throw new IllegalStateException("Portal already exists.");
}
portalCache.put(portalCache.size(),portal);
return portal;
}
public void destroyPortal(int id) {
portals = getPortalsConfiguration();
portals.set("portals."+id,null);
try{
portals.save(file);
}catch (IOException e){
e.printStackTrace();
}
}
public void destroyPortal(Portal portal) {
//destroyPortal(getPortalId(portal));\
int id=0;
for(int i : portalCache.keySet()){
if(portalCache.get(i).equals(portal)){
id = i;
destroyPortal(i);
}
}
portalCache.remove(id);
}
public int getPortalId(Portal portal){
for (int min = 0, max = root.getKeys(false).size(), i = min; i < max; i++) {
if (this.getPortal(i).isPresent() && this.getPortal(i).get().equals(portal)) {
return i;
}
}
return -1;
}
public Optional<Portal> getPortal(int id) {
if (portalCache.containsKey(id)) {
return Optional.of(portalCache.get(id));
}
final ConfigurationSection cs = root.getConfigurationSection(String.valueOf(id));
if (cs == null) {
return Optional.empty();
}
final World world = Bukkit.getWorld(Objects.requireNonNull(cs.getString("world")));
final Double[] one = Arrays.stream(Objects.requireNonNull(cs.getString("pos-one")).split(", ")).map(Double::valueOf).toArray(Double[]::new);
final Double[] two = Arrays.stream(Objects.requireNonNull(cs.getString("pos-two")).split(", ")).map(Double::valueOf).toArray(Double[]::new);
final World worldTo = Bukkit.getWorld(Objects.requireNonNull(cs.getString("worldTo",Objects.requireNonNull(cs.getString("world")))));
final Portal portal = new Portal(
new Location(world, one[0], one[1], one[2]),
new Location(world, two[0], two[1], two[2]),
worldTo);
portalCache.putIfAbsent(id, portal);
return Optional.of(portal);
}
public Set<Portal> getPortals() {
final Set<Portal> portals = new HashSet<>();
for (int min = 0, max = root.getKeys(false).size(), i = min; i < max; i++) {
if (this.getPortal(i).isPresent()) {
portals.add(this.getPortal(i).get());
}
}
return portals;
}
public Optional<Portal> getNearbyPortal(Location loc) {
return portalCache.values().stream().filter(p -> {
if(p.getWorld().equals(loc.getWorld()))
return p.contains(loc.toVector());
return false;
}).findAny();
}
public Optional<Portal> getNearbyPortal(Player player, int radius) {
return portalCache.values().stream().filter(p -> {
if(p.getWorld().equals(player.getWorld()))
return p.getPositionOne().distance(player.getLocation()) <= radius || p.getPositionTwo().distance(player.getLocation()) <= radius;
return false;
}).findAny();
}
public PortalEditSession startSession(Player player) {
if (sessionCache.stream().anyMatch(s -> s.getPlayer().equals(player))) {
throw new IllegalStateException("Cannot start session twice.");
}
final PortalEditSession session = new PortalEditSession(player);
sessionCache.add(session);
return session;
}
public void endSession(Player player) {
sessionCache.removeIf(s -> s.getPlayer().equals(player));
}
public Optional<PortalEditSession> getSession(Player player) {
return sessionCache.stream().filter(s -> s.getPlayer().equals(player)).findAny();
}
private Portal loadFromFile(String portalNum){
final World world = Bukkit.getWorld(portals.getString("portals."+portalNum+".world"));
final Double[] one = Arrays.stream(portals.getString("portals."+portalNum+".pos-one").split(", ")).map(Double::valueOf).toArray(Double[]::new);
final Double[] two = Arrays.stream(portals.getString("portals."+portalNum+".pos-two").split(", ")).map(Double::valueOf).toArray(Double[]::new);
final World worldTo = Bukkit.getWorld(Objects.requireNonNull(portals.getString("portals." + portalNum + ".worldTo", portals.getString("portals." + portalNum + ".world"))));
return new Portal(
new Location(world, one[0], one[1], one[2]),
new Location(world, two[0], two[1], two[2]),
worldTo);
}
public Map<Integer, Portal> getCache(){
return portalCache;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.core.client.impl;
import java.io.File;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.Iterator;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.activemq.artemis.api.core.ActiveMQBuffer;
import org.apache.activemq.artemis.api.core.ActiveMQException;
import org.apache.activemq.artemis.api.core.ActiveMQInterruptedException;
import org.apache.activemq.artemis.api.core.Message;
import org.apache.activemq.artemis.api.core.SimpleString;
import org.apache.activemq.artemis.api.core.client.ClientMessage;
import org.apache.activemq.artemis.api.core.client.ClientSession;
import org.apache.activemq.artemis.api.core.client.ClientSessionFactory;
import org.apache.activemq.artemis.api.core.client.MessageHandler;
import org.apache.activemq.artemis.api.core.client.ServerLocator;
import org.apache.activemq.artemis.core.client.ActiveMQClientLogger;
import org.apache.activemq.artemis.core.client.ActiveMQClientMessageBundle;
import org.apache.activemq.artemis.spi.core.remoting.ConsumerContext;
import org.apache.activemq.artemis.spi.core.remoting.SessionContext;
import org.apache.activemq.artemis.utils.FutureLatch;
import org.apache.activemq.artemis.utils.ReusableLatch;
import org.apache.activemq.artemis.utils.TokenBucketLimiter;
import org.apache.activemq.artemis.utils.collections.PriorityLinkedList;
import org.apache.activemq.artemis.utils.collections.PriorityLinkedListImpl;
import org.jboss.logging.Logger;
public final class ClientConsumerImpl implements ClientConsumerInternal {
// Constants
// ------------------------------------------------------------------------------------
private static final Logger logger = Logger.getLogger(ClientConsumerImpl.class);
private static final long CLOSE_TIMEOUT_MILLISECONDS = 10000;
private static final int NUM_PRIORITIES = 10;
public static final SimpleString FORCED_DELIVERY_MESSAGE = new SimpleString("_hornetq.forced.delivery.seq");
// Attributes
// -----------------------------------------------------------------------------------
private final ClientSessionInternal session;
private final SessionContext sessionContext;
private final ConsumerContext consumerContext;
private final SimpleString filterString;
private final int priority;
private final SimpleString queueName;
private final boolean browseOnly;
private final Executor sessionExecutor;
// For failover we can't send credits back
// while holding a lock or failover could dead lock eventually
// And we can't use the sessionExecutor as that's being used for message handlers
// for that reason we have a separate flowControlExecutor that's using the thread pool
// Which is an OrderedExecutor
private final Executor flowControlExecutor;
// Number of pending calls on flow control
private final ReusableLatch pendingFlowControl = new ReusableLatch(0);
private final int initialWindow;
private final int clientWindowSize;
private final int ackBatchSize;
private final PriorityLinkedList<ClientMessageInternal> buffer = new PriorityLinkedListImpl<>(ClientConsumerImpl.NUM_PRIORITIES);
private final Runner runner = new Runner();
private LargeMessageControllerImpl currentLargeMessageController;
// When receiving LargeMessages, the user may choose to not read the body, on this case we need to discard the body
// before moving to the next message.
private ClientMessageInternal largeMessageReceived;
private final TokenBucketLimiter rateLimiter;
private volatile Thread receiverThread;
private volatile Thread onMessageThread;
private volatile MessageHandler handler;
private volatile boolean closing;
private volatile boolean closed;
private int creditsToSend;
private volatile boolean failedOver;
private volatile Exception lastException;
private int ackBytes;
private volatile ClientMessageInternal lastAckedMessage;
private boolean stopped = false;
private AtomicLong forceDeliveryCount = new AtomicLong(0);
private final ClientSession.QueueQuery queueInfo;
private volatile boolean ackIndividually;
private final ClassLoader contextClassLoader;
// Constructors
// ---------------------------------------------------------------------------------
public ClientConsumerImpl(final ClientSessionInternal session,
final ConsumerContext consumerContext,
final SimpleString queueName,
final SimpleString filterString,
final int priority,
final boolean browseOnly,
final int initialWindow,
final int clientWindowSize,
final int ackBatchSize,
final TokenBucketLimiter rateLimiter,
final Executor executor,
final Executor flowControlExecutor,
final SessionContext sessionContext,
final ClientSession.QueueQuery queueInfo,
final ClassLoader contextClassLoader) {
this.consumerContext = consumerContext;
this.queueName = queueName;
this.filterString = filterString;
this.priority = priority;
this.browseOnly = browseOnly;
this.sessionContext = sessionContext;
this.session = session;
this.rateLimiter = rateLimiter;
sessionExecutor = executor;
this.initialWindow = initialWindow;
this.clientWindowSize = clientWindowSize;
this.ackBatchSize = ackBatchSize;
this.queueInfo = queueInfo;
this.contextClassLoader = contextClassLoader;
this.flowControlExecutor = flowControlExecutor;
if (logger.isTraceEnabled()) {
logger.trace(this + ":: being created at", new Exception("trace"));
}
}
// ClientConsumer implementation
// -----------------------------------------------------------------
@Override
public ConsumerContext getConsumerContext() {
return consumerContext;
}
private ClientMessage receive(final long timeout, final boolean forcingDelivery) throws ActiveMQException {
if (logger.isTraceEnabled()) {
logger.trace(this + "::receive(" + timeout + ", " + forcingDelivery + ")");
}
checkClosed();
if (largeMessageReceived != null) {
if (logger.isTraceEnabled()) {
logger.trace(this + "::receive(" + timeout + ", " + forcingDelivery + ") -> discard LargeMessage body for " + largeMessageReceived);
}
// Check if there are pending packets to be received
largeMessageReceived.discardBody();
largeMessageReceived = null;
}
if (rateLimiter != null) {
rateLimiter.limit();
}
if (handler != null) {
if (logger.isTraceEnabled()) {
logger.trace(this + "::receive(" + timeout + ", " + forcingDelivery + ") -> throwing messageHandlerSet");
}
throw ActiveMQClientMessageBundle.BUNDLE.messageHandlerSet();
}
if (clientWindowSize == 0) {
if (logger.isTraceEnabled()) {
logger.trace(this + "::receive(" + timeout + ", " + forcingDelivery + ") -> start slowConsumer");
}
startSlowConsumer();
}
receiverThread = Thread.currentThread();
// To verify if deliveryForced was already call
boolean deliveryForced = false;
// To control when to call deliveryForce
boolean callForceDelivery = false;
long start = -1;
long toWait = timeout == 0 ? Long.MAX_VALUE : timeout;
try {
while (true) {
ClientMessageInternal m = null;
synchronized (this) {
while ((stopped || (m = buffer.poll()) == null) && !closed && toWait > 0) {
if (start == -1) {
start = System.currentTimeMillis();
}
if (m == null && forcingDelivery) {
if (stopped) {
break;
}
// we only force delivery once per call to receive
if (!deliveryForced) {
callForceDelivery = true;
break;
}
}
try {
wait(toWait);
} catch (InterruptedException e) {
throw new ActiveMQInterruptedException(e);
}
if (m != null || closed) {
break;
}
long now = System.currentTimeMillis();
toWait -= now - start;
start = now;
}
}
if (failedOver) {
if (m == null) {
if (logger.isTraceEnabled()) {
logger.trace(this + "::receive(" + timeout + ", " + forcingDelivery + ") -> m == null and failover");
}
// if failed over and the buffer is null, we reset the state and try it again
failedOver = false;
deliveryForced = false;
toWait = timeout == 0 ? Long.MAX_VALUE : timeout;
continue;
} else {
if (logger.isTraceEnabled()) {
logger.trace(this + "::receive(" + timeout + ", " + forcingDelivery + ") -> failedOver, but m != null, being " + m);
}
failedOver = false;
}
}
if (callForceDelivery) {
if (logger.isTraceEnabled()) {
logger.trace(this + "::Forcing delivery");
}
// JBPAPP-6030 - Calling forceDelivery outside of the lock to avoid distributed dead locks
sessionContext.forceDelivery(this, forceDeliveryCount.getAndIncrement());
callForceDelivery = false;
deliveryForced = true;
continue;
}
if (m != null) {
session.workDone();
if (m.containsProperty(ClientConsumerImpl.FORCED_DELIVERY_MESSAGE)) {
long seq = m.getLongProperty(ClientConsumerImpl.FORCED_DELIVERY_MESSAGE);
// Need to check if forceDelivery was called at this call
// As we could be receiving a message that came from a previous call
if (forcingDelivery && deliveryForced && seq == forceDeliveryCount.get() - 1) {
// forced delivery messages are discarded, nothing has been delivered by the queue
resetIfSlowConsumer();
if (logger.isTraceEnabled()) {
logger.trace(this + "::There was nothing on the queue, leaving it now:: returning null");
}
return null;
} else {
if (logger.isTraceEnabled()) {
logger.trace(this + "::Ignored force delivery answer as it belonged to another call");
}
// Ignore the message
continue;
}
}
// if we have already pre acked we can't expire
boolean expired = m.isExpired();
flowControlBeforeConsumption(m);
if (expired) {
m.discardBody();
session.expire(this, m);
if (clientWindowSize == 0) {
startSlowConsumer();
}
if (toWait > 0) {
continue;
} else {
return null;
}
}
if (m.isLargeMessage()) {
largeMessageReceived = m;
}
if (logger.isTraceEnabled()) {
logger.trace(this + "::Returning " + m);
}
return m;
} else {
if (logger.isTraceEnabled()) {
logger.trace(this + "::Returning null");
}
resetIfSlowConsumer();
return null;
}
}
} finally {
receiverThread = null;
}
}
@Override
public ClientMessage receive(final long timeout) throws ActiveMQException {
if (logger.isTraceEnabled()) {
logger.trace(this + ":: receive(" + timeout + ")");
}
ClientMessage msg = receive(timeout, false);
if (msg == null && !closed) {
if (logger.isTraceEnabled()) {
logger.trace(this + ":: receive(" + timeout + ") -> null, trying again with receive(0)");
}
msg = receive(0, true);
}
if (logger.isTraceEnabled()) {
logger.trace(this + ":: returning " + msg);
}
return msg;
}
@Override
public ClientMessage receive() throws ActiveMQException {
return receive(0, false);
}
@Override
public ClientMessage receiveImmediate() throws ActiveMQException {
return receive(0, true);
}
@Override
public MessageHandler getMessageHandler() throws ActiveMQException {
checkClosed();
return handler;
}
@Override
public Thread getCurrentThread() {
if (onMessageThread != null) {
return onMessageThread;
}
return receiverThread;
}
// Must be synchronized since messages may be arriving while handler is being set and might otherwise end
// up not queueing enough executors - so messages get stranded
@Override
public synchronized ClientConsumerImpl setMessageHandler(final MessageHandler theHandler) throws ActiveMQException {
checkClosed();
if (receiverThread != null) {
throw ActiveMQClientMessageBundle.BUNDLE.inReceive();
}
boolean noPreviousHandler = handler == null;
if (handler != theHandler && clientWindowSize == 0) {
startSlowConsumer();
}
handler = theHandler;
// if no previous handler existed queue up messages for delivery
if (handler != null && noPreviousHandler) {
requeueExecutors();
} else if (handler == null && !noPreviousHandler) {
// if unsetting a previous handler may be in onMessage so wait for completion
waitForOnMessageToComplete(true);
}
return this;
}
@Override
public void close() throws ActiveMQException {
doCleanUp(true);
}
/**
* To be used by MDBs to stop any more handling of messages.
*
* @param future the future to run once the onMessage Thread has completed
* @throws ActiveMQException
*/
@Override
public Thread prepareForClose(final FutureLatch future) throws ActiveMQException {
closing = true;
resetLargeMessageController();
//execute the future after the last onMessage call
sessionExecutor.execute(new Runnable() {
@Override
public void run() {
future.run();
}
});
return onMessageThread;
}
@Override
public void cleanUp() {
try {
doCleanUp(false);
} catch (ActiveMQException e) {
ActiveMQClientLogger.LOGGER.failedCleaningUp(this.toString());
}
}
@Override
public boolean isClosed() {
return closed;
}
@Override
public void stop(final boolean waitForOnMessage) throws ActiveMQException {
if (browseOnly) {
// stop shouldn't affect browser delivery
return;
}
synchronized (this) {
if (stopped) {
return;
}
stopped = true;
}
waitForOnMessageToComplete(waitForOnMessage);
}
@Override
public void clearAtFailover() {
if (logger.isTraceEnabled()) {
logger.trace(this + "::ClearAtFailover");
}
clearBuffer();
// failover will issue a start later
this.stopped = true;
resetLargeMessageController();
lastAckedMessage = null;
creditsToSend = 0;
failedOver = true;
ackIndividually = false;
}
@Override
public synchronized void start() {
stopped = false;
requeueExecutors();
}
@Override
public Exception getLastException() {
return lastException;
}
// ClientConsumerInternal implementation
// --------------------------------------------------------------
@Override
public ClientSession.QueueQuery getQueueInfo() {
return queueInfo;
}
@Override
public long getForceDeliveryCount() {
return forceDeliveryCount.get();
}
@Override
public SimpleString getFilterString() {
return filterString;
}
@Override
public int getPriority() {
return priority;
}
@Override
public SimpleString getQueueName() {
return queueName;
}
@Override
public boolean isBrowseOnly() {
return browseOnly;
}
@Override
public synchronized void handleMessage(final ClientMessageInternal message) throws Exception {
if (closing) {
// This is ok - we just ignore the message
return;
}
if (message.getBooleanProperty(Message.HDR_LARGE_COMPRESSED)) {
handleCompressedMessage(message);
} else {
handleRegularMessage(message);
}
}
private void handleRegularMessage(ClientMessageInternal message) {
if (message.getAddress() == null) {
message.setAddress(queueInfo.getAddress());
}
message.onReceipt(this);
if (!ackIndividually && message.getPriority() != 4 && !message.containsProperty(ClientConsumerImpl.FORCED_DELIVERY_MESSAGE)) {
// We have messages of different priorities so we need to ack them individually since the order
// of them in the ServerConsumerImpl delivery list might not be the same as the order they are
// consumed in, which means that acking all up to won't work
ackIndividually = true;
}
// Add it to the buffer
buffer.addTail(message, message.getPriority());
if (handler != null) {
// Execute using executor
if (!stopped) {
queueExecutor();
}
} else {
notify();
}
}
/**
* This method deals with messages arrived as regular message but its contents are compressed.
* Such messages come from message senders who are configured to compress large messages, and
* if some of the messages are compressed below the min-large-message-size limit, they are sent
* as regular messages.
* <br>
* However when decompressing the message, we are not sure how large the message could be..
* for that reason we fake a large message controller that will deal with the message as it was a large message
* <br>
* Say that you sent a 1G message full of spaces. That could be just bellow 100K compressed but you wouldn't have
* enough memory to decompress it
*/
private void handleCompressedMessage(final ClientMessageInternal clMessage) throws Exception {
ClientLargeMessageImpl largeMessage = new ClientLargeMessageImpl();
largeMessage.retrieveExistingData(clMessage);
File largeMessageCache = null;
if (session.isCacheLargeMessageClient()) {
largeMessageCache = File.createTempFile("tmp-large-message-" + largeMessage.getMessageID() + "-", ".tmp");
largeMessageCache.deleteOnExit();
}
ClientSessionFactory sf = session.getSessionFactory();
ServerLocator locator = sf.getServerLocator();
long callTimeout = locator.getCallTimeout();
currentLargeMessageController = new LargeMessageControllerImpl(this, largeMessage.getLargeMessageSize(), callTimeout, largeMessageCache);
currentLargeMessageController.setLocal(true);
//sets the packet
ActiveMQBuffer qbuff = clMessage.toCore().getBodyBuffer();
final int bytesToRead = qbuff.writerIndex() - qbuff.readerIndex();
final byte[] body = new byte[bytesToRead];
qbuff.readBytes(body);
largeMessage.setLargeMessageController(new CompressedLargeMessageControllerImpl(currentLargeMessageController));
currentLargeMessageController.addPacket(body, body.length, false);
handleRegularMessage(largeMessage);
}
@Override
public synchronized void handleLargeMessage(final ClientLargeMessageInternal clientLargeMessage,
long largeMessageSize) throws Exception {
if (closing) {
// This is ok - we just ignore the message
return;
}
// Flow control for the first packet, we will have others
File largeMessageCache = null;
if (session.isCacheLargeMessageClient()) {
largeMessageCache = File.createTempFile("tmp-large-message-" + clientLargeMessage.getMessageID() + "-", ".tmp");
largeMessageCache.deleteOnExit();
}
ClientSessionFactory sf = session.getSessionFactory();
ServerLocator locator = sf.getServerLocator();
long callTimeout = locator.getCallTimeout();
currentLargeMessageController = new LargeMessageControllerImpl(this, largeMessageSize, callTimeout, largeMessageCache);
if (clientLargeMessage.isCompressed()) {
clientLargeMessage.setLargeMessageController(new CompressedLargeMessageControllerImpl(currentLargeMessageController));
} else {
clientLargeMessage.setLargeMessageController(currentLargeMessageController);
}
handleRegularMessage(clientLargeMessage);
}
@Override
public synchronized void handleLargeMessageContinuation(final byte[] chunk,
final int flowControlSize,
final boolean isContinues) throws Exception {
if (closing) {
return;
}
if (currentLargeMessageController == null) {
if (logger.isTraceEnabled()) {
logger.trace(this + "::Sending back credits for largeController = null " + flowControlSize);
}
flowControl(flowControlSize, false);
} else {
currentLargeMessageController.addPacket(chunk, flowControlSize, isContinues);
}
}
@Override
public void clear(boolean waitForOnMessage) throws ActiveMQException {
synchronized (this) {
// Need to send credits for the messages in the buffer
Iterator<ClientMessageInternal> iter = buffer.iterator();
while (iter.hasNext()) {
try {
ClientMessageInternal message = iter.next();
if (message.isLargeMessage()) {
ClientLargeMessageInternal largeMessage = (ClientLargeMessageInternal) message;
largeMessage.getLargeMessageController().cancel();
}
flowControlBeforeConsumption(message);
} catch (Exception e) {
ActiveMQClientLogger.LOGGER.errorClearingMessages(e);
}
}
clearBuffer();
try {
resetLargeMessageController();
} catch (Throwable e) {
// nothing that could be done here
ActiveMQClientLogger.LOGGER.errorClearingMessages(e);
}
}
// Need to send credits for the messages in the buffer
waitForOnMessageToComplete(waitForOnMessage);
}
private void resetLargeMessageController() {
LargeMessageController controller = currentLargeMessageController;
if (controller != null) {
controller.cancel();
currentLargeMessageController = null;
}
}
@Override
public int getInitialWindowSize() {
return initialWindow;
}
@Override
public int getClientWindowSize() {
return clientWindowSize;
}
@Override
public int getBufferSize() {
return buffer.size();
}
@Override
public void acknowledge(final ClientMessage message) throws ActiveMQException {
ClientMessageInternal cmi = (ClientMessageInternal) message;
if (ackIndividually) {
individualAcknowledge(message);
} else {
ackBytes += message.getEncodeSize();
if (logger.isTraceEnabled()) {
logger.trace(this + "::acknowledge ackBytes=" + ackBytes + " and ackBatchSize=" + ackBatchSize + ", encodeSize=" + message.getEncodeSize());
}
if (ackBytes >= ackBatchSize) {
if (logger.isTraceEnabled()) {
logger.trace(this + ":: acknowledge acking " + cmi);
}
doAck(cmi);
} else {
if (logger.isTraceEnabled()) {
logger.trace(this + ":: acknowledge setting lastAckedMessage = " + cmi);
}
lastAckedMessage = cmi;
}
}
}
@Override
public void individualAcknowledge(ClientMessage message) throws ActiveMQException {
if (lastAckedMessage != null) {
flushAcks();
}
session.individualAcknowledge(this, message);
}
@Override
public void flushAcks() throws ActiveMQException {
if (lastAckedMessage != null) {
if (logger.isTraceEnabled()) {
logger.trace(this + "::FlushACK acking lastMessage::" + lastAckedMessage);
}
doAck(lastAckedMessage);
}
}
/**
* LargeMessageBuffer will call flowcontrol here, while other handleMessage will also be calling flowControl.
* So, this operation needs to be atomic.
*
* @param discountSlowConsumer When dealing with slowConsumers, we need to discount one credit that was pre-sent when the first receive was called. For largeMessage that is only done at the latest packet
*/
@Override
public void flowControl(final int messageBytes, final boolean discountSlowConsumer) throws ActiveMQException {
if (clientWindowSize >= 0) {
creditsToSend += messageBytes;
if (creditsToSend >= clientWindowSize) {
if (clientWindowSize == 0 && discountSlowConsumer) {
if (logger.isTraceEnabled()) {
logger.trace(this + "::FlowControl::Sending " + creditsToSend + " -1, for slow consumer");
}
// sending the credits - 1 initially send to fire the slow consumer, or the slow consumer would be
// always buffering one after received the first message
final int credits = creditsToSend - 1;
creditsToSend = 0;
if (credits > 0) {
sendCredits(credits);
}
} else {
if (logger.isDebugEnabled()) {
logger.debug("Sending " + messageBytes + " from flow-control");
}
final int credits = creditsToSend;
creditsToSend = 0;
if (credits > 0) {
sendCredits(credits);
}
}
}
}
}
// Public
// ---------------------------------------------------------------------------------------
// Package protected
// ---------------------------------------------------------------------------------------
// Protected
// ---------------------------------------------------------------------------------------
// Private
// ---------------------------------------------------------------------------------------
/**
* Sending an initial credit for slow consumers
*/
private void startSlowConsumer() {
if (logger.isTraceEnabled()) {
logger.trace(this + "::Sending 1 credit to start delivering of one message to slow consumer");
}
sendCredits(1);
try {
// We use an executor here to guarantee the messages will arrive in order.
// However when starting a slow consumer, we have to guarantee the credit was sent before we can perform any
// operations like forceDelivery
pendingFlowControl.await(10, TimeUnit.SECONDS);
} catch (InterruptedException e) {
// will just ignore and forward the ignored
Thread.currentThread().interrupt();
}
}
private void resetIfSlowConsumer() {
if (clientWindowSize == 0) {
sendCredits(0);
// If resetting a slow consumer, we need to wait the execution
final CountDownLatch latch = new CountDownLatch(1);
flowControlExecutor.execute(new Runnable() {
@Override
public void run() {
latch.countDown();
}
});
try {
latch.await(10, TimeUnit.SECONDS);
} catch (InterruptedException e) {
throw new ActiveMQInterruptedException(e);
}
}
}
private void requeueExecutors() {
for (int i = 0; i < buffer.size(); i++) {
queueExecutor();
}
}
private void queueExecutor() {
if (logger.isTraceEnabled()) {
logger.trace(this + "::Adding Runner on Executor for delivery");
}
sessionExecutor.execute(runner);
}
/**
* @param credits
*/
private void sendCredits(final int credits) {
pendingFlowControl.countUp();
flowControlExecutor.execute(new Runnable() {
@Override
public void run() {
try {
sessionContext.sendConsumerCredits(ClientConsumerImpl.this, credits);
} finally {
pendingFlowControl.countDown();
}
}
});
}
private void waitForOnMessageToComplete(boolean waitForOnMessage) {
if (handler == null) {
return;
}
if (!waitForOnMessage || Thread.currentThread() == onMessageThread) {
// If called from inside onMessage then return immediately - otherwise would block
return;
}
FutureLatch future = new FutureLatch();
sessionExecutor.execute(future);
boolean ok = future.await(ClientConsumerImpl.CLOSE_TIMEOUT_MILLISECONDS);
if (!ok) {
ActiveMQClientLogger.LOGGER.timeOutWaitingForProcessing();
}
}
private void checkClosed() throws ActiveMQException {
if (closed) {
throw ActiveMQClientMessageBundle.BUNDLE.consumerClosed();
}
}
private void callOnMessage() throws Exception {
if (closing || stopped) {
return;
}
session.workDone();
// We pull the message from the buffer from inside the Runnable so we can ensure priority
// ordering. If we just added a Runnable with the message to the executor immediately as we get it
// we could not do that
ClientMessageInternal message;
// Must store handler in local variable since might get set to null
// otherwise while this is executing and give NPE when calling onMessage
MessageHandler theHandler = handler;
if (theHandler != null) {
if (rateLimiter != null) {
rateLimiter.limit();
}
failedOver = false;
synchronized (this) {
message = buffer.poll();
}
if (message != null) {
if (message.containsProperty(ClientConsumerImpl.FORCED_DELIVERY_MESSAGE)) {
//Ignore, this could be a relic from a previous receiveImmediate();
return;
}
boolean expired = message.isExpired();
flowControlBeforeConsumption(message);
if (!expired) {
if (logger.isTraceEnabled()) {
logger.trace(this + "::Calling handler.onMessage");
}
final ClassLoader originalLoader = AccessController.doPrivileged(new PrivilegedAction<ClassLoader>() {
@Override
public ClassLoader run() {
ClassLoader originalLoader = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(contextClassLoader);
return originalLoader;
}
});
onMessageThread = Thread.currentThread();
try {
theHandler.onMessage(message);
} finally {
try {
AccessController.doPrivileged(new PrivilegedAction<Object>() {
@Override
public Object run() {
Thread.currentThread().setContextClassLoader(originalLoader);
return null;
}
});
} catch (Exception e) {
ActiveMQClientLogger.LOGGER.failedPerformPostActionsOnMessage(e);
}
onMessageThread = null;
}
if (logger.isTraceEnabled()) {
logger.trace(this + "::Handler.onMessage done");
}
if (message.isLargeMessage()) {
message.discardBody();
}
} else {
session.expire(this, message);
}
// If slow consumer, we need to send 1 credit to make sure we get another message
if (clientWindowSize == 0) {
startSlowConsumer();
}
}
}
}
/**
* @param message
* @throws ActiveMQException
*/
private void flowControlBeforeConsumption(final ClientMessageInternal message) throws ActiveMQException {
// Chunk messages will execute the flow control while receiving the chunks
if (message.getFlowControlSize() != 0) {
// on large messages we should discount 1 on the first packets as we need continuity until the last packet
flowControl(message.getFlowControlSize(), !message.isLargeMessage());
}
}
private void doCleanUp(final boolean sendCloseMessage) throws ActiveMQException {
try {
if (closed) {
return;
}
// We need an extra flag closing, since we need to prevent any more messages getting queued to execute
// after this and we can't just set the closed flag to true here, since after/in onmessage the message
// might be acked and if the consumer is already closed, the ack will be ignored
closing = true;
// Now we wait for any current handler runners to run.
waitForOnMessageToComplete(true);
resetLargeMessageController();
closed = true;
synchronized (this) {
if (receiverThread != null) {
// Wake up any receive() thread that might be waiting
notify();
}
handler = null;
receiverThread = null;
}
flushAcks();
clearBuffer();
if (sendCloseMessage) {
sessionContext.closeConsumer(this);
}
} catch (Throwable t) {
// Consumer close should always return without exception
}
session.removeConsumer(this);
}
private void clearBuffer() {
buffer.clear();
}
private void doAck(final ClientMessageInternal message) throws ActiveMQException {
ackBytes = 0;
lastAckedMessage = null;
if (logger.isTraceEnabled()) {
logger.trace(this + "::Acking message " + message);
}
session.acknowledge(this, message);
}
@Override
public String toString() {
return super.toString() + "{" +
"consumerContext=" + consumerContext +
", queueName=" + queueName +
'}';
}
// Inner classes
// --------------------------------------------------------------------------------
private class Runner implements Runnable {
@Override
public void run() {
try {
callOnMessage();
} catch (Exception e) {
ActiveMQClientLogger.LOGGER.onMessageError(e);
lastException = e;
}
}
}
}
| |
/*
* Copyright 2012-2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.jpa.repository.support;
import java.util.List;
import javax.persistence.EntityManager;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.data.domain.Sort.Order;
import org.springframework.data.mapping.PropertyPath;
import org.springframework.data.querydsl.QSort;
import org.springframework.util.Assert;
import com.mysema.query.jpa.EclipseLinkTemplates;
import com.mysema.query.jpa.HQLTemplates;
import com.mysema.query.jpa.JPQLQuery;
import com.mysema.query.jpa.OpenJPATemplates;
import com.mysema.query.jpa.impl.AbstractJPAQuery;
import com.mysema.query.jpa.impl.JPAQuery;
import com.mysema.query.support.Expressions;
import com.mysema.query.types.EntityPath;
import com.mysema.query.types.Expression;
import com.mysema.query.types.OrderSpecifier;
import com.mysema.query.types.OrderSpecifier.NullHandling;
import com.mysema.query.types.Path;
import com.mysema.query.types.path.PathBuilder;
/**
* Helper instance to ease access to Querydsl JPA query API.
*
* @author Oliver Gierke
* @author Thomas Darimont
*/
public class Querydsl {
private final EntityManager em;
private final PersistenceProvider provider;
private final PathBuilder<?> builder;
/**
* Creates a new {@link Querydsl} for the given {@link EntityManager} and {@link PathBuilder}.
*
* @param em must not be {@literal null}.
* @param builder must not be {@literal null}.
*/
public Querydsl(EntityManager em, PathBuilder<?> builder) {
Assert.notNull(em);
Assert.notNull(builder);
this.em = em;
this.provider = PersistenceProvider.fromEntityManager(em);
this.builder = builder;
}
/**
* Creates the {@link JPQLQuery} instance based on the configured {@link EntityManager}.
*
* @return
*/
public AbstractJPAQuery<JPAQuery> createQuery() {
switch (provider) {
case ECLIPSELINK:
return new JPAQuery(em, EclipseLinkTemplates.DEFAULT);
case HIBERNATE:
return new JPAQuery(em, HQLTemplates.DEFAULT);
case OPEN_JPA:
return new JPAQuery(em, OpenJPATemplates.DEFAULT);
case GENERIC_JPA:
default:
return new JPAQuery(em);
}
}
/**
* Creates the {@link JPQLQuery} instance based on the configured {@link EntityManager}.
*
* @return
*/
public AbstractJPAQuery<JPAQuery> createQuery(EntityPath<?>... paths) {
return createQuery().from(paths);
}
/**
* Applies the given {@link Pageable} to the given {@link JPQLQuery}.
*
* @param pageable
* @param query must not be {@literal null}.
* @return the Querydsl {@link JPQLQuery}.
*/
public JPQLQuery applyPagination(Pageable pageable, JPQLQuery query) {
if (pageable == null) {
return query;
}
query.offset(pageable.getOffset());
query.limit(pageable.getPageSize());
return applySorting(pageable.getSort(), query);
}
/**
* Applies sorting to the given {@link JPQLQuery}.
*
* @param sort
* @param query must not be {@literal null}.
* @return the Querydsl {@link JPQLQuery}
*/
public JPQLQuery applySorting(Sort sort, JPQLQuery query) {
if (sort == null) {
return query;
}
if (sort instanceof QSort) {
return addOrderByFrom((QSort) sort, query);
}
return addOrderByFrom(sort, query);
}
/**
* Applies the given {@link OrderSpecifier}s to the given {@link JPQLQuery}. Potentially transforms the given
* {@code OrderSpecifier}s to be able to injection potentially necessary left-joins.
*
* @param qsort must not be {@literal null}.
* @param query must not be {@literal null}.
*/
private JPQLQuery addOrderByFrom(QSort qsort, JPQLQuery query) {
List<OrderSpecifier<?>> orderSpecifiers = qsort.getOrderSpecifiers();
return query.orderBy(orderSpecifiers.toArray(new OrderSpecifier[orderSpecifiers.size()]));
}
/**
* Converts the {@link Order} items of the given {@link Sort} into {@link OrderSpecifier} and attaches those to the
* given {@link JPQLQuery}.
*
* @param sort must not be {@literal null}.
* @param query must not be {@literal null}.
* @return
*/
private JPQLQuery addOrderByFrom(Sort sort, JPQLQuery query) {
Assert.notNull(sort, "Sort must not be null!");
Assert.notNull(query, "Query must not be null!");
for (Order order : sort) {
query.orderBy(toOrderSpecifier(order, query));
}
return query;
}
/**
* Transforms a plain {@link Order} into a QueryDsl specific {@link OrderSpecifier}.
*
* @param order must not be {@literal null}.
* @return
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
private OrderSpecifier<?> toOrderSpecifier(Order order, JPQLQuery query) {
return new OrderSpecifier(order.isAscending() ? com.mysema.query.types.Order.ASC
: com.mysema.query.types.Order.DESC, buildOrderPropertyPathFrom(order),
toQueryDslNullHandling(order.getNullHandling()));
}
/**
* Converts the given {@link org.springframework.data.domain.Sort.NullHandling} to the appropriate Querydsl
* {@link NullHandling}.
*
* @param nullHandling must not be {@literal null}.
* @return
* @since 1.6
*/
private NullHandling toQueryDslNullHandling(org.springframework.data.domain.Sort.NullHandling nullHandling) {
Assert.notNull(nullHandling, "NullHandling must not be null!");
switch (nullHandling) {
case NULLS_FIRST:
return NullHandling.NullsFirst;
case NULLS_LAST:
return NullHandling.NullsLast;
case NATIVE:
default:
return NullHandling.Default;
}
}
/**
* Creates an {@link Expression} for the given {@link Order} property.
*
* @param order must not be {@literal null}.
* @return
*/
private Expression<?> buildOrderPropertyPathFrom(Order order) {
Assert.notNull(order, "Order must not be null!");
PropertyPath path = PropertyPath.from(order.getProperty(), builder.getType());
Expression<?> sortPropertyExpression = builder;
while (path != null) {
if (!path.hasNext() && order.isIgnoreCase()) {
// if order is ignore-case we have to treat the last path segment as a String.
sortPropertyExpression = Expressions.stringPath((Path<?>) sortPropertyExpression, path.getSegment()).lower();
} else {
sortPropertyExpression = Expressions.path(path.getType(), (Path<?>) sortPropertyExpression, path.getSegment());
}
path = path.next();
}
return sortPropertyExpression;
}
}
| |
package tests;
import static org.junit.Assert.*;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import core.analyzer.InterAnalysis;
import core.analyzer.IntraAnalysis;
import core.callGraph.CallGraph;
import core.callGraph.InvertedProcessCallGraph;
import core.callGraph.ProcessCallGraph;
import core.parser.CallGraphParser;
import core.parser.LLVMCallGraphParser;
import core.util.ProcessRunner;
import core.util.ThreadedProcessRunner;
import core.util.ProcessRunner.OutputType;
public class LLVMCallGraphParserTester {
CallGraphParser parser;
ProcessRunner pr;
@Before
public void setUp() {
parser = new LLVMCallGraphParser();
pr = new ThreadedProcessRunner();
}
@After
public void tearDown() {
parser = null;
pr = null;
}
//@Test
public void test1Intra() {
CallGraph invertedProcessCG = new InvertedProcessCallGraph();
try {
String callGraph = pr.run( "opt", "-print-callgraph tests/test1.bc", OutputType.STDERR );
parser.parse( callGraph, invertedProcessCG );
//printf main
IntraAnalysis analysis = new IntraAnalysis();
String args[] = {"3", "65"};
System.out.println(analysis.analyze(invertedProcessCG, args));
System.out.println("");
invertedProcessCG = null;
} catch (Exception e) {
e.printStackTrace();
}
}
//@Test
public void test2Intra() {
CallGraph invertedProcessCG = new InvertedProcessCallGraph();
try {
String callGraph = pr.run( "opt", "-print-callgraph tests/test2.bc", OutputType.STDERR );
parser.parse( callGraph, invertedProcessCG );
//A, B
IntraAnalysis analysis = new IntraAnalysis();
String args[] = {"3", "65"};
System.out.println(analysis.analyze(invertedProcessCG, args));
System.out.println("");
invertedProcessCG = null;
} catch (Exception e) {
e.printStackTrace();
}
}
@Test
public void testHttpdIntra() {
CallGraph invertedProcessCG = new InvertedProcessCallGraph();
try {
String callGraph = pr.run( "opt", "-print-callgraph tests/httpd.bc", OutputType.STDERR );
parser.parse( callGraph, invertedProcessCG );
//A, B
IntraAnalysis analysis = new IntraAnalysis();
String args[] = {"3", "65"};
System.out.println(analysis.analyze(invertedProcessCG, args));
System.out.println("");
invertedProcessCG = null;
} catch (Exception e) {
e.printStackTrace();
}
}
//@Test
public void mytest1Intra() {
CallGraph invertedProcessCG = new InvertedProcessCallGraph();
try {
String callGraph = pr.run( "opt", "-print-callgraph tests/mytest1.bc", OutputType.STDERR );
parser.parse( callGraph, invertedProcessCG );
invertedProcessCG = null;
} catch (Exception e) {
e.printStackTrace();
}
}
/******************************
* INTER ANALAYSIS
********************************/
//@Test
public void test1Inter() {
CallGraph processCG = new ProcessCallGraph();
try {
String callGraph = pr.run( "opt", "-print-callgraph tests/test1.bc", OutputType.STDERR );
parser.parse( callGraph, processCG );
//printf main
InterAnalysis analysis = new InterAnalysis();
String args[] = {"1", "3", "65"};
System.out.println(analysis.analyze(processCG, args));
System.out.println("");
processCG = null;
} catch (Exception e) {
e.printStackTrace();
}
}
@Test
public void test2Inter() {
CallGraph processCG = new ProcessCallGraph();
try {
String callGraph = pr.run( "opt", "-print-callgraph tests/test2.bc", OutputType.STDERR );
parser.parse( callGraph, processCG );
//printf main
InterAnalysis analysis = new InterAnalysis();
String args[] = {"10", "3", "65"};
System.out.println( analysis.analyze(processCG, args) );
System.out.println("");
processCG = null;
} catch (Exception e) {
e.printStackTrace();
}
}
//@Test
public void testHttpdInter() {
CallGraph processCG = new ProcessCallGraph();
try {
String callGraph = pr.run( "opt", "-print-callgraph tests/httpd.bc", OutputType.STDERR );
parser.parse( callGraph, processCG );
//printf main
InterAnalysis analysis = new InterAnalysis();
String args[] = {"10", "3", "65"};
System.out.println(analysis.analyze(processCG, args));
System.out.println("");
processCG = null;
} catch (Exception e) {
e.printStackTrace();
}
}
//@Test
public void mytest1Inter() {
CallGraph processCG = new ProcessCallGraph();
try {
String callGraph = pr.run( "opt", "-print-callgraph tests/mytest1.bc", OutputType.STDERR );
parser.parse( callGraph, processCG );
//A, B
InterAnalysis analysis = new InterAnalysis();
String args[] = {"1", "3", "65"};
System.out.println(analysis.analyze(processCG, args));
System.out.println("");
processCG = null;
} catch (Exception e) {
e.printStackTrace();
}
}
//@Test
public void mytest2Inter() {
CallGraph processCG = new ProcessCallGraph();
try {
String callGraph = pr.run( "opt", "-print-callgraph tests/mytest2.bc", OutputType.STDERR );
parser.parse( callGraph, processCG );
//A, B
InterAnalysis analysis = new InterAnalysis();
String args[] = {"1", "3", "65"};
System.out.println(analysis.analyze(processCG, args));
System.out.println("");
processCG = null;
} catch (Exception e) {
e.printStackTrace();
}
}
/*
@Test
public void testIsCallee() {
assertEquals( true, parser.isCallee( " CS<0x0> calls function 'main'" ) );
}
@Test
public void testIsCaller() {
assertEquals( false, parser.isCaller( "Call graph node <<null function>><<0x80049d00>> #uses=0" ) );
assertEquals( true, parser.isCaller( "Call graph node for function: 'main'<<0x80049d30>> #uses=1" ) );
}
@Test
public void testNonCallerNonCallee() {
assertEquals( false, parser.isCaller( " " ) );
assertEquals( false, parser.isCallee( " " ) );
}
@Test
public void testExtractCallerFunctionName() {
assertEquals( null, parser.extractCallerFunctionName( "Call graph node <<null function>><<0x80049d00>> #uses=0" ) );
assertEquals( "main", parser.extractCallerFunctionName( "Call graph node for function: 'main'<<0x80049d30>> #uses=1" ) );
}
@Test
public void testExtractCalleeFunctionName() {
assertEquals( "hahaha", parser.extractCalleeFunctionName( " CS<0x0> calls function 'hahaha'" ) );
}*/
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.plugins.index.property;
import static com.google.common.collect.ImmutableSet.of;
import static org.apache.jackrabbit.JcrConstants.JCR_PRIMARYTYPE;
import static org.apache.jackrabbit.JcrConstants.NT_BASE;
import static org.apache.jackrabbit.JcrConstants.NT_FILE;
import static org.apache.jackrabbit.JcrConstants.NT_UNSTRUCTURED;
import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.INDEX_CONTENT_NODE_NAME;
import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.INDEX_DEFINITIONS_NAME;
import static org.apache.jackrabbit.oak.plugins.index.IndexUtils.createIndexDefinition;
import static org.apache.jackrabbit.oak.plugins.index.counter.NodeCounterEditor.COUNT_PROPERTY_NAME;
import static org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE;
import static org.apache.jackrabbit.oak.plugins.memory.PropertyStates.createProperty;
import static org.apache.jackrabbit.oak.InitialContent.INITIAL_CONTENT;
import static org.apache.jackrabbit.oak.spi.filter.PathFilter.PROP_EXCLUDED_PATHS;
import static org.apache.jackrabbit.oak.spi.filter.PathFilter.PROP_INCLUDED_PATHS;
import static org.apache.jackrabbit.oak.spi.state.NodeStateUtils.getNode;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Set;
import org.apache.jackrabbit.oak.api.CommitFailedException;
import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.api.PropertyValue;
import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.commons.PathUtils;
import org.apache.jackrabbit.oak.plugins.index.IndexConstants;
import org.apache.jackrabbit.oak.plugins.index.IndexUpdateProvider;
import org.apache.jackrabbit.oak.plugins.index.property.strategy.ContentMirrorStoreStrategy;
import org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState;
import org.apache.jackrabbit.oak.plugins.memory.PropertyStates;
import org.apache.jackrabbit.oak.plugins.memory.PropertyValues;
import org.apache.jackrabbit.oak.query.NodeStateNodeTypeInfoProvider;
import org.apache.jackrabbit.oak.query.QueryEngineSettings;
import org.apache.jackrabbit.oak.query.ast.NodeTypeInfo;
import org.apache.jackrabbit.oak.query.ast.NodeTypeInfoProvider;
import org.apache.jackrabbit.oak.query.ast.Operator;
import org.apache.jackrabbit.oak.query.ast.SelectorImpl;
import org.apache.jackrabbit.oak.query.index.FilterImpl;
import org.apache.jackrabbit.oak.query.index.TraversingIndex;
import org.apache.jackrabbit.oak.spi.commit.CommitInfo;
import org.apache.jackrabbit.oak.spi.commit.CompositeHook;
import org.apache.jackrabbit.oak.spi.commit.DefaultValidator;
import org.apache.jackrabbit.oak.spi.commit.EditorHook;
import org.apache.jackrabbit.oak.spi.commit.Validator;
import org.apache.jackrabbit.oak.spi.commit.ValidatorProvider;
import org.apache.jackrabbit.oak.spi.mount.Mount;
import org.apache.jackrabbit.oak.spi.mount.MountInfoProvider;
import org.apache.jackrabbit.oak.spi.mount.Mounts;
import org.apache.jackrabbit.oak.spi.query.Filter;
import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
import org.apache.jackrabbit.oak.spi.state.NodeState;
import org.junit.Test;
import org.slf4j.LoggerFactory;
import ch.qos.logback.classic.Level;
import ch.qos.logback.classic.LoggerContext;
import ch.qos.logback.classic.spi.ILoggingEvent;
import ch.qos.logback.core.Appender;
import ch.qos.logback.core.read.ListAppender;
import ch.qos.logback.core.spi.FilterReply;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Sets;
/**
* Test the Property2 index mechanism.
*/
public class PropertyIndexTest {
private static final int MANY = 100;
private static final EditorHook HOOK = new EditorHook(
new IndexUpdateProvider(new PropertyIndexEditorProvider()));
@Test
public void costEstimation() throws Exception {
NodeState root = INITIAL_CONTENT;
// Add index definition
NodeBuilder builder = root.builder();
NodeBuilder index = createIndexDefinition(builder.child(INDEX_DEFINITIONS_NAME), "foo",
true, false, ImmutableSet.of("foo"), null);
// disable the estimation
index.setProperty("entryCount", -1);
NodeState before = builder.getNodeState();
// Add some content and process it through the property index hook
for (int i = 0; i < MANY; i++) {
builder.child("n" + i).setProperty("foo", "x" + i % 20);
}
NodeState after = builder.getNodeState();
NodeState indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
FilterImpl f = createFilter(indexed, NT_BASE);
// Query the index
PropertyIndexLookup lookup = new PropertyIndexLookup(indexed);
double cost;
cost = lookup.getCost(f, "foo", PropertyValues.newString("x1"));
assertTrue("cost: " + cost, cost >= 6.5 && cost <= 7.5);
cost = lookup.getCost(f, "foo", PropertyValues.newString(
Arrays.asList("x1", "x2")));
assertTrue("cost: " + cost, cost >= 11.5 && cost <= 12.5);
cost = lookup.getCost(f, "foo", PropertyValues.newString(
Arrays.asList("x1", "x2", "x3", "x4", "x5")));
assertTrue("cost: " + cost, cost >= 26.5 && cost <= 27.5);
cost = lookup.getCost(f, "foo", PropertyValues.newString(
Arrays.asList("x1", "x2", "x3", "x4", "x5", "x6", "x7", "x8", "x9", "x0")));
assertTrue("cost: " + cost, cost >= 51.5 && cost <= 52.5);
cost = lookup.getCost(f, "foo", null);
assertTrue("cost: " + cost, cost >= MANY);
}
/**
* This is essentially same test as {@link #costEstimation()} with one difference that it uses
* path constraint in query and creates similar trees under 2 branches {@code path1} and {@code path2}.
* The cost estimation is then verified to be same as that in {@code costEstimation} for query under {@code path1}
* @throws Exception
*/
@Test
public void pathBasedCostEstimation() throws Exception {
NodeState root = INITIAL_CONTENT;
// Add index definition
NodeBuilder builder = root.builder();
NodeBuilder index = createIndexDefinition(builder.child(INDEX_DEFINITIONS_NAME), "foo",
true, false, ImmutableSet.of("foo"), null);
// disable the estimation
index.setProperty("entryCount", -1);
builder.setProperty(COUNT_PROPERTY_NAME, (long) MANY * 2, Type.LONG);
NodeState before = builder.getNodeState();
NodeBuilder path1 = builder.child("path1");
NodeBuilder path2 = builder.child("path2");
// Add some content and process it through the property index hook
for (int i = 0; i < MANY; i++) {
path1.child("n" + i).setProperty("foo", "x" + i % 20);
path2.child("n" + i).setProperty("foo", "x" + i % 20);
}
path1.setProperty(COUNT_PROPERTY_NAME, (long) MANY, Type.LONG);
NodeState after = builder.getNodeState();
NodeState indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
FilterImpl f = createFilter(indexed, NT_BASE);
f.restrictPath("/path1", Filter.PathRestriction.ALL_CHILDREN);
// Query the index
PropertyIndexLookup lookup = new PropertyIndexLookup(indexed);
double cost;
cost = lookup.getCost(f, "foo", PropertyValues.newString("x1"));
assertTrue("cost: " + cost, cost >= 10 && cost <= 14);
cost = lookup.getCost(f, "foo", PropertyValues.newString(
Arrays.asList("x1", "x2")));
assertTrue("cost: " + cost, cost >= 20 && cost <= 24);
cost = lookup.getCost(f, "foo", PropertyValues.newString(
Arrays.asList("x1", "x2", "x3", "x4", "x5")));
assertTrue("cost: " + cost, cost >= 50 && cost <= 54);
cost = lookup.getCost(f, "foo", PropertyValues.newString(
Arrays.asList("x1", "x2", "x3", "x4", "x5", "x6", "x7", "x8", "x9", "x0")));
assertTrue("cost: " + cost, cost >= 120 && cost <= 124);
cost = lookup.getCost(f, "foo", null);
assertTrue("cost: " + cost, cost >= MANY);
}
@Test
public void costMaxEstimation() throws Exception {
NodeState root = EmptyNodeState.EMPTY_NODE;
// Add index definition
NodeBuilder builder = root.builder();
createIndexDefinition(builder.child(INDEX_DEFINITIONS_NAME), "foo",
true, false, ImmutableSet.of("foo"), null);
NodeState before = builder.getNodeState();
// 100 nodes in the index:
// with a single level /content cost is 121
// adding a second level /content/data cost is133
// 101 nodes in the index:
// with a single level /content cost is 121
// adding a second level /content/data cost is 133
// 100 nodes, 12 levels deep, cost is 345
// 101 nodes, 12 levels deep, cost is 345
// threshold for estimation (PropertyIndexLookup.MAX_COST) is at 100
int nodes = 101;
int levels = 12;
NodeBuilder data = builder;
for (int i = 0; i < levels; i++) {
data = data.child("l" + i);
}
for (int i = 0; i < nodes; i++) {
NodeBuilder c = data.child("c_" + i);
c.setProperty("foo", "azerty");
}
// add more nodes (to make traversal more expensive)
for (int i = 0; i < 10000; i++) {
data.child("cx_" + i);
}
NodeState after = builder.getNodeState();
NodeState indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
FilterImpl f = createFilter(indexed, NT_BASE);
PropertyIndexLookup lookup = new PropertyIndexLookup(indexed);
double cost = lookup.getCost(f, "foo",
PropertyValues.newString("azerty"));
double traversal = new TraversingIndex().getCost(f, indexed);
assertTrue("Estimated cost for " + nodes
+ " nodes should not be higher than traversal (" + cost + " < " + traversal + ")",
cost < traversal);
}
@Test
public void testPropertyLookup() throws Exception {
NodeState root = INITIAL_CONTENT;
// Add index definition
NodeBuilder builder = root.builder();
NodeBuilder index = createIndexDefinition(builder.child(INDEX_DEFINITIONS_NAME), "foo",
true, false, ImmutableSet.of("foo"), null);
index.setProperty("entryCount", -1);
NodeState before = builder.getNodeState();
// Add some content and process it through the property index hook
builder.child("a").setProperty("foo", "abc");
builder.child("b").setProperty("foo", Arrays.asList("abc", "def"),
Type.STRINGS);
// plus lots of dummy content to highlight the benefit of indexing
for (int i = 0; i < MANY; i++) {
builder.child("n" + i).setProperty("foo", "xyz");
}
NodeState after = builder.getNodeState();
NodeState indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
FilterImpl f = createFilter(indexed, NT_BASE);
// Query the index
PropertyIndexLookup lookup = new PropertyIndexLookup(indexed);
assertEquals(ImmutableSet.of("a", "b"), find(lookup, "foo", "abc", f));
assertEquals(ImmutableSet.of("b"), find(lookup, "foo", "def", f));
assertEquals(ImmutableSet.of(), find(lookup, "foo", "ghi", f));
assertEquals(MANY, find(lookup, "foo", "xyz", f).size());
assertEquals(MANY + 2, find(lookup, "foo", null, f).size());
double cost;
cost = lookup.getCost(f, "foo", PropertyValues.newString("xyz"));
assertTrue("cost: " + cost, cost >= MANY);
cost = lookup.getCost(f, "foo", null);
assertTrue("cost: " + cost, cost >= MANY);
}
@Test
public void testPathAwarePropertyLookup() throws Exception {
NodeState root = INITIAL_CONTENT;
// Add index definition
NodeBuilder builder = root.builder();
createIndexDefinition(builder.child(INDEX_DEFINITIONS_NAME), "foo",
true, false, ImmutableSet.of("foo"), null);
NodeState before = builder.getNodeState();
// Add some content and process it through the property index hook
builder.child("a").setProperty("foo", "abc");
builder.child("b").setProperty("foo", "abc");
NodeState after = builder.getNodeState();
NodeState indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
FilterImpl f = createFilter(indexed, NT_BASE);
f.restrictPath("/a", Filter.PathRestriction.ALL_CHILDREN);
// Query the index
PropertyIndexLookup lookup = new PropertyIndexLookup(indexed);
assertEquals(ImmutableSet.of("a"), find(lookup, "foo", "abc", f));
}
private static Set<String> find(PropertyIndexLookup lookup, String name,
String value, Filter filter) {
return Sets.newHashSet(lookup.query(filter, name, value == null ? null
: PropertyValues.newString(value)));
}
@Test
public void testCustomConfigPropertyLookup() throws Exception {
NodeState root = INITIAL_CONTENT;
// Add index definition
NodeBuilder builder = root.builder();
createIndexDefinition(builder.child(INDEX_DEFINITIONS_NAME),
"fooIndex", true, false, ImmutableSet.of("foo", "extrafoo"),
null);
NodeState before = builder.getNodeState();
// Add some content and process it through the property index hook
builder.child("a").setProperty("foo", "abc")
.setProperty("extrafoo", "pqr");
builder.child("b").setProperty("foo", Arrays.asList("abc", "def"),
Type.STRINGS);
// plus lots of dummy content to highlight the benefit of indexing
for (int i = 0; i < MANY; i++) {
builder.child("n" + i).setProperty("foo", "xyz");
}
NodeState after = builder.getNodeState();
// Add an index
NodeState indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
FilterImpl f = createFilter(indexed, NT_BASE);
// Query the index
PropertyIndexLookup lookup = new PropertyIndexLookup(indexed);
assertEquals(ImmutableSet.of("a", "b"), find(lookup, "foo", "abc", f));
assertEquals(ImmutableSet.of("b"), find(lookup, "foo", "def", f));
assertEquals(ImmutableSet.of(), find(lookup, "foo", "ghi", f));
assertEquals(MANY, find(lookup, "foo", "xyz", f).size());
assertEquals(ImmutableSet.of("a"), find(lookup, "extrafoo", "pqr", f));
try {
assertEquals(ImmutableSet.of(), find(lookup, "pqr", "foo", f));
fail();
} catch (IllegalArgumentException e) {
// expected: no index for "pqr"
}
}
/**
* @see <a href="https://issues.apache.org/jira/browse/OAK-666">OAK-666:
* Property2Index: node type is used when indexing, but ignored when
* querying</a>
*/
@Test
public void testCustomConfigNodeType() throws Exception {
NodeState root = INITIAL_CONTENT;
// Add index definitions
NodeBuilder builder = root.builder();
NodeBuilder index = builder.child(INDEX_DEFINITIONS_NAME);
createIndexDefinition(index, "fooIndex", true, false,
ImmutableSet.of("foo", "extrafoo"),
ImmutableSet.of(NT_UNSTRUCTURED));
createIndexDefinition(index, "fooIndexFile", true, false,
ImmutableSet.of("foo"), ImmutableSet.of(NT_FILE));
NodeState before = builder.getNodeState();
// Add some content and process it through the property index hook
builder.child("a")
.setProperty(JCR_PRIMARYTYPE, NT_UNSTRUCTURED, Type.NAME)
.setProperty("foo", "abc");
builder.child("b")
.setProperty(JCR_PRIMARYTYPE, NT_UNSTRUCTURED, Type.NAME)
.setProperty("foo", Arrays.asList("abc", "def"), Type.STRINGS);
NodeState after = builder.getNodeState();
NodeState indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
FilterImpl f = createFilter(indexed, NT_UNSTRUCTURED);
// Query the index
PropertyIndexLookup lookup = new PropertyIndexLookup(indexed);
assertEquals(ImmutableSet.of("a", "b"), find(lookup, "foo", "abc", f));
assertEquals(ImmutableSet.of("b"), find(lookup, "foo", "def", f));
assertEquals(ImmutableSet.of(), find(lookup, "foo", "ghi", f));
try {
assertEquals(ImmutableSet.of(), find(lookup, "pqr", "foo", f));
fail();
} catch (IllegalArgumentException e) {
// expected: no index for "pqr"
}
}
private static FilterImpl createFilter(NodeState root, String nodeTypeName) {
NodeTypeInfoProvider nodeTypes = new NodeStateNodeTypeInfoProvider(root);
NodeTypeInfo type = nodeTypes.getNodeTypeInfo(nodeTypeName);
SelectorImpl selector = new SelectorImpl(type, nodeTypeName);
return new FilterImpl(selector, "SELECT * FROM [" + nodeTypeName + "]", new QueryEngineSettings());
}
/**
* @see <a href="https://issues.apache.org/jira/browse/OAK-666">OAK-666:
* Property2Index: node type is used when indexing, but ignored when
* querying</a>
*/
@Test
public void testCustomConfigNodeTypeFallback() throws Exception {
NodeState root = EMPTY_NODE;
// Add index definitions
NodeBuilder builder = root.builder();
NodeBuilder index = builder.child(INDEX_DEFINITIONS_NAME);
createIndexDefinition(
index, "fooIndex", true, false,
ImmutableSet.of("foo", "extrafoo"), null);
createIndexDefinition(
index, "fooIndexFile", true, false,
ImmutableSet.of("foo"), ImmutableSet.of(NT_FILE));
NodeState before = builder.getNodeState();
// Add some content and process it through the property index hook
builder.child("a")
.setProperty(JCR_PRIMARYTYPE, NT_UNSTRUCTURED, Type.NAME)
.setProperty("foo", "abc");
builder.child("b")
.setProperty(JCR_PRIMARYTYPE, NT_UNSTRUCTURED, Type.NAME)
.setProperty("foo", Arrays.asList("abc", "def"), Type.STRINGS);
NodeState after = builder.getNodeState();
// Add an index
NodeState indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
FilterImpl f = createFilter(after, NT_UNSTRUCTURED);
// Query the index
PropertyIndexLookup lookup = new PropertyIndexLookup(indexed);
assertEquals(ImmutableSet.of("a", "b"), find(lookup, "foo", "abc", f));
assertEquals(ImmutableSet.of("b"), find(lookup, "foo", "def", f));
assertEquals(ImmutableSet.of(), find(lookup, "foo", "ghi", f));
try {
assertEquals(ImmutableSet.of(), find(lookup, "pqr", "foo", f));
fail();
} catch (IllegalArgumentException e) {
// expected: no index for "pqr"
}
}
@Test
public void valuePattern() throws Exception {
NodeState root = EMPTY_NODE;
// Add index definitions
NodeBuilder builder = root.builder();
NodeBuilder index = builder.child(INDEX_DEFINITIONS_NAME);
NodeBuilder indexDef = createIndexDefinition(
index, "fooIndex", true, false,
ImmutableSet.of("foo"), null);
indexDef.setProperty(IndexConstants.VALUE_PATTERN, "(a.*|b)");
NodeState before = builder.getNodeState();
// Add some content and process it through the property index hook
builder.child("a")
.setProperty(JCR_PRIMARYTYPE, NT_UNSTRUCTURED, Type.NAME)
.setProperty("foo", "a");
builder.child("a1")
.setProperty(JCR_PRIMARYTYPE, NT_UNSTRUCTURED, Type.NAME)
.setProperty("foo", "a1");
builder.child("b")
.setProperty(JCR_PRIMARYTYPE, NT_UNSTRUCTURED, Type.NAME)
.setProperty("foo", "b");
builder.child("c")
.setProperty(JCR_PRIMARYTYPE, NT_UNSTRUCTURED, Type.NAME)
.setProperty("foo", "c");
NodeState after = builder.getNodeState();
// Add an index
NodeState indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
FilterImpl f = createFilter(after, NT_UNSTRUCTURED);
// Query the index
PropertyIndexLookup lookup = new PropertyIndexLookup(indexed);
PropertyIndex pIndex = new PropertyIndex(Mounts.defaultMountInfoProvider());
assertEquals(ImmutableSet.of("a"), find(lookup, "foo", "a", f));
assertEquals(ImmutableSet.of("a1"), find(lookup, "foo", "a1", f));
assertEquals(ImmutableSet.of("b"), find(lookup, "foo", "b", f));
// expected: no index for "is not null"
assertTrue(pIndex.getCost(f, indexed) == Double.POSITIVE_INFINITY);
ArrayList<PropertyValue> list = new ArrayList<PropertyValue>();
list.add(PropertyValues.newString("c"));
f.restrictPropertyAsList("foo", list);
// expected: no index for value c
assertTrue(pIndex.getCost(f, indexed) == Double.POSITIVE_INFINITY);
f = createFilter(after, NT_UNSTRUCTURED);
list = new ArrayList<PropertyValue>();
list.add(PropertyValues.newString("a"));
f.restrictPropertyAsList("foo", list);
// expected: no index for value a
assertTrue(pIndex.getCost(f, indexed) < Double.POSITIVE_INFINITY);
}
@Test
public void valuePatternExclude() throws Exception {
NodeState root = EMPTY_NODE;
// Add index definitions
NodeBuilder builder = root.builder();
NodeBuilder index = builder.child(INDEX_DEFINITIONS_NAME);
NodeBuilder indexDef = createIndexDefinition(
index, "fooIndex", true, false,
ImmutableSet.of("foo"), null);
indexDef.setProperty(IndexConstants.VALUE_EXCLUDED_PREFIXES, "test");
valuePatternExclude0(builder);
}
@Test
public void valuePatternExclude2() throws Exception {
NodeState root = EMPTY_NODE;
// Add index definitions
NodeBuilder builder = root.builder();
NodeBuilder index = builder.child(INDEX_DEFINITIONS_NAME);
NodeBuilder indexDef = createIndexDefinition(
index, "fooIndex", true, false,
ImmutableSet.of("foo"), null);
PropertyState ps = PropertyStates.createProperty(
IndexConstants.VALUE_EXCLUDED_PREFIXES,
Arrays.asList("test"),
Type.STRINGS);
indexDef.setProperty(ps);
valuePatternExclude0(builder);
}
private void valuePatternExclude0(NodeBuilder builder) throws CommitFailedException {
NodeState before = builder.getNodeState();
// Add some content and process it through the property index hook
builder.child("a")
.setProperty(JCR_PRIMARYTYPE, NT_UNSTRUCTURED, Type.NAME)
.setProperty("foo", "a");
builder.child("a1")
.setProperty(JCR_PRIMARYTYPE, NT_UNSTRUCTURED, Type.NAME)
.setProperty("foo", "a1");
builder.child("b")
.setProperty(JCR_PRIMARYTYPE, NT_UNSTRUCTURED, Type.NAME)
.setProperty("foo", "b");
builder.child("c")
.setProperty(JCR_PRIMARYTYPE, NT_UNSTRUCTURED, Type.NAME)
.setProperty("foo", "c");
NodeState after = builder.getNodeState();
// Add an index
NodeState indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
FilterImpl f = createFilter(after, NT_UNSTRUCTURED);
// Query the index
PropertyIndexLookup lookup = new PropertyIndexLookup(indexed);
PropertyIndex pIndex = new PropertyIndex(Mounts.defaultMountInfoProvider());
assertEquals(ImmutableSet.of("a"), find(lookup, "foo", "a", f));
assertEquals(ImmutableSet.of("a1"), find(lookup, "foo", "a1", f));
assertEquals(ImmutableSet.of("b"), find(lookup, "foo", "b", f));
// expected: no index for "is not null", "= 'test'", "like 't%'"
assertTrue(pIndex.getCost(f, indexed) == Double.POSITIVE_INFINITY);
f.restrictProperty("foo", Operator.EQUAL, PropertyValues.newString("test"));
assertTrue(pIndex.getCost(f, indexed) == Double.POSITIVE_INFINITY);
f = createFilter(after, NT_UNSTRUCTURED);
f.restrictProperty("foo", Operator.LIKE, PropertyValues.newString("t%"));
assertTrue(pIndex.getCost(f, indexed) == Double.POSITIVE_INFINITY);
f = createFilter(after, NT_UNSTRUCTURED);
// expected: index for "like 'a%'"
f.restrictProperty("foo", Operator.GREATER_OR_EQUAL, PropertyValues.newString("a"));
f.restrictProperty("foo", Operator.LESS_OR_EQUAL, PropertyValues.newString("a0"));
assertTrue(pIndex.getCost(f, indexed) < Double.POSITIVE_INFINITY);
f = createFilter(after, NT_UNSTRUCTURED);
// expected: index for value c
ArrayList<PropertyValue> list = new ArrayList<PropertyValue>();
list.add(PropertyValues.newString("c"));
f.restrictPropertyAsList("foo", list);
assertTrue(pIndex.getCost(f, indexed) < Double.POSITIVE_INFINITY);
// expected: index for value a
f = createFilter(after, NT_UNSTRUCTURED);
list = new ArrayList<PropertyValue>();
list.add(PropertyValues.newString("a"));
f.restrictPropertyAsList("foo", list);
assertTrue(pIndex.getCost(f, indexed) < Double.POSITIVE_INFINITY);
}
@Test(expected = CommitFailedException.class)
public void testUnique() throws Exception {
NodeState root = EMPTY_NODE;
// Add index definition
NodeBuilder builder = root.builder();
createIndexDefinition(
builder.child(INDEX_DEFINITIONS_NAME),
"fooIndex", true, true, ImmutableSet.of("foo"), null);
NodeState before = builder.getNodeState();
builder.child("a").setProperty("foo", "abc");
builder.child("b").setProperty("foo", Arrays.asList("abc", "def"),
Type.STRINGS);
NodeState after = builder.getNodeState();
// should throw
HOOK.processCommit(before, after, CommitInfo.EMPTY);
}
@Test
public void testUpdateUnique() throws Exception {
NodeState root = EMPTY_NODE;
NodeBuilder builder = root.builder();
createIndexDefinition(
builder.child(INDEX_DEFINITIONS_NAME),
"fooIndex", true, true, ImmutableSet.of("foo"), null);
NodeState before = builder.getNodeState();
builder.child("a").setProperty("foo", "abc");
NodeState after = builder.getNodeState();
NodeState done = HOOK.processCommit(before, after, CommitInfo.EMPTY);
// remove, and then re-add the same node
builder = done.builder();
builder.child("a").setProperty("foo", "abc");
after = builder.getNodeState();
// apply the changes to the state before adding the entries
done = HOOK.processCommit(before, after, CommitInfo.EMPTY);
// re-apply the changes
done = HOOK.processCommit(done, after, CommitInfo.EMPTY);
}
@Test
public void testUniqueByTypeOK() throws Exception {
NodeState root = EMPTY_NODE;
// Add index definition
NodeBuilder builder = root.builder();
createIndexDefinition(builder.child(INDEX_DEFINITIONS_NAME),
"fooIndex", true, true, ImmutableSet.of("foo"),
ImmutableSet.of("typeFoo"));
NodeState before = builder.getNodeState();
builder.child("a").setProperty(JCR_PRIMARYTYPE, "typeFoo", Type.NAME)
.setProperty("foo", "abc");
builder.child("b").setProperty(JCR_PRIMARYTYPE, "typeBar", Type.NAME)
.setProperty("foo", "abc");
NodeState after = builder.getNodeState();
HOOK.processCommit(before, after, CommitInfo.EMPTY); // should not throw
}
@Test(expected = CommitFailedException.class)
public void testUniqueByTypeKO() throws Exception {
NodeState root = EMPTY_NODE;
// Add index definition
NodeBuilder builder = root.builder();
createIndexDefinition(builder.child(INDEX_DEFINITIONS_NAME),
"fooIndex", true, true, ImmutableSet.of("foo"),
ImmutableSet.of("typeFoo"));
NodeState before = builder.getNodeState();
builder.child("a").setProperty(JCR_PRIMARYTYPE, "typeFoo", Type.NAME)
.setProperty("foo", "abc");
builder.child("b").setProperty(JCR_PRIMARYTYPE, "typeFoo", Type.NAME)
.setProperty("foo", "abc");
NodeState after = builder.getNodeState();
HOOK.processCommit(before, after, CommitInfo.EMPTY); // should throw
}
@Test
public void testUniqueByTypeDelete() throws Exception {
NodeState root = EMPTY_NODE;
// Add index definition
NodeBuilder builder = root.builder();
createIndexDefinition(builder.child(INDEX_DEFINITIONS_NAME),
"fooIndex", true, true, ImmutableSet.of("foo"),
ImmutableSet.of("typeFoo"));
builder.child("a").setProperty(JCR_PRIMARYTYPE, "typeFoo", Type.NAME)
.setProperty("foo", "abc");
builder.child("b").setProperty(JCR_PRIMARYTYPE, "typeBar", Type.NAME)
.setProperty("foo", "abc");
NodeState before = builder.getNodeState();
builder.getChildNode("b").remove();
NodeState after = builder.getNodeState();
HOOK.processCommit(before, after, CommitInfo.EMPTY); // should not throw
}
@Test
public void traversalWarning() throws Exception {
ListAppender appender = createAndRegisterAppender();
int testDataSize = ContentMirrorStoreStrategy.TRAVERSING_WARN;
NodeState indexed = createTestData(testDataSize);
assertEquals(testDataSize, getResultSize(indexed, "foo", "bar"));
assertFalse(appender.list.isEmpty());
appender.list.clear();
testDataSize = 100;
indexed = createTestData(100);
assertEquals(testDataSize, getResultSize(indexed, "foo", "bar"));
assertTrue("Warning should not be logged for traversing " + testDataSize,
appender.list.isEmpty());
deregisterAppender(appender);
}
@Test
public void testPathInclude() throws Exception {
NodeState root = INITIAL_CONTENT;
// Add index definition
NodeBuilder builder = root.builder();
NodeBuilder index = createIndexDefinition(builder.child(INDEX_DEFINITIONS_NAME), "foo",
true, false, ImmutableSet.of("foo"), null);
index.setProperty(createProperty(PROP_INCLUDED_PATHS, of("/test/a"), Type.STRINGS));
NodeState before = builder.getNodeState();
// Add some content and process it through the property index hook
builder.child("test").child("a").setProperty("foo", "abc");
builder.child("test").child("b").setProperty("foo", "abc");
NodeState after = builder.getNodeState();
NodeState indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
FilterImpl f = createFilter(indexed, NT_BASE);
// Query the index
PropertyIndexLookup lookup = new PropertyIndexLookup(indexed);
assertEquals(ImmutableSet.of("test/a"), find(lookup, "foo", "abc", f));
}
@Test
public void testPathExclude() throws Exception {
NodeState root = INITIAL_CONTENT;
// Add index definition
NodeBuilder builder = root.builder();
NodeBuilder index = createIndexDefinition(builder.child(INDEX_DEFINITIONS_NAME), "foo",
true, false, ImmutableSet.of("foo"), null);
index.setProperty(createProperty(PROP_EXCLUDED_PATHS, of("/test/a"), Type.STRINGS));
NodeState before = builder.getNodeState();
// Add some content and process it through the property index hook
builder.child("test").child("a").setProperty("foo", "abc");
builder.child("test").child("b").setProperty("foo", "abc");
NodeState after = builder.getNodeState();
NodeState indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
FilterImpl f = createFilter(indexed, NT_BASE);
f.restrictProperty("foo", Operator.EQUAL, PropertyValues.newString("abc"));
// Query the index
PropertyIndexLookup lookup = new PropertyIndexLookup(indexed);
assertEquals(ImmutableSet.of("test/b"), find(lookup, "foo", "abc", f));
//no path restriction, opt out
PropertyIndexPlan plan = new PropertyIndexPlan("plan", root, index.getNodeState(), f);
assertTrue(Double.POSITIVE_INFINITY == plan.getCost());
//path restriction is not an ancestor of excluded path, index may be used
f.setPath("/test2");
plan = new PropertyIndexPlan("plan", root, index.getNodeState(), f);
assertTrue(Double.POSITIVE_INFINITY != plan.getCost());
//path restriction is an ancestor of excluded path, opt out
f.setPath("/test");
plan = new PropertyIndexPlan("plan", root, index.getNodeState(), f);
assertTrue(Double.POSITIVE_INFINITY == plan.getCost());
}
@Test
public void testPathIncludeExclude() throws Exception {
NodeState root = INITIAL_CONTENT;
// Add index definition
NodeBuilder builder = root.builder();
NodeBuilder index = createIndexDefinition(builder.child(INDEX_DEFINITIONS_NAME), "foo",
true, false, ImmutableSet.of("foo"), null);
index.setProperty(createProperty(PROP_INCLUDED_PATHS, of("/test/a"), Type.STRINGS));
index.setProperty(createProperty(PROP_EXCLUDED_PATHS, of("/test/a/b"), Type.STRINGS));
NodeState before = builder.getNodeState();
// Add some content and process it through the property index hook
builder.child("test").child("a").setProperty("foo", "abc");
builder.child("test").child("a").child("b").setProperty("foo", "abc");
NodeState after = builder.getNodeState();
NodeState indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
FilterImpl f = createFilter(indexed, NT_BASE);
f.restrictProperty("foo", Operator.EQUAL, PropertyValues.newString("abc"));
// Query the index
PropertyIndexLookup lookup = new PropertyIndexLookup(indexed);
assertEquals(ImmutableSet.of("test/a"), find(lookup, "foo", "abc", f));
//no path restriction, opt out
PropertyIndexPlan plan = new PropertyIndexPlan("plan", root, index.getNodeState(), f);
assertTrue(Double.POSITIVE_INFINITY == plan.getCost());
//path restriction is not an ancestor of excluded path, index may be used
f.setPath("/test/a/x");
plan = new PropertyIndexPlan("plan", root, index.getNodeState(), f);
assertTrue(Double.POSITIVE_INFINITY != plan.getCost());
//path restriction is an ancestor of excluded path but no included path, opt out
f.setPath("/test/a/b");
plan = new PropertyIndexPlan("plan", root, index.getNodeState(), f);
assertTrue(Double.POSITIVE_INFINITY == plan.getCost());
//path restriction is an ancestor of excluded path, opt out
f.setPath("/test/a");
plan = new PropertyIndexPlan("plan", root, index.getNodeState(), f);
assertTrue(Double.POSITIVE_INFINITY == plan.getCost());
}
@Test
public void testPathExcludeInclude() throws Exception{
NodeState root = INITIAL_CONTENT;
// Add index definition
NodeBuilder builder = root.builder();
NodeBuilder index = createIndexDefinition(builder.child(INDEX_DEFINITIONS_NAME), "foo",
true, false, ImmutableSet.of("foo"), null);
index.setProperty(createProperty(PROP_INCLUDED_PATHS, of("/test/a/b"), Type.STRINGS));
index.setProperty(createProperty(PROP_EXCLUDED_PATHS, of("/test/a"), Type.STRINGS));
NodeState before = builder.getNodeState();
// Add some content and process it through the property index hook
builder.child("test").child("a").setProperty("foo", "abc");
builder.child("test").child("a").child("b").setProperty("foo", "abc");
NodeState after = builder.getNodeState();
try {
HOOK.processCommit(before, after, CommitInfo.EMPTY);
assertTrue(false);
} catch (IllegalStateException expected) {}
}
@Test
public void testPathMismatch() throws Exception {
NodeState root = INITIAL_CONTENT;
// Add index definition
NodeBuilder builder = root.builder();
NodeBuilder index = createIndexDefinition(builder.child(INDEX_DEFINITIONS_NAME), "foo",
true, false, ImmutableSet.of("foo"), null);
index.setProperty(createProperty(PROP_INCLUDED_PATHS, of("/test/a"), Type.STRINGS));
index.setProperty(createProperty(PROP_EXCLUDED_PATHS, of("/test/a/b"), Type.STRINGS));
NodeState before = builder.getNodeState();
// Add some content and process it through the property index hook
builder.child("test").child("a").setProperty("foo", "abc");
builder.child("test").child("a").child("b").setProperty("foo", "abc");
NodeState after = builder.getNodeState();
NodeState indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
FilterImpl f = createFilter(indexed, NT_BASE);
f.restrictPath("/test2", Filter.PathRestriction.ALL_CHILDREN);
PropertyIndexPlan plan = new PropertyIndexPlan("plan", root, index.getNodeState(), f);
assertTrue(Double.POSITIVE_INFINITY == plan.getCost());
}
@Test
public void singleMount() throws Exception {
NodeState root = INITIAL_CONTENT;
// Add index definition
NodeBuilder builder = root.builder();
NodeBuilder index = createIndexDefinition(builder.child(INDEX_DEFINITIONS_NAME), "foo",
true, false, ImmutableSet.of("foo"), null);
index.setProperty("entryCount", -1);
NodeState before = builder.getNodeState();
// Add some content and process it through the property index hook
builder.child("a").setProperty("foo", "abc");
builder.child("b").child("x").setProperty("foo", "abc");
builder.child("a").child("x").setProperty("foo", "abc");
builder.child("m").child("n").setProperty("foo", "abc");
builder.child("m").child("n").child("o").setProperty("foo", "abc");
builder.child("m").setProperty("foo", "abc");
NodeState after = builder.getNodeState();
MountInfoProvider mip = Mounts.newBuilder()
.mount("foo", "/a", "/m/n")
.build();
Mount fooMount = mip.getMountByName("foo");
Mount defMount = mip.getDefaultMount();
EditorHook hook = new EditorHook(
new IndexUpdateProvider(new PropertyIndexEditorProvider().with(mip)));
NodeState indexed = hook.processCommit(before, after, CommitInfo.EMPTY);
FilterImpl f = createFilter(indexed, NT_BASE);
// Query the index
PropertyIndexLookup lookup = new PropertyIndexLookup(indexed,mip);
assertEquals(ImmutableSet.of("a", "b/x", "a/x", "m", "m/n", "m/n/o"), find(lookup, "foo", "abc", f));
assertEquals(ImmutableSet.of(), find(lookup, "foo", "ghi", f));
assertTrue(getNode(indexed, "/oak:index/foo/:index").exists());
//Separate node for mount
assertTrue(getNode(indexed, "/oak:index/foo/"+ getNodeForMount(fooMount)).exists());
//Index entries for paths in foo mount should go to :oak:foo-index
assertTrue(getNode(indexed, pathInIndex(fooMount, "/oak:index/foo", "/a", "abc")).exists());
assertTrue(getNode(indexed, pathInIndex(fooMount, "/oak:index/foo", "/a/x", "abc")).exists());
assertTrue(getNode(indexed, pathInIndex(fooMount, "/oak:index/foo", "/m/n", "abc")).exists());
assertTrue(getNode(indexed, pathInIndex(fooMount, "/oak:index/foo", "/m/n/o", "abc")).exists());
assertFalse(getNode(indexed, pathInIndex(defMount, "/oak:index/foo", "/a", "abc")).exists());
assertFalse(getNode(indexed, pathInIndex(defMount, "/oak:index/foo", "/a/x", "abc")).exists());
assertFalse(getNode(indexed, pathInIndex(defMount, "/oak:index/foo", "/m/n", "abc")).exists());
assertFalse(getNode(indexed, pathInIndex(defMount, "/oak:index/foo", "/m/n/o", "abc")).exists());
//All other index entries should go to :index
assertTrue(getNode(indexed, pathInIndex(defMount, "/oak:index/foo", "/b", "abc")).exists());
assertTrue(getNode(indexed, pathInIndex(defMount, "/oak:index/foo", "/b/x", "abc")).exists());
assertTrue(getNode(indexed, pathInIndex(defMount, "/oak:index/foo", "/m", "abc")).exists());
assertFalse(getNode(indexed, pathInIndex(fooMount, "/oak:index/foo", "/b", "abc")).exists());
assertFalse(getNode(indexed, pathInIndex(fooMount, "/oak:index/foo", "/b/x", "abc")).exists());
//System.out.println(NodeStateUtils.toString(getNode(indexed, "/oak:index/foo")));
}
@Test
public void mountWithCommitInWritableMount() throws Exception{
NodeState root = INITIAL_CONTENT;
// Add index definition
NodeBuilder builder = root.builder();
NodeBuilder index = createIndexDefinition(builder.child(INDEX_DEFINITIONS_NAME), "foo",
true, false, ImmutableSet.of("foo"), null);
index.setProperty("entryCount", -1);
NodeState before = builder.getNodeState();
// Add some content and process it through the property index hook
builder.child("content").setProperty("foo", "abc");
NodeState after = builder.getNodeState();
MountInfoProvider mip = Mounts.newBuilder()
.readOnlyMount("foo", "/readOnly")
.build();
CompositeHook hook = new CompositeHook(
new EditorHook(new IndexUpdateProvider(new PropertyIndexEditorProvider().with(mip))),
new EditorHook(new ValidatorProvider(){
protected Validator getRootValidator(NodeState before, NodeState after, CommitInfo info) {
return new PrivateStoreValidator("/", mip);
}
})
);
NodeState indexed = hook.processCommit(before, after, CommitInfo.EMPTY);
Mount defMount = mip.getDefaultMount();
assertTrue(getNode(indexed, pathInIndex(defMount, "/oak:index/foo", "/content", "abc")).exists());
}
@Test(expected = CommitFailedException.class)
public void mountAndUniqueIndexes() throws Exception {
NodeState root = INITIAL_CONTENT;
// Add index definition
NodeBuilder builder = root.builder();
NodeBuilder index = createIndexDefinition(builder.child(INDEX_DEFINITIONS_NAME), "foo",
true, true, ImmutableSet.of("foo"), null);
index.setProperty("entryCount", -1);
NodeState before = builder.getNodeState();
MountInfoProvider mip = Mounts.newBuilder()
.mount("foo", "/a")
.build();
builder.child("a").setProperty("foo", "abc");
builder.child("b").setProperty("foo", Arrays.asList("abc", "def"),
Type.STRINGS);
NodeState after = builder.getNodeState();
EditorHook hook = new EditorHook(
new IndexUpdateProvider(new PropertyIndexEditorProvider().with(mip)));
// should throw
hook.processCommit(before, after, CommitInfo.EMPTY);
}
private static String pathInIndex(Mount mount,
String indexPath, String indexedPath, String indexedValue){
return indexPath + "/" + getNodeForMount(mount) + "/" + indexedValue + indexedPath;
}
private static String getNodeForMount(Mount mount) {
return Multiplexers.getNodeForMount(mount, INDEX_CONTENT_NODE_NAME);
}
private int getResultSize(NodeState indexed, String name, String value){
FilterImpl f = createFilter(indexed, NT_BASE);
// Query the index
PropertyIndexLookup lookup = new PropertyIndexLookup(indexed);
Iterable<String> result = lookup.query(f, name, PropertyValues.newString(value));
return Iterables.size(result);
}
private NodeState createTestData(int entryCount) throws CommitFailedException {
NodeState root = INITIAL_CONTENT;
// Add index definition
NodeBuilder builder = root.builder();
NodeBuilder index = createIndexDefinition(builder.child(INDEX_DEFINITIONS_NAME), "foo",
true, false, ImmutableSet.of("foo"), null);
// disable the estimation
index.setProperty("entryCount", -1);
NodeState before = builder.getNodeState();
// Add some content and process it through the property index hook
int depth = ContentMirrorStoreStrategy.TRAVERSING_WARN / entryCount + 10;
for (int i = 0; i < entryCount; i++) {
NodeBuilder parentNode = builder.child("n" + i);
for (int j = 0; j < depth ; j++) {
parentNode = parentNode.child("c" + j);
}
parentNode.setProperty("foo", "bar");
}
NodeState after = builder.getNodeState();
return HOOK.processCommit(before, after, CommitInfo.EMPTY);
}
private ListAppender createAndRegisterAppender() {
TraversingWarningFilter filter = new TraversingWarningFilter();
filter.start();
ListAppender appender = new ListAppender<ILoggingEvent>();
appender.setContext(getContext());
appender.setName("TestLogCollector");
appender.addFilter(filter);
appender.start();
rootLogger().addAppender(appender);
return appender;
}
private void deregisterAppender(Appender<ILoggingEvent> appender){
rootLogger().detachAppender(appender);
}
private static LoggerContext getContext(){
return (LoggerContext) LoggerFactory.getILoggerFactory();
}
private static ch.qos.logback.classic.Logger rootLogger() {
return getContext().getLogger(ch.qos.logback.classic.Logger.ROOT_LOGGER_NAME);
}
private static class TraversingWarningFilter extends ch.qos.logback.core.filter.Filter<ILoggingEvent> {
@Override
public FilterReply decide(ILoggingEvent event) {
if (event.getLevel().isGreaterOrEqual(Level.WARN)
&& event.getMessage().contains("Traversed")) {
return FilterReply.ACCEPT;
} else {
return FilterReply.DENY;
}
}
}
private class PrivateStoreValidator extends DefaultValidator {
private final String path;
private final MountInfoProvider mountInfoProvider;
public PrivateStoreValidator(String path, MountInfoProvider mountInfoProvider) {
this.path = path;
this.mountInfoProvider = mountInfoProvider;
}
public Validator childNodeAdded(String name, NodeState after) throws CommitFailedException {
return checkPrivateStoreCommit(getCommitPath(name));
}
public Validator childNodeChanged(String name, NodeState before, NodeState after) throws CommitFailedException {
return checkPrivateStoreCommit(getCommitPath(name));
}
public Validator childNodeDeleted(String name, NodeState before) throws CommitFailedException {
return checkPrivateStoreCommit(getCommitPath(name));
}
private Validator checkPrivateStoreCommit(String commitPath) throws CommitFailedException {
Mount mountInfo = mountInfoProvider.getMountByPath(commitPath);
if (mountInfo.isReadOnly()) {
throw new CommitFailedException(CommitFailedException.UNSUPPORTED, 0,
"Unsupported commit to a read-only store "+ commitPath);
}
return new PrivateStoreValidator(commitPath, mountInfoProvider);
}
private String getCommitPath(String changeNodeName) {
return PathUtils.concat(path, changeNodeName);
}
}
}
| |
/**
* Copyright 2011 The PlayN Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package playn.scene;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import pythagoras.f.AffineTransform;
import pythagoras.f.MathUtil;
import pythagoras.f.Point;
import pythagoras.util.NoninvertibleTransformException;
import playn.core.Surface;
/**
* GroupLayer creates a Layer hierarchy by maintaining an ordered group of child Layers.
*/
public class GroupLayer extends ClippedLayer implements Iterable<Layer> {
private final List<Layer> children = new ArrayList<>();
private final AffineTransform paintTx = new AffineTransform();
private final boolean disableClip;
/** Creates an unclipped group layer. Unclipped groups have no defined size. */
public GroupLayer () {
super(0, 0);
disableClip = true;
}
/** Creates a clipped group layer with the specified size. */
public GroupLayer (float width, float height) {
super(width, height);
disableClip = false;
}
/** Returns whether this group has any child layers. */
public boolean isEmpty () { return children.isEmpty(); }
/** Returns the number of child layers in this group. */
public int children() {
return children.size();
}
/**
* Returns the layer at the specified index. Layers are ordered in terms of their depth and will
* be returned in this order, with 0 being the layer on bottom.
*/
public Layer childAt(int index) {
return children.get(index);
}
/**
* Adds a layer to the bottom of the group. Because the {@link Layer} hierarchy is a tree, if
* {@code child} is already a child of another {@link GroupLayer}, it will be removed before
* being added to this {@link GroupLayer}.
*/
public void add(Layer child) {
// optimization if we're requested to add a child that's already added
GroupLayer parent = child.parent();
if (parent == this) return;
// if this child has equal or greater depth to the last child, we can append directly and avoid
// a log(N) search; this is helpful when all children have the same depth
int count = children.size(), index;
if (count == 0 || children.get(count-1).depth() <= child.depth()) index = count;
// otherwise find the appropriate insertion point via binary search
else index = findInsertion(child.depth());
// remove the child from any existing parent, preventing multiple parents
if (parent != null) parent.remove(child);
children.add(index, child);
child.setParent(this);
if (state.get() == State.ADDED) child.onAdd();
// if this child is active, we need to become active
if (child.interactive()) setInteractive(true);
}
/**
* Adds all supplied children to this layer, in order. See {@link #add(Layer)}.
*/
public void add(Layer child0, Layer child1, Layer... childN) {
add(child0);
add(child1);
for (Layer child : childN) add(child);
}
/**
* Adds the supplied layer to this group layer, adjusting its translation (relative to this group
* layer) to the supplied values.
*
* <p>This is equivalent to: {@code add(child.setTranslation(tx, ty))}.
*/
public void addAt(Layer child, float tx, float ty) {
add(child.setTranslation(tx, ty));
}
/**
* Adds {@code child} to this group layer, positioning it such that its center is at ({@code tx},
* {@code tx}). The layer must report a non-zero size, thus this will not work on an unclipped
* group layer.
*
* <p>This is equivalent to: {@code add(child.setTranslation(tx - child.width()/2,
* ty - child.height()/2))}.
*/
public void addCenterAt (Layer child, float tx, float ty) {
add(child.setTranslation(tx - child.width()/2, ty - child.height()/2));
}
/**
* Adds {@code child} to this group layer, adjusting its translation (relative to this group
* layer) to {@code floor(tx), floor(ty)}. This is useful for adding layers which display text a
* text can become blurry if it is positioned on sub-pixel boundaries.
*/
public void addFloorAt (Layer child, float tx, float ty) {
add(child.setTranslation(MathUtil.ifloor(tx), MathUtil.ifloor(ty)));
}
/**
* Removes a layer from the group.
*/
public void remove(Layer child) {
int index = findChild(child, child.depth());
if (index < 0) {
throw new UnsupportedOperationException(
"Could not remove Layer because it is not a child of the GroupLayer " +
"[group=" + this + ", layer=" + child + "]");
}
remove(index);
}
/**
* Removes all supplied children from this layer, in order. See {@link #remove(Layer)}.
*/
public void remove(Layer child0, Layer child1, Layer... childN) {
remove(child0);
remove(child1);
for (Layer child : childN) remove(child);
}
/**
* Removes all child layers from this group.
*/
public void removeAll() {
while (!children.isEmpty()) remove(children.size()-1);
}
/**
* Removes and disposes all child layers from this group.
*/
public void disposeAll() {
Layer[] toDispose = children.toArray(new Layer[children.size()]);
// remove all of the children efficiently
removeAll();
// now that the children have been detached, dispose them
for (Layer child : toDispose) child.close();
}
@Override public Iterator<Layer> iterator () {
return children.iterator();
}
@Override public void close() {
super.close();
disposeAll();
}
@Override public Layer hitTestDefault(Point point) {
float x = point.x, y = point.y;
boolean sawInteractiveChild = false;
// we check back to front as children are ordered "lowest" first
for (int ii = children.size()-1; ii >= 0; ii--) {
Layer child = children.get(ii);
if (!child.interactive()) continue; // ignore non-interactive children
sawInteractiveChild = true; // note that we saw an interactive child
if (!child.visible()) continue; // ignore invisible children
try {
// transform the point into the child's coordinate system
child.transform().inverseTransform(point.set(x, y), point);
point.x += child.originX();
point.y += child.originY();
Layer l = child.hitTest(point);
if (l != null)
return l;
} catch (NoninvertibleTransformException nte) {
// Degenerate transform means no hit
continue;
}
}
// if we saw no interactive children and we don't have listeners registered directly on this
// group, clear our own interactive flag; this lazily deactivates this group after its
// interactive children have been deactivated or removed
if (!sawInteractiveChild && !hasEventListeners()) setInteractive(false);
return super.hitTestDefault(point);
}
@Override protected boolean disableClip () {
return disableClip;
}
@Override protected void paintClipped (Surface surf) {
// save our current transform and restore it before painting each child
paintTx.set(surf.tx());
// iterate manually to avoid creating an Iterator as garbage, this is inner-loop territory
List<Layer> children = this.children;
for (int ii = 0, ll = children.size(); ii < ll; ii++) {
surf.tx().set(paintTx);
children.get(ii).paint(surf);
}
}
int depthChanged(Layer child, float oldDepth) {
// locate the child whose depth changed
int oldIndex = findChild(child, oldDepth);
// fast path for depth changes that don't change ordering
float newDepth = child.depth();
boolean leftCorrect = (oldIndex == 0 || children.get(oldIndex-1).depth() <= newDepth);
boolean rightCorrect = (oldIndex == children.size()-1 ||
children.get(oldIndex+1).depth() >= newDepth);
if (leftCorrect && rightCorrect) {
return oldIndex;
}
// it would be great if we could move an element from one place in an ArrayList to another
// (portably), but instead we have to remove and re-add
children.remove(oldIndex);
int newIndex = findInsertion(newDepth);
children.add(newIndex, child);
return newIndex;
}
@Override void onAdd() {
super.onAdd();
for (int ii = 0, ll = children.size(); ii < ll; ii++) children.get(ii).onAdd();
}
@Override void onRemove() {
super.onRemove();
for (int ii = 0, ll = children.size(); ii < ll; ii++) children.get(ii).onRemove();
}
// group layers do not deactivate when their last event listener is removed; they may still have
// interactive children to which events need to be dispatched; when a hit test is performed on a
// group layer and it discovers that it has no interactive children, it will deactivate itself
@Override protected boolean deactivateOnNoListeners () { return false; }
private void remove(int index) {
Layer child = children.remove(index);
child.onRemove();
child.setParent(null);
}
// uses depth to improve upon a full linear search
private int findChild(Layer child, float depth) {
// findInsertion will find us some element with the same depth as the to-be-removed child
int startIdx = findInsertion(depth);
// search down for our child
for (int ii = startIdx-1; ii >= 0; ii--) {
Layer c = children.get(ii);
if (c == child) {
return ii;
}
if (c.depth() != depth) {
break;
}
}
// search up for our child
for (int ii = startIdx, ll = children.size(); ii < ll; ii++) {
Layer c = children.get(ii);
if (c == child) {
return ii;
}
if (c.depth() != depth) {
break;
}
}
return -1;
}
// who says you never have to write binary search?
private int findInsertion(float depth) {
int low = 0, high = children.size()-1;
while (low <= high) {
int mid = (low + high) >>> 1;
float midDepth = children.get(mid).depth();
if (depth > midDepth) {
low = mid + 1;
} else if (depth < midDepth) {
high = mid - 1;
} else {
return mid;
}
}
return low;
}
}
| |
package com.jivesoftware.jivesdk.impl.http;
import com.google.common.base.Optional;
import com.google.common.collect.Maps;
import com.jivesoftware.jivesdk.api.RestAccessException;
import com.jivesoftware.jivesdk.api.RestDriver;
import com.jivesoftware.jivesdk.impl.http.get.RestGetRequest;
import com.jivesoftware.jivesdk.impl.http.post.RestPostRequest;
import com.jivesoftware.jivesdk.impl.http.put.RestPutRequest;
import com.jivesoftware.jivesdk.impl.utils.JiveSDKUtils;
import com.jivesoftware.jivesdk.server.ServerConstants;
import org.apache.http.Header;
import org.apache.http.HttpEntity;
import org.apache.http.HttpStatus;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpRequestBase;
import org.apache.http.util.EntityUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;
/**
* Client code to abstract communication with a Jive instance.
*/
public class RestDriverImpl implements RestDriver {
private static final Logger log = LoggerFactory.getLogger(RestDriverImpl.class);
private static final AtomicLong NEXT_VERSION = new AtomicLong(1);
public static final String DUMP_DELIMIT = ";;;";
public static final int DEFAULT_HTTP_NUM_OF_RETRIES = 3;
private HttpClientFactory httpClientFactory;
@Nonnull
@Override
public HttpResponse execute(@Nonnull RestRequest request) throws AuthTokenException, RestAccessException {
return executeWithClient(request, getHttpClient());
}
@Nonnull
@Override
public HttpResponse execute(@Nonnull RestRequest request, int timeout) throws AuthTokenException, RestAccessException {
return executeWithClient(request, getHttpClient(timeout));
}
@Nonnull
@Override
public HttpResponse executeWithClient(@Nonnull RestRequest request, @Nonnull HttpClient client) throws AuthTokenException, RestAccessException {
return executeWithClient(request, client, DEFAULT_HTTP_NUM_OF_RETRIES);
}
@Nonnull
@Override
public HttpResponse executeWithClient(@Nonnull RestRequest request, @Nonnull HttpClient client,
int numberOfRetries) throws AuthTokenException, RestAccessException {
HttpRequestBase httpRequest = null;
HttpResponse response = null;
HttpEntity entity = null;
long currentVer = -1;
try {
currentVer = dumpRequestIfDebugging(request);
httpRequest = request.create();
httpRequest.setHeader("user-agent", "jive-sdk-java/0.1");
org.apache.http.HttpResponse httpResponse = client.execute(httpRequest);
int statusCode = httpResponse.getStatusLine().getStatusCode();
entity = httpResponse.getEntity();
if (statusCode == HttpStatus.SC_UNAUTHORIZED) {
String uri = httpRequest.getURI().toString();
String responseBody = getEntityBody(entity);
log.error(String.format("Rest execution unauthorized to %s. Response: %s", uri, responseBody));
throw new AuthTokenException(uri, responseBody);
}
if (ServerConstants.RETRIABLE_RESPONSE_CODES.contains(statusCode) && numberOfRetries > 0) {
log.warn(String.format("Re-trying HTTP request in 500ms due to bad response %d: %s", statusCode, getEntityBody(entity)));
try {
Thread.sleep(500);
} catch (InterruptedException ignored) {
}
return executeWithClient(request, client, numberOfRetries - 1);
}
Map<String, String> headers = Maps.newHashMap();
for (Header header : httpResponse.getAllHeaders()) {
headers.put(header.getName(), header.getValue());
}
if (statusCode >= 200 && statusCode <= 299) {
if (entity != null) {
response = new JiveSDKHttpResponse(statusCode, headers, entity.getContent());
} else {
response = new JiveSDKHttpResponse(statusCode, headers, "");
}
} else {
String responseBody = getEntityBody(entity);
log.error(String.format("Received HTTP response [%d]: %s", statusCode, responseBody));
response = new JiveSDKHttpResponse(statusCode, headers, responseBody);
}
return response;
} catch (IOException e) {
String msg = "Failed executing REST request: " + request.getUrl();
if (numberOfRetries > 0) {
log.warn(String.format("%s. Re-trying HTTP request due to IOException", msg), e);
return executeWithClient(request, client, numberOfRetries - 1);
} else {
log.error(msg, e);
throw new RestAccessException(msg, request.getUrl(), e);
}
} finally {
dumpResponseIfDebugging(response, currentVer);
if (entity != null) {
try {
EntityUtils.consume(entity);
} catch (IOException ignored) {
}
}
if (httpRequest != null) {
try {
httpRequest.releaseConnection();
} catch (Exception ignored) {
}
}
}
}
@Nonnull
@Override
public HttpResponse executeForResponseBody(@Nonnull RestRequest request) throws AuthTokenException, RestAccessException {
HttpRequestBase httpRequest = null;
HttpResponse response = null;
long currentVer = -1;
try {
currentVer = dumpRequestIfDebugging(request);
HttpClient client = getHttpClient();
httpRequest = request.create();
org.apache.http.HttpResponse httpResponse = client.execute(httpRequest);
int statusCode = httpResponse.getStatusLine().getStatusCode();
HttpEntity entity = httpResponse.getEntity();
String responseBody = getEntityBody(entity);
if (statusCode == HttpStatus.SC_UNAUTHORIZED) {
String uri = httpRequest.getURI().toString();
log.error(String.format("Rest execution unauthorized to %s. Response: %s", uri, responseBody));
throw new AuthTokenException(request.getUrl(), responseBody);
}
Map<String, String> headers = Maps.newHashMap();
for (Header header : httpResponse.getAllHeaders()) {
headers.put(header.getName(), header.getValue());
}
response = new JiveSDKHttpResponse(statusCode, headers, responseBody);
if (statusCode < 200 || statusCode > 299) {
log.error(String.format("Received HTTP response [%d]: %s", statusCode, responseBody));
// TODO: Is it ok to throw a RestAccessException here instead of returning the response?
throw new RestAccessException(statusCode + ": " + responseBody, request.getUrl());
}
return response;
} catch (IOException e) {
String msg = "Failed executing REST request: " + request.getUrl();
log.error(msg, e);
throw new RestAccessException(msg, request.getUrl(), e);
} finally {
dumpResponseIfDebugging(response, currentVer);
if (httpRequest != null) {
try {
httpRequest.releaseConnection();
} catch (Exception ignored) {
}
}
}
}
@Nonnull
private HttpClient getHttpClient() {
return httpClientFactory.getClient();
}
@Nonnull
private HttpClient getHttpClient(int timeout) {
return httpClientFactory.getClient(timeout);
}
private String getEntityBody(HttpEntity entity) throws IOException {
return entity != null ? EntityUtils.toString(entity, JiveSDKUtils.UTF_8) : "";
}
private long dumpRequestIfDebugging(@Nonnull RestRequest request) {
if (log.isDebugEnabled() && ServerConstants.shouldDumpRequest.get()) {
long version = NEXT_VERSION.getAndIncrement();
StringBuilder sb = new StringBuilder();
sb.append("---[").append(version).append("] START").append(DUMP_DELIMIT);
if (request instanceof RestGetRequest) {
sb.append("GET ");
} else if (request instanceof RestPostRequest) {
sb.append("POST ");
} else if (request instanceof RestPutRequest) {
sb.append("PUT ");
}
sb.append(request.getUrl()).append(DUMP_DELIMIT);
if (request instanceof RestGetRequest) {
Map<String, String> queryParams = ((RestGetRequest) request).getQueryParams();
if (queryParams != null && !queryParams.isEmpty()) {
sb.append("Query params: ");
for (Map.Entry<String, String> entry : queryParams.entrySet()) {
sb.append(entry.getKey()).append('=').append(entry.getValue()).append('&');
}
}
}
if (request instanceof RestRequestWithEntity) {
RestRequestWithEntity requestWithEntity = (RestRequestWithEntity) request;
Optional<HttpEntity> entity = requestWithEntity.getEntity();
if (entity.isPresent()) {
sb.append("Entity: ");
ByteArrayOutputStream outputStream = null;
try {
outputStream = new ByteArrayOutputStream();
entity.get().writeTo(outputStream);
sb.append(new String(outputStream.toByteArray(), JiveSDKUtils.UTF_8)).append(DUMP_DELIMIT);
} catch (IOException e) {
log.error("Failed dumping request body", e);
} finally {
if (outputStream != null) {
try {
outputStream.close();
} catch (IOException ignored) {
}
}
}
}
}
sb.append("---[").append(version).append("] END");
log.debug(sb.toString());
return version;
}
return -1;
}
private void dumpResponseIfDebugging(@Nullable HttpResponse response, long version) {
if (response != null && version > -1 && ServerConstants.shouldDumpResponse.get()) {
Optional<String> optional = response.getResponseBody();
String body = "Empty body";
if (optional.isPresent()) {
body = optional.get();
}
log.debug("---[" + String.valueOf(version) + "] " + body);
}
}
public void setHttpClientFactory(HttpClientFactory httpClientFactory) {
this.httpClientFactory = httpClientFactory;
}
}
| |
/*
* Copyright (c) 2012, United States Government, as represented by the Secretary of Health and Human Services.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above
* copyright notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of the United States Government nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE UNITED STATES GOVERNMENT BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package gov.hhs.fha.nhinc.adaptermpimanager.HL7Parsers;
import gov.hhs.fha.nhinc.adaptermpimanager.*;
import gov.hhs.fha.nhinc.mpilib.*;
import java.util.List;
import java.io.Serializable;
import java.util.Iterator;
import javax.xml.bind.JAXBElement;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.hl7.v3.*;
/**
*
* @author rayj
*/
public class HL7Parser201301 {
private static Log log = LogFactory.getLog(HL7Parser201301.class);
public static void PrintMessageIdFromMessage(org.hl7.v3.PRPAIN201301UV02 message) {
if (!(message == null)) {
HL7Parser.PrintId(message.getId(), "message");
}
}
public static String ExtractGender(PRPAMT201301UV02Person person) {
String genderCode = null;
CE administrativeGenderCode = person.getAdministrativeGenderCode();
if (administrativeGenderCode == null) {
log.info("message does not contain a gender code");
} else {
log.info("person.getAdministrativeGenderCode().getCode()=" + person.getAdministrativeGenderCode().getCode());
log.info("person.getAdministrativeGenderCode().getDisplayName()="
+ person.getAdministrativeGenderCode().getDisplayName());
genderCode = person.getAdministrativeGenderCode().getCode();
}
return genderCode;
}
public static String ExtractBirthdate(PRPAMT201301UV02Person person) {
String birthDate = null;
if (person.getBirthTime() == null) {
log.info("message does not contain a birthtime");
} else {
birthDate = person.getBirthTime().getValue();
if (birthDate == null) {
log.info("message does not contain a birthtime");
} else {
log.info("person.getBirthTime().getValue()=" + person.getBirthTime().getValue());
}
}
return birthDate;
}
public static PersonName ExtractPersonName(PRPAMT201301UV02Person person) {
// temp logic to prove i can get to name - long term would want to store discrete name parts
// also assume one name, not multiple names
PersonName personname = new PersonName();
log.info("patientPerson.getName().size() " + person.getName().size());
if (person.getName() != null && person.getName().size() > 0 && person.getName().get(0) != null
&& person.getName().get(0).getContent() != null) {
List<Serializable> choice = person.getName().get(0).getContent();
log.info("choice.size()=" + choice.size());
Iterator<Serializable> iterSerialObjects = choice.iterator();
String nameString = "";
EnExplicitFamily lastname = new EnExplicitFamily();
EnExplicitGiven firstname = new EnExplicitGiven();
while (iterSerialObjects.hasNext()) {
log.info("in iterSerialObjects.hasNext() loop");
Serializable contentItem = iterSerialObjects.next();
if (contentItem instanceof String) {
log.info("contentItem is string");
String strValue = (String) contentItem;
if (nameString != null) {
nameString += strValue;
} else {
nameString = strValue;
}
log.info("nameString=" + nameString);
} else if (contentItem instanceof JAXBElement) {
log.info("contentItem is JAXBElement");
JAXBElement oJAXBElement = (JAXBElement) contentItem;
if (oJAXBElement.getValue() instanceof EnExplicitFamily) {
lastname = (EnExplicitFamily) oJAXBElement.getValue();
log.info("found lastname element; content=" + lastname.getContent());
} else if (oJAXBElement.getValue() instanceof EnExplicitGiven) {
firstname = (EnExplicitGiven) oJAXBElement.getValue();
log.info("found firstname element; content=" + firstname.getContent());
} else {
log.info("other name part=" + (ENXPExplicit) oJAXBElement.getValue());
}
} else {
log.info("contentItem is other");
}
}
// If text string in patient name, then set in name
// else set in element.
boolean namefound = false;
if (lastname.getContent() != null) {
personname.setLastName(lastname.getContent());
log.info("FamilyName : " + personname.getLastName());
namefound = true;
}
if (firstname.getContent() != null) {
personname.setFirstName(firstname.getContent());
log.info("GivenName : " + personname.getFirstName());
namefound = true;
}
if (!namefound && !nameString.trim().contentEquals("")) {
log.info("setting name by nameString " + nameString);
personname.setLastName(nameString);
} else {
}
}
log.info("returning personname");
return personname;
}
public static Identifiers ExtractPersonIdentifiers(PRPAMT201301UV02Patient patient) {
Identifiers ids = new Identifiers();
for (II patientid : patient.getId()) {
Identifier id = new Identifier();
id.setId(patientid.getExtension());
id.setOrganizationId(patientid.getRoot());
log.info("Created id from patient identifier [organization=" + id.getOrganizationId() + "][id="
+ id.getId() + "]");
ids.add(id);
}
PRPAMT201301UV02Person person = ExtractHL7PatientPersonFromHL7Patient(patient);
for (II personid : person.getId()) {
Identifier id = new Identifier();
id.setId(personid.getExtension());
id.setOrganizationId(personid.getRoot());
log.info("Created id from person identifier [organization=" + id.getOrganizationId() + "][id=" + id.getId()
+ "]");
ids.add(id);
}
List<PRPAMT201301UV02OtherIDs> OtherIds = person.getAsOtherIDs();
for (PRPAMT201301UV02OtherIDs otherPersonIds : OtherIds) {
for (II otherPersonId : otherPersonIds.getId()) {
if (!(otherPersonId.getRoot().contentEquals(HL7Parser.SSN_OID))) {
Identifier id = new Identifier();
id.setId(otherPersonId.getExtension());
id.setOrganizationId(otherPersonId.getRoot());
log.info("Created id from person other identifier [organization=" + id.getOrganizationId()
+ "][id=" + id.getId() + "]");
ids.add(id);
}
}
}
return ids;
}
public static String ExtractSsn(PRPAMT201301UV02Person person) {
String ssn = null;
List<PRPAMT201301UV02OtherIDs> OtherIds = person.getAsOtherIDs();
for (PRPAMT201301UV02OtherIDs otherPersonIds : OtherIds) {
for (II otherPersonId : otherPersonIds.getId()) {
if (otherPersonId.getRoot().contentEquals(HL7Parser.SSN_OID)) {
ssn = otherPersonId.getExtension();
}
}
}
return ssn;
}
public static PRPAMT201301UV02Person ExtractHL7PatientPersonFromHL7Patient(PRPAMT201301UV02Patient patient) {
JAXBElement<PRPAMT201301UV02Person> patientPersonElement = patient.getPatientPerson();
PRPAMT201301UV02Person patientPerson = patientPerson = patientPersonElement.getValue();
return patientPerson;
}
public static PRPAMT201301UV02Person ExtractHL7PatientPersonFrom201301Message(org.hl7.v3.PRPAIN201301UV02 message) {
// assume one subject for now
PRPAMT201301UV02Patient patient = ExtractHL7PatientFromMessage(message);
PRPAMT201301UV02Person patientPerson = ExtractHL7PatientPersonFromHL7Patient(patient);
return patientPerson;
}
public static PRPAMT201301UV02Patient ExtractHL7PatientFromMessage(org.hl7.v3.PRPAIN201301UV02 message) {
// assume one subject for now
PRPAMT201301UV02Patient patient = null;
log.info("in ExtractPatient");
if (message == null) {
log.info("message is null - no patient");
return null;
}
PRPAIN201301UV02MFMIMT700701UV01ControlActProcess controlActProcess = message.getControlActProcess();
if (controlActProcess == null) {
log.info("controlActProcess is null - no patient");
return null;
}
HL7Parser.PrintId(controlActProcess.getId(), "controlActProcess");
List<PRPAIN201301UV02MFMIMT700701UV01Subject1> subjects = controlActProcess.getSubject();
if ((subjects == null) || (subjects.size() == 0)) {
log.info("subjects is blank/null - no patient");
return null;
}
// for now, assume we only need one subject, this will need to be modified later
PRPAIN201301UV02MFMIMT700701UV01Subject1 subject = subjects.get(0);
HL7Parser.PrintId(subject.getTypeId(), "subject");
PRPAIN201301UV02MFMIMT700701UV01RegistrationEvent registrationevent = subject.getRegistrationEvent();
if (registrationevent == null) {
log.info("registrationevent is null - no patient");
return null;
}
HL7Parser.PrintId(registrationevent.getTypeId(), "registrationevent");
PRPAIN201301UV02MFMIMT700701UV01Subject2 subject1 = registrationevent.getSubject1();
if (subject1 == null) {
log.info("subject1 is null - no patient");
return null;
}
HL7Parser.PrintId(subject1.getTypeId(), "subject1");
patient = subject1.getPatient();
if (patient == null) {
log.info("patient is null - no patient");
return null;
}
HL7Parser.PrintId(patient.getId(), "patient");
log.info("done with ExtractPatient");
return patient;
}
public static Patient ExtractMpiPatientFromMessage(org.hl7.v3.PRPAIN201301UV02 message) {
PRPAMT201301UV02Patient hl7patient = ExtractHL7PatientFromMessage(message);
Patient mpipatient = ExtractMpiPatientFromHL7Patient(hl7patient);
return mpipatient;
}
public static Patient ExtractMpiPatientFromHL7Patient(PRPAMT201301UV02Patient patient) {
PRPAMT201301UV02Person patientPerson = ExtractHL7PatientPersonFromHL7Patient(patient);
Patient mpiPatient = new Patient();
mpiPatient.getNames().add(ExtractPersonName(patientPerson));
mpiPatient.setGender(ExtractGender(patientPerson));
String birthdateString = ExtractBirthdate(patientPerson);
mpiPatient.setDateOfBirth(birthdateString);
mpiPatient.setSSN(ExtractSsn(patientPerson));
Identifiers ids = ExtractPersonIdentifiers(patient);
mpiPatient.setIdentifiers(ids);
return mpiPatient;
}
public static org.hl7.v3.PRPAIN201301UV02 BuildMessagePRPAIN201301UV(Patient mpiPatient) {
ObjectFactory factory = new ObjectFactory();
org.hl7.v3.PRPAIN201301UV02 resultMessage = new org.hl7.v3.PRPAIN201301UV02();
PRPAIN201301UV02MFMIMT700701UV01Subject1 subject = new PRPAIN201301UV02MFMIMT700701UV01Subject1();
PRPAIN201301UV02MFMIMT700701UV01RegistrationEvent registrationevent = new PRPAIN201301UV02MFMIMT700701UV01RegistrationEvent();
subject.setRegistrationEvent(registrationevent);
PRPAIN201301UV02MFMIMT700701UV01Subject2 subjectA = new PRPAIN201301UV02MFMIMT700701UV01Subject2();
registrationevent.setSubject1(subjectA);
PRPAMT201301UV02Patient patient = new PRPAMT201301UV02Patient();
subjectA.setPatient(patient);
PRPAMT201301UV02Person patientPerson = new PRPAMT201301UV02Person();
javax.xml.namespace.QName xmlqname = new javax.xml.namespace.QName("urn:hl7-org:v3", "patientPerson");
JAXBElement<PRPAMT201301UV02Person> patientPersonElement = new JAXBElement<PRPAMT201301UV02Person>(xmlqname,
PRPAMT201301UV02Person.class, patientPerson);
patient.setPatientPerson(patientPersonElement);
patientPersonElement.setValue(patientPerson);
PRPAIN201301UV02MFMIMT700701UV01ControlActProcess controlActProcess = new PRPAIN201301UV02MFMIMT700701UV01ControlActProcess();
resultMessage.setControlActProcess(controlActProcess);
List<PRPAIN201301UV02MFMIMT700701UV01Subject1> subjects = resultMessage.getControlActProcess().getSubject();
subjects.add(subject);
if (!(mpiPatient.getDateOfBirth() == null)) {
TSExplicit birthtime = new TSExplicit();
birthtime.setValue(mpiPatient.getDateOfBirth().toString());
patientPerson.setBirthTime(birthtime);
}
CE administrativeGenderCode = new CE();
administrativeGenderCode.setCode(mpiPatient.getGender());
patientPerson.setAdministrativeGenderCode(administrativeGenderCode);
//
// Name.
//
PNExplicit name = (PNExplicit) (factory.createPNExplicit());
List namelist = name.getContent();
PersonName mpiPatientName = null;
if (mpiPatient.getNames().size() > 0) {
mpiPatientName = mpiPatient.getNames().get(0);
}
if (mpiPatientName != null && mpiPatientName.getLastName().length() > 0) {
log.info("familyName >" + mpiPatientName.getLastName() + "<");
EnExplicitFamily familyName = new EnExplicitFamily();
familyName.setContent(mpiPatientName.getLastName());
familyName.setPartType("FAM");
namelist.add(factory.createPNExplicitFamily(familyName));
}
if (mpiPatientName != null && mpiPatientName.getFirstName().length() > 0) {
log.info("givenName >" + mpiPatientName.getFirstName() + "<");
EnExplicitGiven givenName = new EnExplicitGiven();
givenName.setContent(mpiPatientName.getFirstName());
givenName.setPartType("GIV");
namelist.add(factory.createPNExplicitGiven(givenName));
}
namelist.add(" ");
patientPerson.getName().add(name);
for (Identifier resultPatientId : mpiPatient.getIdentifiers()) {
II id = new II();
id.setRoot(resultPatientId.getOrganizationId());
id.setExtension(resultPatientId.getId());
patient.getId().add(id);
}
Identifier resultPatientId = mpiPatient.getIdentifiers().get(0);
II id = new II();
id.setRoot(resultPatientId.getOrganizationId());
MCCIMT000100UV01Device device = new MCCIMT000100UV01Device();
MCCIMT000100UV01Sender sender = new MCCIMT000100UV01Sender();
device.getId().add(id);
sender.setDevice(device);
resultMessage.setSender(sender);
return resultMessage;
}
}
| |
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*/
/*
* LAGDHillClimber.java
* Copyright (C) 2005 Manuel Neubach
*
*/
package weka.classifiers.bayes.net.search.local;
import weka.classifiers.bayes.BayesNet;
import weka.core.Instances;
import weka.core.Option;
import weka.core.RevisionUtils;
import weka.core.Utils;
import java.util.Enumeration;
import java.util.Vector;
/**
<!-- globalinfo-start -->
* This Bayes Network learning algorithm uses a Look Ahead Hill Climbing algorithm called LAGD Hill Climbing. Unlike Greedy Hill Climbing it doesn't calculate a best greedy operation (adding, deleting or reversing an arc) but a sequence of nrOfLookAheadSteps operations, which leads to a network structure whose score is most likely higher in comparison to the network obtained by performing a sequence of nrOfLookAheadSteps greedy operations. The search is not restricted by an order on the variables (unlike K2). The difference with B and B2 is that this hill climber also considers arrows part of the naive Bayes structure for deletion.
* <p/>
<!-- globalinfo-end -->
*
<!-- options-start -->
* Valid options are: <p/>
*
* <pre> -L <nr of look ahead steps>
* Look Ahead Depth</pre>
*
* <pre> -G <nr of good operations>
* Nr of Good Operations</pre>
*
* <pre> -P <nr of parents>
* Maximum number of parents</pre>
*
* <pre> -R
* Use arc reversal operation.
* (default false)</pre>
*
* <pre> -N
* Initial structure is empty (instead of Naive Bayes)</pre>
*
* <pre> -mbc
* Applies a Markov Blanket correction to the network structure,
* after a network structure is learned. This ensures that all
* nodes in the network are part of the Markov blanket of the
* classifier node.</pre>
*
* <pre> -S [BAYES|MDL|ENTROPY|AIC|CROSS_CLASSIC|CROSS_BAYES]
* Score type (BAYES, BDeu, MDL, ENTROPY and AIC)</pre>
*
<!-- options-end -->
*
* @author Manuel Neubach
* @version $Revision: 1.7 $
*/
public class LAGDHillClimber
extends HillClimber {
/** for serialization */
static final long serialVersionUID = 7217437499439184344L;
/** Number of Look Ahead Steps **/
int m_nNrOfLookAheadSteps = 2;
/** Number of Good Operations per Step **/
int m_nNrOfGoodOperations = 5;
/**
* search determines the network structure/graph of the network
*
* @param bayesNet the network
* @param instances the data to use
* @throws Exception if something goes wrong
*/
protected void search(BayesNet bayesNet, Instances instances) throws Exception {
int k=m_nNrOfLookAheadSteps; // Number of Look Ahead Steps
int l=m_nNrOfGoodOperations; // Number of Good Operations per step
lookAheadInGoodDirectionsSearch(bayesNet, instances, k, l);
} // search
/**
* lookAheadInGoodDirectionsSearch determines the network structure/graph of the network
* with best score according to LAGD Hill Climbing
*
* @param bayesNet the network
* @param instances the data to use
* @param nrOfLookAheadSteps
* @param nrOfGoodOperations
* @throws Exception if something goes wrong
*/
protected void lookAheadInGoodDirectionsSearch(BayesNet bayesNet, Instances instances, int nrOfLookAheadSteps, int nrOfGoodOperations) throws Exception {
System.out.println(Thread.currentThread().getStackTrace()[1].getClassName() +"Initializing Cache");
initCache(bayesNet, instances);
while (nrOfLookAheadSteps>1) {
System.out.println(Thread.currentThread().getStackTrace()[1].getClassName() +"Look Ahead Depth: "+nrOfLookAheadSteps);
boolean legalSequence = true;
double sequenceDeltaScore = 0;
Operation [] bestOperation=new Operation [nrOfLookAheadSteps];
bestOperation = getOptimalOperations(bayesNet, instances, nrOfLookAheadSteps, nrOfGoodOperations);
for (int i = 0; i < nrOfLookAheadSteps; i++) {
if (bestOperation [i] == null) {
legalSequence=false;
} else {
sequenceDeltaScore += bestOperation [i].m_fDeltaScore;
}
}
while (legalSequence && sequenceDeltaScore > 0) {
System.out.println(Thread.currentThread().getStackTrace()[1].getClassName() +"Next Iteration..........................");
for (int i = 0; i < nrOfLookAheadSteps; i++) {
performOperation(bayesNet, instances,bestOperation [i]);
}
bestOperation = getOptimalOperations(bayesNet, instances, nrOfLookAheadSteps, nrOfGoodOperations);
sequenceDeltaScore = 0;
for (int i = 0; i < nrOfLookAheadSteps; i++) {
if (bestOperation [i] != null) {
System.out.println(Thread.currentThread().getStackTrace()[1].getClassName() +bestOperation [i].m_nOperation + " " + bestOperation [i].m_nHead + " " + bestOperation [i].m_nTail);
sequenceDeltaScore += bestOperation [i].m_fDeltaScore;
} else {
legalSequence = false;
}
System.out.println(Thread.currentThread().getStackTrace()[1].getClassName() +"DeltaScore: "+sequenceDeltaScore);
}
}
--nrOfLookAheadSteps;
}
/** last steps with greedy HC **/
Operation oOperation = getOptimalOperation(bayesNet, instances);
while ((oOperation != null) && (oOperation.m_fDeltaScore > 0)) {
performOperation(bayesNet, instances, oOperation);
System.out.println(Thread.currentThread().getStackTrace()[1].getClassName() +"Performing last greedy steps");
oOperation = getOptimalOperation(bayesNet, instances);
}
// free up memory
m_Cache = null;
} // lookAheadInGoodDirectionsSearch
/**
* getAntiOperation determines the Operation, which is needed to cancel oOperation
*
* @param oOperation Operation to cancel
* @return antiOperation to oOperation
* @throws Exception if something goes wrong
*/
protected Operation getAntiOperation(Operation oOperation) throws Exception {
if (oOperation.m_nOperation == Operation.OPERATION_ADD)
return (new Operation (oOperation.m_nTail, oOperation.m_nHead, Operation.OPERATION_DEL));
else {
if (oOperation.m_nOperation == Operation.OPERATION_DEL)
return (new Operation (oOperation.m_nTail, oOperation.m_nHead, Operation.OPERATION_ADD));
else {
return (new Operation (oOperation.m_nHead, oOperation.m_nTail, Operation.OPERATION_REVERSE));
}
}
} // getAntiOperation
/**
* getGoodOperations determines the nrOfGoodOperations best Operations, which are considered for
* the calculation of an optimal operationsequence
* @param bayesNet Bayes network to apply operation on
* @param instances data set to learn from
* @param nrOfGoodOperations number of good operations to consider
* @return good operations to consider
* @throws Exception if something goes wrong
**/
protected Operation [] getGoodOperations(BayesNet bayesNet, Instances instances, int nrOfGoodOperations) throws Exception {
Operation [] goodOperations=new Operation [nrOfGoodOperations];
for (int i = 0; i < nrOfGoodOperations; i++) {
goodOperations [i] = getOptimalOperation(bayesNet, instances);
if (goodOperations[i] != null) {
m_Cache.put(goodOperations [i], -1E100);
} else i=nrOfGoodOperations;
}
for (int i = 0; i < nrOfGoodOperations; i++) {
if (goodOperations[i] != null) {
if (goodOperations [i].m_nOperation!=Operation.OPERATION_REVERSE) {
m_Cache.put(goodOperations [i], goodOperations [i].m_fDeltaScore);
} else {
m_Cache.put(goodOperations [i], goodOperations [i].m_fDeltaScore - m_Cache.m_fDeltaScoreAdd[goodOperations[i].m_nHead] [goodOperations [i].m_nTail]);
}
} else i=nrOfGoodOperations;
}
return goodOperations;
} // getGoodOperations
/**
* getOptimalOperations determines an optimal operationsequence in respect of the parameters
* nrOfLookAheadSteps and nrOfGoodOperations
* @param bayesNet Bayes network to apply operation on
* @param instances data set to learn from
* @param nrOfLookAheadSteps number of lood ahead steps to use
* @param nrOfGoodOperations number of good operations to consider
* @return optimal sequence of operations in respect to nrOfLookAheadSteps and nrOfGoodOperations
* @throws Exception if something goes wrong
**/
protected Operation [] getOptimalOperations(BayesNet bayesNet, Instances instances, int nrOfLookAheadSteps, int nrOfGoodOperations) throws Exception {
if (nrOfLookAheadSteps == 1) { // Abbruch der Rekursion
Operation [] bestOperation = new Operation [1];
bestOperation [0] = getOptimalOperation(bayesNet, instances);
return(bestOperation); // Abbruch der Rekursion
} else {
double bestDeltaScore = 0;
double currentDeltaScore = 0;
Operation [] bestOperation = new Operation [nrOfLookAheadSteps];
Operation [] goodOperations = new Operation [nrOfGoodOperations];
Operation [] tempOperation = new Operation [nrOfLookAheadSteps-1];
goodOperations = getGoodOperations(bayesNet, instances, nrOfGoodOperations);
for (int i = 0; i < nrOfGoodOperations; i++) {
if (goodOperations[i] != null) {
performOperation(bayesNet, instances, goodOperations [i]);
tempOperation = getOptimalOperations(bayesNet, instances, nrOfLookAheadSteps-1, nrOfGoodOperations); // rekursiver Abstieg
currentDeltaScore = goodOperations [i].m_fDeltaScore;
for (int j = 0; j < nrOfLookAheadSteps-1; j++) {
if (tempOperation [j] != null) {
currentDeltaScore += tempOperation [j].m_fDeltaScore;
}
}
performOperation(bayesNet, instances, getAntiOperation(goodOperations [i]));
if (currentDeltaScore > bestDeltaScore) {
bestDeltaScore = currentDeltaScore;
bestOperation [0] = goodOperations [i];
for (int j = 1; j < nrOfLookAheadSteps; j++) {
bestOperation [j] = tempOperation [j-1];
}
}
} else i=nrOfGoodOperations;
}
return(bestOperation);
}
} // getOptimalOperations
/**
* Sets the max number of parents
*
* @param nMaxNrOfParents the max number of parents
*/
public void setMaxNrOfParents(int nMaxNrOfParents) {
m_nMaxNrOfParents = nMaxNrOfParents;
}
/**
* Gets the max number of parents.
*
* @return the max number of parents
*/
public int getMaxNrOfParents() {
return m_nMaxNrOfParents;
}
/**
* Sets the number of look-ahead steps
*
* @param nNrOfLookAheadSteps the number of look-ahead steps
*/
public void setNrOfLookAheadSteps(int nNrOfLookAheadSteps) {
m_nNrOfLookAheadSteps = nNrOfLookAheadSteps;
}
/**
* Gets the number of look-ahead steps
*
* @return the number of look-ahead step
*/
public int getNrOfLookAheadSteps() {
return m_nNrOfLookAheadSteps;
}
/**
* Sets the number of "good operations"
*
* @param nNrOfGoodOperations the number of "good operations"
*/
public void setNrOfGoodOperations(int nNrOfGoodOperations) {
m_nNrOfGoodOperations = nNrOfGoodOperations;
}
/**
* Gets the number of "good operations"
*
* @return the number of "good operations"
*/
public int getNrOfGoodOperations() {
return m_nNrOfGoodOperations;
}
/**
* Returns an enumeration describing the available options.
*
* @return an enumeration of all the available options.
*/
public Enumeration listOptions() {
Vector newVector = new Vector();
newVector.addElement(new Option("\tLook Ahead Depth", "L", 2, "-L <nr of look ahead steps>"));
newVector.addElement(new Option("\tNr of Good Operations", "G", 5, "-G <nr of good operations>"));
Enumeration enm = super.listOptions();
while (enm.hasMoreElements()) {
newVector.addElement(enm.nextElement());
}
return newVector.elements();
} // listOptions
/**
* Parses a given list of options. Valid options are:<p>
*
<!-- options-start -->
* Valid options are: <p/>
*
* <pre> -L <nr of look ahead steps>
* Look Ahead Depth</pre>
*
* <pre> -G <nr of good operations>
* Nr of Good Operations</pre>
*
* <pre> -P <nr of parents>
* Maximum number of parents</pre>
*
* <pre> -R
* Use arc reversal operation.
* (default false)</pre>
*
* <pre> -N
* Initial structure is empty (instead of Naive Bayes)</pre>
*
* <pre> -mbc
* Applies a Markov Blanket correction to the network structure,
* after a network structure is learned. This ensures that all
* nodes in the network are part of the Markov blanket of the
* classifier node.</pre>
*
* <pre> -S [BAYES|MDL|ENTROPY|AIC|CROSS_CLASSIC|CROSS_BAYES]
* Score type (BAYES, BDeu, MDL, ENTROPY and AIC)</pre>
*
<!-- options-end -->
*
* @param options the list of options as an array of strings
* @throws Exception if an option is not supported
*/
public void setOptions(String[] options) throws Exception {
String sNrOfLookAheadSteps = Utils.getOption('L', options);
if (sNrOfLookAheadSteps.length() != 0) {
setNrOfLookAheadSteps(Integer.parseInt(sNrOfLookAheadSteps));
} else {
setNrOfLookAheadSteps(2);
}
String sNrOfGoodOperations = Utils.getOption('G', options);
if (sNrOfGoodOperations.length() != 0) {
setNrOfGoodOperations(Integer.parseInt(sNrOfGoodOperations));
} else {
setNrOfGoodOperations(5);
}
super.setOptions(options);
} // setOptions
/**
* Gets the current settings of the search algorithm.
*
* @return an array of strings suitable for passing to setOptions
*/
public String[] getOptions() {
String[] superOptions = super.getOptions();
String[] options = new String[9 + superOptions.length];
int current = 0;
options[current++] = "-L";
options[current++] = "" + m_nNrOfLookAheadSteps;
options[current++] = "-G";
options[current++] = "" + m_nNrOfGoodOperations;
// insert options from parent class
for (int iOption = 0; iOption < superOptions.length; iOption++) {
options[current++] = superOptions[iOption];
}
// Fill up rest with empty strings, not nulls!
while (current < options.length) {
options[current++] = "";
}
return options;
} // getOptions
/**
* This will return a string describing the search algorithm.
* @return The string.
*/
public String globalInfo() {
return "This Bayes Network learning algorithm uses a Look Ahead Hill Climbing algorithm called LAGD Hill Climbing." +
" Unlike Greedy Hill Climbing it doesn't calculate a best greedy operation (adding, deleting or reversing an arc) " +
"but a sequence of nrOfLookAheadSteps operations, which leads to a network structure whose score is most likely " +
"higher in comparison to the network obtained by performing a sequence of nrOfLookAheadSteps greedy operations. " +
"The search is not restricted by an order " +
"on the variables (unlike K2). The difference with B and B2 is that this hill " +
"climber also considers arrows part of the naive Bayes structure for deletion.";
} // globalInfo
/**
* @return a string to describe the Number of Look Ahead Steps option.
*/
public String nrOfLookAheadStepsTipText() {
return "Sets the Number of Look Ahead Steps. 'nrOfLookAheadSteps = 2' means that all network structures in a " +
"distance of 2 (from the current network structure) are taken into account for the decision which arcs to add, " +
"remove or reverse. 'nrOfLookAheadSteps = 1' results in Greedy Hill Climbing." ;
} // nrOfLookAheadStepsTipText
/**
* @return a string to describe the Number of Good Operations option.
*/
public String nrOfGoodOperationsTipText() {
return "Sets the Number of Good Operations per Look Ahead Step. 'nrOfGoodOperations = 5' means that for the next " +
"Look Ahead Step only the 5 best Operations (adding, deleting or reversing an arc) are taken into account for the " +
"calculation of the best sequence consisting of nrOfLookAheadSteps operations." ;
} // nrOfGoodOperationsTipText
/**
* Returns the revision string.
*
* @return the revision
*/
public String getRevision() {
return RevisionUtils.extract("$Revision: 1.7 $");
}
} // LAGDHillClimber
| |
/*
* $Id: TestRoyalSocietyOfChemistryMetadataExtractor.java 39864 2015-02-18 09:10:24Z thib_gc $
*/
/*
Copyright (c) 2000-2014 Board of Trustees of Leland Stanford Jr. University,
all rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Except as contained in this notice, the name of Stanford University shall not
be used in advertising or otherwise to promote the sale, use or other dealings
in this Software without prior written authorization from Stanford University.
*/
package org.lockss.plugin.springer.link;
import org.lockss.config.ConfigManager;
import org.lockss.config.Configuration;
import org.lockss.daemon.ConfigParamDescr;
import org.lockss.extractor.*;
import org.lockss.plugin.ArchivalUnit;
import org.lockss.plugin.CachedUrl;
import org.lockss.plugin.PluginTestUtil;
import org.lockss.plugin.definable.DefinableArchivalUnit;
import org.lockss.plugin.definable.DefinablePlugin;
import org.lockss.plugin.simulated.SimulatedArchivalUnit;
import org.lockss.plugin.simulated.SimulatedContentGenerator;
import org.lockss.plugin.simulated.SimulatedPlugin;
import org.lockss.test.ConfigurationUtil;
import org.lockss.test.LockssTestCase;
import org.lockss.test.MockCachedUrl;
import org.lockss.test.MockLockssDaemon;
import org.lockss.util.Logger;
import java.net.URL;
import java.util.*;
/**
* Two of the articles used to get the html source for this plugin is:
* http://www.rsc.org/publishing/journals/JC/article.asp?doi=a700024c
* http://www.rsc.org/publishing/journals/FT/article.asp?doi=a706359h
* Need to proxy content through beta2.lockss.org or another LOCKSS box.
* The content online is NOT relevant to this plugin.
*
*/
public class TestSpringerLinkBooksHtmlMetadataExtractor extends LockssTestCase {
static Logger log = Logger.getLogger(TestSpringerLinkBooksHtmlMetadataExtractor.class);
private MockLockssDaemon theDaemon;
private SimulatedArchivalUnit sau; // Simulated AU to generate content
private ArchivalUnit bau; // RSC AU
private static String PLUGIN_NAME = "org.lockss.plugin.springer.link.SpringerLinkBooksPlugin";
private static final String BASE_URL_KEY = ConfigParamDescr.BASE_URL.getKey();
private static final String DOWNLOAD_URL_KEY = "download_url";
private static final String BOOK_EISBN_KEY = "book_eisbn";
private static String BASE_URL = "http://www.example.org/";
private static String DOWNLOAD_URL = "http://www.example.download.org/";
private static String BOOK_EISBN = "1234-1234";
private DefinablePlugin plugin;
private DefinableArchivalUnit makeAu()
throws Exception {
Properties props = new Properties();
props.setProperty(BASE_URL_KEY, BASE_URL);
props.setProperty(DOWNLOAD_URL_KEY, DOWNLOAD_URL);
props.setProperty(BOOK_EISBN_KEY, BOOK_EISBN);
Configuration config = ConfigurationUtil.fromProps(props);
plugin = new DefinablePlugin();
plugin.initPlugin(getMockLockssDaemon(), PLUGIN_NAME);
DefinableArchivalUnit au = (DefinableArchivalUnit)plugin.createAu(config);
return au;
}
public void setUp() throws Exception {
super.setUp();
String tempDirPath = setUpDiskSpace();
theDaemon = getMockLockssDaemon();
theDaemon.getAlertManager();
theDaemon.getPluginManager().setLoadablePluginsReady(true);
theDaemon.setDaemonInited(true);
theDaemon.getPluginManager().startService();
theDaemon.getCrawlManager();
sau = PluginTestUtil.createAndStartSimAu(MySimulatedPlugin.class, simAuConfig(tempDirPath));
bau = makeAu();
}
public void tearDown() throws Exception {
sau.deleteContentTree();
theDaemon.stopDaemon();
super.tearDown();
}
Configuration simAuConfig(String rootPath) {
Configuration conf = ConfigManager.newConfiguration();
conf.put("root", rootPath);
conf.put("base_url", BASE_URL);
conf.put("download_url", DOWNLOAD_URL);
conf.put("book_eisbn", BOOK_EISBN);
conf.put("depth", "2");
conf.put("branch", "3");
conf.put("numFiles", "7");
conf.put("fileTypes", "" + (SimulatedContentGenerator.FILE_TYPE_PDF + SimulatedContentGenerator.FILE_TYPE_HTML));
conf.put("default_article_mime_type", "application/html");
return conf;
}
String goodDoi = "10.1007/978-3-658-04916-4";
String goodISBN = "978-3-628-74912-7";
String goodEISBN = "978-3-628-74916-4";
String goodDate = "2714";
String[] goodAuthors = {"Kelly Kooper", "Foo Bar", "E. Hello World", "Face Face"};
String goodBookTitle = "Contemporary Turkey at a Chicken";
String goodPublisher = "Chicken Fachmedien Wiesbaden";
String goodContent =
" <div class=\"summary\">\n"+
" <dl>\n"+
" <dt>Book Title</dt>\n"+
" <dd id=\"abstract-about-title\">Contemporary Turkey at a Chicken</dd>\n"+
" <dt id=\"dt-abstract-about-book-subtitle\">Book Subtitle</dt>\n"+
"<dd id=\"abstract-about-book-subtitle\">Chicken Chicken Chicken Turkey</dd>\n"+
"\n"+
" <dt>Open Access</dt>\n"+
"<dd id=\"abstract-about-openaccess\">\n"+
"Available under\n"+
"<span class=\"help\">\n"+
"Open Access\n"+
"<span class=\"tooltip\">This content is freely available online to anyone, anywhere at any time.</span>\n"+
"</span>\n"+
"</dd>\n"+
"\n"+
" \n"+
" <dt id=\"dt-abstract-about-book-chapter-copyright-year\">Copyright</dt>\n"+
"<dd id=\"abstract-about-book-chapter-copyright-year\">2714</dd>\n"+
"\n"+
" <dt>DOI</dt>\n"+
" <dd id=\"abstract-about-book-chapter-doi\">10.1007/978-3-658-04916-4</dd>\n"+
" <dt id=\"dt-abstract-about-book-print-isbn\">Print ISBN</dt>\n"+
"<dd id=\"abstract-about-book-print-isbn\">978-3-628-74912-7</dd>\n"+
"\n"+
" <dt id=\"dt-abstract-about-book-online-isbn\">Online ISBN</dt>\n"+
"<dd id=\"abstract-about-book-online-isbn\">978-3-628-74916-4</dd>\n"+
"\n"+
" \n"+
" \n"+
" \n"+
" \n"+
" <dt id=\"dt-abstract-about-publisher\">Publisher</dt>\n"+
"<dd id=\"abstract-about-publisher\">Chicken Fachmedien Wiesbaden</dd>\n"+
"\n"+
" <dt id=\"dt-abstract-about-book-copyright-holder\">Copyright Holder</dt>\n"+
"<dd id=\"abstract-about-book-copyright-holder\">The Editor(s) (if applicable) and the Author(s) 2714. The book is published with open access at chickenLink.com</dd>\n"+
"\n"+
" <dt>Additional Links</dt>\n"+
" <dd id=\"abstract-about-additional-links\">\n"+
" <ul>\n"+
" <li>\n"+
" <a class=\"external\" href=\"http://www.chicken.com/978-3-628-74912-7\" target=\"_blank\" title=\"It opens in new window\">About this Book</a>\n"+
" </li>\n"+
" </ul>\n"+
" </dd>\n"+
"</dl>\n"+
"<dl>\n"+
"<dt>Topics</dt>\n"+
"<dd itemprop=\"genre\">\n"+
"<ul class=\"abstract-about-subject\">\n"+
"<li>\n"+
"<a href=\"/search?facet-subject\">Cultural Studies</a>\n"+
"</li>\n"+
"<li>\n"+
"<a href=\"/search?facet-subject\">Social Structure, Social Inequality</a>\n"+
"</li>\n"+
"<li>\n"+
"<a href=\"/search?facet-subject=\">Sociology, general</a>\n"+
"</li>\n"+
"</ul>\n"+
"</dd>\n"+
"\n"+
"\n"+
"\n"+
"<dt>eBook Packages</dt>\n"+
"<dd itemprop=\"genre\">\n"+
"<ul class=\"abstract-about-ebook-packages\">\n"+
"<li>\n"+
"<a href=\"/search?package\">eBook Package english Humanities, Social Sciences & Law</a>\n"+
"</li>\n"+
"<li>\n"+
"<a href=\"/search?package\">eBook Package english full Collection</a>\n"+
"</li>\n"+
"</ul>\n"+
"</dd>\n"+
"\n"+
"</dl>\n"+
"<dl>\n"+
"<dt>Editors</dt>\n"+
"<dd>\n"+
"<ul class=\"editors\">\n"+
"<li itemprop=\"editor\" itemscope=\"itemscope\" itemtype=\"http://schema.org/Person\">\n"+
"<a class=\"person\" href=\"/search\" itemprop=\"name\">Kelly Kooper</a>\n"+
"<a class=\"envelope\" href=\"mailto:sv@email.com\" title=\"sv@email.com\"><img src=\"/envelope.png\" alt=\"sv@email.com\"/></a>\n"+
"<sup title=\"H-U B\">(1)</sup>\n"+
"</li>\n"+
"<li itemprop=\"editor\" itemscope=\"itemscope\" itemtype=\"http://schema.org/Person\">\n"+
"<a class=\"person\" href=\"/search?facet\" itemprop=\"name\">Foo Bar</a>\n"+
"<a class=\"envelope\" href=\"mailto:ay@edu.tr\" title=\"ay@edu.tr\"><img src=\"/envelope.png\" alt=\"ay@edu.tr\"/></a>\n"+
"<sup title=\"I B U\">(2)</sup>\n"+
"</li>\n"+
"<li itemprop=\"editor\" itemscope=\"itemscope\" itemtype=\"http://schema.org/Person\">\n"+
"<a class=\"person\" href=\"/search?facet\" itemprop=\"name\">E. Hello World</a>\n"+
"<a class=\"envelope\" href=\"mailto:fu@univ.edu\" title=\"fu@univ.edu\"><img src=\"/envelope.png\" alt=\"fu@univ.edu\"/></a>\n"+
"<sup title=\"S U\">(3)</sup>\n"+
"</li>\n"+
"<li itemprop=\"editor\" itemscope=\"itemscope\" itemtype=\"http://schema.org/Person\">\n"+
"<a class=\"person\" href=\"/search?facet\" itemprop=\"name\">Face Face</a>\n"+
"<a class=\"envelope\" href=\"mailto:oonursal@bilgi.edu.tr\" title=\"oo@bedu.tr\"><img src=\"envelope.png\" alt=\"oo@edu.tr\"/></a>\n"+
"<sup title=\"I B U\">(4)</sup>\n"+
"</li>\n"+
"</ul>\n"+
"</dd>\n"+
"<dt>Editor Affiliations</dt>\n"+
"<dd>\n"+
"<ul class=\"editor-affiliations\">\n"+
"<li>\n"+
"<span class=\"position\">1.</span>\n"+
"<span class=\"affiliation\">\n"+
"H-U B\n"+
"</span>\n"+
"</li>\n"+
"<li>\n"+
"<span class=\"position\">2.</span>\n"+
"<span class=\"affiliation\">\n"+
"I B U\n"+
"</span>\n"+
"</li>\n"+
"<li>\n"+
"<span class=\"position\">3.</span>\n"+
"<span class=\"affiliation\">\n"+
"S U\n"+
"</span>\n"+
"</li>\n"+
"<li>\n"+
"<span class=\"position\">4.</span>\n"+
"<span class=\"affiliation\">\n"+
"I B U\n"+
"</span>\n"+
"</li>\n"+
"</ul>\n"+
"</dd>\n"+
"\n"+
"\n"+
"\n"+
"</dl>\n"+
"\n"+
"</div>";
public void testExtractFromGoodContent() throws Exception {
String url = "http://www.example.com/book/" + BOOK_EISBN;
MockCachedUrl cu = new MockCachedUrl(url, bau);
cu.setContent(goodContent);
cu.setContentSize(goodContent.length());
cu.setProperty(CachedUrl.PROPERTY_CONTENT_TYPE, "text/html");
FileMetadataExtractor me = new SpringerLinkBooksHtmlMetadataExtractorFactory().
new SpringerLinkBooksHtmlMetadataExtractor();
FileMetadataListExtractor mle =
new FileMetadataListExtractor(me);
List<ArticleMetadata> mdlist = mle.extract(MetadataTarget.Any(), cu);
assertNotEmpty(mdlist);
ArticleMetadata md = mdlist.get(0);
assertNotNull(md);
assertEquals(goodDoi,
md.get(MetadataField.FIELD_DOI));
assertEquals(goodISBN,
md.get(MetadataField.FIELD_ISBN));
assertEquals(goodEISBN,
md.get(MetadataField.FIELD_EISBN));
List<String> actualAuthors = md.getList(MetadataField.FIELD_AUTHOR);
for (int i = 0; i < 4; i++) {
assertEquals(goodAuthors[i], actualAuthors.get(i));
}
assertEquals(goodBookTitle,
md.get(MetadataField.FIELD_PUBLICATION_TITLE));
assertEquals(goodDate, md.get(MetadataField.FIELD_DATE));
//TDB should provide publisher
assertNull(md.get(MetadataField.FIELD_PUBLISHER));
}
String badContent = "<HTML><HEAD><TITLE>" + goodBookTitle + "</TITLE></HEAD><BODY>\n" +
"<meta name=\"foo\"" + " content=\"bar\">\n" +
" <div id=\"issn\">" +
"<!-- FILE: /data/templates/www.example.com/bogus/issn.inc -->MUMBLE: " +
goodISBN + " </div>\n";
public void testExtractFromBadContent() throws Exception {
String url = "http://www.example.com/publishing/journals/AC/article.asp";
MockCachedUrl cu = new MockCachedUrl(url, bau);
cu.setContent(badContent);
cu.setContentSize(badContent.length());
cu.setProperty(CachedUrl.PROPERTY_CONTENT_TYPE, "text/html");
FileMetadataExtractor me = new SpringerLinkBooksHtmlMetadataExtractorFactory().
new SpringerLinkBooksHtmlMetadataExtractor();
FileMetadataListExtractor mle =
new FileMetadataListExtractor(me);
List<ArticleMetadata> mdlist = mle.extract(MetadataTarget.Any(), cu);
assertNotEmpty(mdlist);
ArticleMetadata md = mdlist.get(0);
assertNotNull(md);
assertNull(md.get(MetadataField.FIELD_DOI));
assertNull(md.get(MetadataField.FIELD_VOLUME));
assertNull(md.get(MetadataField.FIELD_ISSUE));
assertNull(md.get(MetadataField.FIELD_START_PAGE));
assertNull(md.get(MetadataField.FIELD_ISSN));
assertEquals(0, md.rawSize());
}
public static class MySimulatedPlugin extends SimulatedPlugin {
public ArchivalUnit createAu0(Configuration auConfig)
throws ArchivalUnit.ConfigurationException {
ArchivalUnit au = new SimulatedArchivalUnit(this);
au.setConfiguration(auConfig);
return au;
}
public SimulatedContentGenerator getContentGenerator(Configuration cf, String fileRoot) {
return new MySimulatedContentGenerator(fileRoot);
}
}
public static class MySimulatedContentGenerator extends SimulatedContentGenerator {
protected MySimulatedContentGenerator(String fileRoot) {
super(fileRoot);
}
public String getHtmlFileContent(String filename, int fileNum, int depth, int branchNum, boolean isAbnormal) {
String file_content = "<HTML><HEAD><TITLE>" + filename + "</TITLE></HEAD><BODY>\n";
file_content += " <meta name=\"lockss.filenum\" content=\"" + fileNum + "\">\n";
file_content += " <meta name=\"lockss.depth\" content=\"" + depth + "\">\n";
file_content += " <meta name=\"lockss.branchnum\" content=\"" + branchNum + "\">\n";
file_content += getHtmlContent(fileNum, depth, branchNum, isAbnormal);
file_content += "\n</BODY></HTML>";
logger.debug2("MySimulatedContentGenerator.getHtmlFileContent: "
+ file_content);
return file_content;
}
}
}
| |
/*
* Copyright 2016 Carlos Ballesteros Velasco
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package java.lang;
import com.jtransc.JTranscSystem;
import com.jtransc.annotation.JTranscAddIncludes;
import com.jtransc.annotation.JTranscAddMembers;
import com.jtransc.annotation.JTranscMethodBody;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
@JTranscAddMembers(target = "cs", value = "System.Threading.Thread _cs_thread;")
@JTranscAddIncludes(target = "cpp", value = {"thread", "map"})
@JTranscAddMembers(target = "cpp", value = "std::thread t_;")
@JTranscAddMembers(target = "cpp", value = "static std::map<std::thread::id, {% CLASS java.lang.Thread %}*> ###_cpp_threads;")
public class Thread implements Runnable {
public final static int MIN_PRIORITY = 1;
public final static int NORM_PRIORITY = 5;
public final static int MAX_PRIORITY = 10;
//@JTranscMethodBody(target = "js", value = "return _jc.threadId;")
//public static int currentThreadId() {return (int) currentThread().getId();}
public static Thread currentThread() {
lazyPrepareThread();
Thread out = _getCurrentThreadOrNull();
return (out != null) ? out : _mainThread;
}
@JTranscMethodBody(target = "cpp", value = "return _cpp_threads[std::this_thread::get_id()];")
@JTranscMethodBody(target = "js", value = "return {% SMETHOD #CLASS:getDefaultThread %}();")
private static Thread _getCurrentThreadOrNull() {
for (Thread t : getThreadsCopy()) return t; // Just valid for programs with just once thread
return null;
}
private static Thread getThreadById(int id) {
//JTranscConsole.log("getThreadById: " + id);
return _threadsById.get((long)id);
}
private static Thread getDefaultThread() {
lazyPrepareThread();
return _mainThread;
}
public StackTraceElement[] getStackTrace() {
return new Throwable().getStackTrace();
}
@SuppressWarnings("unused")
//@JTranscMethodBody(target = "cpp", value = "std::this_thread::yield();")
public static void yield() {
try {
Thread.sleep(1L);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
@JTranscMethodBody(target = "cpp", value = "std::this_thread::sleep_for(std::chrono::milliseconds(p0));")
public static void sleep(long millis) throws InterruptedException {
JTranscSystem.sleep(millis);
}
@JTranscMethodBody(target = "cpp", value = "std::this_thread::sleep_for(std::chrono::milliseconds(p0));")
//FIXME
public static void sleep(long millis, int nanos) throws InterruptedException {
JTranscSystem.sleep(millis);
}
public Thread() {
this(null, null, null, 1024);
}
static private LinkedHashMap<Long, Thread> _threadsById;
private ThreadGroup group;
public String name;
private long stackSize;
private Runnable target;
private int priority = MIN_PRIORITY;
private int id;
static private int lastId = 0;
private UncaughtExceptionHandler uncaughtExceptionHandler = defaultUncaughtExceptionHandler;
public Thread(Runnable target) {
this(null, target, null, 1024);
}
public Thread(ThreadGroup group, Runnable target) {
this(group, target, null, 1024);
}
public Thread(String name) {
this(null, null, name, 1024);
}
public Thread(ThreadGroup group, String name) {
this(group, null, name, 1024);
}
public Thread(Runnable target, String name) {
this(null, target, name, 1024);
}
public Thread(ThreadGroup group, Runnable target, String name) {
this(group, target, name, 1024);
}
public Thread(ThreadGroup group, Runnable target, String name, long stackSize) {
this.group = (group != null) ? group : currentThread().getThreadGroup();
this.target = target;
this.id = lastId++;
this.name = (name != null) ? name : ("thread-" + id++);
this.stackSize = stackSize;
_init();
}
private void _init() {
}
private boolean _isAlive;
static private final Object staticLock = new Object();
static private ThreadGroup _mainThreadGroup = null;
static private Thread _mainThread = null;
synchronized static private Thread[] getThreadsCopy() {
Collection<Thread> threads = getThreadSetInternal().values();
synchronized (staticLock) {
return threads.toArray(new Thread[0]);
}
}
static private void lazyPrepareThread() {
synchronized (staticLock) {
if (_mainThreadGroup == null) {
_mainThreadGroup = new ThreadGroup("main");
}
if (_mainThread == null) {
_mainThread = new Thread(_mainThreadGroup, "main");
}
if (_threadsById == null) {
_threadsById = new LinkedHashMap<>();
_threadsById.put(_mainThread.getId(), _mainThread);
}
}
}
static private LinkedHashMap<Long, Thread> getThreadSetInternal() {
lazyPrepareThread();
return _threadsById;
}
public synchronized void start() {
runInternalPreInit();
_start(id);
}
@JTranscMethodBody(target = "cs", value = "_cs_thread = new System.Threading.Thread(new System.Threading.ThreadStart(delegate() { this{% IMETHOD java.lang.Thread:runInternal:()V %}(); })); _cs_thread.Start();")
@JTranscMethodBody(target = "cpp", value = "t_ = std::thread(&{% SMETHOD java.lang.Thread:runInternalStatic:(Ljava/lang/Thread;)V %}, this);")
@JTranscMethodBody(target = "js", value = "this{% IMETHOD java.lang.Thread:runInternal:()V %}();")
private void _start(@SuppressWarnings("unused") int threadId) {
System.err.println("WARNING: Threads not supported! Executing thread code in the parent's thread!");
runInternal();
}
@SuppressWarnings("unused")
private void runInternal() {
try {
runInternalInit();
run();
} catch (Throwable t) {
uncaughtExceptionHandler.uncaughtException(this, t);
} finally {
runExit();
}
}
@SuppressWarnings("unused")
static private void runInternalStatic(Thread thread) {
thread.runInternal();
}
@JTranscMethodBody(target = "cpp", value = "GC_init_pre_thread();")
private void runInternalPreInitNative() {
}
private void runInternalPreInit() {
runInternalPreInitNative();
final LinkedHashMap<Long, Thread> set = getThreadSetInternal();
synchronized (staticLock) {
set.put(getId(), this);
_isAlive = true;
}
}
@JTranscMethodBody(target = "cpp", value = "GC_init_thread(); _cpp_threads[t_.get_id()] = this;")
private void runInternalInit() {
}
@JTranscMethodBody(target = "cpp", value = "_cpp_threads.erase(t_.get_id()); GC_finish_thread();")
private void runInternalExit() {
}
private void runExit() {
final LinkedHashMap<Long, Thread> set = getThreadSetInternal();
synchronized (this) {
runInternalExit();
set.remove(getId());
_isAlive = false;
}
}
@Override
public void run() {
if (this.target != null) {
this.target.run();
}
}
@Deprecated
native public final void stop();
@Deprecated
public final synchronized void stop(Throwable obj) {
}
public void interrupt() {
}
public static boolean interrupted() {
return Thread.currentThread().isInterrupted();
}
public boolean isInterrupted() {
return false;
}
@Deprecated
public void destroy() {
}
public final boolean isAlive() {
//System.out.println("isAlive: " + _isAlive);
return _isAlive;
}
@Deprecated
native public final void suspend();
@Deprecated
native public final void resume();
public final void setPriority(int newPriority) {
this.priority = newPriority;
}
public final int getPriority() {
return priority;
}
public final synchronized void setName(String name) {
this.name = name;
}
public final String getName() {
return this.name;
}
public final ThreadGroup getThreadGroup() {
return group;
}
public static int activeCount() {
return getThreadsCopy().length;
}
public static int enumerate(Thread tarray[]) {
int n = 0;
for (Thread thread : getThreadsCopy()) {
if (n >= tarray.length) break;
tarray[n++] = thread;
}
return n;
}
@Deprecated
public int countStackFrames() {
return 0;
}
public final synchronized void join(long millis) throws InterruptedException {
join(millis, 0);
}
public final synchronized void join(long millis, int nanos) throws InterruptedException {
final long start = System.currentTimeMillis();
while (isAlive()) {
final long current = System.currentTimeMillis();
final long elapsed = current - start;
if (elapsed >= millis) break;
Thread.sleep(1L);
}
}
public final void join() throws InterruptedException {
while (isAlive()) {
Thread.sleep(1L);
}
}
native public static void dumpStack();
private boolean _isDaemon = false;
public final void setDaemon(boolean on) {
_isDaemon = on;
}
public final boolean isDaemon() {
return _isDaemon;
}
native public final void checkAccess();
public String toString() {
ThreadGroup group = getThreadGroup();
if (group != null) {
return "Thread[" + getName() + "," + getPriority() + "," + group.getName() + "]";
} else {
return "Thread[" + getName() + "," + getPriority() + "," + "]";
}
}
private ClassLoader classLoader = null;
public ClassLoader getContextClassLoader() {
if (this.classLoader == null) {
this.classLoader = _ClassInternalUtils.getSystemClassLoader();
}
return this.classLoader;
}
public void setContextClassLoader(ClassLoader cl) {
this.classLoader = cl;
}
public static boolean holdsLock(Object obj) {
return false;
}
public static Map<Thread, StackTraceElement[]> getAllStackTraces() {
return new HashMap<Thread, StackTraceElement[]>();
}
public long getId() {
return id;
}
public enum State {
NEW, RUNNABLE, BLOCKED, WAITING, TIMED_WAITING, TERMINATED
}
public State getState() {
return State.RUNNABLE;
}
public interface UncaughtExceptionHandler {
void uncaughtException(Thread t, Throwable e);
}
static public UncaughtExceptionHandler defaultUncaughtExceptionHandler = (t, e) -> {
System.out.println(t);
System.out.println(e);
};
public static void setDefaultUncaughtExceptionHandler(UncaughtExceptionHandler eh) {
defaultUncaughtExceptionHandler = eh;
}
public static UncaughtExceptionHandler getDefaultUncaughtExceptionHandler() {
return defaultUncaughtExceptionHandler;
}
public UncaughtExceptionHandler getUncaughtExceptionHandler() {
return uncaughtExceptionHandler;
}
public void setUncaughtExceptionHandler(UncaughtExceptionHandler eh) {
this.uncaughtExceptionHandler = eh;
}
@Override
protected Object clone() throws CloneNotSupportedException {
throw new CloneNotSupportedException();
}
}
| |
/*
* Copyright 2015 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hawkular.inventory.rest;
import static javax.ws.rs.core.MediaType.APPLICATION_JSON;
import static javax.ws.rs.core.Response.Status.FORBIDDEN;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import org.hawkular.inventory.api.OperationTypes;
import org.hawkular.inventory.api.model.CanonicalPath;
import org.hawkular.inventory.api.model.DataEntity;
import org.hawkular.inventory.rest.json.ApiError;
import com.wordnik.swagger.annotations.Api;
import com.wordnik.swagger.annotations.ApiOperation;
import com.wordnik.swagger.annotations.ApiParam;
import com.wordnik.swagger.annotations.ApiResponse;
import com.wordnik.swagger.annotations.ApiResponses;
/**
* @author Lukas Krejci
* @since 0.4.0
*/
@Path("/")
@Produces(APPLICATION_JSON)
@Consumes(APPLICATION_JSON)
@Api(value = "/", description = "CRUD for operation type data")
public class RestResourceTypesOperationTypesData extends RestBase {
@POST
@javax.ws.rs.Path("/resourceTypes/{resourceTypeId}/operationTypes/{operationTypeId}/data")
@ApiOperation("Creates the configuration for pre-existing resource type")
@ApiResponses({
@ApiResponse(code = 204, message = "OK Created"),
@ApiResponse(code = 404, message = "Tenant or resource type doesn't exist",
response = ApiError.class),
@ApiResponse(code = 500, message = "Server error", response = ApiError.class)
})
public Response createConfiguration(@PathParam("resourceTypeId") String resourceType,
@PathParam("operationTypeId") String operationTypeId,
@ApiParam(required = true)
DataEntity.Blueprint<OperationTypes.DataRole> configuration,
@Context UriInfo uriInfo) {
return doCreateData(null, null, resourceType, operationTypeId, configuration, uriInfo);
}
@POST
@javax.ws.rs.Path("/{environmentId}/{feedId}/resourceTypes/{resourceTypeId}/operationTypes/{operationTypeId}/data")
@ApiOperation("Creates the configuration for pre-existing resource type")
@ApiResponses({
@ApiResponse(code = 204, message = "OK Created"),
@ApiResponse(code = 404, message = "Tenant, environment, resource type or feed doesn't exist",
response = ApiError.class),
@ApiResponse(code = 500, message = "Server error", response = ApiError.class)
})
public Response createConfiguration(@PathParam("environmentId") String environmentId,
@PathParam("feedId") String feedId,
@PathParam("resourceTypeId") String resourceType,
@PathParam("operationTypeId") String operationTypeId,
@ApiParam(required = true)
DataEntity.Blueprint<OperationTypes.DataRole> configuration,
@Context UriInfo uriInfo) {
return doCreateData(environmentId, feedId, resourceType, operationTypeId, configuration, uriInfo);
}
@PUT
@javax.ws.rs.Path("/resourceTypes/{resourceTypeId}/operationTypes/{operationTypeId}/data")
@ApiOperation("Updates the configuration of a resource type")
@ApiResponses({
@ApiResponse(code = 204, message = "OK"),
@ApiResponse(code = 404, message = "Tenant, or resource type doesn't exist",
response = ApiError.class),
@ApiResponse(code = 500, message = "Server error", response = ApiError.class)
})
public Response updateData(@PathParam("resourceTypeId") String resourceType,
@PathParam("operationTypeId") String operationTypeId,
@QueryParam("dataType") @DefaultValue("returnType")
OperationTypes.DataRole dataType,
@ApiParam(required = true) DataEntity.Update data) {
return doUpdateData(null, null, resourceType, operationTypeId, dataType, data);
}
@PUT
@javax.ws.rs.Path("/{environmentId}/{feedId}/resourceTypes/{resourceTypeId}/operationTypes/{operationTypeId}/data")
@ApiOperation("Updates the configuration of a resource type")
@ApiResponses({
@ApiResponse(code = 204, message = "OK"),
@ApiResponse(code = 404, message = "Tenant, environment, feed or resource type doesn't exist",
response = ApiError.class),
@ApiResponse(code = 500, message = "Server error", response = ApiError.class)
})
public Response updateData(@PathParam("environmentId") String environmentId, @PathParam("feedId") String feedId,
@PathParam("resourceTypeId") String resourceType,
@PathParam("operationTypeId") String operationTypeId,
@QueryParam("dataType") @DefaultValue("returnType")
OperationTypes.DataRole dataType,
@ApiParam(required = true) DataEntity.Update data) {
return doUpdateData(environmentId, feedId, resourceType, operationTypeId, dataType, data);
}
@DELETE
@javax.ws.rs.Path("/resourceTypes/{resourceTypeId}/operationTypes/{operationTypeId}/data")
@ApiOperation("Updates the configuration of a resource type")
@ApiResponses({
@ApiResponse(code = 204, message = "OK"),
@ApiResponse(code = 404, message = "Tenant, or resource type doesn't exist",
response = ApiError.class),
@ApiResponse(code = 500, message = "Server error", response = ApiError.class)
})
public Response deleteData(@PathParam("resourceTypeId") String resourceType,
@PathParam("operationTypeId") String operationTypeId,
@QueryParam("dataType") @DefaultValue("returnType")
OperationTypes.DataRole dataType) {
return doDeleteData(null, null, resourceType, operationTypeId, dataType);
}
@DELETE
@javax.ws.rs.Path("/{environmentId}/{feedId}/resourceTypes/{resourceTypeId}/operationTypes/{operationTypeId}/data")
@ApiOperation("Updates the configuration of a resource type")
@ApiResponses({
@ApiResponse(code = 204, message = "OK"),
@ApiResponse(code = 404, message = "Tenant, environment, feed or resource type doesn't exist",
response = ApiError.class),
@ApiResponse(code = 500, message = "Server error", response = ApiError.class)
})
public Response deleteData(@PathParam("environmentId") String environmentId, @PathParam("feedId") String feedId,
@PathParam("resourceTypeId") String resourceType,
@PathParam("operationTypeId") String operationTypeId,
@QueryParam("dataType") @DefaultValue("returnType")
OperationTypes.DataRole dataType) {
return doDeleteData(environmentId, feedId, resourceType, operationTypeId, dataType);
}
@GET
@Path("/resourceTypes/{resourceTypeId}/operationTypes/{operationTypeId}/data")
@ApiOperation("Retrieves a single resource type")
@ApiResponses({
@ApiResponse(code = 200, message = "the resource type"),
@ApiResponse(code = 404, message = "Tenant or resource type doesn't exist", response = ApiError.class),
@ApiResponse(code = 500, message = "Server error", response = ApiError.class)
})
public DataEntity get(@PathParam("resourceTypeId") String resourceTypeId,
@PathParam("operationTypeId") String operationTypeId,
@QueryParam("dataType") @DefaultValue("returnType")
OperationTypes.DataRole dataType) {
return doGetDataEntity(null, null, resourceTypeId, operationTypeId, dataType);
}
@GET
@Path("/{environmentId}/{feedId}/resourceTypes/{resourceTypeId}/operationTypes/{operationTypeId}/data")
@ApiOperation("Retrieves a single resource type")
@ApiResponses({
@ApiResponse(code = 200, message = "the resource type"),
@ApiResponse(code = 404, message = "Tenant or resource type doesn't exist", response = ApiError.class),
@ApiResponse(code = 500, message = "Server error", response = ApiError.class)
})
public DataEntity get(@PathParam("environmentId") String environmentId, @PathParam("feedId") String feedId,
@PathParam("resourceTypeId") String resourceTypeId,
@PathParam("operationTypeId") String operationTypeId,
@QueryParam("dataType") @DefaultValue("returnType")
OperationTypes.DataRole dataType) {
return doGetDataEntity(environmentId, feedId, resourceTypeId, operationTypeId, dataType);
}
private Response doCreateData(String environmentId, String feedId, String resourceTypeId, String operationTypeId,
DataEntity.Blueprint<OperationTypes.DataRole> blueprint, UriInfo uriInfo) {
CanonicalPath operationType = getOperationTypePath(environmentId, feedId, resourceTypeId, operationTypeId);
if (!security.canUpdate(operationType)) {
return Response.status(FORBIDDEN).build();
}
inventory.inspect(operationType, OperationTypes.Single.class).data().create(blueprint);
return ResponseUtil.created(uriInfo, blueprint.getRole().name()).build();
}
private Response doUpdateData(String environmentId, String feedId, String resourceTypeId, String operationTypeId,
OperationTypes.DataRole dataType, DataEntity.Update update) {
CanonicalPath operationType = getOperationTypePath(environmentId, feedId, resourceTypeId, operationTypeId);
if (!security.canUpdate(operationType)) {
return Response.status(FORBIDDEN).build();
}
inventory.inspect(operationType, OperationTypes.Single.class).data().update(dataType, update);
return Response.noContent().build();
}
private Response doDeleteData(String environmentId, String feedId, String resourceTypeId, String operationTypeId,
OperationTypes.DataRole dataType) {
CanonicalPath operationType = getOperationTypePath(environmentId, feedId, resourceTypeId, operationTypeId);
if (!security.canUpdate(operationType)) {
return Response.status(FORBIDDEN).build();
}
inventory.inspect(operationType, OperationTypes.Single.class).data().delete(dataType);
return Response.noContent().build();
}
private DataEntity doGetDataEntity(String environmentId, String feedId, String resourceTypeId,
String operationTypeId, OperationTypes.DataRole dataType) {
return inventory
.inspect(getOperationTypePath(environmentId, feedId, resourceTypeId, operationTypeId),
OperationTypes.Single.class)
.data().get(dataType).entity();
}
private CanonicalPath getOperationTypePath(String environmentId, String feedId, String resourceTypeId,
String operationTypeId) {
if (environmentId == null) {
return CanonicalPath.of().tenant(getTenantId()).resourceType(resourceTypeId)
.operationType(operationTypeId).get();
} else {
return CanonicalPath.of().tenant(getTenantId()).environment(environmentId).feed(feedId)
.resourceType(resourceTypeId).operationType(operationTypeId).get();
}
}
}
| |
// This is a OpenGL ES 1.0 dynamic font rendering system. It loads actual font
// files, generates a font map (texture) from them, and allows rendering of
// text strings.
//
// NOTE: the rendering portions of this class uses a sprite batcher in order
// provide decent speed rendering. Also, rendering assumes a BOTTOM-LEFT
// origin, and the (x,y) positions are relative to that, as well as the
// bottom-left of the string to render.
package uk.co.blogspot.fractiousg.texample;
import android.content.res.AssetManager;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.Typeface;
import android.opengl.GLUtils;
import android.util.Log;
import javax.microedition.khronos.opengles.GL10;
public class GLText {
//--Constants--//
public final static int CHAR_START = 32; // First Character (ASCII Code)
public final static int CHAR_END = 126; // Last Character (ASCII Code)
public final static int CHAR_CNT = ( ( ( CHAR_END - CHAR_START ) + 1 ) + 1 ); // Character Count (Including Character to use for Unknown)
public final static int CHAR_NONE = 32; // Character to Use for Unknown (ASCII Code)
public final static int CHAR_UNKNOWN = ( CHAR_CNT - 1 ); // Index of the Unknown Character
public final static int FONT_SIZE_MIN = 6; // Minumum Font Size (Pixels)
public final static int FONT_SIZE_MAX = 180; // Maximum Font Size (Pixels)
public final static int CHAR_BATCH_SIZE = 100; // Number of Characters to Render Per Batch
//--Members--//
GL10 gl; // GL10 Instance
AssetManager assets; // Asset Manager
SpriteBatch batch; // Batch Renderer
int fontPadX, fontPadY; // Font Padding (Pixels; On Each Side, ie. Doubled on Both X+Y Axis)
float fontHeight; // Font Height (Actual; Pixels)
float fontAscent; // Font Ascent (Above Baseline; Pixels)
float fontDescent; // Font Descent (Below Baseline; Pixels)
int textureId; // Font Texture ID [NOTE: Public for Testing Purposes Only!]
int textureSize; // Texture Size for Font (Square) [NOTE: Public for Testing Purposes Only!]
TextureRegion textureRgn; // Full Texture Region
float charWidthMax; // Character Width (Maximum; Pixels)
float charHeight; // Character Height (Maximum; Pixels)
final float[] charWidths; // Width of Each Character (Actual; Pixels)
TextureRegion[] charRgn; // Region of Each Character (Texture Coordinates)
int cellWidth, cellHeight; // Character Cell Width/Height
int rowCnt, colCnt; // Number of Rows/Columns
float scaleX, scaleY; // Font Scale (X,Y Axis)
float spaceX; // Additional (X,Y Axis) Spacing (Unscaled)
//--Constructor--//
// D: save GL instance + asset manager, create arrays, and initialize the members
// A: gl - OpenGL ES 10 Instance
public GLText(GL10 gl, AssetManager assets) {
this.gl = gl; // Save the GL10 Instance
this.assets = assets; // Save the Asset Manager Instance
batch = new SpriteBatch( gl, CHAR_BATCH_SIZE ); // Create Sprite Batch (with Defined Size)
charWidths = new float[CHAR_CNT]; // Create the Array of Character Widths
charRgn = new TextureRegion[CHAR_CNT]; // Create the Array of Character Regions
// initialize remaining members
fontPadX = 0;
fontPadY = 0;
fontHeight = 0.0f;
fontAscent = 0.0f;
fontDescent = 0.0f;
textureId = -1;
textureSize = 0;
charWidthMax = 0;
charHeight = 0;
cellWidth = 0;
cellHeight = 0;
rowCnt = 0;
colCnt = 0;
scaleX = 1.0f; // Default Scale = 1 (Unscaled)
scaleY = 1.0f; // Default Scale = 1 (Unscaled)
spaceX = 0.0f;
}
public boolean load(String file, int size, int padX, int padY) {
Typeface tf = Typeface.createFromAsset( assets, file ); // Create the Typeface from Font File
return load(tf, size, padX, padY);
}
//--Load Font--//
// description
// this will load the specified font file, create a texture for the defined
// character range, and setup all required values used to render with it.
// arguments:
// file - Filename of the font (.ttf, .otf) to use. In 'Assets' folder.
// size - Requested pixel size of font (height)
// padX, padY - Extra padding per character (X+Y Axis); to prevent overlapping characters.
public boolean load(Typeface tf, int size, int padX, int padY) {
// setup requested values
fontPadX = padX; // Set Requested X Axis Padding
fontPadY = padY; // Set Requested Y Axis Padding
// setup paint instance for drawing
Paint paint = new Paint(); // Create Android Paint Instance
paint.setAntiAlias( true ); // Enable Anti Alias
paint.setTextSize( size ); // Set Text Size
paint.setColor( 0xffffffff ); // Set ARGB (White, Opaque)
paint.setTypeface( tf ); // Set Typeface
// get font metrics
Paint.FontMetrics fm = paint.getFontMetrics(); // Get Font Metrics
fontHeight = (float)Math.ceil( Math.abs( fm.bottom ) + Math.abs( fm.top ) ); // Calculate Font Height
fontAscent = (float)Math.ceil( Math.abs( fm.ascent ) ); // Save Font Ascent
fontDescent = (float)Math.ceil( Math.abs( fm.descent ) ); // Save Font Descent
// determine the width of each character (including unknown character)
// also determine the maximum character width
char[] s = new char[2]; // Create Character Array
charWidthMax = charHeight = 0; // Reset Character Width/Height Maximums
float[] w = new float[2]; // Working Width Value
int cnt = 0; // Array Counter
for ( char c = CHAR_START; c <= CHAR_END; c++ ) { // FOR Each Character
s[0] = c; // Set Character
paint.getTextWidths( s, 0, 1, w ); // Get Character Bounds
charWidths[cnt] = w[0]; // Get Width
if ( charWidths[cnt] > charWidthMax ) // IF Width Larger Than Max Width
charWidthMax = charWidths[cnt]; // Save New Max Width
cnt++; // Advance Array Counter
}
s[0] = CHAR_NONE; // Set Unknown Character
paint.getTextWidths( s, 0, 1, w ); // Get Character Bounds
charWidths[cnt] = w[0]; // Get Width
if ( charWidths[cnt] > charWidthMax ) // IF Width Larger Than Max Width
charWidthMax = charWidths[cnt]; // Save New Max Width
cnt++; // Advance Array Counter
// set character height to font height
charHeight = fontHeight; // Set Character Height
// find the maximum size, validate, and setup cell sizes
cellWidth = (int)charWidthMax + ( 2 * fontPadX ); // Set Cell Width
cellHeight = (int)charHeight + ( 2 * fontPadY ); // Set Cell Height
int maxSize = cellWidth > cellHeight ? cellWidth : cellHeight; // Save Max Size (Width/Height)
if ( maxSize < FONT_SIZE_MIN || maxSize > FONT_SIZE_MAX ) // IF Maximum Size Outside Valid Bounds
return false; // Return Error
// set texture size based on max font size (width or height)
// NOTE: these values are fixed, based on the defined characters. when
// changing start/end characters (CHAR_START/CHAR_END) this will need adjustment too!
if ( maxSize <= 24 ) // IF Max Size is 18 or Less
textureSize = 256; // Set 256 Texture Size
else if ( maxSize <= 40 ) // ELSE IF Max Size is 40 or Less
textureSize = 512; // Set 512 Texture Size
else if ( maxSize <= 80 ) // ELSE IF Max Size is 80 or Less
textureSize = 1024; // Set 1024 Texture Size
else // ELSE IF Max Size is Larger Than 80 (and Less than FONT_SIZE_MAX)
textureSize = 2048; // Set 2048 Texture Size
// create an empty bitmap (alpha only)
Bitmap bitmap = Bitmap.createBitmap( textureSize, textureSize, Bitmap.Config.ALPHA_8 ); // Create Bitmap
Canvas canvas = new Canvas( bitmap ); // Create Canvas for Rendering to Bitmap
bitmap.eraseColor( 0x00000000 ); // Set Transparent Background (ARGB)
// calculate rows/columns
// NOTE: while not required for anything, these may be useful to have :)
colCnt = textureSize / cellWidth; // Calculate Number of Columns
rowCnt = (int)Math.ceil( (float)CHAR_CNT / (float)colCnt ); // Calculate Number of Rows
// render each of the characters to the canvas (ie. build the font map)
float x = fontPadX; // Set Start Position (X)
float y = ( cellHeight - 1 ) - fontDescent - fontPadY; // Set Start Position (Y)
for ( char c = CHAR_START; c <= CHAR_END; c++ ) { // FOR Each Character
s[0] = c; // Set Character to Draw
canvas.drawText( s, 0, 1, x, y, paint ); // Draw Character
x += cellWidth; // Move to Next Character
if ( ( x + cellWidth - fontPadX ) > textureSize ) { // IF End of Line Reached
x = fontPadX; // Set X for New Row
y += cellHeight; // Move Down a Row
}
}
s[0] = CHAR_NONE; // Set Character to Use for NONE
canvas.drawText( s, 0, 1, x, y, paint ); // Draw Character
// generate a new texture
int[] textureIds = new int[1]; // Array to Get Texture Id
gl.glGenTextures( 1, textureIds, 0 ); // Generate New Texture
Log.i("text handle", "" + textureIds[0]);
textureId = textureIds[0]; // Save Texture Id
// setup filters for texture
gl.glBindTexture( GL10.GL_TEXTURE_2D, textureId ); // Bind Texture
gl.glTexParameterf( GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST ); // Set Minification Filter
gl.glTexParameterf( GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR ); // Set Magnification Filter
gl.glTexParameterf( GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE ); // Set U Wrapping
gl.glTexParameterf( GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE ); // Set V Wrapping
// load the generated bitmap onto the texture
GLUtils.texImage2D( GL10.GL_TEXTURE_2D, 0, bitmap, 0 ); // Load Bitmap to Texture
gl.glBindTexture( GL10.GL_TEXTURE_2D, 0 ); // Unbind Texture
// release the bitmap
bitmap.recycle(); // Release the Bitmap
// setup the array of character texture regions
x = 0; // Initialize X
y = 0; // Initialize Y
for ( int c = 0; c < CHAR_CNT; c++ ) { // FOR Each Character (On Texture)
charRgn[c] = new TextureRegion( textureSize, textureSize, x, y, cellWidth-1, cellHeight-1 ); // Create Region for Character
x += cellWidth; // Move to Next Char (Cell)
if ( x + cellWidth > textureSize ) {
x = 0; // Reset X Position to Start
y += cellHeight; // Move to Next Row (Cell)
}
}
// create full texture region
textureRgn = new TextureRegion( textureSize, textureSize, 0, 0, textureSize, textureSize ); // Create Full Texture Region
// return success
return true; // Return Success
}
//--Begin/End Text Drawing--//
// D: call these methods before/after (respectively all draw() calls using a text instance
// NOTE: color is set on a per-batch basis, and fonts should be 8-bit alpha only!!!
// A: red, green, blue - RGB values for font (default = 1.0)
// alpha - optional alpha value for font (default = 1.0)
// R: [none]
public void begin() {
begin( 1.0f, 1.0f, 1.0f, 1.0f ); // Begin with White Opaque
}
public void begin(float alpha) {
begin( 1.0f, 1.0f, 1.0f, alpha ); // Begin with White (Explicit Alpha)
}
public void begin(float red, float green, float blue, float alpha) {
gl.glColor4f( red, green, blue, alpha ); // Set Color+Alpha
gl.glBindTexture( GL10.GL_TEXTURE_2D, textureId ); // Bind the Texture
batch.beginBatch(); // Begin Batch
}
public void end() {
batch.endBatch(); // End Batch
gl.glBindTexture(GL10.GL_TEXTURE_2D, 0); // Bind the Texture
gl.glColor4f( 1.0f, 1.0f, 1.0f, 1.0f ); // Restore Default Color/Alpha
}
//--Draw Text--//
// D: draw text at the specified x,y position
// A: text - the string to draw
// x, y - the x,y position to draw text at (bottom left of text; including descent)
// R: [none]
public void draw(String text, float x, float y) {
float chrHeight = cellHeight * scaleY; // Calculate Scaled Character Height
float chrWidth = cellWidth * scaleX; // Calculate Scaled Character Width
int len = text.length(); // Get String Length
x += ( chrWidth / 2.0f ) - ( fontPadX * scaleX ); // Adjust Start X
y += ( chrHeight / 2.0f ) - ( fontPadY * scaleY ); // Adjust Start Y
for ( int i = 0; i < len; i++ ) { // FOR Each Character in String
int c = (int)text.charAt( i ) - CHAR_START; // Calculate Character Index (Offset by First Char in Font)
if ( c < 0 || c >= CHAR_CNT ) // IF Character Not In Font
c = CHAR_UNKNOWN; // Set to Unknown Character Index
batch.drawSprite( x, y, chrWidth, chrHeight, charRgn[c] ); // Draw the Character
x += ( charWidths[c] + spaceX ) * scaleX; // Advance X Position by Scaled Character Width
}
}
//--Draw Text Centered--//
// D: draw text CENTERED at the specified x,y position
// A: text - the string to draw
// x, y - the x,y position to draw text at (bottom left of text)
// R: the total width of the text that was drawn
public float drawC(String text, float x, float y) {
float len = getLength( text ); // Get Text Length
draw( text, x - ( len / 2.0f ), y - ( getCharHeight() / 2.0f ) ); // Draw Text Centered
return len; // Return Length
}
public float drawCX(String text, float x, float y) {
float len = getLength( text ); // Get Text Length
draw( text, x - ( len / 2.0f ), y ); // Draw Text Centered (X-Axis Only)
return len; // Return Length
}
public void drawCY(String text, float x, float y) {
draw( text, x, y - ( getCharHeight() / 2.0f ) ); // Draw Text Centered (Y-Axis Only)
}
//--Set Scale--//
// D: set the scaling to use for the font
// A: scale - uniform scale for both x and y axis scaling
// sx, sy - separate x and y axis scaling factors
// R: [none]
public void setScale(float scale) {
scaleX = scaleY = scale; // Set Uniform Scale
}
public void setScale(float sx, float sy) {
scaleX = sx; // Set X Scale
scaleY = sy; // Set Y Scale
}
//--Get Scale--//
// D: get the current scaling used for the font
// A: [none]
// R: the x/y scale currently used for scale
public float getScaleX() {
return scaleX; // Return X Scale
}
public float getScaleY() {
return scaleY; // Return Y Scale
}
//--Set Space--//
// D: set the spacing (unscaled; ie. pixel size) to use for the font
// A: space - space for x axis spacing
// R: [none]
public void setSpace(float space) {
spaceX = space; // Set Space
}
//--Get Space--//
// D: get the current spacing used for the font
// A: [none]
// R: the x/y space currently used for scale
public float getSpace() {
return spaceX; // Return X Space
}
//--Get Length of a String--//
// D: return the length of the specified string if rendered using current settings
// A: text - the string to get length for
// R: the length of the specified string (pixels)
public float getLength(String text) {
float len = 0.0f; // Working Length
int strLen = text.length(); // Get String Length (Characters)
for ( int i = 0; i < strLen; i++ ) { // For Each Character in String (Except Last
int c = (int)text.charAt( i ) - CHAR_START; // Calculate Character Index (Offset by First Char in Font)
len += ( charWidths[c] * scaleX ); // Add Scaled Character Width to Total Length
}
len += ( strLen > 1 ? ( ( strLen - 1 ) * spaceX ) * scaleX : 0 ); // Add Space Length
return len; // Return Total Length
}
//--Get Width/Height of Character--//
// D: return the scaled width/height of a character, or max character width
// NOTE: since all characters are the same height, no character index is required!
// NOTE: excludes spacing!!
// A: chr - the character to get width for
// R: the requested character size (scaled)
public float getCharWidth(char chr) {
int c = chr - CHAR_START; // Calculate Character Index (Offset by First Char in Font)
return ( charWidths[c] * scaleX ); // Return Scaled Character Width
}
public float getCharWidthMax() {
return ( charWidthMax * scaleX ); // Return Scaled Max Character Width
}
public float getCharHeight() {
return ( charHeight * scaleY ); // Return Scaled Character Height
}
//--Get Font Metrics--//
// D: return the specified (scaled) font metric
// A: [none]
// R: the requested font metric (scaled)
public float getAscent() {
return ( fontAscent * scaleY ); // Return Font Ascent
}
public float getDescent() {
return ( fontDescent * scaleY ); // Return Font Descent
}
public float getHeight() {
return ( fontHeight * scaleY ); // Return Font Height (Actual)
}
}
| |
/*
* Copyright 2000-2011 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.codeStyle;
import com.intellij.lang.ASTNode;
import com.intellij.openapi.components.ServiceManager;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiManager;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.ThrowableRunnable;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Collection;
/**
* Service for reformatting code fragments, getting names for elements
* according to the user's code style and working with import statements and full-qualified names.
*/
public abstract class CodeStyleManager {
/**
* Returns the code style manager for the specified project.
*
* @param project the project to get the code style manager for.
* @return the code style manager instance.
*/
public static CodeStyleManager getInstance(@NotNull Project project) {
return ServiceManager.getService(project, CodeStyleManager.class);
}
/**
* Returns the code style manager for the project associated with the specified
* PSI manager.
*
* @param manager the PSI manager to get the code style manager for.
* @return the code style manager instance.
*/
public static CodeStyleManager getInstance(@NotNull PsiManager manager) {
return getInstance(manager.getProject());
}
/**
* Gets the project with which the code style manager is associated.
*
* @return the project instance.
*/
@NotNull public abstract Project getProject();
/**
* Reformats the contents of the specified PSI element, enforces braces and splits import
* statements according to the user's code style.
*
* @param element the element to reformat.
* @return the element in the PSI tree after the reformat operation corresponding to the
* original element.
* @throws IncorrectOperationException if the file to reformat is read-only.
* @see #reformatText(com.intellij.psi.PsiFile, int, int)
*/
@NotNull public abstract PsiElement reformat(@NotNull PsiElement element) throws IncorrectOperationException;
/**
* Reformats the contents of the specified PSI element, and optionally enforces braces
* and splits import statements according to the user's code style.
*
* @param element the element to reformat.
* @param canChangeWhiteSpacesOnly if true, only reformatting is performed; if false,
* braces and import statements also can be modified if necessary.
* @return the element in the PSI tree after the reformat operation corresponding to the
* original element.
* @throws IncorrectOperationException if the file to reformat is read-only.
* @see #reformatText(com.intellij.psi.PsiFile, int, int)
*/
@NotNull public abstract PsiElement reformat(@NotNull PsiElement element, boolean canChangeWhiteSpacesOnly) throws IncorrectOperationException;
/**
* Reformats part of the contents of the specified PSI element, enforces braces
* and splits import statements according to the user's code style.
*
* @param element the element to reformat.
* @param startOffset the start offset in the document of the text range to reformat.
* @param endOffset the end offset in the document of the text range to reformat.
* @return the element in the PSI tree after the reformat operation corresponding to the
* original element.
* @throws IncorrectOperationException if the file to reformat is read-only.
* @see #reformatText(com.intellij.psi.PsiFile, int, int)
*/
public abstract PsiElement reformatRange(@NotNull PsiElement element, int startOffset, int endOffset) throws IncorrectOperationException;
/**
* Reformats part of the contents of the specified PSI element, and optionally enforces braces
* and splits import statements according to the user's code style.
*
* @param element the element to reformat.
* @param startOffset the start offset in the document of the text range to reformat.
* @param endOffset the end offset in the document of the text range to reformat.
* @param canChangeWhiteSpacesOnly if true, only reformatting is performed; if false,
* braces and import statements also can be modified if necessary.
* @return the element in the PSI tree after the reformat operation corresponding to the
* original element.
* @throws IncorrectOperationException if the file to reformat is read-only.
* @see #reformatText(com.intellij.psi.PsiFile, int, int)
*/
public abstract PsiElement reformatRange(@NotNull PsiElement element,
int startOffset,
int endOffset,
boolean canChangeWhiteSpacesOnly) throws IncorrectOperationException;
/**
* Delegates to the {@link #reformatText(PsiFile, Collection)} with the single range defined by the given offsets.
*
* @param file the file to reformat.
* @param startOffset the start of the text range to reformat.
* @param endOffset the end of the text range to reformat.
* @throws IncorrectOperationException if the file to reformat is read-only.
*/
public abstract void reformatText(@NotNull PsiFile file, int startOffset, int endOffset) throws IncorrectOperationException;
/**
* Re-formats a ranges of text in the specified file. This method works faster than
* {@link #reformatRange(com.intellij.psi.PsiElement, int, int)} but invalidates the
* PSI structure for the file.
*
* @param file the file to reformat
* @param ranges ranges to process
* @throws IncorrectOperationException if the file to reformat is read-only.
*/
public abstract void reformatText(@NotNull PsiFile file, @NotNull Collection<TextRange> ranges) throws IncorrectOperationException;
/**
* Works as #reformatText, but reformats not only specified ranges, but also some context around to make code look consistent
* @param file
* @param ranges
* @throws IncorrectOperationException
*/
public abstract void reformatTextWithContext(@NotNull PsiFile file, @NotNull Collection<TextRange> ranges) throws IncorrectOperationException;
/**
* Re-formats the specified range of a file, modifying only line indents and leaving
* all other whitespace intact.
*
* @param file the file to reformat.
* @param rangeToAdjust the range of text in which indents should be reformatted.
* @throws IncorrectOperationException if the file is read-only.
*/
public abstract void adjustLineIndent(@NotNull PsiFile file, TextRange rangeToAdjust) throws IncorrectOperationException;
/**
* Reformats the line at the specified offset in the specified file, modifying only the line indent
* and leaving all other whitespace intact.
*
* @param file the file to reformat.
* @param offset the offset the line at which should be reformatted.
* @throws IncorrectOperationException if the file is read-only.
*/
public abstract int adjustLineIndent(@NotNull PsiFile file, int offset) throws IncorrectOperationException;
/**
* Reformats the line at the specified offset in the specified file, modifying only the line indent
* and leaving all other whitespace intact.
*
* @param document the document to reformat.
* @param offset the offset the line at which should be reformatted.
* @throws IncorrectOperationException if the file is read-only.
*/
public abstract int adjustLineIndent(@NotNull Document document, int offset);
/**
* @deprecated this method is not intended to be used by plugins.
*/
public abstract boolean isLineToBeIndented(@NotNull PsiFile file, int offset);
/**
* Calculates the indent that should be used for the specified line in
* the specified file.
*
* @param file the file for which the indent should be calculated.
* @param offset the offset for the line at which the indent should be calculated.
* @return the indent string (containing of tabs and/or whitespaces), or null if it
* was not possible to calculate the indent.
*/
@Nullable
public abstract String getLineIndent(@NotNull PsiFile file, int offset);
/**
* Calculates the indent that should be used for the current line in the specified
* editor.
*
* @param document for which the indent should be calculated.
* @return the indent string (containing of tabs and/or whitespaces), or null if it
* was not possible to calculate the indent.
*/
@Nullable
public abstract String getLineIndent(@NotNull Document document, int offset);
/**
* @deprecated
*/
public abstract Indent getIndent(String text, FileType fileType);
/**
* @deprecated
*/
public abstract String fillIndent(Indent indent, FileType fileType);
/**
* @deprecated
*/
public abstract Indent zeroIndent();
/**
* Reformats line indents inside new element and reformats white spaces around it
* @param block - added element parent
* @param addedElement - new element
* @throws IncorrectOperationException if the operation fails for some reason (for example,
* the file is read-only).
*/
public abstract void reformatNewlyAddedElement(@NotNull final ASTNode block, @NotNull final ASTNode addedElement) throws IncorrectOperationException;
/**
* Formatting may be executed sequentially, i.e. the whole (re)formatting task is split into a number of smaller sub-tasks
* that are executed sequentially. That is done primarily for ability to show progress dialog during formatting (formatting
* is always performed from EDT, hence, the GUI freezes if we perform formatting as a single big iteration).
* <p/>
* However, there are situation when we don't want to use such an approach - for example, IntelliJ IDEA sometimes inserts dummy
* text into file in order to calculate formatting-specific data and removes it after that. We don't want to allow Swing events
* dispatching during that in order to not show that dummy text to the end-user.
* <p/>
* It's possible to configure that (implementation details are insignificant here) and current method serves as a read-only
* facade for obtaining information if 'sequential' processing is allowed at the moment.
*
* @return <code>true</code> if 'sequential' formatting is allowed now; <code>false</code> otherwise
*/
public abstract boolean isSequentialProcessingAllowed();
/**
* Disables automatic formatting of modified PSI elements, runs the specified operation
* and re-enables the formatting. Can be used to improve performance of PSI write
* operations.
*
* @param r the operation to run.
*/
public abstract void performActionWithFormatterDisabled(Runnable r);
public abstract <T extends Throwable> void performActionWithFormatterDisabled(ThrowableRunnable<T> r) throws T;
public abstract <T> T performActionWithFormatterDisabled(Computable<T> r);
}
| |
package org.aspenos.app.aoscontentserver.xml;
import java.io.*;
import java.util.*;
import org.xml.sax.*;
import org.xml.sax.helpers.*;
import org.aspenos.app.aoscontentserver.defs.*;
import org.aspenos.xml.*;
// The ERT (Event-Resource-Template) Def has all the
// info needs to set up the relationships between
// events, resources and templates.
public class SaxERTParser extends GenericSaxParser {
private String _curRegistryGroup;
private WebEventDefs _wedefs;
private WebEventDef _curEvent;
private ERTDefs _ertDefs;
private ERTDef _curERT;
private ResourceDefs _rdefs;
private ResourceDef _curResource;
private TemplateDefs _tdefs;
private TemplateDef _curTemplate;
private RoleDefs _roledefs;
private RoleDef _curRole;
private int _curOrdinal;
public SaxERTParser(File f) throws SAXException, IOException {
super(f);
}
public SaxERTParser(String xml) throws SAXException, IOException {
super(xml);
}
////////////////////////////////////////////////////////////////
public ERTDefs getERTDefs() {
return _ertDefs;
}
////////////////////////////////////////////////////////////////
public void startElement(String namespaceURI, String localName,
String qName, Attributes attrs)
throws SAXException {
HashMap hash = new HashMap();
String id;
if(attrs != null) {
for(int i = 0; i < attrs.getLength(); i++) {
String attrib = attrs.getLocalName(i);
String value = attrs.getValue(i);
hash.put(attrib, value);
}
}
if (localName.equals("ERTDef")) {
_curRegistryGroup = (String)hash.get("registry_group");
_curERT = new ERTDef(hash);
// Create a new webevent defs for this ert's webevents
_wedefs = new WebEventDefs();
} else if (localName.equals("WebEventDef")) {
_curEvent = new WebEventDef(hash);
id = (String)hash.get("id");
if (id != null)
_curEvent.setId(id);
_wedefs.add(_curEvent);
// Reset the ordinal counter
_curOrdinal = 1;
// Reset the parent's defs
_curERT.setProperty("webevent_defs", _wedefs);
// Create a new resource defs for this event's resources
_rdefs = new ResourceDefs();
} else if (localName.equals("ResourceDef")) {
_curResource = new ResourceDef(hash);
id = (String)hash.get("id");
if (id != null)
_curResource.setId(id);
// count the ordinal
_curResource.setProperty("ordinal",
Integer.toString(_curOrdinal));
_curOrdinal++;
_rdefs.add(_curResource);
// Reset the parent's defs
_curEvent.setProperty("resource_defs", _rdefs);
// Create a new template defs for this resource's templates
_tdefs = new TemplateDefs();
} else if (localName.equals("TemplateDef")) {
_curTemplate = new TemplateDef(hash);
id = (String)hash.get("id");
if (id != null)
_curTemplate.setId(id);
_tdefs.add(_curTemplate);
// Reset the parent's defs
_curResource.setProperty("template_defs", _tdefs);
// Create a new role defs for this template's roles
_roledefs = new RoleDefs();
} else if (localName.equals("RoleDef")) {
_curRole = new RoleDef(hash);
id = (String)hash.get("id");
if (id != null)
_curRole.setId(id);
_roledefs.add(_curRole);
// Reset the parent's defs
_curTemplate.setProperty("role_defs", _roledefs);
}
}
public void endElement(String namespaceURI, String localName,
String qName) throws SAXException {
if (localName.equals("ERTDef")) {
if (_ertDefs == null)
_ertDefs = new ERTDefs();
_ertDefs.add(_curERT);
}
}
////////////////////////////////////////////////////////////////
public static void main(String args [])
throws IOException {
if(args.length != 1) {
System.err.println("Usage: <XML filename>");
System.exit(1);
}
try {
// THIS IS CRUCIAL!!
// You must set the XML parser class here.
System.setProperty("sax.parser.class",
"org.apache.xerces.parsers.SAXParser");
System.setProperty("sax.parser.validating",
"false");
// Run it with a File ///////////////////////////
//SaxERTParser ertParser = new SaxERTParser(new File(args[0]));
/////////////////////////////////////////////////
// Run it with a plain xml String ///////////////
BufferedReader is = new BufferedReader(
new FileReader(args[0]));
StringBuffer in = new StringBuffer();
String curLine;
while((curLine=is.readLine()) != null)
in.append(curLine+"\n");
is.close();
System.out.println("parsing this: " + in.toString());
SaxERTParser ertParser = new SaxERTParser(in.toString());
/////////////////////////////////////////////////
String xml = ertParser.getERTDefs().toXML();
System.out.println("Ready to send ERTs to the registry:\n\n" + xml);
} catch(SAXParseException err) {
System.out.println("** Parsing error"
+ ", line " + err.getLineNumber()
+ ", uri " + err.getSystemId());
System.out.println(" " + err.getMessage());
} catch(SAXException e) {
Exception x = e;
if(e.getException() != null)
x = e.getException();
x.printStackTrace();
} catch(Throwable t) {
t.printStackTrace();
}
System.exit(0);
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.network.v2018_04_01;
import com.microsoft.azure.arm.model.HasInner;
import com.microsoft.azure.management.network.v2018_04_01.implementation.VpnConnectionInner;
import com.microsoft.azure.arm.model.Indexable;
import com.microsoft.azure.arm.model.Refreshable;
import com.microsoft.azure.arm.model.Updatable;
import com.microsoft.azure.arm.model.Appliable;
import com.microsoft.azure.arm.model.Creatable;
import com.microsoft.azure.arm.resources.models.HasManager;
import com.microsoft.azure.management.network.v2018_04_01.implementation.NetworkManager;
import java.util.Map;
import com.microsoft.azure.SubResource;
import java.util.List;
/**
* Type representing VpnConnection.
*/
public interface VpnConnection extends HasInner<VpnConnectionInner>, Indexable, Refreshable<VpnConnection>, Updatable<VpnConnection.Update>, HasManager<NetworkManager> {
/**
* @return the connectionBandwidth value.
*/
Integer connectionBandwidth();
/**
* @return the connectionStatus value.
*/
VpnConnectionStatus connectionStatus();
/**
* @return the egressBytesTransferred value.
*/
Long egressBytesTransferred();
/**
* @return the enableBgp value.
*/
Boolean enableBgp();
/**
* @return the etag value.
*/
String etag();
/**
* @return the id value.
*/
String id();
/**
* @return the ingressBytesTransferred value.
*/
Long ingressBytesTransferred();
/**
* @return the ipsecPolicies value.
*/
List<IpsecPolicy> ipsecPolicies();
/**
* @return the location value.
*/
String location();
/**
* @return the name value.
*/
String name();
/**
* @return the provisioningState value.
*/
ProvisioningState provisioningState();
/**
* @return the remoteVpnSite value.
*/
SubResource remoteVpnSite();
/**
* @return the routingWeight value.
*/
Integer routingWeight();
/**
* @return the sharedKey value.
*/
String sharedKey();
/**
* @return the tags value.
*/
Map<String, String> tags();
/**
* @return the type value.
*/
String type();
/**
* The entirety of the VpnConnection definition.
*/
interface Definition extends DefinitionStages.Blank, DefinitionStages.WithVpnGateway, DefinitionStages.WithCreate {
}
/**
* Grouping of VpnConnection definition stages.
*/
interface DefinitionStages {
/**
* The first stage of a VpnConnection definition.
*/
interface Blank extends WithVpnGateway {
}
/**
* The stage of the vpnconnection definition allowing to specify VpnGateway.
*/
interface WithVpnGateway {
/**
* Specifies resourceGroupName, gatewayName.
* @param resourceGroupName The resource group name of the VpnGateway
* @param gatewayName The name of the gateway
* @return the next definition stage
*/
WithCreate withExistingVpnGateway(String resourceGroupName, String gatewayName);
}
/**
* The stage of the vpnconnection definition allowing to specify ConnectionStatus.
*/
interface WithConnectionStatus {
/**
* Specifies connectionStatus.
* @param connectionStatus The connection status. Possible values include: 'Unknown', 'Connecting', 'Connected', 'NotConnected'
* @return the next definition stage
*/
WithCreate withConnectionStatus(VpnConnectionStatus connectionStatus);
}
/**
* The stage of the vpnconnection definition allowing to specify EnableBgp.
*/
interface WithEnableBgp {
/**
* Specifies enableBgp.
* @param enableBgp EnableBgp flag
* @return the next definition stage
*/
WithCreate withEnableBgp(Boolean enableBgp);
}
/**
* The stage of the vpnconnection definition allowing to specify Id.
*/
interface WithId {
/**
* Specifies id.
* @param id Resource ID
* @return the next definition stage
*/
WithCreate withId(String id);
}
/**
* The stage of the vpnconnection definition allowing to specify IpsecPolicies.
*/
interface WithIpsecPolicies {
/**
* Specifies ipsecPolicies.
* @param ipsecPolicies The IPSec Policies to be considered by this connection
* @return the next definition stage
*/
WithCreate withIpsecPolicies(List<IpsecPolicy> ipsecPolicies);
}
/**
* The stage of the vpnconnection definition allowing to specify Location.
*/
interface WithLocation {
/**
* Specifies location.
* @param location Resource location
* @return the next definition stage
*/
WithCreate withLocation(String location);
}
/**
* The stage of the vpnconnection definition allowing to specify ProvisioningState.
*/
interface WithProvisioningState {
/**
* Specifies provisioningState.
* @param provisioningState The provisioning state of the resource. Possible values include: 'Succeeded', 'Updating', 'Deleting', 'Failed'
* @return the next definition stage
*/
WithCreate withProvisioningState(ProvisioningState provisioningState);
}
/**
* The stage of the vpnconnection definition allowing to specify RemoteVpnSite.
*/
interface WithRemoteVpnSite {
/**
* Specifies remoteVpnSite.
* @param remoteVpnSite Id of the connected vpn site
* @return the next definition stage
*/
WithCreate withRemoteVpnSite(SubResource remoteVpnSite);
}
/**
* The stage of the vpnconnection definition allowing to specify RoutingWeight.
*/
interface WithRoutingWeight {
/**
* Specifies routingWeight.
* @param routingWeight routing weight for vpn connection
* @return the next definition stage
*/
WithCreate withRoutingWeight(Integer routingWeight);
}
/**
* The stage of the vpnconnection definition allowing to specify SharedKey.
*/
interface WithSharedKey {
/**
* Specifies sharedKey.
* @param sharedKey SharedKey for the vpn connection
* @return the next definition stage
*/
WithCreate withSharedKey(String sharedKey);
}
/**
* The stage of the vpnconnection definition allowing to specify Tags.
*/
interface WithTags {
/**
* Specifies tags.
* @param tags Resource tags
* @return the next definition stage
*/
WithCreate withTags(Map<String, String> tags);
}
/**
* The stage of the definition which contains all the minimum required inputs for
* the resource to be created (via {@link WithCreate#create()}), but also allows
* for any other optional settings to be specified.
*/
interface WithCreate extends Creatable<VpnConnection>, DefinitionStages.WithConnectionStatus, DefinitionStages.WithEnableBgp, DefinitionStages.WithId, DefinitionStages.WithIpsecPolicies, DefinitionStages.WithLocation, DefinitionStages.WithProvisioningState, DefinitionStages.WithRemoteVpnSite, DefinitionStages.WithRoutingWeight, DefinitionStages.WithSharedKey, DefinitionStages.WithTags {
}
}
/**
* The template for a VpnConnection update operation, containing all the settings that can be modified.
*/
interface Update extends Appliable<VpnConnection>, UpdateStages.WithConnectionStatus, UpdateStages.WithEnableBgp, UpdateStages.WithId, UpdateStages.WithIpsecPolicies, UpdateStages.WithLocation, UpdateStages.WithProvisioningState, UpdateStages.WithRemoteVpnSite, UpdateStages.WithRoutingWeight, UpdateStages.WithSharedKey, UpdateStages.WithTags {
}
/**
* Grouping of VpnConnection update stages.
*/
interface UpdateStages {
/**
* The stage of the vpnconnection update allowing to specify ConnectionStatus.
*/
interface WithConnectionStatus {
/**
* Specifies connectionStatus.
* @param connectionStatus The connection status. Possible values include: 'Unknown', 'Connecting', 'Connected', 'NotConnected'
* @return the next update stage
*/
Update withConnectionStatus(VpnConnectionStatus connectionStatus);
}
/**
* The stage of the vpnconnection update allowing to specify EnableBgp.
*/
interface WithEnableBgp {
/**
* Specifies enableBgp.
* @param enableBgp EnableBgp flag
* @return the next update stage
*/
Update withEnableBgp(Boolean enableBgp);
}
/**
* The stage of the vpnconnection update allowing to specify Id.
*/
interface WithId {
/**
* Specifies id.
* @param id Resource ID
* @return the next update stage
*/
Update withId(String id);
}
/**
* The stage of the vpnconnection update allowing to specify IpsecPolicies.
*/
interface WithIpsecPolicies {
/**
* Specifies ipsecPolicies.
* @param ipsecPolicies The IPSec Policies to be considered by this connection
* @return the next update stage
*/
Update withIpsecPolicies(List<IpsecPolicy> ipsecPolicies);
}
/**
* The stage of the vpnconnection update allowing to specify Location.
*/
interface WithLocation {
/**
* Specifies location.
* @param location Resource location
* @return the next update stage
*/
Update withLocation(String location);
}
/**
* The stage of the vpnconnection update allowing to specify ProvisioningState.
*/
interface WithProvisioningState {
/**
* Specifies provisioningState.
* @param provisioningState The provisioning state of the resource. Possible values include: 'Succeeded', 'Updating', 'Deleting', 'Failed'
* @return the next update stage
*/
Update withProvisioningState(ProvisioningState provisioningState);
}
/**
* The stage of the vpnconnection update allowing to specify RemoteVpnSite.
*/
interface WithRemoteVpnSite {
/**
* Specifies remoteVpnSite.
* @param remoteVpnSite Id of the connected vpn site
* @return the next update stage
*/
Update withRemoteVpnSite(SubResource remoteVpnSite);
}
/**
* The stage of the vpnconnection update allowing to specify RoutingWeight.
*/
interface WithRoutingWeight {
/**
* Specifies routingWeight.
* @param routingWeight routing weight for vpn connection
* @return the next update stage
*/
Update withRoutingWeight(Integer routingWeight);
}
/**
* The stage of the vpnconnection update allowing to specify SharedKey.
*/
interface WithSharedKey {
/**
* Specifies sharedKey.
* @param sharedKey SharedKey for the vpn connection
* @return the next update stage
*/
Update withSharedKey(String sharedKey);
}
/**
* The stage of the vpnconnection update allowing to specify Tags.
*/
interface WithTags {
/**
* Specifies tags.
* @param tags Resource tags
* @return the next update stage
*/
Update withTags(Map<String, String> tags);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.platform.compute;
import java.util.List;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.ignite.IgniteException;
import org.apache.ignite.compute.ComputeJobResult;
import org.apache.ignite.compute.ComputeJobResultPolicy;
import org.apache.ignite.compute.ComputeTask;
import org.apache.ignite.internal.portable.PortableRawWriterEx;
import org.apache.ignite.internal.processors.platform.PlatformContext;
import org.apache.ignite.internal.processors.platform.PlatformNativeException;
import org.apache.ignite.internal.processors.platform.memory.PlatformMemory;
import org.apache.ignite.internal.processors.platform.memory.PlatformOutputStream;
import org.apache.ignite.internal.processors.platform.utils.PlatformUtils;
import org.apache.ignite.internal.util.typedef.X;
import org.jetbrains.annotations.Nullable;
/**
* Base class for all interop tasks.
*/
public abstract class PlatformAbstractTask implements ComputeTask<Object, Void> {
/** Platform context. */
protected final PlatformContext ctx;
/** Pointer to the task in the native platform. */
protected final long taskPtr;
/** Lock for safe access to native pointers. */
protected final ReadWriteLock lock = new ReentrantReadWriteLock();
/** Done flag. */
protected boolean done;
/**
* Constructor.
*
* @param ctx Platform context.
* @param taskPtr Task pointer.
*/
protected PlatformAbstractTask(PlatformContext ctx, long taskPtr) {
this.ctx = ctx;
this.taskPtr = taskPtr;
}
/** {@inheritDoc} */
@SuppressWarnings({"ThrowableResultOfMethodCallIgnored", "unchecked"})
@Override public ComputeJobResultPolicy result(ComputeJobResult res, List<ComputeJobResult> rcvd) {
assert rcvd.isEmpty() : "Should not cache result in Java for interop task";
int plc;
lock.readLock().lock();
try {
assert !done;
PlatformAbstractJob job = res.getJob();
assert job.pointer() != 0;
Object res0bj = res.getData();
if (res0bj == PlatformAbstractJob.LOC_JOB_RES)
// Processing local job execution result.
plc = ctx.gateway().computeTaskJobResult(taskPtr, job.pointer(), 0);
else {
// Processing remote job execution result or exception.
try (PlatformMemory mem = ctx.memory().allocate()) {
PlatformOutputStream out = mem.output();
PortableRawWriterEx writer = ctx.writer(out);
writer.writeUuid(res.getNode().id());
writer.writeBoolean(res.isCancelled());
IgniteException err = res.getException();
PlatformUtils.writeInvocationResult(writer, res0bj, err);
out.synchronize();
plc = ctx.gateway().computeTaskJobResult(taskPtr, job.pointer(), mem.pointer());
}
}
ComputeJobResultPolicy plc0 = ComputeJobResultPolicy.fromOrdinal((byte) plc);
assert plc0 != null : plc;
return plc0;
}
finally {
lock.readLock().unlock();
}
}
/** {@inheritDoc} */
@Nullable @Override public Void reduce(List<ComputeJobResult> results) {
assert results.isEmpty() : "Should not cache result in java for interop task";
lock.readLock().lock();
try {
assert !done;
ctx.gateway().computeTaskReduce(taskPtr);
}
finally {
lock.readLock().unlock();
}
return null;
}
/**
* Callback invoked when task future is completed and all resources could be safely cleaned up.
*
* @param e If failed.
*/
@SuppressWarnings("ThrowableResultOfMethodCallIgnored")
public void onDone(Exception e) {
lock.writeLock().lock();
try {
assert !done;
if (e == null)
// Normal completion.
ctx.gateway().computeTaskComplete(taskPtr, 0);
else {
PlatformNativeException e0 = X.cause(e, PlatformNativeException.class);
try (PlatformMemory mem = ctx.memory().allocate()) {
PlatformOutputStream out = mem.output();
PortableRawWriterEx writer = ctx.writer(out);
if (e0 == null) {
writer.writeBoolean(false);
writer.writeString(e.getClass().getName());
writer.writeString(e.getMessage());
}
else {
writer.writeBoolean(true);
writer.writeObject(e0.cause());
}
out.synchronize();
ctx.gateway().computeTaskComplete(taskPtr, mem.pointer());
}
}
}
finally {
// Done flag is set irrespective of any exceptions.
done = true;
lock.writeLock().unlock();
}
}
/**
* Callback invoked by job when it wants to lock the task.
*
* @return {@code} True if task is not completed yet, {@code false} otherwise.
*/
@SuppressWarnings("LockAcquiredButNotSafelyReleased")
public boolean onJobLock() {
lock.readLock().lock();
if (done) {
lock.readLock().unlock();
return false;
}
else
return true;
}
/**
* Callback invoked by job when task can be unlocked.
*/
public void onJobUnlock() {
assert !done;
lock.readLock().unlock();
}
}
| |
/*
* Copyright 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.template.soy.jbcsrc.api;
import static com.google.common.truth.Truth.assertThat;
import static com.google.template.soy.data.UnsafeSanitizedContentOrdainer.ordainAsSafe;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import com.google.common.collect.ImmutableMap;
import com.google.common.util.concurrent.SettableFuture;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.template.soy.SoyFileSet;
import com.google.template.soy.SoyModule;
import com.google.template.soy.data.SanitizedContent;
import com.google.template.soy.data.SanitizedContent.ContentKind;
import com.google.template.soy.data.SanitizedContents;
import com.google.template.soy.data.SoyValueProvider;
import com.google.template.soy.data.restricted.IntegerData;
import com.google.template.soy.jbcsrc.api.SoySauce.Continuation;
import com.google.template.soy.jbcsrc.api.SoySauce.WriteContinuation;
import com.google.template.soy.jbcsrc.runtime.DetachableSoyValueProvider;
import java.io.IOException;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Tests basic soy sauce interaction */
@SuppressWarnings("CheckReturnValue")
@RunWith(JUnit4.class)
public class SoySauceTest {
private SoySauce sauce;
@Before
public void setUp() throws Exception {
Injector injector = Guice.createInjector(new SoyModule());
SoyFileSet.Builder builder = injector.getInstance(SoyFileSet.Builder.class);
builder.add(SoySauceTest.class.getResource("strict.soy"));
builder.add(SoySauceTest.class.getResource("non_strict.soy"));
sauce = builder.build().compileTemplates();
}
@Test
public void testStrictContentKindHandling_html() {
assertEquals("Hello world", sauce.renderTemplate("strict_test.helloHtml").render().get());
assertEquals(
ordainAsSafe("Hello world", ContentKind.HTML),
sauce.renderTemplate("strict_test.helloHtml").renderStrict().get());
assertEquals(
SanitizedContents.unsanitizedText("Hello world"),
sauce
.renderTemplate("strict_test.helloHtml")
.setExpectedContentKind(ContentKind.TEXT)
.renderStrict()
.get());
try {
sauce
.renderTemplate("strict_test.helloHtml")
.setExpectedContentKind(ContentKind.JS)
.renderStrict()
.get();
fail();
} catch (IllegalStateException e) {
assertThat(e.getMessage())
.isEqualTo(
"Expected template to be kind=\"js\" but was kind=\"html\": strict_test.helloHtml");
}
}
@Test
public void testStrictContentKindHandling_js() {
try {
sauce.renderTemplate("strict_test.helloJs").render();
fail();
} catch (IllegalStateException e) {
assertThat(e.getMessage())
.isEqualTo(
"Expected template to be kind=\"html\" but was kind=\"js\": strict_test.helloJs");
}
try {
sauce.renderTemplate("strict_test.helloJs").renderStrict();
fail();
} catch (IllegalStateException e) {
assertThat(e.getMessage())
.isEqualTo(
"Expected template to be kind=\"html\" but was kind=\"js\": strict_test.helloJs");
}
assertEquals(
ordainAsSafe("'Hello world'", ContentKind.JS),
sauce
.renderTemplate("strict_test.helloJs")
.setExpectedContentKind(ContentKind.JS)
.renderStrict()
.get());
assertEquals(
ordainAsSafe("'Hello world'", ContentKind.TEXT),
sauce
.renderTemplate("strict_test.helloJs")
.setExpectedContentKind(ContentKind.TEXT) // TEXT always works
.renderStrict()
.get());
assertEquals(
"'Hello world'",
sauce
.renderTemplate("strict_test.helloJs")
.setExpectedContentKind(ContentKind.TEXT)
.render()
.get());
}
@Test
public void testNonStrictContentHandling() {
assertEquals("Hello world", sauce.renderTemplate("nonstrict_test.hello").render().get());
assertEquals(
"Hello world",
sauce
.renderTemplate("nonstrict_test.hello")
.setExpectedContentKind(ContentKind.TEXT) // text is always fine
.render()
.get());
assertEquals(
SanitizedContents.unsanitizedText("Hello world"),
sauce
.renderTemplate("nonstrict_test.hello")
.setExpectedContentKind(ContentKind.TEXT) // text is always fine, even with renderStrict
.renderStrict()
.get());
try {
sauce.renderTemplate("nonstrict_test.hello").renderStrict();
fail();
} catch (IllegalStateException e) {
assertThat(e.getMessage()).isEqualTo("Cannot render a non strict template as 'html'");
}
try {
sauce.renderTemplate("nonstrict_test.hello").setExpectedContentKind(ContentKind.JS).render();
fail();
} catch (IllegalStateException e) {
assertThat(e.getMessage()).isEqualTo("Cannot render a non strict template as 'js'");
}
}
@Test
public void testDetaching_string() {
SoySauce.Renderer tmpl = sauce.renderTemplate("strict_test.withParam");
SettableFuture<String> p = SettableFuture.create();
Continuation<String> stringContinuation = tmpl.setData(ImmutableMap.of("p", p)).render();
assertEquals(RenderResult.Type.DETACH, stringContinuation.result().type());
assertEquals(p, stringContinuation.result().future());
p.set("tigger");
stringContinuation = stringContinuation.continueRender();
assertEquals(RenderResult.done(), stringContinuation.result());
assertEquals("Hello, tigger", stringContinuation.get());
}
@Test
public void testDetaching_strict() {
SoySauce.Renderer tmpl = sauce.renderTemplate("strict_test.withParam");
SettableFuture<String> p = SettableFuture.create();
Continuation<SanitizedContent> strictContinuation =
tmpl.setData(ImmutableMap.of("p", p)).renderStrict();
assertEquals(RenderResult.Type.DETACH, strictContinuation.result().type());
assertEquals(p, strictContinuation.result().future());
p.set("pooh bear");
strictContinuation = strictContinuation.continueRender();
assertEquals(RenderResult.done(), strictContinuation.result());
assertEquals("Hello, pooh bear", strictContinuation.get().getContent());
}
@Test
public void testDetaching_appendable() throws IOException {
SoySauce.Renderer tmpl = sauce.renderTemplate("strict_test.withParam");
TestAppendable builder = new TestAppendable();
builder.softLimitReached = true;
SettableFuture<String> p = SettableFuture.create();
WriteContinuation continuation = tmpl.setData(ImmutableMap.of("p", p)).render(builder);
assertEquals(RenderResult.Type.LIMITED, continuation.result().type());
assertEquals("Hello, ", builder.toString());
builder.softLimitReached = false;
continuation = continuation.continueRender();
assertEquals(RenderResult.Type.DETACH, continuation.result().type());
assertEquals(p, continuation.result().future());
p.set("piglet");
continuation = continuation.continueRender();
assertEquals(RenderResult.done(), continuation.result());
assertEquals("Hello, piglet", builder.toString());
}
@Test
public void testExceptionRewriting() {
SoySauce.Renderer tmpl = sauce.renderTemplate("strict_test.callsItself");
SoyValueProvider intProvider =
new DetachableSoyValueProvider() {
@Override
protected RenderResult doResolve() {
resolvedValue = IntegerData.ZERO;
return RenderResult.done();
}
};
try {
tmpl.setData(ImmutableMap.of("depth", 10, "p", intProvider)).render();
fail();
} catch (ClassCastException cce) {
// we get an CCE because we passed an int but it expected a string
StackTraceElement[] stackTrace = cce.getStackTrace();
assertThat(stackTrace[0].toString())
.isEqualTo("strict_test.callsItself.render(strict.soy:32)");
for (int i = 1; i < 11; i++) {
assertThat(stackTrace[i].toString())
.isEqualTo("strict_test.callsItself.render(strict.soy:34)");
}
}
}
private static final class TestAppendable implements AdvisingAppendable {
private final StringBuilder delegate = new StringBuilder();
boolean softLimitReached;
@Override
public TestAppendable append(CharSequence s) {
delegate.append(s);
return this;
}
@Override
public TestAppendable append(CharSequence s, int start, int end) {
delegate.append(s, start, end);
return this;
}
@Override
public TestAppendable append(char c) {
delegate.append(c);
return this;
}
@Override
public boolean softLimitReached() {
return softLimitReached;
}
@Override
public String toString() {
return delegate.toString();
}
}
}
| |
/*
* libjingle
* Copyright 2015 Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.appspot.apprtc;
import org.appspot.apprtc.AppRTCClient.RoomConnectionParameters;
import org.appspot.apprtc.AppRTCClient.SignalingParameters;
import org.appspot.apprtc.PeerConnectionClient.PeerConnectionParameters;
import org.appspot.apprtc.util.LooperExecutor;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.FragmentTransaction;
import android.content.DialogInterface;
import android.content.Intent;
import android.net.Uri;
import android.opengl.GLSurfaceView;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.view.Window;
import android.view.WindowManager.LayoutParams;
import android.widget.Toast;
import org.webrtc.IceCandidate;
import org.webrtc.SessionDescription;
import org.webrtc.StatsReport;
import org.webrtc.VideoRenderer;
import org.webrtc.VideoRendererGui;
import org.webrtc.VideoRendererGui.ScalingType;
/**
* Activity for peer connection call setup, call waiting
* and call view.
*/
public class CallActivity extends Activity
implements AppRTCClient.SignalingEvents,
PeerConnectionClient.PeerConnectionEvents,
CallFragment.OnCallEvents {
public static final String EXTRA_ROOMID =
"org.appspot.apprtc.ROOMID";
public static final String EXTRA_LOOPBACK =
"org.appspot.apprtc.LOOPBACK";
public static final String EXTRA_VIDEO_CALL =
"org.appspot.apprtc.VIDEO_CALL";
public static final String EXTRA_VIDEO_WIDTH =
"org.appspot.apprtc.VIDEO_WIDTH";
public static final String EXTRA_VIDEO_HEIGHT =
"org.appspot.apprtc.VIDEO_HEIGHT";
public static final String EXTRA_VIDEO_FPS =
"org.appspot.apprtc.VIDEO_FPS";
public static final String EXTRA_VIDEO_BITRATE =
"org.appspot.apprtc.VIDEO_BITRATE";
public static final String EXTRA_VIDEOCODEC =
"org.appspot.apprtc.VIDEOCODEC";
public static final String EXTRA_HWCODEC_ENABLED =
"org.appspot.apprtc.HWCODEC";
public static final String EXTRA_AUDIO_BITRATE =
"org.appspot.apprtc.AUDIO_BITRATE";
public static final String EXTRA_AUDIOCODEC =
"org.appspot.apprtc.AUDIOCODEC";
public static final String EXTRA_CPUOVERUSE_DETECTION =
"org.appspot.apprtc.CPUOVERUSE_DETECTION";
public static final String EXTRA_DISPLAY_HUD =
"org.appspot.apprtc.DISPLAY_HUD";
public static final String EXTRA_CMDLINE =
"org.appspot.apprtc.CMDLINE";
public static final String EXTRA_RUNTIME =
"org.appspot.apprtc.RUNTIME";
private static final String TAG = "CallRTCClient";
// Peer connection statistics callback period in ms.
private static final int STAT_CALLBACK_PERIOD = 1000;
// Local preview screen position before call is connected.
private static final int LOCAL_X_CONNECTING = 0;
private static final int LOCAL_Y_CONNECTING = 0;
private static final int LOCAL_WIDTH_CONNECTING = 100;
private static final int LOCAL_HEIGHT_CONNECTING = 100;
// Local preview screen position after call is connected.
private static final int LOCAL_X_CONNECTED = 72;
private static final int LOCAL_Y_CONNECTED = 72;
private static final int LOCAL_WIDTH_CONNECTED = 25;
private static final int LOCAL_HEIGHT_CONNECTED = 25;
// Remote video screen position
private static final int REMOTE_X = 0;
private static final int REMOTE_Y = 0;
private static final int REMOTE_WIDTH = 100;
private static final int REMOTE_HEIGHT = 100;
private PeerConnectionClient peerConnectionClient = null;
private AppRTCClient appRtcClient;
private SignalingParameters signalingParameters;
private AppRTCAudioManager audioManager = null;
private VideoRenderer.Callbacks localRender;
private VideoRenderer.Callbacks remoteRender;
private ScalingType scalingType;
private Toast logToast;
private boolean commandLineRun;
private int runTimeMs;
private boolean activityRunning;
private RoomConnectionParameters roomConnectionParameters;
private PeerConnectionParameters peerConnectionParameters;
private boolean iceConnected;
private boolean isError;
private boolean callControlFragmentVisible = true;
private long callStartedTimeMs = 0;
// Controls
private GLSurfaceView videoView;
CallFragment callFragment;
HudFragment hudFragment;
//private CallStatsAPI callstats = new CallStatsAPI();
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Thread.setDefaultUncaughtExceptionHandler(
new UnhandledExceptionHandler(this));
// Set window styles for fullscreen-window size. Needs to be done before
// adding content.
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(
LayoutParams.FLAG_FULLSCREEN
| LayoutParams.FLAG_KEEP_SCREEN_ON
| LayoutParams.FLAG_DISMISS_KEYGUARD
| LayoutParams.FLAG_SHOW_WHEN_LOCKED
| LayoutParams.FLAG_TURN_SCREEN_ON);
getWindow().getDecorView().setSystemUiVisibility(
View.SYSTEM_UI_FLAG_HIDE_NAVIGATION
| View.SYSTEM_UI_FLAG_FULLSCREEN
| View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY);
setContentView(R.layout.activity_call);
iceConnected = false;
signalingParameters = null;
scalingType = ScalingType.SCALE_ASPECT_FILL;
// Create UI controls.
videoView = (GLSurfaceView) findViewById(R.id.glview_call);
callFragment = new CallFragment();
hudFragment = new HudFragment();
// Create video renderers.
VideoRendererGui.setView(videoView, new Runnable() {
@Override
public void run() {
createPeerConnectionFactory();
}
});
remoteRender = VideoRendererGui.create(
REMOTE_X, REMOTE_Y,
REMOTE_WIDTH, REMOTE_HEIGHT, scalingType, false);
localRender = VideoRendererGui.create(
LOCAL_X_CONNECTING, LOCAL_Y_CONNECTING,
LOCAL_WIDTH_CONNECTING, LOCAL_HEIGHT_CONNECTING, scalingType, true);
// Show/hide call control fragment on view click.
videoView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
toggleCallControlFragmentVisibility();
}
});
// Get Intent parameters.
final Intent intent = getIntent();
Uri roomUri = intent.getData();
if (roomUri == null) {
logAndToast(getString(R.string.missing_url));
Log.e(TAG, "Didn't get any URL in intent!");
setResult(RESULT_CANCELED);
finish();
return;
}
String roomId = intent.getStringExtra(EXTRA_ROOMID);
if (roomId == null || roomId.length() == 0) {
logAndToast(getString(R.string.missing_url));
Log.e(TAG, "Incorrect room ID in intent!");
setResult(RESULT_CANCELED);
finish();
return;
}
boolean loopback = intent.getBooleanExtra(EXTRA_LOOPBACK, false);
peerConnectionParameters = new PeerConnectionParameters(
intent.getBooleanExtra(EXTRA_VIDEO_CALL, true),
loopback,
intent.getIntExtra(EXTRA_VIDEO_WIDTH, 0),
intent.getIntExtra(EXTRA_VIDEO_HEIGHT, 0),
intent.getIntExtra(EXTRA_VIDEO_FPS, 0),
intent.getIntExtra(EXTRA_VIDEO_BITRATE, 0),
intent.getStringExtra(EXTRA_VIDEOCODEC),
intent.getBooleanExtra(EXTRA_HWCODEC_ENABLED, true),
intent.getIntExtra(EXTRA_AUDIO_BITRATE, 0),
intent.getStringExtra(EXTRA_AUDIOCODEC),
intent.getBooleanExtra(EXTRA_CPUOVERUSE_DETECTION, true));
commandLineRun = intent.getBooleanExtra(EXTRA_CMDLINE, false);
runTimeMs = intent.getIntExtra(EXTRA_RUNTIME, 0);
// Create connection client and connection parameters.
appRtcClient = new WebSocketRTCClient(this, new LooperExecutor());
roomConnectionParameters = new RoomConnectionParameters(
roomUri.toString(), roomId, loopback);
Log.d("CallStatsAPI","Room uri is "+roomUri.toString()+" Room id is "+roomId);
// Send intent arguments to fragments.
callFragment.setArguments(intent.getExtras());
hudFragment.setArguments(intent.getExtras());
// Activate call and HUD fragments and start the call.
FragmentTransaction ft = getFragmentManager().beginTransaction();
ft.add(R.id.call_fragment_container, callFragment);
ft.add(R.id.hud_fragment_container, hudFragment);
ft.commit();
startCall();
// For command line execution run connection for <runTimeMs> and exit.
if (commandLineRun && runTimeMs > 0) {
videoView.postDelayed(new Runnable() {
public void run() {
disconnect();
}
}, runTimeMs);
}
}
// Activity interfaces
@Override
public void onPause() {
super.onPause();
videoView.onPause();
activityRunning = false;
if (peerConnectionClient != null) {
peerConnectionClient.stopVideoSource();
}
}
@Override
public void onResume() {
super.onResume();
videoView.onResume();
activityRunning = true;
if (peerConnectionClient != null) {
peerConnectionClient.startVideoSource();
}
}
@Override
protected void onDestroy() {
disconnect();
super.onDestroy();
if (logToast != null) {
logToast.cancel();
}
activityRunning = false;
}
// CallFragment.OnCallEvents interface implementation.
@Override
public void onCallHangUp() {
// callstats.onCallEnded(new CallStatsAPIListener(){
//
// @Override
// public void onSucess() {
// // TODO Auto-generated method stub
//
// }
//
// @Override
// public void onFailure() {
// // TODO Auto-generated method stub
//
// }
//
// });
disconnect();
}
@Override
public void onCameraSwitch() {
if (peerConnectionClient != null) {
peerConnectionClient.switchCamera();
}
}
@Override
public void onVideoScalingSwitch(ScalingType scalingType) {
this.scalingType = scalingType;
updateVideoView();
}
// Helper functions.
private void toggleCallControlFragmentVisibility() {
if (!iceConnected || !callFragment.isAdded()) {
return;
}
// Show/hide call control fragment
callControlFragmentVisible = !callControlFragmentVisible;
FragmentTransaction ft = getFragmentManager().beginTransaction();
if (callControlFragmentVisible) {
ft.show(callFragment);
ft.show(hudFragment);
} else {
ft.hide(callFragment);
ft.hide(hudFragment);
}
ft.setTransition(FragmentTransaction.TRANSIT_FRAGMENT_FADE);
ft.commit();
}
private void updateVideoView() {
VideoRendererGui.update(remoteRender,
REMOTE_X, REMOTE_Y,
REMOTE_WIDTH, REMOTE_HEIGHT, scalingType, false);
if (iceConnected) {
VideoRendererGui.update(localRender,
LOCAL_X_CONNECTED, LOCAL_Y_CONNECTED,
LOCAL_WIDTH_CONNECTED, LOCAL_HEIGHT_CONNECTED,
ScalingType.SCALE_ASPECT_FIT, true);
} else {
VideoRendererGui.update(localRender,
LOCAL_X_CONNECTING, LOCAL_Y_CONNECTING,
LOCAL_WIDTH_CONNECTING, LOCAL_HEIGHT_CONNECTING, scalingType, true);
}
}
private void startCall() {
if (appRtcClient == null) {
Log.e(TAG, "AppRTC client is not allocated for a call.");
return;
}
callStartedTimeMs = System.currentTimeMillis();
// Start room connection.
logAndToast(getString(R.string.connecting_to,
roomConnectionParameters.roomUrl));
appRtcClient.connectToRoom(roomConnectionParameters);
// Create and audio manager that will take care of audio routing,
// audio modes, audio device enumeration etc.
audioManager = AppRTCAudioManager.create(this, new Runnable() {
// This method will be called each time the audio state (number and
// type of devices) has been changed.
@Override
public void run() {
onAudioManagerChangedState();
}
}
);
// Store existing audio settings and change audio mode to
// MODE_IN_COMMUNICATION for best possible VoIP performance.
Log.d(TAG, "Initializing the audio manager...");
audioManager.init();
}
// Should be called from UI thread
private void callConnected() {
final long delta = System.currentTimeMillis() - callStartedTimeMs;
Log.i(TAG, "Call connected: delay=" + delta + "ms");
// Update video view.
updateVideoView();
// Enable statistics callback.
peerConnectionClient.enableStatsEvents(true, STAT_CALLBACK_PERIOD);
}
private void onAudioManagerChangedState() {
// TODO(henrika): disable video if AppRTCAudioManager.AudioDevice.EARPIECE
// is active.
}
// Create peer connection factory when EGL context is ready.
private void createPeerConnectionFactory() {
runOnUiThread(new Runnable() {
@Override
public void run() {
if (peerConnectionClient == null) {
final long delta = System.currentTimeMillis() - callStartedTimeMs;
Log.d(TAG, "Creating peer connection factory, delay=" + delta + "ms");
peerConnectionClient = PeerConnectionClient.getInstance();
peerConnectionClient.createPeerConnectionFactory(CallActivity.this,
VideoRendererGui.getEGLContext(), peerConnectionParameters,
CallActivity.this);
}
if (signalingParameters != null) {
Log.w(TAG, "EGL context is ready after room connection.");
onConnectedToRoomInternal(signalingParameters);
}
}
});
}
// Disconnect from remote resources, dispose of local resources, and exit.
private void disconnect() {
activityRunning = false;
if (appRtcClient != null) {
appRtcClient.disconnectFromRoom();
appRtcClient = null;
}
if (peerConnectionClient != null) {
peerConnectionClient.close();
peerConnectionClient = null;
}
if (audioManager != null) {
audioManager.close();
audioManager = null;
}
if (iceConnected && !isError) {
setResult(RESULT_OK);
} else {
setResult(RESULT_CANCELED);
}
finish();
}
private void disconnectWithErrorMessage(final String errorMessage) {
if (commandLineRun || !activityRunning) {
Log.e(TAG, "Critical error: " + errorMessage);
disconnect();
} else {
new AlertDialog.Builder(this)
.setTitle(getText(R.string.channel_error_title))
.setMessage(errorMessage)
.setCancelable(false)
.setNeutralButton(R.string.ok, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int id) {
dialog.cancel();
disconnect();
}
}).create().show();
}
}
// Log |msg| and Toast about it.
private void logAndToast(String msg) {
Log.d(TAG, msg);
if (logToast != null) {
logToast.cancel();
}
logToast = Toast.makeText(this, msg, Toast.LENGTH_SHORT);
logToast.show();
}
private void reportError(final String description) {
runOnUiThread(new Runnable() {
@Override
public void run() {
if (!isError) {
isError = true;
disconnectWithErrorMessage(description);
}
}
});
}
// -----Implementation of AppRTCClient.AppRTCSignalingEvents ---------------
// All callbacks are invoked from websocket signaling looper thread and
// are routed to UI thread.
private void onConnectedToRoomInternal(final SignalingParameters params) {
final long delta = System.currentTimeMillis() - callStartedTimeMs;
signalingParameters = params;
if (peerConnectionClient == null) {
Log.w(TAG, "Room is connected, but EGL context is not ready yet.");
return;
}
logAndToast("Creating peer connection, delay=" + delta + "ms");
peerConnectionClient.createPeerConnection(
localRender, remoteRender, signalingParameters);
if (signalingParameters.initiator) {
logAndToast("Creating OFFER...");
// Create offer. Offer SDP will be sent to answering client in
// PeerConnectionEvents.onLocalDescription event.
peerConnectionClient.createOffer();
} else {
if (params.offerSdp != null) {
peerConnectionClient.setRemoteDescription(params.offerSdp);
logAndToast("Creating ANSWER...");
// Create answer. Answer SDP will be sent to offering client in
// PeerConnectionEvents.onLocalDescription event.
peerConnectionClient.createAnswer();
}
if (params.iceCandidates != null) {
// Add remote ICE candidates from room.
for (IceCandidate iceCandidate : params.iceCandidates) {
peerConnectionClient.addRemoteIceCandidate(iceCandidate);
}
}
}
}
@Override
public void onConnectedToRoom(final SignalingParameters params) {
// callstats.onConnectedtoRoom(params.clientId, new CallStatsAPIListener(){
//
// @Override
// public void onSucess() {
// // TODO Auto-generated method stub
//
// }
//
// @Override
// public void onFailure() {
// // TODO Auto-generated method stub
//
// }
//
// });
runOnUiThread(new Runnable() {
@Override
public void run() {
onConnectedToRoomInternal(params);
}
});
}
@Override
public void onRemoteDescription(final SessionDescription sdp) {
final long delta = System.currentTimeMillis() - callStartedTimeMs;
runOnUiThread(new Runnable() {
@Override
public void run() {
if (peerConnectionClient == null) {
Log.e(TAG, "Received remote SDP for non-initilized peer connection.");
return;
}
logAndToast("Received remote " + sdp.type + ", delay=" + delta + "ms");
peerConnectionClient.setRemoteDescription(sdp);
if (!signalingParameters.initiator) {
logAndToast("Creating ANSWER...");
// Create answer. Answer SDP will be sent to offering client in
// PeerConnectionEvents.onLocalDescription event.
peerConnectionClient.createAnswer();
}
}
});
}
@Override
public void onRemoteIceCandidate(final IceCandidate candidate) {
runOnUiThread(new Runnable() {
@Override
public void run() {
if (peerConnectionClient == null) {
Log.e(TAG,
"Received ICE candidate for non-initilized peer connection.");
return;
}
peerConnectionClient.addRemoteIceCandidate(candidate);
}
});
}
@Override
public void onChannelClose() {
// callstats.onCallEnded(new CallStatsAPIListener(){
//
// @Override
// public void onSucess() {
// // TODO Auto-generated method stub
//
// }
//
// @Override
// public void onFailure() {
// // TODO Auto-generated method stub
//
// }
//
// });
runOnUiThread(new Runnable() {
@Override
public void run() {
logAndToast("Remote end hung up; dropping PeerConnection");
disconnect();
}
});
}
@Override
public void onChannelError(final String description) {
reportError(description);
}
// -----Implementation of PeerConnectionClient.PeerConnectionEvents.---------
// Send local peer connection SDP and ICE candidates to remote party.
// All callbacks are invoked from peer connection client looper thread and
// are routed to UI thread.
@Override
public void onLocalDescription(final SessionDescription sdp) {
final long delta = System.currentTimeMillis() - callStartedTimeMs;
runOnUiThread(new Runnable() {
@Override
public void run() {
if (appRtcClient != null) {
logAndToast("Sending " + sdp.type + ", delay=" + delta + "ms");
if (signalingParameters.initiator) {
appRtcClient.sendOfferSdp(sdp);
} else {
appRtcClient.sendAnswerSdp(sdp);
}
}
}
});
}
@Override
public void onIceCandidate(final IceCandidate candidate) {
runOnUiThread(new Runnable() {
@Override
public void run() {
if (appRtcClient != null) {
appRtcClient.sendLocalIceCandidate(candidate);
}
}
});
}
@Override
public void onIceConnected() {
final long delta = System.currentTimeMillis() - callStartedTimeMs;
runOnUiThread(new Runnable() {
@Override
public void run() {
logAndToast("ICE connected, delay=" + delta + "ms");
iceConnected = true;
callConnected();
}
});
}
@Override
public void onIceDisconnected() {
// callstats.onCallEnded(new CallStatsAPIListener(){
//
// @Override
// public void onSucess() {
// // TODO Auto-generated method stub
//
// }
//
// @Override
// public void onFailure() {
// // TODO Auto-generated method stub
//
// }
//
// });
runOnUiThread(new Runnable() {
@Override
public void run() {
logAndToast("ICE disconnected");
iceConnected = false;
disconnect();
}
});
}
@Override
public void onPeerConnectionClosed() {
}
@Override
public void onPeerConnectionStatsReady(final StatsReport[] reports) {
// callstats.onCallStatsReceived(reports, new CallStatsAPIListener(){
//
// @Override
// public void onSucess() {
// // TODO Auto-generated method stub
//
// }
//
// @Override
// public void onFailure() {
// // TODO Auto-generated method stub
//
// }
//
// });
runOnUiThread(new Runnable() {
@Override
public void run() {
if (!isError && iceConnected) {
hudFragment.updateEncoderStatistics(reports);
}
}
});
}
@Override
public void onPeerConnectionError(final String description) {
reportError(description);
}
}
| |
/*
* Copyright (c) 2003, 2011, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.sun.java.util.jar.pack;
import java.beans.PropertyChangeListener;
import java.io.BufferedInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.HashSet;
import java.util.Set;
import java.util.SortedMap;
import java.util.TimeZone;
import java.util.jar.JarEntry;
import java.util.jar.JarInputStream;
import java.util.jar.JarOutputStream;
import java.util.jar.Pack200;
import java.util.zip.CRC32;
import java.util.zip.CheckedOutputStream;
import java.util.zip.ZipEntry;
/*
* Implementation of the Pack provider.
* </pre></blockquote>
* @author John Rose
* @author Kumar Srinivasan
*/
public class UnpackerImpl extends TLGlobals implements Pack200.Unpacker {
/**
* Register a listener for changes to options.
* @param listener An object to be invoked when a property is changed.
*/
public void addPropertyChangeListener(PropertyChangeListener listener) {
props.addListener(listener);
}
/**
* Remove a listener for the PropertyChange event.
* @param listener The PropertyChange listener to be removed.
*/
public void removePropertyChangeListener(PropertyChangeListener listener) {
props.removeListener(listener);
}
public UnpackerImpl() {}
/**
* Get the set of options for the pack and unpack engines.
* @return A sorted association of option key strings to option values.
*/
@SuppressWarnings("unchecked")
public SortedMap properties() {
return props;
}
// Back-pointer to NativeUnpacker, when active.
Object _nunp;
public String toString() {
return Utils.getVersionString();
}
//Driver routines
// The unpack worker...
/**
* Takes a packed-stream InputStream, and writes to a JarOutputStream. Internally
* the entire buffer must be read, it may be more efficient to read the packed-stream
* to a file and pass the File object, in the alternate method described below.
* <p>
* Closes its input but not its output. (The output can accumulate more elements.)
* @param in an InputStream.
* @param out a JarOutputStream.
* @exception IOException if an error is encountered.
*/
public void unpack(InputStream in, JarOutputStream out) throws IOException {
if (in == null) {
throw new NullPointerException("null input");
}
if (out == null) {
throw new NullPointerException("null output");
}
assert(Utils.currentInstance.get() == null);
TimeZone tz = (props.getBoolean(Utils.PACK_DEFAULT_TIMEZONE))
? null
: TimeZone.getDefault();
try {
Utils.currentInstance.set(this);
if (tz != null) TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
final int verbose = props.getInteger(Utils.DEBUG_VERBOSE);
BufferedInputStream in0 = new BufferedInputStream(in);
if (Utils.isJarMagic(Utils.readMagic(in0))) {
if (verbose > 0)
Utils.log.info("Copying unpacked JAR file...");
Utils.copyJarFile(new JarInputStream(in0), out);
} else if (props.getBoolean(Utils.DEBUG_DISABLE_NATIVE)) {
(new DoUnpack()).run(in0, out);
in0.close();
Utils.markJarFile(out);
} else {
(new NativeUnpack(this)).run(in0, out);
in0.close();
Utils.markJarFile(out);
}
} finally {
_nunp = null;
Utils.currentInstance.set(null);
if (tz != null) TimeZone.setDefault(tz);
}
}
/**
* Takes an input File containing the pack file, and generates a JarOutputStream.
* <p>
* Does not close its output. (The output can accumulate more elements.)
* @param in a File.
* @param out a JarOutputStream.
* @exception IOException if an error is encountered.
*/
public void unpack(File in, JarOutputStream out) throws IOException {
if (in == null) {
throw new NullPointerException("null input");
}
if (out == null) {
throw new NullPointerException("null output");
}
// Use the stream-based implementation.
// %%% Reconsider if native unpacker learns to memory-map the file.
try (FileInputStream instr = new FileInputStream(in)) {
unpack(instr, out);
}
if (props.getBoolean(Utils.UNPACK_REMOVE_PACKFILE)) {
in.delete();
}
}
private class DoUnpack {
final int verbose = props.getInteger(Utils.DEBUG_VERBOSE);
{
props.setInteger(Pack200.Unpacker.PROGRESS, 0);
}
// Here's where the bits are read from disk:
final Package pkg = new Package();
final boolean keepModtime
= Pack200.Packer.KEEP.equals(
props.getProperty(Utils.UNPACK_MODIFICATION_TIME, Pack200.Packer.KEEP));
final boolean keepDeflateHint
= Pack200.Packer.KEEP.equals(
props.getProperty(Pack200.Unpacker.DEFLATE_HINT, Pack200.Packer.KEEP));
final int modtime;
final boolean deflateHint;
{
if (!keepModtime) {
modtime = props.getTime(Utils.UNPACK_MODIFICATION_TIME);
} else {
modtime = pkg.default_modtime;
}
deflateHint = (keepDeflateHint) ? false :
props.getBoolean(java.util.jar.Pack200.Unpacker.DEFLATE_HINT);
}
// Checksum apparatus.
final CRC32 crc = new CRC32();
final ByteArrayOutputStream bufOut = new ByteArrayOutputStream();
final OutputStream crcOut = new CheckedOutputStream(bufOut, crc);
public void run(BufferedInputStream in, JarOutputStream out) throws IOException {
if (verbose > 0) {
props.list(System.out);
}
for (int seg = 1; ; seg++) {
unpackSegment(in, out);
// Try to get another segment.
if (!Utils.isPackMagic(Utils.readMagic(in))) break;
if (verbose > 0)
Utils.log.info("Finished segment #"+seg);
}
}
private void unpackSegment(InputStream in, JarOutputStream out) throws IOException {
props.setProperty(java.util.jar.Pack200.Unpacker.PROGRESS,"0");
// Process the output directory or jar output.
new PackageReader(pkg, in).read();
if (props.getBoolean("unpack.strip.debug")) pkg.stripAttributeKind("Debug");
if (props.getBoolean("unpack.strip.compile")) pkg.stripAttributeKind("Compile");
props.setProperty(java.util.jar.Pack200.Unpacker.PROGRESS,"50");
pkg.ensureAllClassFiles();
// Now write out the files.
Set<Package.Class> classesToWrite = new HashSet<>(pkg.getClasses());
for (Package.File file : pkg.getFiles()) {
String name = file.nameString;
JarEntry je = new JarEntry(Utils.getJarEntryName(name));
boolean deflate;
deflate = (keepDeflateHint)
? (((file.options & Constants.FO_DEFLATE_HINT) != 0) ||
((pkg.default_options & Constants.AO_DEFLATE_HINT) != 0))
: deflateHint;
boolean needCRC = !deflate; // STORE mode requires CRC
if (needCRC) crc.reset();
bufOut.reset();
if (file.isClassStub()) {
Package.Class cls = file.getStubClass();
assert(cls != null);
new ClassWriter(cls, needCRC ? crcOut : bufOut).write();
classesToWrite.remove(cls); // for an error check
} else {
// collect data & maybe CRC
file.writeTo(needCRC ? crcOut : bufOut);
}
je.setMethod(deflate ? JarEntry.DEFLATED : JarEntry.STORED);
if (needCRC) {
if (verbose > 0)
Utils.log.info("stored size="+bufOut.size()+" and crc="+crc.getValue());
je.setMethod(JarEntry.STORED);
je.setSize(bufOut.size());
je.setCrc(crc.getValue());
}
if (keepModtime) {
je.setTime(file.modtime);
// Convert back to milliseconds
je.setTime((long)file.modtime * 1000);
} else {
je.setTime((long)modtime * 1000);
}
out.putNextEntry(je);
bufOut.writeTo(out);
out.closeEntry();
if (verbose > 0)
Utils.log.info("Writing "+Utils.zeString((ZipEntry)je));
}
assert(classesToWrite.isEmpty());
props.setProperty(java.util.jar.Pack200.Unpacker.PROGRESS,"100");
pkg.reset(); // reset for the next segment, if any
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.mover;
import java.io.IOException;
import java.net.URI;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSOutputStream;
import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.protocol.DirectoryListing;
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
import org.apache.hadoop.hdfs.protocol.HdfsLocatedFileStatus;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
import org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtocol;
import org.apache.hadoop.hdfs.server.balancer.Dispatcher;
import org.apache.hadoop.hdfs.server.balancer.ExitStatus;
import org.apache.hadoop.hdfs.server.balancer.TestBalancer;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockStoragePolicySuite;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsVolumeImpl;
import org.apache.hadoop.hdfs.server.namenode.snapshot.SnapshotTestHelper;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.log4j.Level;
import org.junit.Assert;
import org.junit.Test;
import com.google.common.base.Preconditions;
import com.google.common.collect.Maps;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_LAZY_WRITER_INTERVAL_SEC;
/**
* Test the data migration tool (for Archival Storage)
*/
public class TestStorageMover {
static final Log LOG = LogFactory.getLog(TestStorageMover.class);
static {
GenericTestUtils.setLogLevel(LogFactory.getLog(BlockPlacementPolicy.class),
Level.ALL);
GenericTestUtils.setLogLevel(LogFactory.getLog(Dispatcher.class),
Level.ALL);
GenericTestUtils.setLogLevel(DataTransferProtocol.LOG, Level.ALL);
}
private static final int BLOCK_SIZE = 1024;
private static final short REPL = 3;
private static final int NUM_DATANODES = 6;
private static final Configuration DEFAULT_CONF = new HdfsConfiguration();
private static final BlockStoragePolicySuite DEFAULT_POLICIES;
private static final BlockStoragePolicy HOT;
private static final BlockStoragePolicy WARM;
private static final BlockStoragePolicy COLD;
static {
DEFAULT_CONF.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, BLOCK_SIZE);
DEFAULT_CONF.setLong(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1L);
DEFAULT_CONF.setLong(DFSConfigKeys.DFS_NAMENODE_REPLICATION_INTERVAL_KEY,
2L);
DEFAULT_CONF.setLong(DFSConfigKeys.DFS_MOVER_MOVEDWINWIDTH_KEY, 2000L);
DEFAULT_POLICIES = BlockStoragePolicySuite.createDefaultSuite();
HOT = DEFAULT_POLICIES.getPolicy(HdfsConstants.HOT_STORAGE_POLICY_NAME);
WARM = DEFAULT_POLICIES.getPolicy(HdfsConstants.WARM_STORAGE_POLICY_NAME);
COLD = DEFAULT_POLICIES.getPolicy(HdfsConstants.COLD_STORAGE_POLICY_NAME);
TestBalancer.initTestSetup();
Dispatcher.setDelayAfterErrors(1000L);
}
/**
* This scheme defines files/directories and their block storage policies. It
* also defines snapshots.
*/
static class NamespaceScheme {
final List<Path> dirs;
final List<Path> files;
final long fileSize;
final Map<Path, List<String>> snapshotMap;
final Map<Path, BlockStoragePolicy> policyMap;
NamespaceScheme(List<Path> dirs, List<Path> files, long fileSize,
Map<Path,List<String>> snapshotMap,
Map<Path, BlockStoragePolicy> policyMap) {
this.dirs = dirs == null? Collections.<Path>emptyList(): dirs;
this.files = files == null? Collections.<Path>emptyList(): files;
this.fileSize = fileSize;
this.snapshotMap = snapshotMap == null ?
Collections.<Path, List<String>>emptyMap() : snapshotMap;
this.policyMap = policyMap;
}
/**
* Create files/directories/snapshots.
*/
void prepare(DistributedFileSystem dfs, short repl) throws Exception {
for (Path d : dirs) {
dfs.mkdirs(d);
}
for (Path file : files) {
DFSTestUtil.createFile(dfs, file, fileSize, repl, 0L);
}
for (Map.Entry<Path, List<String>> entry : snapshotMap.entrySet()) {
for (String snapshot : entry.getValue()) {
SnapshotTestHelper.createSnapshot(dfs, entry.getKey(), snapshot);
}
}
}
/**
* Set storage policies according to the corresponding scheme.
*/
void setStoragePolicy(DistributedFileSystem dfs) throws Exception {
for (Map.Entry<Path, BlockStoragePolicy> entry : policyMap.entrySet()) {
dfs.setStoragePolicy(entry.getKey(), entry.getValue().getName());
}
}
}
/**
* This scheme defines DataNodes and their storage, including storage types
* and remaining capacities.
*/
static class ClusterScheme {
final Configuration conf;
final int numDataNodes;
final short repl;
final StorageType[][] storageTypes;
final long[][] storageCapacities;
ClusterScheme() {
this(DEFAULT_CONF, NUM_DATANODES, REPL,
genStorageTypes(NUM_DATANODES), null);
}
ClusterScheme(Configuration conf, int numDataNodes, short repl,
StorageType[][] types, long[][] capacities) {
Preconditions.checkArgument(types == null || types.length == numDataNodes);
Preconditions.checkArgument(capacities == null || capacities.length ==
numDataNodes);
this.conf = conf;
this.numDataNodes = numDataNodes;
this.repl = repl;
this.storageTypes = types;
this.storageCapacities = capacities;
}
}
class MigrationTest {
private final ClusterScheme clusterScheme;
private final NamespaceScheme nsScheme;
private final Configuration conf;
private MiniDFSCluster cluster;
private DistributedFileSystem dfs;
private final BlockStoragePolicySuite policies;
MigrationTest(ClusterScheme cScheme, NamespaceScheme nsScheme) {
this.clusterScheme = cScheme;
this.nsScheme = nsScheme;
this.conf = clusterScheme.conf;
this.policies = DEFAULT_POLICIES;
}
/**
* Set up the cluster and start NameNode and DataNodes according to the
* corresponding scheme.
*/
void setupCluster() throws Exception {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(clusterScheme
.numDataNodes).storageTypes(clusterScheme.storageTypes)
.storageCapacities(clusterScheme.storageCapacities).build();
cluster.waitActive();
dfs = cluster.getFileSystem();
}
private void runBasicTest(boolean shutdown) throws Exception {
setupCluster();
try {
prepareNamespace();
verify(true);
setStoragePolicy();
migrate(ExitStatus.SUCCESS);
verify(true);
} finally {
if (shutdown) {
shutdownCluster();
}
}
}
void shutdownCluster() throws Exception {
IOUtils.cleanup(null, dfs);
if (cluster != null) {
cluster.shutdown();
}
}
/**
* Create files/directories and set their storage policies according to the
* corresponding scheme.
*/
void prepareNamespace() throws Exception {
nsScheme.prepare(dfs, clusterScheme.repl);
}
void setStoragePolicy() throws Exception {
nsScheme.setStoragePolicy(dfs);
}
/**
* Run the migration tool.
*/
void migrate(ExitStatus expectedExitCode) throws Exception {
runMover(expectedExitCode);
Thread.sleep(5000); // let the NN finish deletion
}
/**
* Verify block locations after running the migration tool.
*/
void verify(boolean verifyAll) throws Exception {
for (DataNode dn : cluster.getDataNodes()) {
DataNodeTestUtils.triggerBlockReport(dn);
}
if (verifyAll) {
verifyNamespace();
}
}
private void runMover(ExitStatus expectedExitCode) throws Exception {
Collection<URI> namenodes = DFSUtil.getNsServiceRpcUris(conf);
Map<URI, List<Path>> nnMap = Maps.newHashMap();
for (URI nn : namenodes) {
nnMap.put(nn, null);
}
int result = Mover.run(nnMap, conf);
Assert.assertEquals(expectedExitCode.getExitCode(), result);
}
private void verifyNamespace() throws Exception {
HdfsFileStatus status = dfs.getClient().getFileInfo("/");
verifyRecursively(null, status);
}
private void verifyRecursively(final Path parent,
final HdfsFileStatus status) throws Exception {
if (status.isDir()) {
Path fullPath = parent == null ?
new Path("/") : status.getFullPath(parent);
DirectoryListing children = dfs.getClient().listPaths(
fullPath.toString(), HdfsFileStatus.EMPTY_NAME, true);
for (HdfsFileStatus child : children.getPartialListing()) {
verifyRecursively(fullPath, child);
}
} else if (!status.isSymlink()) { // is file
verifyFile(parent, status, null);
}
}
void verifyFile(final Path file, final Byte expectedPolicyId)
throws Exception {
final Path parent = file.getParent();
DirectoryListing children = dfs.getClient().listPaths(
parent.toString(), HdfsFileStatus.EMPTY_NAME, true);
for (HdfsFileStatus child : children.getPartialListing()) {
if (child.getLocalName().equals(file.getName())) {
verifyFile(parent, child, expectedPolicyId);
return;
}
}
Assert.fail("File " + file + " not found.");
}
private void verifyFile(final Path parent, final HdfsFileStatus status,
final Byte expectedPolicyId) throws Exception {
HdfsLocatedFileStatus fileStatus = (HdfsLocatedFileStatus) status;
byte policyId = fileStatus.getStoragePolicy();
BlockStoragePolicy policy = policies.getPolicy(policyId);
if (expectedPolicyId != null) {
Assert.assertEquals((byte)expectedPolicyId, policy.getId());
}
final List<StorageType> types = policy.chooseStorageTypes(
status.getReplication());
for(LocatedBlock lb : fileStatus.getBlockLocations().getLocatedBlocks()) {
final Mover.StorageTypeDiff diff = new Mover.StorageTypeDiff(types,
lb.getStorageTypes());
Assert.assertTrue(fileStatus.getFullName(parent.toString())
+ " with policy " + policy + " has non-empty overlap: " + diff
+ ", the corresponding block is " + lb.getBlock().getLocalBlock(),
diff.removeOverlap(true));
}
}
Replication getReplication(Path file) throws IOException {
return getOrVerifyReplication(file, null);
}
Replication verifyReplication(Path file, int expectedDiskCount,
int expectedArchiveCount) throws IOException {
final Replication r = new Replication();
r.disk = expectedDiskCount;
r.archive = expectedArchiveCount;
return getOrVerifyReplication(file, r);
}
private Replication getOrVerifyReplication(Path file, Replication expected)
throws IOException {
final List<LocatedBlock> lbs = dfs.getClient().getLocatedBlocks(
file.toString(), 0).getLocatedBlocks();
Assert.assertEquals(1, lbs.size());
LocatedBlock lb = lbs.get(0);
StringBuilder types = new StringBuilder();
final Replication r = new Replication();
for(StorageType t : lb.getStorageTypes()) {
types.append(t).append(", ");
if (t == StorageType.DISK) {
r.disk++;
} else if (t == StorageType.ARCHIVE) {
r.archive++;
} else {
Assert.fail("Unexpected storage type " + t);
}
}
if (expected != null) {
final String s = "file = " + file + "\n types = [" + types + "]";
Assert.assertEquals(s, expected, r);
}
return r;
}
}
static class Replication {
int disk;
int archive;
@Override
public int hashCode() {
return disk ^ archive;
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
} else if (obj == null || !(obj instanceof Replication)) {
return false;
}
final Replication that = (Replication)obj;
return this.disk == that.disk && this.archive == that.archive;
}
@Override
public String toString() {
return "[disk=" + disk + ", archive=" + archive + "]";
}
}
private static StorageType[][] genStorageTypes(int numDataNodes) {
return genStorageTypes(numDataNodes, 0, 0, 0);
}
private static StorageType[][] genStorageTypes(int numDataNodes,
int numAllDisk, int numAllArchive) {
return genStorageTypes(numDataNodes, numAllDisk, numAllArchive, 0);
}
private static StorageType[][] genStorageTypes(int numDataNodes,
int numAllDisk, int numAllArchive, int numRamDisk) {
Preconditions.checkArgument(
(numAllDisk + numAllArchive + numRamDisk) <= numDataNodes);
StorageType[][] types = new StorageType[numDataNodes][];
int i = 0;
for (; i < numRamDisk; i++)
{
types[i] = new StorageType[]{StorageType.RAM_DISK, StorageType.DISK};
}
for (; i < numRamDisk + numAllDisk; i++) {
types[i] = new StorageType[]{StorageType.DISK, StorageType.DISK};
}
for (; i < numRamDisk + numAllDisk + numAllArchive; i++) {
types[i] = new StorageType[]{StorageType.ARCHIVE, StorageType.ARCHIVE};
}
for (; i < types.length; i++) {
types[i] = new StorageType[]{StorageType.DISK, StorageType.ARCHIVE};
}
return types;
}
private static long[][] genCapacities(int nDatanodes, int numAllDisk,
int numAllArchive, int numRamDisk, long diskCapacity,
long archiveCapacity, long ramDiskCapacity) {
final long[][] capacities = new long[nDatanodes][];
int i = 0;
for (; i < numRamDisk; i++) {
capacities[i] = new long[]{ramDiskCapacity, diskCapacity};
}
for (; i < numRamDisk + numAllDisk; i++) {
capacities[i] = new long[]{diskCapacity, diskCapacity};
}
for (; i < numRamDisk + numAllDisk + numAllArchive; i++) {
capacities[i] = new long[]{archiveCapacity, archiveCapacity};
}
for(; i < capacities.length; i++) {
capacities[i] = new long[]{diskCapacity, archiveCapacity};
}
return capacities;
}
private static class PathPolicyMap {
final Map<Path, BlockStoragePolicy> map = Maps.newHashMap();
final Path hot = new Path("/hot");
final Path warm = new Path("/warm");
final Path cold = new Path("/cold");
final List<Path> files;
PathPolicyMap(int filesPerDir){
map.put(hot, HOT);
map.put(warm, WARM);
map.put(cold, COLD);
files = new ArrayList<Path>();
for(Path dir : map.keySet()) {
for(int i = 0; i < filesPerDir; i++) {
files.add(new Path(dir, "file" + i));
}
}
}
NamespaceScheme newNamespaceScheme() {
return new NamespaceScheme(Arrays.asList(hot, warm, cold),
files, BLOCK_SIZE/2, null, map);
}
/**
* Move hot files to warm and cold, warm files to hot and cold,
* and cold files to hot and warm.
*/
void moveAround(DistributedFileSystem dfs) throws Exception {
for(Path srcDir : map.keySet()) {
int i = 0;
for(Path dstDir : map.keySet()) {
if (!srcDir.equals(dstDir)) {
final Path src = new Path(srcDir, "file" + i++);
final Path dst = new Path(dstDir, srcDir.getName() + "2" + dstDir.getName());
LOG.info("rename " + src + " to " + dst);
dfs.rename(src, dst);
}
}
}
}
}
/**
* A normal case for Mover: move a file into archival storage
*/
@Test
public void testMigrateFileToArchival() throws Exception {
LOG.info("testMigrateFileToArchival");
final Path foo = new Path("/foo");
Map<Path, BlockStoragePolicy> policyMap = Maps.newHashMap();
policyMap.put(foo, COLD);
NamespaceScheme nsScheme = new NamespaceScheme(null, Arrays.asList(foo),
2*BLOCK_SIZE, null, policyMap);
ClusterScheme clusterScheme = new ClusterScheme(DEFAULT_CONF,
NUM_DATANODES, REPL, genStorageTypes(NUM_DATANODES), null);
new MigrationTest(clusterScheme, nsScheme).runBasicTest(true);
}
/**
* Print a big banner in the test log to make debug easier.
*/
static void banner(String string) {
LOG.info("\n\n\n\n================================================\n" +
string + "\n" +
"==================================================\n\n");
}
/**
* Run Mover with arguments specifying files and directories
*/
@Test
public void testMoveSpecificPaths() throws Exception {
LOG.info("testMoveSpecificPaths");
final Path foo = new Path("/foo");
final Path barFile = new Path(foo, "bar");
final Path foo2 = new Path("/foo2");
final Path bar2File = new Path(foo2, "bar2");
Map<Path, BlockStoragePolicy> policyMap = Maps.newHashMap();
policyMap.put(foo, COLD);
policyMap.put(foo2, WARM);
NamespaceScheme nsScheme = new NamespaceScheme(Arrays.asList(foo, foo2),
Arrays.asList(barFile, bar2File), BLOCK_SIZE, null, policyMap);
ClusterScheme clusterScheme = new ClusterScheme(DEFAULT_CONF,
NUM_DATANODES, REPL, genStorageTypes(NUM_DATANODES), null);
MigrationTest test = new MigrationTest(clusterScheme, nsScheme);
test.setupCluster();
try {
test.prepareNamespace();
test.setStoragePolicy();
Map<URI, List<Path>> map = Mover.Cli.getNameNodePathsToMove(test.conf,
"-p", "/foo/bar", "/foo2");
int result = Mover.run(map, test.conf);
Assert.assertEquals(ExitStatus.SUCCESS.getExitCode(), result);
Thread.sleep(5000);
test.verify(true);
} finally {
test.shutdownCluster();
}
}
/**
* Move an open file into archival storage
*/
@Test
public void testMigrateOpenFileToArchival() throws Exception {
LOG.info("testMigrateOpenFileToArchival");
final Path fooDir = new Path("/foo");
Map<Path, BlockStoragePolicy> policyMap = Maps.newHashMap();
policyMap.put(fooDir, COLD);
NamespaceScheme nsScheme = new NamespaceScheme(Arrays.asList(fooDir), null,
BLOCK_SIZE, null, policyMap);
ClusterScheme clusterScheme = new ClusterScheme(DEFAULT_CONF,
NUM_DATANODES, REPL, genStorageTypes(NUM_DATANODES), null);
MigrationTest test = new MigrationTest(clusterScheme, nsScheme);
test.setupCluster();
// create an open file
banner("writing to file /foo/bar");
final Path barFile = new Path(fooDir, "bar");
DFSTestUtil.createFile(test.dfs, barFile, BLOCK_SIZE, (short) 1, 0L);
FSDataOutputStream out = test.dfs.append(barFile);
out.writeBytes("hello, ");
((DFSOutputStream) out.getWrappedStream()).hsync();
try {
banner("start data migration");
test.setStoragePolicy(); // set /foo to COLD
test.migrate(ExitStatus.SUCCESS);
// make sure the under construction block has not been migrated
LocatedBlocks lbs = test.dfs.getClient().getLocatedBlocks(
barFile.toString(), BLOCK_SIZE);
LOG.info("Locations: " + lbs);
List<LocatedBlock> blks = lbs.getLocatedBlocks();
Assert.assertEquals(1, blks.size());
Assert.assertEquals(1, blks.get(0).getLocations().length);
banner("finish the migration, continue writing");
// make sure the writing can continue
out.writeBytes("world!");
((DFSOutputStream) out.getWrappedStream()).hsync();
IOUtils.cleanup(LOG, out);
lbs = test.dfs.getClient().getLocatedBlocks(
barFile.toString(), BLOCK_SIZE);
LOG.info("Locations: " + lbs);
blks = lbs.getLocatedBlocks();
Assert.assertEquals(1, blks.size());
Assert.assertEquals(1, blks.get(0).getLocations().length);
banner("finish writing, starting reading");
// check the content of /foo/bar
FSDataInputStream in = test.dfs.open(barFile);
byte[] buf = new byte[13];
// read from offset 1024
in.readFully(BLOCK_SIZE, buf, 0, buf.length);
IOUtils.cleanup(LOG, in);
Assert.assertEquals("hello, world!", new String(buf));
} finally {
test.shutdownCluster();
}
}
/**
* Test directories with Hot, Warm and Cold polices.
*/
@Test
public void testHotWarmColdDirs() throws Exception {
LOG.info("testHotWarmColdDirs");
PathPolicyMap pathPolicyMap = new PathPolicyMap(3);
NamespaceScheme nsScheme = pathPolicyMap.newNamespaceScheme();
ClusterScheme clusterScheme = new ClusterScheme();
MigrationTest test = new MigrationTest(clusterScheme, nsScheme);
try {
test.runBasicTest(false);
pathPolicyMap.moveAround(test.dfs);
test.migrate(ExitStatus.SUCCESS);
test.verify(true);
} finally {
test.shutdownCluster();
}
}
private void waitForAllReplicas(int expectedReplicaNum, Path file,
DistributedFileSystem dfs) throws Exception {
for (int i = 0; i < 5; i++) {
LocatedBlocks lbs = dfs.getClient().getLocatedBlocks(file.toString(), 0,
BLOCK_SIZE);
LocatedBlock lb = lbs.get(0);
if (lb.getLocations().length >= expectedReplicaNum) {
return;
} else {
Thread.sleep(1000);
}
}
}
private void setVolumeFull(DataNode dn, StorageType type) {
try (FsDatasetSpi.FsVolumeReferences refs = dn.getFSDataset()
.getFsVolumeReferences()) {
for (FsVolumeSpi fvs : refs) {
FsVolumeImpl volume = (FsVolumeImpl) fvs;
if (volume.getStorageType() == type) {
LOG.info("setCapacity to 0 for [" + volume.getStorageType() + "]"
+ volume.getStorageID());
volume.setCapacityForTesting(0);
}
}
} catch (IOException e) {
LOG.error("Unexpected exception by closing FsVolumeReference", e);
}
}
/**
* Test DISK is running out of spaces.
*/
@Test
public void testNoSpaceDisk() throws Exception {
LOG.info("testNoSpaceDisk");
final PathPolicyMap pathPolicyMap = new PathPolicyMap(0);
final NamespaceScheme nsScheme = pathPolicyMap.newNamespaceScheme();
Configuration conf = new Configuration(DEFAULT_CONF);
final ClusterScheme clusterScheme = new ClusterScheme(conf,
NUM_DATANODES, REPL, genStorageTypes(NUM_DATANODES), null);
final MigrationTest test = new MigrationTest(clusterScheme, nsScheme);
try {
test.runBasicTest(false);
// create 2 hot files with replication 3
final short replication = 3;
for (int i = 0; i < 2; i++) {
final Path p = new Path(pathPolicyMap.hot, "file" + i);
DFSTestUtil.createFile(test.dfs, p, BLOCK_SIZE, replication, 0L);
waitForAllReplicas(replication, p, test.dfs);
}
// set all the DISK volume to full
for (DataNode dn : test.cluster.getDataNodes()) {
setVolumeFull(dn, StorageType.DISK);
DataNodeTestUtils.triggerHeartbeat(dn);
}
// test increasing replication. Since DISK is full,
// new replicas should be stored in ARCHIVE as a fallback storage.
final Path file0 = new Path(pathPolicyMap.hot, "file0");
final Replication r = test.getReplication(file0);
final short newReplication = (short) 5;
test.dfs.setReplication(file0, newReplication);
Thread.sleep(10000);
test.verifyReplication(file0, r.disk, newReplication - r.disk);
// test creating a cold file and then increase replication
final Path p = new Path(pathPolicyMap.cold, "foo");
DFSTestUtil.createFile(test.dfs, p, BLOCK_SIZE, replication, 0L);
test.verifyReplication(p, 0, replication);
test.dfs.setReplication(p, newReplication);
Thread.sleep(10000);
test.verifyReplication(p, 0, newReplication);
//test move a hot file to warm
final Path file1 = new Path(pathPolicyMap.hot, "file1");
test.dfs.rename(file1, pathPolicyMap.warm);
test.migrate(ExitStatus.NO_MOVE_BLOCK);
test.verifyFile(new Path(pathPolicyMap.warm, "file1"), WARM.getId());
} finally {
test.shutdownCluster();
}
}
/**
* Test ARCHIVE is running out of spaces.
*/
@Test
public void testNoSpaceArchive() throws Exception {
LOG.info("testNoSpaceArchive");
final PathPolicyMap pathPolicyMap = new PathPolicyMap(0);
final NamespaceScheme nsScheme = pathPolicyMap.newNamespaceScheme();
final ClusterScheme clusterScheme = new ClusterScheme(DEFAULT_CONF,
NUM_DATANODES, REPL, genStorageTypes(NUM_DATANODES), null);
final MigrationTest test = new MigrationTest(clusterScheme, nsScheme);
try {
test.runBasicTest(false);
// create 2 hot files with replication 3
final short replication = 3;
for (int i = 0; i < 2; i++) {
final Path p = new Path(pathPolicyMap.cold, "file" + i);
DFSTestUtil.createFile(test.dfs, p, BLOCK_SIZE, replication, 0L);
waitForAllReplicas(replication, p, test.dfs);
}
// set all the ARCHIVE volume to full
for (DataNode dn : test.cluster.getDataNodes()) {
setVolumeFull(dn, StorageType.ARCHIVE);
DataNodeTestUtils.triggerHeartbeat(dn);
}
{ // test increasing replication but new replicas cannot be created
// since no more ARCHIVE space.
final Path file0 = new Path(pathPolicyMap.cold, "file0");
final Replication r = test.getReplication(file0);
Assert.assertEquals(0, r.disk);
final short newReplication = (short) 5;
test.dfs.setReplication(file0, newReplication);
Thread.sleep(10000);
test.verifyReplication(file0, 0, r.archive);
}
{ // test creating a hot file
final Path p = new Path(pathPolicyMap.hot, "foo");
DFSTestUtil.createFile(test.dfs, p, BLOCK_SIZE, (short) 3, 0L);
}
{ //test move a cold file to warm
final Path file1 = new Path(pathPolicyMap.cold, "file1");
test.dfs.rename(file1, pathPolicyMap.warm);
test.migrate(ExitStatus.SUCCESS);
test.verify(true);
}
} finally {
test.shutdownCluster();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.pagemem.impl;
import java.io.Closeable;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.ignite.IgniteException;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.IgniteSystemProperties;
import org.apache.ignite.configuration.DataRegionConfiguration;
import org.apache.ignite.failure.FailureContext;
import org.apache.ignite.failure.FailureType;
import org.apache.ignite.internal.mem.DirectMemoryProvider;
import org.apache.ignite.internal.mem.DirectMemoryRegion;
import org.apache.ignite.internal.mem.IgniteOutOfMemoryException;
import org.apache.ignite.internal.pagemem.PageIdUtils;
import org.apache.ignite.internal.pagemem.PageMemory;
import org.apache.ignite.internal.processors.cache.GridCacheSharedContext;
import org.apache.ignite.internal.processors.cache.persistence.tree.io.PageIO;
import org.apache.ignite.internal.metric.IoStatisticsHolder;
import org.apache.ignite.internal.metric.IoStatisticsHolderNoOp;
import org.apache.ignite.internal.processors.metric.impl.LongAdderMetric;
import org.apache.ignite.internal.util.GridUnsafe;
import org.apache.ignite.internal.util.IgniteUtils;
import org.apache.ignite.internal.util.OffheapReadWriteLock;
import org.apache.ignite.internal.util.offheap.GridOffHeapOutOfMemoryException;
import org.apache.ignite.internal.util.typedef.internal.U;
import static org.apache.ignite.IgniteSystemProperties.IGNITE_OFFHEAP_LOCK_CONCURRENCY_LEVEL;
import static org.apache.ignite.internal.util.GridUnsafe.wrapPointer;
/**
* Page header structure is described by the following diagram.
*
* When page is not allocated (in a free list):
* <pre>
* +--------+--------+---------------------------------------------+
* |8 bytes |8 bytes | PAGE_SIZE + PAGE_OVERHEAD - 16 bytes |
* +--------+--------+---------------------------------------------+
* |Next ptr|Rel ptr | Empty |
* +--------+--------+---------------------------------------------+
* </pre>
* <p/>
* When page is allocated and is in use:
* <pre>
* +--------+--------+--------+--------+---------------------------+
* |8 bytes |8 bytes |8 bytes |8 bytes | PAGE_SIZE |
* +--------+--------+--------+--------+---------------------------+
* | Marker |Page ID |Pin CNT | Lock | Page data |
* +--------+--------+--------+--------+---------------------------+
* </pre>
*
* Note that first 8 bytes of page header are used either for page marker or for next relative pointer depending
* on whether the page is in use or not.
*/
public class PageMemoryNoStoreImpl implements PageMemory {
/** */
public static final long PAGE_MARKER = 0xBEEAAFDEADBEEF01L;
/** Full relative pointer mask. */
private static final long RELATIVE_PTR_MASK = 0xFFFFFFFFFFFFFFL;
/** Invalid relative pointer value. */
private static final long INVALID_REL_PTR = RELATIVE_PTR_MASK;
/** Address mask to avoid ABA problem. */
private static final long ADDRESS_MASK = 0xFFFFFFFFFFFFFFL;
/** Counter mask to avoid ABA problem. */
private static final long COUNTER_MASK = ~ADDRESS_MASK;
/** Counter increment to avoid ABA problem. */
private static final long COUNTER_INC = ADDRESS_MASK + 1;
/** Page ID offset. */
public static final int PAGE_ID_OFFSET = 8;
/** Page pin counter offset. */
public static final int LOCK_OFFSET = 16;
/**
* Need a 8-byte pointer for linked list, 8 bytes for internal needs (flags),
* 4 bytes cache ID, 8 bytes timestamp.
*/
public static final int PAGE_OVERHEAD = LOCK_OFFSET + OffheapReadWriteLock.LOCK_SIZE;
/** Number of bits required to store segment index. */
private static final int SEG_BITS = 4;
/** Number of bits required to store segment index. */
private static final int SEG_CNT = (1 << SEG_BITS);
/** Number of bits left to store page index. */
private static final int IDX_BITS = PageIdUtils.PAGE_IDX_SIZE - SEG_BITS;
/** Segment mask. */
private static final int SEG_MASK = ~(-1 << SEG_BITS);
/** Index mask. */
private static final int IDX_MASK = ~(-1 << IDX_BITS);
/** Page size. */
private int sysPageSize;
/** */
private final IgniteLogger log;
/** Direct memory allocator. */
private final DirectMemoryProvider directMemoryProvider;
/** Name of DataRegion this PageMemory is associated with. */
private final DataRegionConfiguration dataRegionCfg;
/** */
private AtomicLong freePageListHead = new AtomicLong(INVALID_REL_PTR);
/** Segments array. */
private volatile Segment[] segments;
/** Lock for segments changes. */
private Object segmentsLock = new Object();
/** */
private final AtomicInteger allocatedPages = new AtomicInteger();
/** */
private final LongAdderMetric totalAllocatedPagesMetric;
/** */
private AtomicInteger selector = new AtomicInteger();
/** */
private OffheapReadWriteLock rwLock;
/** Concurrency lvl. */
private final int lockConcLvl = IgniteSystemProperties.getInteger(
IGNITE_OFFHEAP_LOCK_CONCURRENCY_LEVEL,
IgniteUtils.nearestPow2(Runtime.getRuntime().availableProcessors() * 4)
);
/** */
private final int totalPages;
/** */
private final boolean trackAcquiredPages;
/** Shared context. */
private final GridCacheSharedContext<?, ?> ctx;
/**
* {@code False} if memory was not started or already stopped and is not supposed for any usage.
*/
private volatile boolean started;
/**
* @param log Logger.
* @param directMemoryProvider Memory allocator to use.
* @param sharedCtx Cache shared context.
* @param pageSize Page size.
* @param dataRegionCfg Data region configuration.
* @param totalAllocatedPagesMetric Total allocated pages metric.
* @param trackAcquiredPages If {@code true} tracks number of allocated pages (for tests purpose only).
*/
public PageMemoryNoStoreImpl(
IgniteLogger log,
DirectMemoryProvider directMemoryProvider,
GridCacheSharedContext<?, ?> sharedCtx,
int pageSize,
DataRegionConfiguration dataRegionCfg,
LongAdderMetric totalAllocatedPagesMetric,
boolean trackAcquiredPages
) {
assert log != null || sharedCtx != null;
assert pageSize % 8 == 0;
this.log = sharedCtx != null ? sharedCtx.logger(PageMemoryNoStoreImpl.class) : log;
this.directMemoryProvider = directMemoryProvider;
this.trackAcquiredPages = trackAcquiredPages;
this.totalAllocatedPagesMetric = totalAllocatedPagesMetric;
this.dataRegionCfg = dataRegionCfg;
this.ctx = sharedCtx;
sysPageSize = pageSize + PAGE_OVERHEAD;
assert sysPageSize % 8 == 0 : sysPageSize;
totalPages = (int)(dataRegionCfg.getMaxSize() / sysPageSize);
rwLock = new OffheapReadWriteLock(lockConcLvl);
}
/** {@inheritDoc} */
@Override public void start() throws IgniteException {
synchronized (segmentsLock) {
if (started)
return;
started = true;
long startSize = dataRegionCfg.getInitialSize();
long maxSize = dataRegionCfg.getMaxSize();
long[] chunks = new long[SEG_CNT];
chunks[0] = startSize;
long total = startSize;
long allocChunkSize = Math.max((maxSize - startSize) / (SEG_CNT - 1), 256L * 1024 * 1024);
int lastIdx = 0;
for (int i = 1; i < SEG_CNT; i++) {
long allocSize = Math.min(allocChunkSize, maxSize - total);
if (allocSize <= 0)
break;
chunks[i] = allocSize;
total += allocSize;
lastIdx = i;
}
if (lastIdx != SEG_CNT - 1)
chunks = Arrays.copyOf(chunks, lastIdx + 1);
if (segments == null)
directMemoryProvider.initialize(chunks);
addSegment(null);
}
}
/** {@inheritDoc} */
@Override public void stop(boolean deallocate) throws IgniteException {
synchronized (segmentsLock) {
if (log.isDebugEnabled())
log.debug("Stopping page memory.");
started = false;
directMemoryProvider.shutdown(deallocate);
if (directMemoryProvider instanceof Closeable) {
try {
((Closeable)directMemoryProvider).close();
}
catch (IOException e) {
throw new IgniteException(e);
}
}
}
}
/** {@inheritDoc} */
@Override public ByteBuffer pageBuffer(long pageAddr) {
return wrapPointer(pageAddr, pageSize());
}
/** {@inheritDoc} */
@Override public long allocatePage(int grpId, int partId, byte flags) {
assert started;
long relPtr = borrowFreePage();
long absPtr = 0;
if (relPtr != INVALID_REL_PTR) {
int pageIdx = PageIdUtils.pageIndex(relPtr);
Segment seg = segment(pageIdx);
absPtr = seg.absolute(pageIdx);
}
// No segments contained a free page.
if (relPtr == INVALID_REL_PTR) {
Segment[] seg0 = segments;
Segment allocSeg = seg0[seg0.length - 1];
while (allocSeg != null) {
relPtr = allocSeg.allocateFreePage(flags);
if (relPtr != INVALID_REL_PTR) {
absPtr = allocSeg.absolute(PageIdUtils.pageIndex(relPtr));
break;
}
else
allocSeg = addSegment(seg0);
}
}
if (relPtr == INVALID_REL_PTR) {
IgniteOutOfMemoryException oom = new IgniteOutOfMemoryException("Out of memory in data region [" +
"name=" + dataRegionCfg.getName() +
", initSize=" + U.readableSize(dataRegionCfg.getInitialSize(), false) +
", maxSize=" + U.readableSize(dataRegionCfg.getMaxSize(), false) +
", persistenceEnabled=" + dataRegionCfg.isPersistenceEnabled() + "] Try the following:" + U.nl() +
" ^-- Increase maximum off-heap memory size (DataRegionConfiguration.maxSize)" + U.nl() +
" ^-- Enable Ignite persistence (DataRegionConfiguration.persistenceEnabled)" + U.nl() +
" ^-- Enable eviction or expiration policies"
);
if (ctx != null)
ctx.kernalContext().failure().process(new FailureContext(FailureType.CRITICAL_ERROR, oom));
throw oom;
}
assert (relPtr & ~PageIdUtils.PAGE_IDX_MASK) == 0 : U.hexLong(relPtr & ~PageIdUtils.PAGE_IDX_MASK);
// Assign page ID according to flags and partition ID.
long pageId = PageIdUtils.pageId(partId, flags, (int)relPtr);
writePageId(absPtr, pageId);
// TODO pass an argument to decide whether the page should be cleaned.
GridUnsafe.setMemory(absPtr + PAGE_OVERHEAD, sysPageSize - PAGE_OVERHEAD, (byte)0);
return pageId;
}
/** {@inheritDoc} */
@Override public boolean freePage(int cacheId, long pageId) {
assert started;
releaseFreePage(pageId);
return true;
}
/** {@inheritDoc} */
@Override public int pageSize() {
return sysPageSize - PAGE_OVERHEAD;
}
/** {@inheritDoc} */
@Override public int systemPageSize() {
return sysPageSize;
}
/** {@inheritDoc} */
@Override public int realPageSize(int grpId) {
return pageSize();
}
/**
* @return Next index.
*/
private int nextRoundRobinIndex() {
while (true) {
int idx = selector.get();
int nextIdx = idx + 1;
if (nextIdx >= segments.length)
nextIdx = 0;
if (selector.compareAndSet(idx, nextIdx))
return nextIdx;
}
}
/** {@inheritDoc} */
@Override public long loadedPages() {
return allocatedPages.get();
}
/**
* @return Total number of pages may be allocated for this instance.
*/
public int totalPages() {
return totalPages;
}
/**
* @return Total number of acquired pages.
*/
public long acquiredPages() {
long total = 0;
for (Segment seg : segments) {
seg.readLock().lock();
try {
int acquired = seg.acquiredPages();
assert acquired >= 0;
total += acquired;
}
finally {
seg.readLock().unlock();
}
}
return total;
}
/**
* Writes page ID to the page at the given absolute position.
*
* @param absPtr Absolute memory pointer to the page header.
* @param pageId Page ID to write.
*/
private void writePageId(long absPtr, long pageId) {
GridUnsafe.putLong(absPtr + PAGE_ID_OFFSET, pageId);
}
/**
* @param pageIdx Page index.
* @return Segment.
*/
private Segment segment(int pageIdx) {
int segIdx = segmentIndex(pageIdx);
return segments[segIdx];
}
/**
* @param pageIdx Page index to extract segment index from.
* @return Segment index.
*/
private int segmentIndex(long pageIdx) {
return (int)((pageIdx >> IDX_BITS) & SEG_MASK);
}
/**
* @param segIdx Segment index.
* @param pageIdx Page index.
* @return Full page index.
*/
private long fromSegmentIndex(int segIdx, long pageIdx) {
long res = 0;
res = (res << SEG_BITS) | (segIdx & SEG_MASK);
res = (res << IDX_BITS) | (pageIdx & IDX_MASK);
return res;
}
// *** PageSupport methods ***
/** {@inheritDoc} */
@Override public long acquirePage(int cacheId, long pageId) {
return acquirePage(cacheId, pageId, IoStatisticsHolderNoOp.INSTANCE);
}
/** {@inheritDoc} */
@Override public long acquirePage(int cacheId, long pageId, IoStatisticsHolder statHolder) {
assert started;
int pageIdx = PageIdUtils.pageIndex(pageId);
Segment seg = segment(pageIdx);
long absPtr = seg.acquirePage(pageIdx);
statHolder.trackLogicalRead(absPtr + PAGE_OVERHEAD);
return absPtr;
}
/** {@inheritDoc} */
@Override public void releasePage(int cacheId, long pageId, long page) {
assert started;
if (trackAcquiredPages) {
Segment seg = segment(PageIdUtils.pageIndex(pageId));
seg.onPageRelease();
}
}
/** {@inheritDoc} */
@Override public long readLock(int cacheId, long pageId, long page) {
assert started;
if (rwLock.readLock(page + LOCK_OFFSET, PageIdUtils.tag(pageId)))
return page + PAGE_OVERHEAD;
return 0L;
}
/** {@inheritDoc} */
@Override public long readLockForce(int cacheId, long pageId, long page) {
assert started;
if (rwLock.readLock(page + LOCK_OFFSET, -1))
return page + PAGE_OVERHEAD;
return 0L;
}
/** {@inheritDoc} */
@Override public void readUnlock(int cacheId, long pageId, long page) {
assert started;
rwLock.readUnlock(page + LOCK_OFFSET);
}
/** {@inheritDoc} */
@Override public long writeLock(int cacheId, long pageId, long page) {
assert started;
if (rwLock.writeLock(page + LOCK_OFFSET, PageIdUtils.tag(pageId)))
return page + PAGE_OVERHEAD;
return 0L;
}
/** {@inheritDoc} */
@Override public long tryWriteLock(int cacheId, long pageId, long page) {
assert started;
if (rwLock.tryWriteLock(page + LOCK_OFFSET, PageIdUtils.tag(pageId)))
return page + PAGE_OVERHEAD;
return 0L;
}
/** {@inheritDoc} */
@Override public void writeUnlock(
int cacheId,
long pageId,
long page,
Boolean walPlc,
boolean dirtyFlag
) {
assert started;
long actualId = PageIO.getPageId(page + PAGE_OVERHEAD);
rwLock.writeUnlock(page + LOCK_OFFSET, PageIdUtils.tag(actualId));
}
/** {@inheritDoc} */
@Override public boolean isDirty(int cacheId, long pageId, long page) {
// always false for page no store.
return false;
}
/**
* @param pageIdx Page index.
* @return Total page sequence number.
*/
public int pageSequenceNumber(int pageIdx) {
Segment seg = segment(pageIdx);
return seg.sequenceNumber(pageIdx);
}
/**
* @param seqNo Page sequence number.
* @return Page index.
*/
public int pageIndex(int seqNo) {
Segment[] segs = segments;
int low = 0, high = segs.length - 1;
while (low <= high) {
int mid = (low + high) >>> 1;
Segment seg = segs[mid];
int cmp = seg.containsPageBySequence(seqNo);
if (cmp < 0)
high = mid - 1;
else if (cmp > 0)
low = mid + 1;
else
return seg.pageIndex(seqNo);
}
throw new IgniteException("Allocated page must always be present in one of the segments [seqNo=" + seqNo +
", segments=" + Arrays.toString(segs) + ']');
}
/**
* @param pageId Page ID to release.
*/
private void releaseFreePage(long pageId) {
int pageIdx = PageIdUtils.pageIndex(pageId);
// Clear out flags and file ID.
long relPtr = PageIdUtils.pageId(0, (byte)0, pageIdx);
Segment seg = segment(pageIdx);
long absPtr = seg.absolute(pageIdx);
// Second, write clean relative pointer instead of page ID.
writePageId(absPtr, relPtr);
// Third, link the free page.
while (true) {
long freePageRelPtrMasked = freePageListHead.get();
long freePageRelPtr = freePageRelPtrMasked & RELATIVE_PTR_MASK;
GridUnsafe.putLong(absPtr, freePageRelPtr);
if (freePageListHead.compareAndSet(freePageRelPtrMasked, relPtr)) {
allocatedPages.decrementAndGet();
totalAllocatedPagesMetric.decrement();
return;
}
}
}
/**
* @return Relative pointer to a free page that was borrowed from the allocated pool.
*/
private long borrowFreePage() {
while (true) {
long freePageRelPtrMasked = freePageListHead.get();
long freePageRelPtr = freePageRelPtrMasked & ADDRESS_MASK;
if (freePageRelPtr != INVALID_REL_PTR) {
int pageIdx = PageIdUtils.pageIndex(freePageRelPtr);
Segment seg = segment(pageIdx);
long freePageAbsPtr = seg.absolute(pageIdx);
long nextFreePageRelPtr = GridUnsafe.getLong(freePageAbsPtr) & ADDRESS_MASK;
long cnt = ((freePageRelPtrMasked & COUNTER_MASK) + COUNTER_INC) & COUNTER_MASK;
if (freePageListHead.compareAndSet(freePageRelPtrMasked, nextFreePageRelPtr | cnt)) {
GridUnsafe.putLong(freePageAbsPtr, PAGE_MARKER);
allocatedPages.incrementAndGet();
totalAllocatedPagesMetric.increment();
return freePageRelPtr;
}
}
else
return INVALID_REL_PTR;
}
}
/**
* Attempts to add a new memory segment.
*
* @param oldRef Old segments array. If this method observes another segments array, it will allocate a new
* segment (if possible). If the array has already been updated, it will return the last element in the
* new array.
* @return Added segment, if successfull, {@code null} if failed to add.
*/
private synchronized Segment addSegment(Segment[] oldRef) {
if (segments == oldRef) {
DirectMemoryRegion region = directMemoryProvider.nextRegion();
// No more memory is available.
if (region == null)
return null;
if (oldRef != null) {
if (log.isInfoEnabled())
log.info("Allocated next memory segment [plcName=" + dataRegionCfg.getName() +
", chunkSize=" + U.readableSize(region.size(), true) + ']');
}
Segment[] newRef = new Segment[oldRef == null ? 1 : oldRef.length + 1];
if (oldRef != null)
System.arraycopy(oldRef, 0, newRef, 0, oldRef.length);
Segment lastSeg = oldRef == null ? null : oldRef[oldRef.length - 1];
Segment allocated = new Segment(newRef.length - 1, region, lastSeg == null ? 0 : lastSeg.sumPages());
allocated.init();
newRef[newRef.length - 1] = allocated;
segments = newRef;
}
// Only this synchronized method writes to segments, so it is safe to read twice.
return segments[segments.length - 1];
}
/**
*
*/
private class Segment extends ReentrantReadWriteLock {
/** */
private static final long serialVersionUID = 0L;
/** Segment index. */
private int idx;
/** Direct memory chunk. */
private DirectMemoryRegion region;
/** Last allocated page index. */
private long lastAllocatedIdxPtr;
/** Base address for all pages. */
private long pagesBase;
/** */
private int pagesInPrevSegments;
/** */
private int maxPages;
/** */
private final AtomicInteger acquiredPages;
/**
* @param idx Index.
* @param region Memory region to use.
* @param pagesInPrevSegments Number of pages in previously allocated segments.
*/
private Segment(int idx, DirectMemoryRegion region, int pagesInPrevSegments) {
this.idx = idx;
this.region = region;
this.pagesInPrevSegments = pagesInPrevSegments;
acquiredPages = new AtomicInteger();
}
/**
* Initializes page memory segment.
*/
private void init() {
long base = region.address();
lastAllocatedIdxPtr = base;
base += 8;
// Align by 8 bytes.
pagesBase = (base + 7) & ~0x7;
GridUnsafe.putLong(lastAllocatedIdxPtr, 0);
long limit = region.address() + region.size();
maxPages = (int)((limit - pagesBase) / sysPageSize);
}
/**
* @param pageIdx Page index.
* @return Page absolute pointer.
*/
private long acquirePage(int pageIdx) {
long absPtr = absolute(pageIdx);
assert absPtr % 8 == 0 : absPtr;
if (trackAcquiredPages)
acquiredPages.incrementAndGet();
return absPtr;
}
/**
*/
private void onPageRelease() {
acquiredPages.decrementAndGet();
}
/**
* @param pageIdx Page index.
* @return Absolute pointer.
*/
private long absolute(int pageIdx) {
pageIdx &= IDX_MASK;
long off = ((long)pageIdx) * sysPageSize;
return pagesBase + off;
}
/**
* @param pageIdx Page index with encoded segment.
* @return Absolute page sequence number.
*/
private int sequenceNumber(int pageIdx) {
pageIdx &= IDX_MASK;
return pagesInPrevSegments + pageIdx;
}
/**
* @return Page sequence number upper bound.
*/
private int sumPages() {
return pagesInPrevSegments + maxPages;
}
/**
* @return Total number of currently acquired pages.
*/
private int acquiredPages() {
return acquiredPages.get();
}
/**
* @param tag Tag to initialize RW lock.
* @return Relative pointer of the allocated page.
* @throws GridOffHeapOutOfMemoryException If failed to allocate.
*/
private long allocateFreePage(int tag) throws GridOffHeapOutOfMemoryException {
long limit = region.address() + region.size();
while (true) {
long lastIdx = GridUnsafe.getLongVolatile(null, lastAllocatedIdxPtr);
// Check if we have enough space to allocate a page.
if (pagesBase + (lastIdx + 1) * sysPageSize > limit)
return INVALID_REL_PTR;
if (GridUnsafe.compareAndSwapLong(null, lastAllocatedIdxPtr, lastIdx, lastIdx + 1)) {
long absPtr = pagesBase + lastIdx * sysPageSize;
assert lastIdx <= PageIdUtils.MAX_PAGE_NUM : lastIdx;
long pageIdx = fromSegmentIndex(idx, lastIdx);
assert pageIdx != INVALID_REL_PTR;
writePageId(absPtr, pageIdx);
GridUnsafe.putLong(absPtr, PAGE_MARKER);
rwLock.init(absPtr + LOCK_OFFSET, tag);
allocatedPages.incrementAndGet();
totalAllocatedPagesMetric.increment();
return pageIdx;
}
}
}
/**
* @param seqNo Page sequence number.
* @return {@code 0} if this segment contains the page with the given sequence number,
* {@code -1} if one of the previous segments contains the page with the given sequence number,
* {@code 1} if one of the next segments contains the page with the given sequence number.
*/
public int containsPageBySequence(int seqNo) {
if (seqNo < pagesInPrevSegments)
return -1;
else if (seqNo < pagesInPrevSegments + maxPages)
return 0;
else
return 1;
}
/**
* @param seqNo Page sequence number.
* @return Page index
*/
public int pageIndex(int seqNo) {
return PageIdUtils.pageIndex(fromSegmentIndex(idx, seqNo - pagesInPrevSegments));
}
}
/** {@inheritDoc} */
@Override public int checkpointBufferPagesCount() {
return 0;
}
}
| |
/*
* Copyright 2015 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.server.services.jbpm;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import org.jbpm.services.api.TaskNotFoundException;
import org.jbpm.services.api.UserTaskService;
import org.kie.api.task.model.Attachment;
import org.kie.api.task.model.Comment;
import org.kie.api.task.model.OrganizationalEntity;
import org.kie.api.task.model.Task;
import org.kie.internal.identity.IdentityProvider;
import org.kie.internal.task.api.TaskModelProvider;
import org.kie.internal.task.api.model.InternalPeopleAssignments;
import org.kie.internal.task.api.model.InternalTask;
import org.kie.server.api.KieServerConstants;
import org.kie.server.api.model.instance.TaskAttachment;
import org.kie.server.api.model.instance.TaskAttachmentList;
import org.kie.server.api.model.instance.TaskComment;
import org.kie.server.api.model.instance.TaskCommentList;
import org.kie.server.api.model.instance.TaskInstance;
import org.kie.server.services.api.KieServerRegistry;
import org.kie.server.services.impl.marshal.MarshallerHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class UserTaskServiceBase {
public static final Logger logger = LoggerFactory.getLogger(UserTaskServiceBase.class);
private IdentityProvider identityProvider;
private UserTaskService userTaskService;
private MarshallerHelper marshallerHelper;
private boolean bypassAuthUser = false;
public UserTaskServiceBase(UserTaskService userTaskService, KieServerRegistry context) {
this.userTaskService = userTaskService;
this.identityProvider = context.getIdentityProvider();
this.marshallerHelper = new MarshallerHelper(context);
this.bypassAuthUser = Boolean.parseBoolean(context.getConfig().getConfigItemValue(KieServerConstants.CFG_BYPASS_AUTH_USER, "false"));
}
protected String getUser(String queryParamUser) {
if (bypassAuthUser) {
return queryParamUser;
}
return identityProvider.getName();
}
public void activate(String containerId, Number taskId, String userId) {
userId = getUser(userId);
logger.debug("About to activate task with id '{}' as user '{}'", taskId, userId);
userTaskService.activate(taskId.longValue(), userId);
}
public void claim(String containerId, Number taskId, String userId) {
userId = getUser(userId);
logger.debug("About to claim task with id '{}' as user '{}'", taskId, userId);
userTaskService.claim(taskId.longValue(), userId);
}
public void complete(String containerId, Number taskId, String userId, String payload, String marshallerType) {
userId = getUser(userId);
logger.debug("About to unmarshal task outcome parameters from payload: '{}'", payload);
Map<String, Object> parameters = marshallerHelper.unmarshal(containerId, payload, marshallerType, Map.class);
logger.debug("About to complete task with id '{}' as user '{}' with data {}", taskId, userId, parameters);
userTaskService.complete(taskId.longValue(), userId, parameters);
}
public void completeAutoProgress(String containerId, Number taskId, String userId, String payload, String marshallerType) {
userId = getUser(userId);
logger.debug("About to unmarshal task outcome parameters from payload: '{}'", payload);
Map<String, Object> parameters = marshallerHelper.unmarshal(containerId, payload, marshallerType, Map.class);
logger.debug("About to complete task with id '{}' as user '{}' with data {}", taskId, userId, parameters);
userTaskService.completeAutoProgress(taskId.longValue(), userId, parameters);
}
public void delegate(String containerId, Number taskId, String userId, String targetUserId) {
userId = getUser(userId);
logger.debug("About to delegate task with id '{}' as user '{}' to user '{}'", taskId, userId, targetUserId);
userTaskService.delegate(taskId.longValue(), userId, targetUserId);
}
public void exit(String containerId, Number taskId, String userId) {
userId = getUser(userId);
logger.debug("About to exit task with id '{}' as user '{}'", taskId, userId);
userTaskService.exit(taskId.longValue(), userId);
}
public void fail(String containerId, Number taskId, String userId, String payload, String marshallerType) {
userId = getUser(userId);
logger.debug("About to unmarshal task failure data from payload: '{}'", payload);
Map<String, Object> parameters = marshallerHelper.unmarshal(containerId, payload, marshallerType, Map.class);
logger.debug("About to fail task with id '{}' as user '{}' with data {}", taskId, userId, parameters);
userTaskService.fail(taskId.longValue(), userId, parameters);
}
public void forward(String containerId, Number taskId, String userId, String targetUserId) {
userId = getUser(userId);
logger.debug("About to forward task with id '{}' as user '{}' to user '{}'", taskId, userId, targetUserId);
userTaskService.forward(taskId.longValue(), userId, targetUserId);
}
public void release(String containerId, Number taskId, String userId) {
userId = getUser(userId);
logger.debug("About to release task with id '{}' as user '{}'", taskId, userId);
userTaskService.release(taskId.longValue(), userId);
}
public void resume(String containerId, Number taskId, String userId) {
userId = getUser(userId);
logger.debug("About to resume task with id '{}' as user '{}'", taskId, userId);
userTaskService.resume(taskId.longValue(), userId);
}
public void skip(String containerId, Number taskId, String userId) {
userId = getUser(userId);
logger.debug("About to skip task with id '{}' as user '{}'", taskId, userId);
userTaskService.skip(taskId.longValue(), userId);
}
public void start(String containerId, Number taskId, String userId) {
userId = getUser(userId);
logger.debug("About to start task with id '{}' as user '{}'", taskId, userId);
userTaskService.start(taskId.longValue(), userId);
}
public void stop(String containerId, Number taskId, String userId) {
userId = getUser(userId);
logger.debug("About to stop task with id '{}' as user '{}'", taskId, userId);
userTaskService.stop(taskId.longValue(), userId);
}
public void suspend(String containerId, Number taskId, String userId) {
userId = getUser(userId);
logger.debug("About to suspend task with id '{}' as user '{}'", taskId, userId);
userTaskService.suspend(taskId.longValue(), userId);
}
public void nominate(String containerId, Number taskId, String userId, List<String> potentialOwners) {
userId = getUser(userId);
logger.debug("About to nominate task with id '{}' as user '{}' to potential owners", taskId, userId, potentialOwners);
List<OrganizationalEntity> potOwnerEntities = new ArrayList<OrganizationalEntity>();
for (String potOwnerId : potentialOwners) {
potOwnerEntities.add(TaskModelProvider.getFactory().newUser(potOwnerId));
}
userTaskService.nominate(taskId.longValue(), userId, potOwnerEntities);
}
public void setPriority(String containerId, Number taskId, String priorityPayload, String marshallingType) {
logger.debug("About to unmarshal task priority from payload: '{}'", priorityPayload);
Integer priority = marshallerHelper.unmarshal(containerId, priorityPayload, marshallingType, Integer.class);
logger.debug("About to set priority for a task with id '{}' with value '{}'", taskId, priority);
userTaskService.setPriority(taskId.longValue(), priority);
}
public void setExpirationDate(String containerId, Number taskId, String datePayload, String marshallingType) {
logger.debug("About to unmarshal task priority from payload: '{}'", datePayload);
Date expirationDate = marshallerHelper.unmarshal(containerId, datePayload, marshallingType, Date.class);
logger.debug("About to set expiration date for a task with id '{}' with value '{}'", taskId, expirationDate);
userTaskService.setExpirationDate(taskId.longValue(), expirationDate);
}
public void setSkipable(String containerId, Number taskId, String skipablePayload, String marshallingType) {
logger.debug("About to unmarshal task skipable from payload: '{}'", skipablePayload);
Boolean skipable = marshallerHelper.unmarshal(containerId, skipablePayload, marshallingType, Boolean.class);
logger.debug("About to set skipable attribute for a task with id '{}' with value '{}'", taskId, skipable);
userTaskService.setSkipable(taskId.longValue(), skipable);
}
public void setName(String containerId, Number taskId, String namePayload, String marshallingType) {
logger.debug("About to unmarshal task name from payload: '{}'", namePayload);
String name = marshallerHelper.unmarshal(containerId, namePayload, marshallingType, String.class);
logger.debug("About to set name for a task with id '{}' with value '{}'", taskId, name);
userTaskService.setName(taskId.longValue(), name);
}
public void setDescription(String containerId, Number taskId, String descriptionPayload, String marshallingType) {
logger.debug("About to unmarshal task description from payload: '{}'", descriptionPayload);
String description = marshallerHelper.unmarshal(containerId, descriptionPayload, marshallingType, String.class);
logger.debug("About to set name for a task with id '{}' with value '{}'", taskId, description);
userTaskService.setDescription(taskId.longValue(), description);
}
public String saveContent(String containerId, Number taskId, String payload, String marshallingType) {
logger.debug("About to unmarshal task content parameters from payload: '{}'", payload);
Map<String, Object> parameters = marshallerHelper.unmarshal(containerId, payload, marshallingType, Map.class);
logger.debug("About to set content of a task with id '{}' with data {}", taskId, parameters);
Long contentId = userTaskService.saveContent(taskId.longValue(), parameters);
String response = marshallerHelper.marshal(containerId, marshallingType, contentId);
return response;
}
public String getTaskOutputContentByTaskId(String containerId, Number taskId, String marshallingType) {
Map<String, Object> variables = userTaskService.getTaskOutputContentByTaskId(taskId.longValue());
logger.debug("About to marshal task '{}' output variables {}", taskId, variables);
String response = marshallerHelper.marshal(containerId, marshallingType, variables);
return response;
}
public String getTaskInputContentByTaskId(String containerId, Number taskId, String marshallingType) {
Map<String, Object> variables = userTaskService.getTaskInputContentByTaskId(taskId.longValue());
logger.debug("About to marshal task '{}' input variables {}", taskId, variables);
String response = marshallerHelper.marshal(containerId, marshallingType, variables);
return response;
}
public void deleteContent(String containerId, Number taskId, Number contentId) {
userTaskService.deleteContent(taskId.longValue(), contentId.longValue());
}
public String addComment(String containerId, Number taskId, String payload, String marshallingType) {
logger.debug("About to unmarshal task comment from payload: '{}'", payload);
TaskComment comment = marshallerHelper.unmarshal(containerId, payload, marshallingType, TaskComment.class);
logger.debug("About to set comment on a task with id '{}' with data {}", taskId, comment);
Long commentId = userTaskService.addComment(taskId.longValue(), comment.getText(), comment.getAddedBy(), comment.getAddedAt());
String response = marshallerHelper.marshal(containerId, marshallingType, commentId);
return response;
}
public void deleteComment(String containerId, Number taskId, Number commentId) {
userTaskService.deleteComment(taskId.longValue(), commentId.longValue());
}
public String getCommentsByTaskId(String containerId, Number taskId, String marshallingType) {
List<Comment> comments = userTaskService.getCommentsByTaskId(taskId.longValue());
TaskComment[] taskComments = new TaskComment[comments.size()];
int counter = 0;
for (Comment comment : comments) {
TaskComment taskComment = TaskComment.builder()
.id(comment.getId())
.text(comment.getText())
.addedBy(comment.getAddedBy().getId())
.addedAt(comment.getAddedAt())
.build();
taskComments[counter] = taskComment;
counter++;
}
TaskCommentList result = new TaskCommentList(taskComments);
logger.debug("About to marshal task '{}' comments {}", taskId, result);
String response = marshallerHelper.marshal(containerId, marshallingType, result);
return response;
}
public String getCommentById(String containerId, Number taskId, Number commentId, String marshallingType) {
Comment comment = userTaskService.getCommentById(taskId.longValue(), commentId.longValue());
if (comment == null) {
throw new IllegalStateException("No comment found with id " + commentId + " on task " + taskId);
}
TaskComment taskComment = TaskComment.builder()
.id(comment.getId())
.text(comment.getText())
.addedBy(comment.getAddedBy().getId())
.addedAt(comment.getAddedAt())
.build();
logger.debug("About to marshal task '{}' comment {}", taskId, taskComment);
String response = marshallerHelper.marshal(containerId, marshallingType, taskComment);
return response;
}
public String addAttachment(String containerId, Number taskId, String userId, String name, String attachmentPayload, String marshallingType) {
logger.debug("About to unmarshal task attachment from payload: '{}'", attachmentPayload);
Object attachment = marshallerHelper.unmarshal(containerId, attachmentPayload, marshallingType, Object.class);
logger.debug("About to add attachment on a task with id '{}' with data {}", taskId, attachment);
Long attachmentId = userTaskService.addAttachment(taskId.longValue(), getUser(userId), name, attachment);
String response = marshallerHelper.marshal(containerId, marshallingType, attachmentId);
return response;
}
public void deleteAttachment(String containerId, Number taskId, Number attachmentId) {
userTaskService.deleteAttachment(taskId.longValue(), attachmentId.longValue());
}
public String getAttachmentById(String containerId, Number taskId, Number attachmentId, String marshallingType) {
Attachment attachment = userTaskService.getAttachmentById(taskId.longValue(), attachmentId.longValue());
TaskAttachment taskAttachment = TaskAttachment.builder()
.id(attachment.getId())
.name(attachment.getName())
.addedBy(attachment.getAttachedBy().getId())
.addedAt(attachment.getAttachedAt())
.attachmentContentId(attachment.getAttachmentContentId())
.contentType(attachment.getContentType())
.size(attachment.getSize())
.build();
logger.debug("About to marshal task '{}' attachment {} with content {}", taskId, attachmentId, taskAttachment);
String response = marshallerHelper.marshal(containerId, marshallingType, taskAttachment);
return response;
}
public String getAttachmentContentById(String containerId, Number taskId, Number attachmentId, String marshallingType) {
Object attachment = userTaskService.getAttachmentContentById(taskId.longValue(), attachmentId.longValue());
if (attachment == null) {
throw new IllegalStateException("No attachment found for id " + attachmentId + " for task " + taskId);
}
logger.debug("About to marshal task attachment with id '{}' {}", attachmentId, attachment);
String response = marshallerHelper.marshal(containerId, marshallingType, attachment);
return response;
}
public String getAttachmentsByTaskId(String containerId, Number taskId, String marshallingType) {
List<Attachment> attachments = userTaskService.getAttachmentsByTaskId(taskId.longValue());
TaskAttachment[] taskComments = new TaskAttachment[attachments.size()];
int counter = 0;
for (Attachment attachment : attachments) {
TaskAttachment taskComment = TaskAttachment.builder()
.id(attachment.getId())
.name(attachment.getName())
.addedBy(attachment.getAttachedBy().getId())
.addedAt(attachment.getAttachedAt())
.contentType(attachment.getContentType())
.attachmentContentId(attachment.getAttachmentContentId())
.size(attachment.getSize())
.build();
taskComments[counter] = taskComment;
counter++;
}
TaskAttachmentList result = new TaskAttachmentList(taskComments);
logger.debug("About to marshal task '{}' attachments {}", taskId, result);
String response = marshallerHelper.marshal(containerId, marshallingType, result);
return response;
}
public String getTask(String containerId, Number taskId, boolean withInput, boolean withOutput, boolean withAssignments, String marshallingType) {
Task task = userTaskService.getTask(taskId.longValue());
if (task == null) {
throw new TaskNotFoundException("No task found with id " + taskId);
}
TaskInstance.Builder builder = TaskInstance.builder();
builder
.id(task.getId())
.name(task.getName())
.subject(task.getSubject())
.description(task.getDescription())
.priority(task.getPriority())
.taskType(task.getTaskType())
.formName(((InternalTask) task).getFormName())
.status(task.getTaskData().getStatus().name())
.actualOwner(getOrgEntityIfNotNull(task.getTaskData().getActualOwner()))
.createdBy(getOrgEntityIfNotNull(task.getTaskData().getCreatedBy()))
.createdOn(task.getTaskData().getCreatedOn())
.activationTime(task.getTaskData().getActivationTime())
.expirationTime(task.getTaskData().getExpirationTime())
.skippable(task.getTaskData().isSkipable())
.workItemId(task.getTaskData().getWorkItemId())
.processInstanceId(task.getTaskData().getProcessInstanceId())
.parentId(task.getTaskData().getParentId())
.processId(task.getTaskData().getProcessId())
.containerId(task.getTaskData().getDeploymentId());
if (Boolean.TRUE.equals(withInput)) {
Map<String, Object> variables = userTaskService.getTaskInputContentByTaskId(taskId.longValue());
builder.inputData(variables);
}
if (Boolean.TRUE.equals(withOutput)) {
Map<String, Object> variables = userTaskService.getTaskOutputContentByTaskId(taskId.longValue());
builder.outputData(variables);
}
if (Boolean.TRUE.equals(withAssignments)) {
builder.potentialOwners(orgEntityAsList(task.getPeopleAssignments().getPotentialOwners()));
builder.excludedOwners(orgEntityAsList(((InternalPeopleAssignments) task.getPeopleAssignments()).getExcludedOwners()));
builder.businessAdmins(orgEntityAsList(task.getPeopleAssignments().getBusinessAdministrators()));
}
TaskInstance taskInstance = builder.build();
logger.debug("About to marshal task '{}' representation {}", taskId, taskInstance);
String response = marshallerHelper.marshal(containerId, marshallingType, taskInstance);
return response;
}
private String getOrgEntityIfNotNull(OrganizationalEntity organizationalEntity) {
if (organizationalEntity == null) {
return "";
}
return organizationalEntity.getId();
}
private List<String> orgEntityAsList(List<OrganizationalEntity> organizationalEntities) {
ArrayList<String> entities = new ArrayList<String>();
if (organizationalEntities == null) {
return entities;
}
for (OrganizationalEntity entity : organizationalEntities) {
entities.add(entity.getId());
}
return entities;
}
}
| |
/*
* Copyright 2009 The Kuali Foundation.
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.kfs.module.endow.document.validation.impl;
import java.sql.Date;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.kuali.kfs.module.endow.EndowConstants;
import org.kuali.kfs.module.endow.EndowKeyConstants;
import org.kuali.kfs.module.endow.EndowParameterKeyConstants;
import org.kuali.kfs.module.endow.EndowPropertyConstants;
import org.kuali.kfs.module.endow.businessobject.CloseCode;
import org.kuali.kfs.module.endow.businessobject.KEMID;
import org.kuali.kfs.module.endow.businessobject.KemidAgreement;
import org.kuali.kfs.module.endow.businessobject.KemidAuthorizations;
import org.kuali.kfs.module.endow.businessobject.KemidBenefittingOrganization;
import org.kuali.kfs.module.endow.businessobject.KemidCombineDonorStatement;
import org.kuali.kfs.module.endow.businessobject.KemidDonorStatement;
import org.kuali.kfs.module.endow.businessobject.KemidFee;
import org.kuali.kfs.module.endow.businessobject.KemidGeneralLedgerAccount;
import org.kuali.kfs.module.endow.businessobject.KemidPayoutInstruction;
import org.kuali.kfs.module.endow.businessobject.KemidReportGroup;
import org.kuali.kfs.module.endow.businessobject.KemidSourceOfFunds;
import org.kuali.kfs.module.endow.businessobject.KemidSpecialInstruction;
import org.kuali.kfs.module.endow.businessobject.KemidUseCriteria;
import org.kuali.kfs.module.endow.document.service.KemidCurrentCashService;
import org.kuali.kfs.module.endow.document.service.KemidHoldingTaxLotOpenRecordsService;
import org.kuali.kfs.module.endow.document.service.ValidateDateBasedOnFrequencyCodeService;
import org.kuali.kfs.sys.KFSConstants;
import org.kuali.kfs.sys.KFSKeyConstants;
import org.kuali.kfs.sys.context.SpringContext;
import org.kuali.rice.core.api.datetime.DateTimeService;
import org.kuali.rice.core.api.util.type.KualiDecimal;
import org.kuali.rice.coreservice.framework.parameter.ParameterService;
import org.kuali.rice.kns.document.MaintenanceDocument;
import org.kuali.rice.kns.maintenance.rules.MaintenanceDocumentRuleBase;
import org.kuali.rice.krad.bo.PersistableBusinessObject;
import org.kuali.rice.krad.document.Document;
import org.kuali.rice.krad.service.BusinessObjectService;
import org.kuali.rice.krad.util.GlobalVariables;
import org.kuali.rice.krad.util.MessageMap;
import org.kuali.rice.krad.util.ObjectUtils;
/**
* This KEMIDRule class implements the Business rules associated with the KEMID.
*/
public class KEMIDRule extends MaintenanceDocumentRuleBase {
protected static Logger LOG = org.apache.log4j.Logger.getLogger(KEMIDRule.class);
private KEMID newKemid;
private KEMID oldKemid;
/**
* This method initializes the old and new kemid.
*
* @param document
*/
private void initializeAttributes(MaintenanceDocument document) {
if (newKemid == null) {
newKemid = (KEMID) document.getNewMaintainableObject().getBusinessObject();
}
if (oldKemid == null) {
oldKemid = (KEMID) document.getOldMaintainableObject().getBusinessObject();
}
}
/**
* @see org.kuali.rice.kns.maintenance.rules.MaintenanceDocumentRuleBase#processCustomRouteDocumentBusinessRules(org.kuali.rice.kns.document.MaintenanceDocument)
*/
@Override
protected boolean processCustomRouteDocumentBusinessRules(MaintenanceDocument document) {
boolean isValid = true;
isValid &= super.processCustomRouteDocumentBusinessRules(document);
MessageMap errorMap = GlobalVariables.getMessageMap();
isValid &= errorMap.hasNoErrors();
if (isValid) {
initializeAttributes(document);
isValid &= checkCloseCode();
isValid &= checkIfKemidHasCurrentCashOpenRecordsIfClosed();
isValid &= checkIfKemidHasHoldingTaxLotOpenRecordsIfClosed();
isValid &= validateIncomeRestrictionCode(document);
isValid &= validateAgreements();
isValid &= validateUseTransactionRestrictionFromAgreement();
isValid &= validateSourceOfFunds();
isValid &= validateBenefittingOrgs();
isValid &= validateGeneralLedgerAccounts();
isValid &= validateKemidAuthorizations();
isValid &= validatePayoutInstructions();
isValid &= validateKemidDonorStatements();
isValid &= validateFees();
}
return isValid;
}
/**
* @see org.kuali.rice.kns.maintenance.rules.MaintenanceDocumentRuleBase#processCustomAddCollectionLineBusinessRules(org.kuali.rice.kns.document.MaintenanceDocument,
* java.lang.String, org.kuali.rice.krad.bo.PersistableBusinessObject)
*/
@Override
public boolean processCustomAddCollectionLineBusinessRules(MaintenanceDocument document, String collectionName, PersistableBusinessObject bo) {
boolean success = true;
success &= super.processCustomAddCollectionLineBusinessRules(document, collectionName, bo);
MessageMap errorMap = GlobalVariables.getMessageMap();
success &= errorMap.hasNoErrors();
if (success) {
initializeAttributes(document);
if (collectionName.equalsIgnoreCase(EndowPropertyConstants.KEMID_DONOR_STATEMENTS_TAB)) {
KemidDonorStatement donorStatement = (KemidDonorStatement) bo;
if (!validCombineWithDonorId(donorStatement)) {
success &= false;
}
}
if (bo instanceof KemidAgreement) {
KemidAgreement agreement = (KemidAgreement) bo;
success &= checkAgreement(agreement);
}
if (bo instanceof KemidSourceOfFunds) {
KemidSourceOfFunds sourceOfFunds = (KemidSourceOfFunds) bo;
success &= checkSourceOfFunds(sourceOfFunds);
}
if (bo instanceof KemidBenefittingOrganization) {
KemidBenefittingOrganization benefittingOrg = (KemidBenefittingOrganization) bo;
success &= checkBenefittingOrg(benefittingOrg);
}
if (bo instanceof KemidGeneralLedgerAccount) {
KemidGeneralLedgerAccount generalLedgerAccount = (KemidGeneralLedgerAccount) bo;
success &= checkGeneralLedgerAccount(generalLedgerAccount);
List<KemidGeneralLedgerAccount> generalLedgerAccounts = new ArrayList<KemidGeneralLedgerAccount>();
generalLedgerAccounts.addAll(newKemid.getKemidGeneralLedgerAccounts());
generalLedgerAccounts.add(generalLedgerAccount);
success &= validateIncomePrincipalGLAccounts(generalLedgerAccounts);
}
if (bo instanceof KemidAuthorizations) {
KemidAuthorizations authorization = (KemidAuthorizations) bo;
success &= checkAuthorization(authorization, -1);
}
if (bo instanceof KemidPayoutInstruction) {
KemidPayoutInstruction payoutInstruction = (KemidPayoutInstruction) bo;
success &= checkPayoutInstruction(payoutInstruction, -1);
}
if (bo instanceof KemidUseCriteria) {
KemidUseCriteria useCriteria = (KemidUseCriteria) bo;
success &= checkUseCriteria(useCriteria);
}
if (bo instanceof KemidSpecialInstruction) {
KemidSpecialInstruction specialInstruction = (KemidSpecialInstruction) bo;
success &= checkSpecialInstruction(specialInstruction);
}
if (bo instanceof KemidReportGroup) {
KemidReportGroup reportGroup = (KemidReportGroup) bo;
success &= checkReportGroup(reportGroup);
}
if (bo instanceof KemidDonorStatement) {
KemidDonorStatement donorStatement = (KemidDonorStatement) bo;
success &= checkDonorStatement(donorStatement);
}
if (bo instanceof KemidCombineDonorStatement) {
KemidCombineDonorStatement combineDonorStatement = (KemidCombineDonorStatement) bo;
success &= checkCombineDonorStatement(combineDonorStatement);
}
if (bo instanceof KemidFee) {
KemidFee fee = (KemidFee) bo;
success &= checkFee(fee);
success &= validateFeePercentageTotal(fee, -1);
success &= validatePercentageOfFeeChargedToPrincipal(fee, -1);
success &= validateKemidFeeStartDate(fee, -1);
}
}
return success;
}
/**
* This method will validate if income restriction code is "P" (Permanently Restricted) Rule: Type_inc_restr_cd cannot be P
* (Permanently Restricted).
*
* @param document
* @return true if Income Restriction code is not "P" else return false
*/
private boolean validateIncomeRestrictionCode(Document document) {
boolean rulesPassed = true;
MaintenanceDocument maintenanceDocument = (MaintenanceDocument) document;
KEMID kemid = (KEMID) maintenanceDocument.getNewMaintainableObject().getBusinessObject();
if (EndowConstants.TypeRestrictionPresetValueCodes.PERMANENT_TYPE_RESTRICTION_CODE.equalsIgnoreCase(kemid.getIncomeRestrictionCode())) {
GlobalVariables.getMessageMap().putError(EndowPropertyConstants.TYPE_INC_RESTR_CD, EndowKeyConstants.TypeRestrictionCodeConstants.ERROR_PERMANENT_INDICATOR_CANNOT_BE_USED_FOR_TYPE_RESTRICTION_CODE);
return false;
}
return rulesPassed;
}
/**
* Checks that the agreement type and agreement status exist.
*
* @param agreement
* @return true if they exist, false otherwise
*/
private boolean checkAgreement(KemidAgreement agreement) {
boolean success = true;
int originalErrorCount = GlobalVariables.getMessageMap().getErrorCount();
String errorPathPrefix = KFSConstants.MAINTENANCE_ADD_PREFIX + EndowPropertyConstants.KEMID_AGREEMENTS_TAB + ".";
// check that the agreement type exists
if (StringUtils.isNotBlank(agreement.getAgreementTypeCode())) {
agreement.refreshReferenceObject(EndowPropertyConstants.KEMID_AGRMNT_TYPE);
if (ObjectUtils.isNull(agreement.getAgreementType())) {
String label = this.getDataDictionaryService().getAttributeLabel(KemidAgreement.class, EndowPropertyConstants.KEMID_AGRMNT_TYP_CD);
String message = label + "(" + agreement.getAgreementTypeCode() + ")";
putFieldError(errorPathPrefix + EndowPropertyConstants.KEMID_AGRMNT_TYP_CD, KFSKeyConstants.ERROR_EXISTENCE, message);
}
}
// check that the agreement status exists
if (StringUtils.isNotBlank(agreement.getAgreementStatusCode())) {
agreement.refreshReferenceObject(EndowPropertyConstants.KEMID_AGRMNT_STATUS);
if (ObjectUtils.isNull(agreement.getAgreementType())) {
String label = this.getDataDictionaryService().getAttributeLabel(KemidAgreement.class, EndowPropertyConstants.KEMID_AGRMNT_STAT_CD);
String message = label + "(" + agreement.getAgreementStatusCode() + ")";
putFieldError(errorPathPrefix + EndowPropertyConstants.KEMID_AGRMNT_STAT_CD, KFSKeyConstants.ERROR_EXISTENCE, message);
}
}
success &= GlobalVariables.getMessageMap().getErrorCount() == originalErrorCount;
return success;
}
/**
* Checks that the fund source and opened from kemid exist.
*
* @param sourceOfFunds
* @return true if they exist, false otherwise
*/
private boolean checkSourceOfFunds(KemidSourceOfFunds sourceOfFunds) {
boolean success = true;
int originalErrorCount = GlobalVariables.getMessageMap().getErrorCount();
String errorPathPrefix = KFSConstants.MAINTENANCE_ADD_PREFIX + EndowPropertyConstants.KEMID_SOURCE_OF_FUNDS_TAB + ".";
// check that the fund source exists
if (StringUtils.isNotBlank(sourceOfFunds.getFundSourceCode())) {
sourceOfFunds.refreshReferenceObject(EndowPropertyConstants.KEMID_FND_SRC);
if (ObjectUtils.isNull(sourceOfFunds.getFundSource())) {
String label = this.getDataDictionaryService().getAttributeLabel(KemidAgreement.class, EndowPropertyConstants.KEMID_FND_SRC_CD);
String message = label + "(" + sourceOfFunds.getFundSourceCode() + ")";
putFieldError(errorPathPrefix + EndowPropertyConstants.KEMID_FND_SRC_CD, KFSKeyConstants.ERROR_EXISTENCE, message);
}
}
// check that the opened from kemid exists
if (StringUtils.isNotBlank(sourceOfFunds.getOpenedFromKemid())) {
sourceOfFunds.refreshReferenceObject(EndowPropertyConstants.KEMID_FND_SRC_OPND_FROM_KEMID_OBJ_REF);
if (ObjectUtils.isNull(sourceOfFunds.getOpenedFromKemidObjRef())) {
String label = this.getDataDictionaryService().getAttributeLabel(KemidSourceOfFunds.class, EndowPropertyConstants.KEMID_FND_SRC_OPND_FROM_KEMID);
String message = label + "(" + sourceOfFunds.getOpenedFromKemid() + ")";
putFieldError(errorPathPrefix + EndowPropertyConstants.KEMID_FND_SRC_OPND_FROM_KEMID, KFSKeyConstants.ERROR_EXISTENCE, message);
}
}
success &= GlobalVariables.getMessageMap().getErrorCount() == originalErrorCount;
return success;
}
/**
* Checks that the pay income to kemid exists.
*
* @param payoutInstruction
* @return true if it exists, false otherwise
*/
private boolean checkBenefittingOrg(KemidBenefittingOrganization benefittingOrg) {
boolean success = true;
int originalErrorCount = GlobalVariables.getMessageMap().getErrorCount();
String errorPathPrefix = KFSConstants.MAINTENANCE_ADD_PREFIX + EndowPropertyConstants.KEMID_BENEFITTING_ORGS_TAB + ".";
// check that the organization exists
if (StringUtils.isNotBlank(benefittingOrg.getBenefittingOrgCode())) {
benefittingOrg.refreshReferenceObject(EndowPropertyConstants.KEMID_BENE_ORG);
if (ObjectUtils.isNull(benefittingOrg.getOrganization())) {
String label = this.getDataDictionaryService().getAttributeLabel(KemidBenefittingOrganization.class, EndowPropertyConstants.KEMID_BENE_ORG_CD);
String message = label + "(" + benefittingOrg.getBenefittingOrgCode() + ")";
putFieldError(errorPathPrefix + EndowPropertyConstants.KEMID_BENE_ORG_CD, KFSKeyConstants.ERROR_EXISTENCE, message);
}
}
// check that the chart exists
if (StringUtils.isNotBlank(benefittingOrg.getBenefittingChartCode())) {
benefittingOrg.refreshReferenceObject(EndowPropertyConstants.KEMID_BENE_CHRT);
if (ObjectUtils.isNull(benefittingOrg.getChart())) {
String label = this.getDataDictionaryService().getAttributeLabel(KemidBenefittingOrganization.class, EndowPropertyConstants.KEMID_BENE_CHRT_CD);
String message = label + "(" + benefittingOrg.getBenefittingChartCode() + ")";
putFieldError(errorPathPrefix + EndowPropertyConstants.KEMID_BENE_CHRT_CD, KFSKeyConstants.ERROR_EXISTENCE, message);
}
}
success &= GlobalVariables.getMessageMap().getErrorCount() == originalErrorCount;
return success;
}
/**
* Checks that the generalLedgerAccount chart and account status exist.
*
* @param generalLedgerAccount
* @return true if they exist, false otherwise
*/
private boolean checkGeneralLedgerAccount(KemidGeneralLedgerAccount generalLedgerAccount) {
boolean success = true;
int originalErrorCount = GlobalVariables.getMessageMap().getErrorCount();
String errorPathPrefix = KFSConstants.MAINTENANCE_ADD_PREFIX + EndowPropertyConstants.KEMID_GENERAL_LEDGER_ACCOUNTS_TAB + ".";
// check that the chart exists
if (StringUtils.isNotBlank(generalLedgerAccount.getChartCode())) {
generalLedgerAccount.refreshReferenceObject(EndowPropertyConstants.KEMID_GL_ACCOUNT_CHART);
if (ObjectUtils.isNull(generalLedgerAccount.getChart())) {
String label = this.getDataDictionaryService().getAttributeLabel(KemidGeneralLedgerAccount.class, EndowPropertyConstants.KEMID_GL_ACCOUNT_CHART_CD);
String message = label + "(" + generalLedgerAccount.getChartCode() + ")";
putFieldError(errorPathPrefix + EndowPropertyConstants.KEMID_GL_ACCOUNT_CHART_CD, KFSKeyConstants.ERROR_EXISTENCE, message);
}
}
// check that the account exists
if (StringUtils.isNotBlank(generalLedgerAccount.getAccountNumber())) {
generalLedgerAccount.refreshReferenceObject(EndowPropertyConstants.KEMID_GL_ACCOUNT);
if (ObjectUtils.isNull(generalLedgerAccount.getAccount())) {
String label = this.getDataDictionaryService().getAttributeLabel(KemidGeneralLedgerAccount.class, EndowPropertyConstants.KEMID_GL_ACCOUNT_NBR);
String message = label + "(" + generalLedgerAccount.getAccountNumber() + ")";
putFieldError(errorPathPrefix + EndowPropertyConstants.KEMID_GL_ACCOUNT_NBR, KFSKeyConstants.ERROR_EXISTENCE, message);
}
}
success &= GlobalVariables.getMessageMap().getErrorCount() == originalErrorCount;
return success;
}
/**
* Checks that the given authorization is valid.
*
* @param authorization
* @param index
* @return true if valid, false otherwise
*/
private boolean checkAuthorization(KemidAuthorizations authorization, int index) {
boolean success = true;
if (authorization.isActive()) {
success &= validateRoleInKFSEndowNamespace(authorization, index);
}
return success;
}
/**
* Checks that the pay income to kemid exists.
*
* @param payoutInstruction the payout instruction to be validated
* @param index -1 if cehcking the add payout instruction, the index of the payout instruction in the list of added payout
* instruction otherwise
* @return true if it exists, false otherwise
*/
private boolean checkPayoutInstruction(KemidPayoutInstruction payoutInstruction, int index) {
boolean success = true;
int originalErrorCount = GlobalVariables.getMessageMap().getErrorCount();
// check that the pay income to kemid exists
if (StringUtils.isNotBlank(payoutInstruction.getPayIncomeToKemid()) && !payoutInstruction.getPayIncomeToKemid().equalsIgnoreCase(newKemid.getKemid())) {
payoutInstruction.refreshReferenceObject(EndowPropertyConstants.KEMID_PAY_INC_TO_KEMID_OBJ_REF);
if (ObjectUtils.isNull(payoutInstruction.getPayIncomeToKemidObjRef())) {
String label = this.getDataDictionaryService().getAttributeLabel(KemidPayoutInstruction.class, EndowPropertyConstants.KEMID_PAY_INC_TO_KEMID);
String message = label + "(" + payoutInstruction.getPayIncomeToKemid() + ")";
if (index == -1) {
putFieldError(KFSConstants.MAINTENANCE_ADD_PREFIX + EndowPropertyConstants.KEMID_PAY_INSTRUCTIONS_TAB + "." + EndowPropertyConstants.KEMID_PAY_INC_TO_KEMID, KFSKeyConstants.ERROR_EXISTENCE, message);
}
else {
putFieldError(EndowPropertyConstants.KEMID_PAY_INSTRUCTIONS_TAB + "[" + index + "]" + "." + EndowPropertyConstants.KEMID_PAY_INC_TO_KEMID, KFSKeyConstants.ERROR_EXISTENCE, message);
}
}
}
// check that start date is prior to end date
Date startDate = payoutInstruction.getStartDate();
Date endDate = payoutInstruction.getEndDate();
if (startDate != null && endDate != null) {
if (startDate.after(endDate)) {
if (index == -1) {
putFieldError(KFSConstants.MAINTENANCE_ADD_PREFIX + EndowPropertyConstants.KEMID_PAY_INSTRUCTIONS_TAB + "." + EndowPropertyConstants.KEMID_PAY_INC_START_DATE, EndowKeyConstants.KEMIDConstants.ERROR_KEMID_PAYOUT_INSTRUCTION_START_DATE_SHOULD_BE_PRIOR_TO_END_DATE);
}
else {
putFieldError(EndowPropertyConstants.KEMID_PAY_INSTRUCTIONS_TAB + "[" + index + "]" + "." + EndowPropertyConstants.KEMID_PAY_INC_START_DATE, EndowKeyConstants.KEMIDConstants.ERROR_KEMID_PAYOUT_INSTRUCTION_START_DATE_SHOULD_BE_PRIOR_TO_END_DATE);
}
}
}
success &= GlobalVariables.getMessageMap().getErrorCount() == originalErrorCount;
return success;
}
/**
* Checks that the use criteria exists.
*
* @param useCriteria
* @return true if it exists, false otherwise
*/
private boolean checkUseCriteria(KemidUseCriteria useCriteria) {
boolean success = true;
int originalErrorCount = GlobalVariables.getMessageMap().getErrorCount();
String errorPathPrefix = KFSConstants.MAINTENANCE_ADD_PREFIX + EndowPropertyConstants.KEMID_USE_CRITERIA_TAB + ".";
// check that the use criteria exists
if (StringUtils.isNotBlank(useCriteria.getUseCriteriaCode())) {
useCriteria.refreshReferenceObject(EndowPropertyConstants.KEMID_USE_CRIT);
if (ObjectUtils.isNull(useCriteria.getUseCriteria())) {
String label = this.getDataDictionaryService().getAttributeLabel(KemidUseCriteria.class, EndowPropertyConstants.KEMID_USE_CRIT_CD);
String message = label + "(" + useCriteria.getUseCriteriaCode() + ")";
putFieldError(errorPathPrefix + EndowPropertyConstants.KEMID_USE_CRIT_CD, KFSKeyConstants.ERROR_EXISTENCE, message);
}
}
success &= GlobalVariables.getMessageMap().getErrorCount() == originalErrorCount;
return success;
}
/**
* Checks that the agreement special instruction exists.
*
* @param specialInstruction
* @return true if it exists, false otherwise
*/
private boolean checkSpecialInstruction(KemidSpecialInstruction specialInstruction) {
boolean success = true;
int originalErrorCount = GlobalVariables.getMessageMap().getErrorCount();
String errorPathPrefix = KFSConstants.MAINTENANCE_ADD_PREFIX + EndowPropertyConstants.KEMID_SPECIAL_INSTRUCTIONS_TAB + ".";
// check that the agreement special instruction exists
if (StringUtils.isNotBlank(specialInstruction.getAgreementSpecialInstructionCode())) {
specialInstruction.refreshReferenceObject(EndowPropertyConstants.KEMID_SPEC_INSTR);
if (ObjectUtils.isNull(specialInstruction.getAgreementSpecialInstruction())) {
String label = this.getDataDictionaryService().getAttributeLabel(KemidSpecialInstruction.class, EndowPropertyConstants.KEMID_SPEC_INSTR_CD);
String message = label + "(" + specialInstruction.getAgreementSpecialInstructionCode() + ")";
putFieldError(errorPathPrefix + EndowPropertyConstants.KEMID_SPEC_INSTR_CD, KFSKeyConstants.ERROR_EXISTENCE, message);
}
}
success &= GlobalVariables.getMessageMap().getErrorCount() == originalErrorCount;
return success;
}
/**
* Checks that the fee method, charge fee to kemid exist.
*
* @param fee
* @return true if it exist, false otherwise
*/
private boolean checkFee(KemidFee fee) {
boolean success = true;
int originalErrorCount = GlobalVariables.getMessageMap().getErrorCount();
String errorPathPrefix = KFSConstants.MAINTENANCE_ADD_PREFIX + EndowPropertyConstants.KEMID_FEES_TAB + ".";
// check that the fee method exists
if (StringUtils.isNotBlank(fee.getFeeMethodCode())) {
fee.refreshReferenceObject(EndowPropertyConstants.KEMID_FEE_MTHD);
if (ObjectUtils.isNull(fee.getFeeMethod())) {
String label = this.getDataDictionaryService().getAttributeLabel(KemidFee.class, EndowPropertyConstants.KEMID_FEE_MTHD_CD);
String message = label + "(" + fee.getFeeMethodCode() + ")";
putFieldError(errorPathPrefix + EndowPropertyConstants.KEMID_FEE_MTHD_CD, KFSKeyConstants.ERROR_EXISTENCE, message);
}
}
// check that charge fee to kemid exists
if (StringUtils.isNotBlank(fee.getChargeFeeToKemid())) {
fee.refreshReferenceObject(EndowPropertyConstants.KEMID_FEE_CHARGE_FEE_TO_KEMID_OBJ_REF);
if (ObjectUtils.isNull(fee.getChargeFeeToKemidObjRef())) {
String label = this.getDataDictionaryService().getAttributeLabel(KemidFee.class, EndowPropertyConstants.KEMID_FEE_CHARGE_FEE_TO_KEMID);
String message = label + "(" + fee.getChargeFeeToKemid() + ")";
putFieldError(errorPathPrefix + EndowPropertyConstants.KEMID_FEE_CHARGE_FEE_TO_KEMID, KFSKeyConstants.ERROR_EXISTENCE, message);
}
}
success &= GlobalVariables.getMessageMap().getErrorCount() == originalErrorCount;
return success;
}
/**
* Checks that the combine Group exists.
*
* @param reportGroup
* @return true if it exists, false otherwise
*/
private boolean checkReportGroup(KemidReportGroup reportGroup) {
boolean success = true;
int originalErrorCount = GlobalVariables.getMessageMap().getErrorCount();
String errorPathPrefix = KFSConstants.MAINTENANCE_ADD_PREFIX + EndowPropertyConstants.KEMID_REPORT_GROUP_TAB + ".";
// check that the combine Group exists
if (StringUtils.isNotBlank(reportGroup.getCombineGroupCode())) {
reportGroup.refreshReferenceObject(EndowPropertyConstants.KEMID_REPORT_GRP);
if (ObjectUtils.isNull(reportGroup.getCombineGroup())) {
String label = this.getDataDictionaryService().getAttributeLabel(KemidReportGroup.class, EndowPropertyConstants.KEMID_REPORT_GRP_CD);
String message = label + "(" + reportGroup.getCombineGroupCode() + ")";
putFieldError(errorPathPrefix + EndowPropertyConstants.KEMID_REPORT_GRP_CD, KFSKeyConstants.ERROR_EXISTENCE, message);
}
}
success &= GlobalVariables.getMessageMap().getErrorCount() == originalErrorCount;
return success;
}
/**
* Checks that the donor, donor statement, combine with donor and donor label exist.
*
* @param donorStatement
* @return true if they exist, false otherwise
*/
private boolean checkDonorStatement(KemidDonorStatement donorStatement) {
boolean success = true;
int originalErrorCount = GlobalVariables.getMessageMap().getErrorCount();
String errorPathPrefix = KFSConstants.MAINTENANCE_ADD_PREFIX + EndowPropertyConstants.KEMID_DONOR_STATEMENTS_TAB + ".";
// check that the donor exists
if (StringUtils.isNotBlank(donorStatement.getDonorId())) {
donorStatement.refreshReferenceObject(EndowPropertyConstants.KEMID_DONOR_STATEMENT_DONOR);
if (ObjectUtils.isNull(donorStatement.getDonor())) {
String label = this.getDataDictionaryService().getAttributeLabel(KemidDonorStatement.class, EndowPropertyConstants.KEMID_DONOR_STATEMENT_ID);
String message = label + "(" + donorStatement.getDonorId() + ")";
putFieldError(errorPathPrefix + EndowPropertyConstants.KEMID_DONOR_STATEMENT_ID, KFSKeyConstants.ERROR_EXISTENCE, message);
}
}
// check that the donor statement exists
if (StringUtils.isNotBlank(donorStatement.getDonorStatementCode())) {
donorStatement.refreshReferenceObject(EndowPropertyConstants.KEMID_DONOR_STATEMENT);
if (ObjectUtils.isNull(donorStatement.getDonor())) {
String label = this.getDataDictionaryService().getAttributeLabel(KemidDonorStatement.class, EndowPropertyConstants.KEMID_DONOR_STATEMENT_CD);
String message = label + "(" + donorStatement.getDonorStatementCode() + ")";
putFieldError(errorPathPrefix + EndowPropertyConstants.KEMID_DONOR_STATEMENT_CD, KFSKeyConstants.ERROR_EXISTENCE, message);
}
}
// check that the combine with donor exists
if (StringUtils.isNotBlank(donorStatement.getCombineWithDonorId())) {
donorStatement.refreshReferenceObject(EndowPropertyConstants.KEMID_DONOR_STATEMENT_COMBINE_WITH_DONOR);
if (ObjectUtils.isNull(donorStatement.getCombineWithDonor())) {
String label = this.getDataDictionaryService().getAttributeLabel(KemidDonorStatement.class, EndowPropertyConstants.KEMID_DONOR_STATEMENT_COMBINE_WITH_DONOR_ID);
String message = label + "(" + donorStatement.getCombineWithDonorId() + ")";
putFieldError(errorPathPrefix + EndowPropertyConstants.KEMID_DONOR_STATEMENT_COMBINE_WITH_DONOR_ID, KFSKeyConstants.ERROR_EXISTENCE, message);
}
}
// check that the donor label exists
if (StringUtils.isNotBlank(donorStatement.getDonorLabel())) {
donorStatement.refreshReferenceObject(EndowPropertyConstants.KEMID_DONOR_STATEMENT_DONOR_LABEL_OBJ_REF);
if (ObjectUtils.isNull(donorStatement.getDonorLabelObjRef())) {
String label = this.getDataDictionaryService().getAttributeLabel(KemidDonorStatement.class, EndowPropertyConstants.KEMID_DONOR_STATEMENT_DONOR_LABEL);
String message = label + "(" + donorStatement.getDonorLabel() + ")";
putFieldError(errorPathPrefix + EndowPropertyConstants.KEMID_DONOR_STATEMENT_DONOR_LABEL, KFSKeyConstants.ERROR_EXISTENCE, message);
}
}
success &= GlobalVariables.getMessageMap().getErrorCount() == originalErrorCount;
return success;
}
/**
* Checks that the combine with kemid exists.
*
* @param combineDonorStatement
* @return true if it exists, false otherwise
*/
private boolean checkCombineDonorStatement(KemidCombineDonorStatement combineDonorStatement) {
boolean success = true;
int originalErrorCount = GlobalVariables.getMessageMap().getErrorCount();
String errorPathPrefix = KFSConstants.MAINTENANCE_ADD_PREFIX + EndowPropertyConstants.KEMID_COMBINE_DONOR_STATEMENTS_TAB + ".";
// check that the combine with kemid exists
if (StringUtils.isNotBlank(combineDonorStatement.getCombineWithKemid())) {
combineDonorStatement.refreshReferenceObject(EndowPropertyConstants.KEMID_COMBINE_DONOR_STATEMENT_WITH_KEMID_OBJ_REF);
if (ObjectUtils.isNull(combineDonorStatement.getCombineWithKemidObjRef())) {
String label = this.getDataDictionaryService().getAttributeLabel(KemidCombineDonorStatement.class, EndowPropertyConstants.KEMID_COMBINE_DONOR_STATEMENT_WITH_KEMID);
String message = label + "(" + combineDonorStatement.getCombineWithKemid() + ")";
putFieldError(errorPathPrefix + EndowPropertyConstants.KEMID_COMBINE_DONOR_STATEMENT_WITH_KEMID, KFSKeyConstants.ERROR_EXISTENCE, message);
}
}
success &= GlobalVariables.getMessageMap().getErrorCount() == originalErrorCount;
return success;
}
/**
* Checks that a valid Reason Closed is entered whe the Closed indicator is "Yes".
*
* @return true if valid, false otherwise
*/
private boolean checkCloseCode() {
boolean valid = true;
if (newKemid.isClose()) {
String closeCode = newKemid.getCloseCode();
Map pkMap = new HashMap();
pkMap.put(EndowPropertyConstants.ENDOWCODEBASE_CODE, closeCode);
CloseCode reasonClosed = (CloseCode) SpringContext.getBean(BusinessObjectService.class).findByPrimaryKey(CloseCode.class, pkMap);
if (ObjectUtils.isNull(reasonClosed)) {
putFieldError(EndowPropertyConstants.KEMID_CLOSE_CODE, EndowKeyConstants.KEMIDConstants.ERROR_INVALID_CLOSED_CODE);
valid = false;
}
}
return valid;
}
/**
* Checks if the kemid has current cash open records in case the closed indicator is "Yes".
*
* @return true if it does not have open records, false otherwise
*/
private boolean checkIfKemidHasCurrentCashOpenRecordsIfClosed() {
boolean valid = true;
if (newKemid.isClose()) {
String kemid = newKemid.getKemid();
boolean hasOpenRecords = SpringContext.getBean(KemidCurrentCashService.class).hasKemidOpenRecordsInCurrentCash(kemid);
valid = !hasOpenRecords;
if (!valid) {
putFieldError(EndowPropertyConstants.KEMID_CLOSED_IND, EndowKeyConstants.KEMIDConstants.ERROR_HAS_OPEN_RECORDS_IN_CURRENT_CASH);
}
}
return valid;
}
/**
* Checks if the kemid has holding tax lot open records in case the closed indicator is "Yes".
*
* @return true if it does not have open records, false otherwise
*/
private boolean checkIfKemidHasHoldingTaxLotOpenRecordsIfClosed() {
boolean valid = true;
if (newKemid.isClose()) {
String kemid = newKemid.getKemid();
boolean hasOpenRecords = SpringContext.getBean(KemidHoldingTaxLotOpenRecordsService.class).hasKemidHoldingTaxLotOpenRecords(kemid);
valid = !hasOpenRecords;
if (!valid) {
putFieldError(EndowPropertyConstants.KEMID_CLOSED_IND, EndowKeyConstants.KEMIDConstants.ERROR_HAS_OPEN_RECORDS_IN_HOLDING_TAX_LOT);
}
}
return valid;
}
/**
* Checks that the KEMID has at least one ACTIVE Agreement set up.
*
* @return true if it has at least one Agreement, false otherwise
*/
private boolean validateAgreements() {
boolean valid = true;
boolean hasActiveRecord = false;
if (newKemid.getKemidAgreements() == null || newKemid.getKemidAgreements().size() == 0) {
putFieldError(EndowPropertyConstants.KEMID_AGREEMENTS_TAB, EndowKeyConstants.KEMIDConstants.ERROR_KEMID_MUST_HAVE_AT_LEAST_ONE_ACTIVE_AGREEMENT);
valid = false;
}
else {
// Make sure that the KEMID has at least one ACTIVE Agreement
for (KemidAgreement kemidAgreement : newKemid.getKemidAgreements()) {
if (kemidAgreement.isActive()) {
hasActiveRecord = true;
break;
}
}
}
if (!hasActiveRecord) {
putFieldError(EndowPropertyConstants.KEMID_AGREEMENTS_TAB, EndowKeyConstants.KEMIDConstants.ERROR_KEMID_MUST_HAVE_AT_LEAST_ONE_ACTIVE_AGREEMENT);
valid = false;
}
return valid;
}
/**
* Checks that only one Agreement has the Use Transaction Restriction From Agreement checked.
*
* @return true if valid, false otherwise
*/
private boolean validateUseTransactionRestrictionFromAgreement() {
boolean valid = true;
boolean useTransactionRestrictionFromAgreementFound = false;
for (KemidAgreement kemidAgreement : newKemid.getKemidAgreements()) {
if (kemidAgreement.isUseTransactionRestrictionFromAgreement()) {
if (useTransactionRestrictionFromAgreementFound) {
putFieldError(EndowPropertyConstants.KEMID_AGREEMENTS_TAB, EndowKeyConstants.KEMIDConstants.ERROR_KEMID_ONLY_ONE_AGREEMENT_CAN_BR_MARKED_FOR_TRANSACTION_RESTR_USE);
valid = false;
break;
}
useTransactionRestrictionFromAgreementFound = true;
}
}
return valid;
}
/**
* Validates that the KEMID has at least one Source of Funds defined.
*
* @return true if valid, false otherwise
*/
private boolean validateSourceOfFunds() {
boolean valid = true;
boolean hasActiveRecord = false;
if (newKemid.getKemidSourcesOfFunds() == null || newKemid.getKemidSourcesOfFunds().size() == 0) {
putFieldError(EndowPropertyConstants.KEMID_SOURCE_OF_FUNDS_TAB, EndowKeyConstants.KEMIDConstants.ERROR_KEMID_MUST_HAVE_AT_LEAST_ONE_ACTIVE_SOURCE_OF_FUNDS);
valid = false;
}
else {
// Make sure that the KEMID has at least one ACTIVE Source of Funds
for (KemidSourceOfFunds kemidSourceOfFunds : newKemid.getKemidSourcesOfFunds()) {
if (kemidSourceOfFunds.isActive()) {
hasActiveRecord = true;
break;
}
}
}
if (!hasActiveRecord) {
putFieldError(EndowPropertyConstants.KEMID_SOURCE_OF_FUNDS_TAB, EndowKeyConstants.KEMIDConstants.ERROR_KEMID_MUST_HAVE_AT_LEAST_ONE_ACTIVE_SOURCE_OF_FUNDS);
valid = false;
}
return valid;
}
/**
* Validates that the KEMID has at least one ACTIVE Benefitting Org defined.
*
* @return true if valid, false otherwise
*/
private boolean validateBenefittingOrgs() {
boolean valid = true;
boolean hasActiveRecord = false;
if (newKemid.getKemidBenefittingOrganizations() == null || newKemid.getKemidBenefittingOrganizations().size() == 0) {
putFieldError(EndowPropertyConstants.KEMID_BENEFITTING_ORGS_TAB, EndowKeyConstants.KEMIDConstants.ERROR_KEMID_MUST_HAVE_AT_LEAST_ONE_ACTIVE_BENEFITTING_ORG);
valid = false;
}
else {
// Make sure that the KEMID has at least one ACTIVE Benefitting Org
for (KemidBenefittingOrganization kemidBenefittingOrganization : newKemid.getKemidBenefittingOrganizations()) {
if (kemidBenefittingOrganization.isActive()) {
hasActiveRecord = true;
break;
}
}
if (!hasActiveRecord) {
putFieldError(EndowPropertyConstants.KEMID_BENEFITTING_ORGS_TAB, EndowKeyConstants.KEMIDConstants.ERROR_KEMID_MUST_HAVE_AT_LEAST_ONE_ACTIVE_BENEFITTING_ORG);
return false;
}
// Check: the total of BENE_PCT for all records where ROW_ACTV_IND is equal to Yes must be 1(100%).
KualiDecimal benefittingPercentage = KualiDecimal.ZERO;
for (KemidBenefittingOrganization benefittingOrganization : newKemid.getKemidBenefittingOrganizations()) {
if (benefittingOrganization.isActive()) {
benefittingPercentage = benefittingPercentage.add(benefittingOrganization.getBenefitPrecent());
}
}
if (benefittingPercentage.compareTo(new KualiDecimal(1)) != 0) {
putFieldError(EndowPropertyConstants.KEMID_BENEFITTING_ORGS_TAB, EndowKeyConstants.KEMIDConstants.ERROR_KEMID_ACTIVE_BENE_ORGS_PCT_SUM_MUST_BE_ONE);
valid = false;
}
}
return valid;
}
/**
* Validates the GeneralLedgerAccounts tab. In KEMID spec, section 6.5.1.1, item 1 and 2, the rules should be updated to : 1.
* One and ONLY ONE ACTIVE END_KEMID_GL_LNK_T record with the IP_IND_CD field equal to I must exist for each END_KEMID_T record.
* 2. One and ONLY ONE ACTIVE END_KEMID_GL_LNK_T record with the IP_IND_CD field equal to P must exist for each END_KEMID_T
* record where the TYP_PRIN_RESTR_CD for the associated END_KEMID_T: TYP_CD is NOT equal to NA (Not Applicable) 3. If the
* TYP_PRIN_RESTR_CD for the associated END_KEMID_T: TYP_CD is equal to NA (Not Applicable), each END_KEMID_T record can have
* either zero or one INACTIVE END_KEMID_GL_LNK_T record with the IP_IND_CD field equal to P
*
* @return true if valid, false otherwise
*/
private boolean validateGeneralLedgerAccounts() {
boolean valid = true;
boolean hasIncomeGL = false;
boolean hasPrincipalGL = false;
boolean hasActiveIncomeGL = false;
boolean hasActivePrincipalGL = false;
if (newKemid.getKemidGeneralLedgerAccounts() == null || newKemid.getKemidGeneralLedgerAccounts().size() == 0) {
putFieldError(EndowPropertyConstants.KEMID_GENERAL_LEDGER_ACCOUNTS_TAB, EndowKeyConstants.KEMIDConstants.ERROR_KEMID_MUST_HAVE_AT_LEAST_ONE_INCOME_GL_ACC);
return false;
}
else {
valid &= validateIncomePrincipalGLAccounts(newKemid.getKemidGeneralLedgerAccounts());
}
return valid;
}
/**
* Validates that there is no more than one active entry with IP indicator I or P, that there is at least one active income GL
* account, if principal restriction code is NA then there is no principal GL account and if principal restriction code is not
* NA then there is at least one principal GL account.
*
* @param generalLedgerAccounts
* @return true if valid, false otherwise
*/
private boolean validateIncomePrincipalGLAccounts(List<KemidGeneralLedgerAccount> generalLedgerAccounts) {
boolean valid = true;
boolean hasIncomeGL = false;
boolean hasPrincipalGL = false;
boolean hasActiveIncomeGL = false;
boolean hasActivePrincipalGL = false;
if (generalLedgerAccounts != null && generalLedgerAccounts.size() != 0) {
for (KemidGeneralLedgerAccount kemidGeneralLedgerAccount : generalLedgerAccounts) {
if (kemidGeneralLedgerAccount.getIncomePrincipalIndicatorCode().equalsIgnoreCase(EndowConstants.IncomePrincipalIndicator.INCOME)) {
// One and ONLY ONE END_KEMID_GL_LNK_T record with the IP_IND_CD field equal to I must exist for each
// END_KEMID_T record.
if (!hasIncomeGL) {
hasIncomeGL = true;
}
else {
// Error: There are more than one END_KEMID_GL_LNK_T record with the IP_IND_CD field equal to I
putFieldError(EndowPropertyConstants.KEMID_GENERAL_LEDGER_ACCOUNTS_TAB, EndowKeyConstants.KEMIDConstants.ERROR_KEMID_CAN_ONLY_HAVE_ONE_INCOME_GL_ACC);
return false;
}
if (hasIncomeGL) {
hasActiveIncomeGL = kemidGeneralLedgerAccount.isActive();
}
}
else if (kemidGeneralLedgerAccount.getIncomePrincipalIndicatorCode().equalsIgnoreCase(EndowConstants.IncomePrincipalIndicator.PRINCIPAL)) {
if (!hasPrincipalGL) {
hasPrincipalGL = true;
}
else {
// Error: There is more than one END_KEMID_GL_LNK_T record with the IP_IND_CD field equal to P
putFieldError(EndowPropertyConstants.KEMID_GENERAL_LEDGER_ACCOUNTS_TAB, EndowKeyConstants.KEMIDConstants.ERROR_KEMID_CAN_ONLY_HAVE_ONE_PRINCIPAL_GL_ACC);
return false;
}
if (hasPrincipalGL) {
hasActivePrincipalGL = kemidGeneralLedgerAccount.isActive();
}
}
hasActivePrincipalGL = kemidGeneralLedgerAccount.isActive();
}
if (!hasIncomeGL || !hasActiveIncomeGL) {
putFieldError(EndowPropertyConstants.KEMID_GENERAL_LEDGER_ACCOUNTS_TAB, EndowKeyConstants.KEMIDConstants.ERROR_KEMID_MUST_HAVE_AT_LEAST_ONE_INCOME_GL_ACC);
return false;
}
if (newKemid.getPrincipalRestrictionCode() != null && newKemid.getPrincipalRestrictionCode().equalsIgnoreCase(EndowConstants.TypeRestrictionPresetValueCodes.NOT_APPLICABLE_TYPE_RESTRICTION_CODE) && hasActivePrincipalGL) {
putFieldError(EndowPropertyConstants.KEMID_GENERAL_LEDGER_ACCOUNTS_TAB, EndowKeyConstants.KEMIDConstants.ERROR_KEMID_CAN_NOT_HAVE_A_PRINCIPAL_GL_ACC_IF_PRINCIPAL_RESTR_CD_IS_NA);
return false;
}
if (newKemid.getPrincipalRestrictionCode() != null && !newKemid.getPrincipalRestrictionCode().equalsIgnoreCase(EndowConstants.TypeRestrictionPresetValueCodes.NOT_APPLICABLE_TYPE_RESTRICTION_CODE) && !hasActivePrincipalGL) {
putFieldError(EndowPropertyConstants.KEMID_GENERAL_LEDGER_ACCOUNTS_TAB, EndowKeyConstants.KEMIDConstants.ERROR_KEMID_MUST_HAVE_AT_LEAST_ONE_ACTIVE_PRINCIPAL_GL_ACC_IF_PRINCIPAL_CD_NOT_NA);
return false;
}
}
return valid;
}
/**
* Validates the KEMID Authorizations.
*
* @return true if valid, false otherwise
*/
private boolean validateKemidAuthorizations() {
boolean isValid = true;
String errorPathPrefix = KFSConstants.MAINTENANCE_ADD_PREFIX + EndowPropertyConstants.KEMID_AUTHORIZATIONS_TAB + ".";
List<KemidAuthorizations> authorizations = newKemid.getKemidAuthorizations();
// if sys param END_KEMID_ROLE_T_RECORD_REQUIRED_IND is yes the Kemid must have at least one active entry in the
// authorizations tab
String authorizationReqParamVal = SpringContext.getBean(ParameterService.class).getParameterValueAsString(KEMID.class, EndowParameterKeyConstants.ROLE_REQUIRED_IND);
if (KFSConstants.ParameterValues.YES.equalsIgnoreCase(authorizationReqParamVal)) {
// At least one active records must exist
if (authorizations == null || authorizations.size() == 0) {
putFieldError(EndowPropertyConstants.KEMID_AUTHORIZATIONS_TAB, EndowKeyConstants.KEMIDConstants.ERROR_KEMID_MUST_HAVE_AT_LEAST_ONE_ACTIVE_AUTHORIZATION);
return false;
}
isValid &= validateKemidAuthorizationsHaveOneActiveEntry();
}
// check all authorizations are valid
for (int i = 0; i < authorizations.size(); i++) {
KemidAuthorizations authorization = (KemidAuthorizations) authorizations.get(i);
isValid &= checkAuthorization(authorization, i);
}
return isValid;
}
/**
* Checks if the Authorizations tab has at least one active entry.
*
* @return true if it has one, false otherwise
*/
private boolean validateKemidAuthorizationsHaveOneActiveEntry() {
boolean hasActiveAuthorization = false;
for (KemidAuthorizations authorization : newKemid.getKemidAuthorizations()) {
if (authorization.isActive()) {
hasActiveAuthorization = true;
break;
}
}
return hasActiveAuthorization;
}
/**
* Validates that the role namespace is KFS-ENDOW.
*
* @param authorization
* @return true if valid, false otherwise
*/
private boolean validateRoleInKFSEndowNamespace(KemidAuthorizations authorization, int index) {
if (!authorization.getRole().getNamespaceCode().equalsIgnoreCase(EndowConstants.KFS_ENDOW_ROLE_NAMESPACE)) {
if (index == -1) {
putFieldError(KFSConstants.MAINTENANCE_ADD_PREFIX + EndowPropertyConstants.KEMID_AUTHORIZATIONS_TAB + "." + EndowPropertyConstants.KEMID_AUTHORIZATIONS_ROLE_ID, EndowKeyConstants.KEMIDConstants.ERROR_KEMID_AUTHORIZATION_ROLE_NAMESPACE_ENDOW);
}
else {
putFieldError(EndowPropertyConstants.KEMID_AUTHORIZATIONS_TAB + "[" + index + "]" + "." + EndowPropertyConstants.KEMID_AUTHORIZATIONS_ROLE_ID, EndowKeyConstants.KEMIDConstants.ERROR_KEMID_AUTHORIZATION_ROLE_NAMESPACE_ENDOW);
}
return false;
}
else
return true;
}
/**
* Validates KEMID Donor Statements.
*
* @return true if valid, false otherwise
*/
private boolean validateKemidDonorStatements() {
boolean isValid = true;
String errorPathPrefix = KFSConstants.MAINTENANCE_ADD_PREFIX + EndowPropertyConstants.KEMID_DONOR_STATEMENTS_TAB + ".";
for (KemidDonorStatement donorStatement : newKemid.getKemidDonorStatements()) {
if (!validCombineWithDonorId(donorStatement)) {
isValid = false;
}
if (donorStatement.getTerminationDate() != null && StringUtils.isEmpty(donorStatement.getTerminationReason())) {
putFieldError(errorPathPrefix + EndowPropertyConstants.KEMID_DONOR_STATEMENT_TERMINATION_REASON, EndowKeyConstants.KEMIDConstants.ERROR_KEMID_DONOR_STMNT_TERM_RSN_CANT_BE_EMPTY_IS_TERM_DATE_ENTERED);
isValid = false;
}
}
return isValid;
}
/**
* Checks that the combine with donor is different from the donor.
*
* @param donorStatement
* @return true if valid, false otherwise
*/
private boolean validCombineWithDonorId(KemidDonorStatement donorStatement) {
String combineWithDonorId = donorStatement.getCombineWithDonorId();
String errorPathPrefix = KFSConstants.MAINTENANCE_ADD_PREFIX + EndowPropertyConstants.KEMID_DONOR_STATEMENTS_TAB + ".";
if (StringUtils.isNotEmpty(combineWithDonorId)) {
if (combineWithDonorId.equalsIgnoreCase(donorStatement.getDonorId())) {
putFieldError(errorPathPrefix + EndowPropertyConstants.KEMID_DONOR_STATEMENT_COMBINE_WITH_DONOR_ID, EndowKeyConstants.KEMIDConstants.ERROR_KEMID_DONOR_STMNT_COMBINE_WITH_DONR_MUST_BE_DIFF_FROM_DONOR);
return false;
}
}
return true;
}
/**
* Validates that the KEMID has at least one Pay Instruction defined.
*
* @return true if valid, false otherwise
*/
private boolean validatePayoutInstructions() {
boolean valid = true;
if (newKemid.getKemidPayoutInstructions() == null || newKemid.getKemidPayoutInstructions().size() == 0) {
putFieldError(EndowPropertyConstants.KEMID_PAY_INSTRUCTIONS_TAB, EndowKeyConstants.KEMIDConstants.ERROR_KEMID_MUST_HAVE_AT_LEAST_ONE_PAYOUT_INSTRUCTION);
valid = false;
}
if (valid) {
int index = 0;
for (KemidPayoutInstruction payoutInstruction : newKemid.getKemidPayoutInstructions()) {
checkPayoutInstruction(payoutInstruction, index);
index++;
}
validatePayoutInstructionsPercentTotal();
}
return valid;
}
/**
* Validates that the total of all non-terminated records is 1 (100%).
*
* @return true if valid, false otherwise
*/
private boolean validatePayoutInstructionsPercentTotal() {
boolean isValid = true;
DateTimeService dateTimeService = SpringContext.getBean(DateTimeService.class);
Date currentDate = dateTimeService.getCurrentSqlDate();
KualiDecimal total = KualiDecimal.ZERO;
for (KemidPayoutInstruction payoutInstruction : newKemid.getKemidPayoutInstructions()) {
if (payoutInstruction.getEndDate() == null || payoutInstruction.getEndDate().after(currentDate)) {
total = total.add(payoutInstruction.getPercentOfIncomeToPayToKemid());
}
}
KualiDecimal one = new KualiDecimal(1);
if (one.compareTo(total) != 0) {
putFieldError(EndowPropertyConstants.KEMID_PAY_INSTRUCTIONS_TAB, EndowKeyConstants.KEMIDConstants.ERROR_KEMID_TOTAL_OFF_ALL_PAYOUT_RECORDS_MUST_BE_ONE);
isValid = false;
}
return isValid;
}
/**
* Validates the Kemid Fees.
*
* @return true if valid, false otherwise
*/
private boolean validateFees() {
boolean valid = true;
List<KemidFee> fees = newKemid.getKemidFees();
if (fees != null && fees.size() != 0) {
for (int i = 0; i < fees.size(); i++) {
KemidFee fee = fees.get(i);
if (!validateFeePercentageTotal(fee, i)) {
valid = false;
}
if (!validatePercentageOfFeeChargedToPrincipal(fee, i)) {
valid = false;
}
if (!validateKemidFeeStartDate(fee, i)) {
valid = false;
}
}
}
return valid;
}
/**
* Validates that the total of the Percentage of Fee Charged to Income plus Percentage Of Fee Charged to Principal cannot exceed
* 1 (100%).
*
* @param fee
* @return true if valid, false otherwise
*/
private boolean validateFeePercentageTotal(KemidFee fee, int index) {
boolean valid = true;
KualiDecimal percentage = fee.getPercentOfFeeChargedToIncome().add(fee.getPercentOfFeeChargedToPrincipal());
if (percentage.isGreaterThan(new KualiDecimal(1))) {
valid = false;
if (index != -1) {
putFieldError(EndowPropertyConstants.KEMID_FEES_TAB + "[" + index + "]", EndowKeyConstants.KEMIDConstants.ERROR_KEMID_FEE_PCT_CHRG_FEE_SUM_MUST_NOT_BE_GREATER_THAN_ONE);
}
else {
putFieldError(EndowPropertyConstants.KEMID_FEES_TAB, EndowKeyConstants.KEMIDConstants.ERROR_KEMID_FEE_PCT_CHRG_FEE_SUM_MUST_NOT_BE_GREATER_THAN_ONE);
}
}
return valid;
}
/**
* Validates that the percentage if fee charged to principal does not exceed zero when the type restriction code is NA(Not
* Available).
*
* @param fee
* @return true if valid, false otherwise
*/
private boolean validatePercentageOfFeeChargedToPrincipal(KemidFee fee, int index) {
boolean valid = true;
if (ObjectUtils.isNotNull(newKemid.getType()) && EndowConstants.TypeRestrictionPresetValueCodes.NOT_APPLICABLE_TYPE_RESTRICTION_CODE.equalsIgnoreCase(newKemid.getPrincipalRestrictionCode())) {
if (fee.getPercentOfFeeChargedToPrincipal().isGreaterThan(KualiDecimal.ZERO)) {
valid = false;
if (index >= 0) {
putFieldError(EndowPropertyConstants.KEMID_FEES_TAB + "[" + index + "]" + "." + EndowPropertyConstants.KEMID_FEE_PERCENT_OF_FEE_CHARGED_TO_PRINCIPAL, EndowKeyConstants.KEMIDConstants.ERROR_KEMID_FEE_PCT_CHRG_TO_PRIN_CANNOT_EXCEED_ZERO_IF_TYPE_RESTR_CD_NA);
}
else {
putFieldError(EndowPropertyConstants.KEMID_FEE_PERCENT_OF_FEE_CHARGED_TO_PRINCIPAL, EndowKeyConstants.KEMIDConstants.ERROR_KEMID_FEE_PCT_CHRG_TO_PRIN_CANNOT_EXCEED_ZERO_IF_TYPE_RESTR_CD_NA);
}
}
}
return valid;
}
/**
* Validates that the kemid fee start date is a valid value for the fee frequency.
*
* @param fee
* @return true if valid, false otherwise
*/
private boolean validateKemidFeeStartDate(KemidFee fee, int index) {
boolean isValid = true;
ValidateDateBasedOnFrequencyCodeService validateService = SpringContext.getBean(ValidateDateBasedOnFrequencyCodeService.class);
Date feeStartDate = fee.getFeeStartDate();
fee.refreshReferenceObject(EndowPropertyConstants.FEE_METHOD);
String frequencyCode = fee.getFeeMethod() != null ? fee.getFeeMethod().getFeeFrequencyCode() : null;
if (feeStartDate != null && frequencyCode != null) {
isValid = validateService.validateDateBasedOnFrequencyCode(feeStartDate, frequencyCode);
}
if (!isValid) {
if (index == -1) {
putFieldError(KFSConstants.MAINTENANCE_ADD_PREFIX + EndowPropertyConstants.KEMID_FEES_TAB + "." + EndowPropertyConstants.KEMID_FEE_START_DATE, EndowKeyConstants.KEMIDConstants.ERROR_KEMID_FEE_START_DATE_NOT_VALID);
}
else {
putFieldError(EndowPropertyConstants.KEMID_FEES_TAB + "[" + index + "]" + "." + EndowPropertyConstants.KEMID_FEE_START_DATE, EndowKeyConstants.KEMIDConstants.ERROR_KEMID_FEE_START_DATE_NOT_VALID);
}
}
return isValid;
}
}
| |
// Copyright (C) 2013 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.client.change;
import com.google.gerrit.client.Gerrit;
import com.google.gerrit.client.account.AccountApi;
import com.google.gerrit.client.changes.ChangeApi;
import com.google.gerrit.client.changes.ChangeInfo;
import com.google.gerrit.client.changes.ChangeInfo.EditInfo;
import com.google.gerrit.client.changes.ChangeInfo.FetchInfo;
import com.google.gerrit.client.changes.ChangeList;
import com.google.gerrit.client.rpc.NativeMap;
import com.google.gerrit.client.rpc.Natives;
import com.google.gerrit.client.rpc.RestApi;
import com.google.gerrit.extensions.client.ListChangesOption;
import com.google.gerrit.reviewdb.client.AccountGeneralPreferences;
import com.google.gerrit.reviewdb.client.AccountGeneralPreferences.DownloadScheme;
import com.google.gerrit.reviewdb.client.PatchSet;
import com.google.gwt.core.client.JavaScriptObject;
import com.google.gwt.event.dom.client.ChangeEvent;
import com.google.gwt.event.dom.client.ChangeHandler;
import com.google.gwt.user.client.rpc.AsyncCallback;
import com.google.gwt.user.client.ui.Anchor;
import com.google.gwt.user.client.ui.FlexTable;
import com.google.gwt.user.client.ui.HorizontalPanel;
import com.google.gwt.user.client.ui.InlineLabel;
import com.google.gwt.user.client.ui.ListBox;
import com.google.gwt.user.client.ui.VerticalPanel;
import com.google.gwt.user.client.ui.Widget;
import com.google.gwtexpui.clippy.client.CopyableLabel;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.Iterator;
import java.util.List;
class DownloadBox extends VerticalPanel {
private final ChangeInfo change;
private final String revision;
private final PatchSet.Id psId;
private final FlexTable commandTable;
private final ListBox scheme;
private NativeMap<FetchInfo> fetch;
DownloadBox(ChangeInfo change, String revision, PatchSet.Id psId) {
this.change = change;
this.revision = revision;
this.psId = psId;
this.commandTable = new FlexTable();
this.scheme = new ListBox();
this.scheme.addChangeHandler(new ChangeHandler() {
@Override
public void onChange(ChangeEvent event) {
renderCommands();
if (Gerrit.isSignedIn()) {
saveScheme();
}
}
});
setStyleName(Gerrit.RESOURCES.css().downloadBox());
commandTable.setStyleName(Gerrit.RESOURCES.css().downloadBoxTable());
scheme.setStyleName(Gerrit.RESOURCES.css().downloadBoxScheme());
add(commandTable);
}
@Override
protected void onLoad() {
if (fetch == null) {
if (psId.get() == 0) {
ChangeApi.editWithCommands(change.legacyId().get()).get(
new AsyncCallback<EditInfo>() {
@Override
public void onSuccess(EditInfo result) {
fetch = result.fetch();
renderScheme();
}
@Override
public void onFailure(Throwable caught) {
}
});
} else {
RestApi call = ChangeApi.detail(change.legacyId().get());
ChangeList.addOptions(call, EnumSet.of(
revision.equals(change.currentRevision())
? ListChangesOption.CURRENT_REVISION
: ListChangesOption.ALL_REVISIONS,
ListChangesOption.DOWNLOAD_COMMANDS));
call.get(new AsyncCallback<ChangeInfo>() {
@Override
public void onSuccess(ChangeInfo result) {
fetch = result.revision(revision).fetch();
renderScheme();
}
@Override
public void onFailure(Throwable caught) {
}
});
}
}
}
private void renderCommands() {
commandTable.removeAllRows();
if (scheme.getItemCount() > 0) {
FetchInfo fetchInfo =
fetch.get(scheme.getValue(scheme.getSelectedIndex()));
for (String commandName : Natives.keys(fetchInfo.commands())) {
CopyableLabel copyLabel =
new CopyableLabel(fetchInfo.command(commandName));
copyLabel.setStyleName(Gerrit.RESOURCES.css().downloadBoxCopyLabel());
insertCommand(commandName, copyLabel);
}
}
if (change.revision(revision).commit().parents().length() == 1) {
insertPatch();
}
insertArchive();
insertCommand(null, scheme);
}
private void insertPatch() {
String id = revision.substring(0, 7);
Anchor patchBase64 = new Anchor(id + ".diff.base64");
patchBase64.setHref(new RestApi("/changes/")
.id(psId.getParentKey().get())
.view("revisions")
.id(revision)
.view("patch")
.addParameterTrue("download")
.url());
Anchor patchZip = new Anchor(id + ".diff.zip");
patchZip.setHref(new RestApi("/changes/")
.id(psId.getParentKey().get())
.view("revisions")
.id(revision)
.view("patch")
.addParameterTrue("zip")
.url());
HorizontalPanel p = new HorizontalPanel();
p.add(patchBase64);
InlineLabel spacer = new InlineLabel("|");
spacer.setStyleName(Gerrit.RESOURCES.css().downloadBoxSpacer());
p.add(spacer);
p.add(patchZip);
insertCommand("Patch-File", p);
}
private void insertArchive() {
List<String> activated = Gerrit.getConfig().getArchiveFormats();
if (activated.isEmpty()) {
return;
}
List<Anchor> anchors = new ArrayList<>(activated.size());
for (String f : activated) {
Anchor archive = new Anchor(f);
archive.setHref(new RestApi("/changes/")
.id(psId.getParentKey().get())
.view("revisions")
.id(revision)
.view("archive")
.addParameter("format", f)
.url());
anchors.add(archive);
}
HorizontalPanel p = new HorizontalPanel();
Iterator<Anchor> it = anchors.iterator();
while (it.hasNext()) {
Anchor a = it.next();
p.add(a);
if (it.hasNext()) {
InlineLabel spacer = new InlineLabel("|");
spacer.setStyleName(Gerrit.RESOURCES.css().downloadBoxSpacer());
p.add(spacer);
}
}
insertCommand("Archive", p);
}
private void insertCommand(String commandName, Widget w) {
int row = commandTable.getRowCount();
commandTable.insertRow(row);
commandTable.getCellFormatter().addStyleName(row, 0,
Gerrit.RESOURCES.css().downloadBoxTableCommandColumn());
if (commandName != null) {
commandTable.setText(row, 0, commandName);
}
if (w != null) {
commandTable.setWidget(row, 1, w);
}
}
private void renderScheme() {
for (String id : fetch.keySet()) {
scheme.addItem(id);
}
if (scheme.getItemCount() == 0) {
scheme.setVisible(false);
} else {
if (scheme.getItemCount() == 1) {
scheme.setSelectedIndex(0);
scheme.setVisible(false);
} else {
int select = 0;
String find = getUserPreference();
if (find != null) {
for (int i = 0; i < scheme.getItemCount(); i++) {
if (find.equals(scheme.getValue(i))) {
select = i;
break;
}
}
}
scheme.setSelectedIndex(select);
}
}
renderCommands();
}
private static String getUserPreference() {
if (Gerrit.isSignedIn()) {
DownloadScheme pref =
Gerrit.getUserAccount().getGeneralPreferences().getDownloadUrl();
if (pref != null) {
switch (pref) {
case ANON_GIT:
return "git";
case ANON_HTTP:
return "anonymous http";
case HTTP:
return "http";
case SSH:
return "ssh";
case REPO_DOWNLOAD:
return "repo";
default:
return null;
}
}
}
return null;
}
private void saveScheme() {
DownloadScheme scheme = getSelectedScheme();
AccountGeneralPreferences pref =
Gerrit.getUserAccount().getGeneralPreferences();
if (scheme != null && scheme != pref.getDownloadUrl()) {
pref.setDownloadUrl(scheme);
PreferenceInput in = PreferenceInput.create();
in.downloadScheme(scheme);
AccountApi.self().view("preferences")
.put(in, new AsyncCallback<JavaScriptObject>() {
@Override
public void onSuccess(JavaScriptObject result) {
}
@Override
public void onFailure(Throwable caught) {
}
});
}
}
private DownloadScheme getSelectedScheme() {
String id = scheme.getValue(scheme.getSelectedIndex());
if ("git".equals(id)) {
return DownloadScheme.ANON_GIT;
} else if ("anonymous http".equals(id)) {
return DownloadScheme.ANON_HTTP;
} else if ("http".equals(id)) {
return DownloadScheme.HTTP;
} else if ("ssh".equals(id)) {
return DownloadScheme.SSH;
} else if ("repo".equals(id)) {
return DownloadScheme.REPO_DOWNLOAD;
}
return null;
}
private static class PreferenceInput extends JavaScriptObject {
static PreferenceInput create() {
return createObject().cast();
}
final void downloadScheme(DownloadScheme s) {
downloadScheme0(s.name());
}
private final native void downloadScheme0(String n) /*-{
this.download_scheme = n;
}-*/;
protected PreferenceInput() {
}
}
}
| |
package com.ycsoft.business.dto.core.fee;
import java.util.Date;
import java.util.List;
import com.ycsoft.beans.core.bill.BillDto;
public class BBillPrintDto {
private String billing_cycle_id ;
private String cust_id ;
private Integer month_begin_fee ;
private Integer month_end_fee ;
private Integer month_sum_fee ;
private Integer zs_fee ;
private Integer xj_fee ;
private Integer tk_fee ;
private Integer other_fee ;
private Integer bk_fee ;
private Integer prod_bill_fee;
private Integer fee_judge;
private String remark ;
private Integer str1 ;
private Integer str2 ;
private Integer str3 ;
private Integer str4 ;
private Integer str5 ;
private String str6 ;
private String str7 ;
private String str8 ;
private String str9 ;
private String str10 ;
private Date create_time ;
private List<BillDto> billList;
/**
* default empty constructor
*/
public BBillPrintDto() {
}
// billing_cycle_id getter and setter
public String getBilling_cycle_id(){
return this.billing_cycle_id ;
}
public void setBilling_cycle_id(String billing_cycle_id){
this.billing_cycle_id = billing_cycle_id ;
}
// cust_id getter and setter
public String getCust_id(){
return this.cust_id ;
}
public void setCust_id(String cust_id){
this.cust_id = cust_id ;
}
// month_begin_fee getter and setter
public Integer getMonth_begin_fee(){
return this.month_begin_fee ;
}
public void setMonth_begin_fee(Integer month_begin_fee){
this.month_begin_fee = month_begin_fee ;
}
// month_end_fee getter and setter
public Integer getMonth_end_fee(){
return this.month_end_fee ;
}
public void setMonth_end_fee(Integer month_end_fee){
this.month_end_fee = month_end_fee ;
}
// month_sum_fee getter and setter
public Integer getMonth_sum_fee(){
return this.month_sum_fee ;
}
public void setMonth_sum_fee(Integer month_sum_fee){
this.month_sum_fee = month_sum_fee ;
}
// zs_fee getter and setter
public Integer getZs_fee(){
return this.zs_fee ;
}
public void setZs_fee(Integer zs_fee){
this.zs_fee = zs_fee ;
}
// xj_fee getter and setter
public Integer getXj_fee(){
return this.xj_fee ;
}
public void setXj_fee(Integer xj_fee){
this.xj_fee = xj_fee ;
}
// tk_fee getter and setter
public Integer getTk_fee(){
return this.tk_fee ;
}
public void setTk_fee(Integer tk_fee){
this.tk_fee = tk_fee ;
}
// other_fee getter and setter
public Integer getOther_fee(){
return this.other_fee ;
}
public void setOther_fee(Integer other_fee){
this.other_fee = other_fee ;
}
// bk_fee getter and setter
public Integer getBk_fee(){
return this.bk_fee ;
}
public void setBk_fee(Integer bk_fee){
this.bk_fee = bk_fee ;
}
// remark getter and setter
public String getRemark(){
return this.remark ;
}
public void setRemark(String remark){
this.remark = remark ;
}
// str1 getter and setter
public Integer getStr1(){
return this.str1 ;
}
public void setStr1(Integer str1){
this.str1 = str1 ;
}
// str2 getter and setter
public Integer getStr2(){
return this.str2 ;
}
public void setStr2(Integer str2){
this.str2 = str2 ;
}
// str3 getter and setter
public Integer getStr3(){
return this.str3 ;
}
public void setStr3(Integer str3){
this.str3 = str3 ;
}
// str4 getter and setter
public Integer getStr4(){
return this.str4 ;
}
public void setStr4(Integer str4){
this.str4 = str4 ;
}
// str5 getter and setter
public Integer getStr5(){
return this.str5 ;
}
public void setStr5(Integer str5){
this.str5 = str5 ;
}
// str6 getter and setter
public String getStr6(){
return this.str6 ;
}
public void setStr6(String str6){
this.str6 = str6 ;
}
// str7 getter and setter
public String getStr7(){
return this.str7 ;
}
public void setStr7(String str7){
this.str7 = str7 ;
}
// str8 getter and setter
public String getStr8(){
return this.str8 ;
}
public void setStr8(String str8){
this.str8 = str8 ;
}
// str9 getter and setter
public String getStr9(){
return this.str9 ;
}
public void setStr9(String str9){
this.str9 = str9 ;
}
// str10 getter and setter
public String getStr10(){
return this.str10 ;
}
public void setStr10(String str10){
this.str10 = str10 ;
}
// create_time getter and setter
public Date getCreate_time(){
return this.create_time ;
}
public void setCreate_time(Date create_time){
this.create_time = create_time ;
}
public List<BillDto> getBillList() {
return billList;
}
public void setBillList(List<BillDto> billList) {
this.billList = billList;
}
public Integer getProd_bill_fee() {
return prod_bill_fee;
}
public void setProd_bill_fee(Integer prod_bill_fee) {
this.prod_bill_fee = prod_bill_fee;
}
public Integer getFee_judge() {
return fee_judge;
}
public void setFee_judge(Integer fee_judge) {
this.fee_judge = fee_judge;
}
}
| |
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.aurora.scheduler.events;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import java.util.Set;
import java.util.concurrent.Executor;
import java.util.concurrent.atomic.AtomicLong;
import javax.inject.Inject;
import javax.inject.Qualifier;
import javax.inject.Singleton;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.eventbus.AsyncEventBus;
import com.google.common.eventbus.DeadEvent;
import com.google.common.eventbus.EventBus;
import com.google.common.eventbus.Subscribe;
import com.google.common.eventbus.SubscriberExceptionHandler;
import com.google.common.util.concurrent.AbstractIdleService;
import com.google.inject.AbstractModule;
import com.google.inject.Binder;
import com.google.inject.Provides;
import com.google.inject.multibindings.Multibinder;
import org.apache.aurora.common.stats.StatsProvider;
import org.apache.aurora.scheduler.SchedulerServicesModule;
import org.apache.aurora.scheduler.async.AsyncModule.AsyncExecutor;
import org.apache.aurora.scheduler.base.AsyncUtil;
import org.apache.aurora.scheduler.events.PubsubEvent.EventSubscriber;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.ElementType.PARAMETER;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import static java.util.Objects.requireNonNull;
/**
* Binding module for plumbing event notifications.
*/
public final class PubsubEventModule extends AbstractModule {
@Qualifier
@Target({ FIELD, PARAMETER, METHOD }) @Retention(RUNTIME)
private @interface DeadEventHandler { }
@Qualifier
@Target({ FIELD, PARAMETER, METHOD }) @Retention(RUNTIME)
public @interface RegisteredEvents { }
private final Logger log;
private final Executor registeredExecutor;
@VisibleForTesting
static final String EXCEPTIONS_STAT = "event_bus_exceptions";
@VisibleForTesting
static final String EVENT_BUS_DEAD_EVENTS = "event_bus_dead_events";
public PubsubEventModule() {
this(LoggerFactory.getLogger(PubsubEventModule.class));
}
private PubsubEventModule(Logger log) {
this.log = requireNonNull(log);
this.registeredExecutor = AsyncUtil.singleThreadLoggingScheduledExecutor("RegisteredEventSink",
log);
}
@VisibleForTesting
PubsubEventModule(Logger log, Executor registeredExecutor) {
this.log = requireNonNull(log);
this.registeredExecutor = requireNonNull(registeredExecutor);
}
@VisibleForTesting
static final String DEAD_EVENT_MESSAGE = "Captured dead event %s";
@Override
protected void configure() {
// Ensure at least an empty binding is present.
Multibinder.newSetBinder(binder(), EventSubscriber.class);
Multibinder.newSetBinder(binder(), EventSubscriber.class, RegisteredEvents.class);
// TODO(ksweeney): Would this be better as a scheduler active service?
SchedulerServicesModule.addAppStartupServiceBinding(binder()).to(RegisterSubscribers.class);
}
@Provides
@Singleton
SubscriberExceptionHandler provideSubscriberExceptionHandler(StatsProvider statsProvider) {
final AtomicLong subscriberExceptions = statsProvider.makeCounter(EXCEPTIONS_STAT);
return (exception, context) -> {
subscriberExceptions.incrementAndGet();
log.error(
"Failed to dispatch event to " + context.getSubscriberMethod() + ": " + exception,
exception);
};
}
@Provides
@DeadEventHandler
@Singleton
Object provideDeadEventHandler(StatsProvider statsProvider) {
final AtomicLong deadEventCounter = statsProvider.makeCounter(EVENT_BUS_DEAD_EVENTS);
return new Object() {
@Subscribe
public void logDeadEvent(DeadEvent event) {
deadEventCounter.incrementAndGet();
log.warn(String.format(DEAD_EVENT_MESSAGE, event.getEvent()));
}
};
}
@Provides
@Singleton
EventBus provideEventBus(@AsyncExecutor Executor executor,
SubscriberExceptionHandler subscriberExceptionHandler,
@DeadEventHandler Object deadEventHandler) {
EventBus eventBus = new AsyncEventBus(executor, subscriberExceptionHandler);
eventBus.register(deadEventHandler);
return eventBus;
}
@Provides
@Singleton
EventSink provideEventSink(EventBus eventBus) {
return eventBus::post;
}
@Provides
@RegisteredEvents
@Singleton
EventBus provideRegisteredEventBus(SubscriberExceptionHandler subscriberExceptionHandler,
@DeadEventHandler Object deadEventHandler) {
EventBus eventBus = new AsyncEventBus(registeredExecutor, subscriberExceptionHandler);
eventBus.register(deadEventHandler);
return eventBus;
}
@Provides
@RegisteredEvents
@Singleton
EventSink provideRegisteredEventSink(@RegisteredEvents EventBus eventBus) {
return eventBus::post;
}
static class RegisterSubscribers extends AbstractIdleService {
private final EventBus eventBus;
private final EventBus registeredEventBus;
private final Set<EventSubscriber> subscribers;
private final Set<EventSubscriber> registeredSubscribers;
@Inject
RegisterSubscribers(EventBus eventBus,
@RegisteredEvents EventBus registeredEventBus,
Set<EventSubscriber> subscribers,
@RegisteredEvents Set<EventSubscriber> registeredSubscribers) {
this.eventBus = requireNonNull(eventBus);
this.registeredEventBus = requireNonNull(registeredEventBus);
this.subscribers = requireNonNull(subscribers);
this.registeredSubscribers = requireNonNull(registeredSubscribers);
}
@Override
protected void startUp() {
subscribers.forEach(eventBus::register);
registeredSubscribers.forEach(registeredEventBus::register);
}
@Override
protected void shutDown() {
// Nothing to do - await VM shutdown.
}
}
/**
* Binds a task event module.
*
* @param binder Binder to bind against.
*/
public static void bind(Binder binder) {
binder.install(new PubsubEventModule());
}
/**
* Binds a subscriber to receive task events.
*
* @param binder Binder to bind the subscriber with.
* @param subscriber Subscriber implementation class to register for events.
*/
public static void bindSubscriber(Binder binder, Class<? extends EventSubscriber> subscriber) {
Multibinder.newSetBinder(binder, EventSubscriber.class).addBinding().to(subscriber);
}
/**
* Binds a subscriber to receive Mesos registered events.
*
* @param binder Binder to bind the subscriber with.
* @param subscriber Subscriber implementation class to register for events.
*/
public static void bindRegisteredSubscriber(Binder binder,
Class<? extends EventSubscriber> subscriber) {
Multibinder.newSetBinder(binder, EventSubscriber.class, RegisteredEvents.class)
.addBinding()
.to(subscriber);
}
}
| |
/*
* Copyright Notice
*
* This is a work of the U.S. Government and is not subject to copyright
* protection in the United States. Foreign copyrights may apply.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gov.va.isaac.workflow.persistence;
import gov.va.isaac.AppContext;
import gov.va.isaac.interfaces.workflow.ProcessInstanceCreationRequestI;
import gov.va.isaac.util.Utility;
import gov.va.isaac.workflow.ProcessInstanceCreationRequest;
import gov.va.isaac.workflow.ProcessInstanceServiceBI;
import gov.va.isaac.workflow.exceptions.DatastoreException;
import java.beans.XMLDecoder;
import java.beans.XMLEncoder;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import javax.inject.Singleton;
import javax.sql.DataSource;
import org.jfree.util.Log;
import org.jvnet.hk2.annotations.Service;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* {@link ProcessInstanceCreationRequestsAPI}
*
* @author alo
* @author <a href="mailto:daniel.armbrust.list@gmail.com">Dan Armbrust</a>
*/
@Service
@Singleton
public class ProcessInstanceCreationRequestsAPI implements ProcessInstanceServiceBI {
private final Logger log = LoggerFactory.getLogger(ProcessInstanceCreationRequestsAPI.class);
private DataSource dataSource;
private ProcessInstanceCreationRequestsAPI() {
//For HK2 to construct
//start the init process
Utility.execute(() -> getDataSource());
}
private DataSource getDataSource()
{
if (dataSource == null)
{
dataSource = AppContext.getService(DatastoreManager.class).getDataSource();
try
{
createSchema();
}
catch (DatastoreException e)
{
log.error("Create schema failed during init", e);
}
}
return dataSource;
}
@Override
public ProcessInstanceCreationRequestI createRequest(String processName, UUID componentId, String componentName, String author, Map<String, String> variables)
throws DatastoreException {
try (Connection conn = getDataSource().getConnection()) {
PreparedStatement psInsert = conn.prepareStatement("insert into PINST_REQUESTS(component_id, component_name, process_name, user_id, status, sync_message,"
+ " request_time, sync_time, wf_id, variables) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",PreparedStatement.RETURN_GENERATED_KEYS);
if (componentId == null) {
log.error("Setting null componentId for ProcessInstanceCreationRequestsAPI.createRequest(): user_id={}, process_name={}, component={}, map={}", author, processName, componentName, variables);
}
psInsert.setString(1, componentId != null ? componentId.toString() : null);
psInsert.setString(2, componentName);
psInsert.setString(3, processName);
psInsert.setString(4, author);
psInsert.setString(5, ProcessInstanceCreationRequestI.RequestStatus.REQUESTED.name());
psInsert.setString(6, "");
psInsert.setLong(7, System.currentTimeMillis());
psInsert.setLong(8, 0L);
psInsert.setLong(9, 0L);
psInsert.setString(10, serializeMap(variables));
psInsert.executeUpdate();
ProcessInstanceCreationRequestI result = new ProcessInstanceCreationRequest();
result.setComponentId(componentId != null ? componentId.toString() : null);
result.setComponentName(componentName);
ResultSet generatedKeys = psInsert.getGeneratedKeys();
if (generatedKeys.next()) {
result.setId(generatedKeys.getInt(1));
} else {
throw new SQLException("Creating instance failed, no generated key obtained.");
}
result.setRequestTime(Long.MIN_VALUE);
result.setProcessName(processName);
result.setStatus(ProcessInstanceCreationRequestI.RequestStatus.REQUESTED);
result.setUserId(author);
result.setVariables(variables);
conn.commit();
return result;
} catch (SQLException ex) {
throw new DatastoreException(ex);
}
}
@Override
public void updateRequestStatus(int id, ProcessInstanceCreationRequestI.RequestStatus status, String syncMessage, Long wfId) throws DatastoreException {
try (Connection conn = getDataSource().getConnection()){
PreparedStatement psUpdateRequest = conn.prepareStatement("update PINST_REQUESTS set sync_message = ?, status = ?, wf_id = ? where id = ?");
psUpdateRequest.setString(1, syncMessage);
psUpdateRequest.setString(2, status.name());
psUpdateRequest.setLong(3, (wfId == null ? 0 : wfId));
psUpdateRequest.setInt(4, id);
int rowCount = psUpdateRequest.executeUpdate();
if (rowCount != 1)
{
throw new DatastoreException("updateRequestStatus failed to update any rows!");
}
conn.commit();
} catch (SQLException ex) {
throw new DatastoreException(ex);
}
}
@Override
public List<ProcessInstanceCreationRequestI> getOpenOwnedRequests(String owner) throws DatastoreException {
List<ProcessInstanceCreationRequestI> requests = new ArrayList<>();
try (Connection conn = getDataSource().getConnection()){
Statement s = conn.createStatement();
ResultSet rs = s.executeQuery("SELECT * FROM PINST_REQUESTS where user_id = '" + owner + "' and status = '"
+ ProcessInstanceCreationRequestI.RequestStatus.REQUESTED.name() + "'");
while (rs.next()) {
requests.add(readRequest(rs));
}
} catch (SQLException ex) {
throw new DatastoreException(ex);
}
return requests;
}
@Override
public List<ProcessInstanceCreationRequestI> getOwnedRequestsByStatus(String owner, ProcessInstanceCreationRequestI.RequestStatus status) throws DatastoreException {
List<ProcessInstanceCreationRequestI> requests = new ArrayList<>();
try (Connection conn = getDataSource().getConnection()){
Statement s = conn.createStatement();
ResultSet rs = s.executeQuery("SELECT * FROM PINST_REQUESTS where user_id = '" + owner + "' and status = '" + status.name() + "'");
while (rs.next()) {
requests.add(readRequest(rs));
}
} catch (SQLException ex) {
throw new DatastoreException(ex);
}
return requests;
}
@Override
public List<ProcessInstanceCreationRequestI> getOpenOwnedRequestsByComponentId(String owner, UUID componentId) throws DatastoreException {
List<ProcessInstanceCreationRequestI> requests = new ArrayList<>();
try (Connection conn = getDataSource().getConnection()){
Statement s = conn.createStatement();
ResultSet rs = s.executeQuery("SELECT * FROM PINST_REQUESTS where user_id = '" + owner + "' and component_id = '" + componentId + "' and status = '"
+ ProcessInstanceCreationRequestI.RequestStatus.REQUESTED.name() + "'");
while (rs.next()) {
requests.add(readRequest(rs));
}
} catch (SQLException ex) {
throw new DatastoreException(ex);
}
return requests;
}
@Override
public List<ProcessInstanceCreationRequestI> getRequestsByComponentId(UUID componentId) throws DatastoreException {
List<ProcessInstanceCreationRequestI> requests = new ArrayList<>();
try (Connection conn = getDataSource().getConnection()){
Statement s = conn.createStatement();
ResultSet rs = s.executeQuery("SELECT * FROM PINST_REQUESTS where component_id = '" + componentId + "'");
while (rs.next()) {
requests.add(readRequest(rs));
}
} catch (SQLException ex) {
throw new DatastoreException(ex);
}
return requests;
}
@Override
public ProcessInstanceCreationRequestI getRequestByWfId(Long wfId) throws DatastoreException {
try (Connection conn = getDataSource().getConnection()){
ProcessInstanceCreationRequestI request = null;
Statement s = conn.createStatement();
ResultSet rs = s.executeQuery("SELECT * FROM PINST_REQUESTS where wf_id = " + wfId);
if (!rs.next()) {
// no results
} else {
request = readRequest(rs);
return request;
}
return request;
} catch (SQLException ex) {
throw new DatastoreException(ex);
}
}
@Override
public List<ProcessInstanceCreationRequestI> getRequests() throws DatastoreException {
List<ProcessInstanceCreationRequestI> requests = new ArrayList<>();
try (Connection conn = getDataSource().getConnection()){
Statement s = conn.createStatement();
ResultSet rs = s.executeQuery("SELECT * FROM PINST_REQUESTS");
while (rs.next()) {
requests.add(readRequest(rs));
}
} catch (SQLException ex) {
throw new DatastoreException(ex);
}
return requests;
}
@Override
public ProcessInstanceCreationRequestI getRequest(int id) throws DatastoreException {
try (Connection conn = getDataSource().getConnection()){
ProcessInstanceCreationRequestI request = null;
Statement s = conn.createStatement();
ResultSet rs = s.executeQuery("SELECT * FROM PINST_REQUESTS where id = " + id);
if (!rs.next()) {
// no results
} else {
request = readRequest(rs);
return request;
}
return request;
} catch (SQLException ex) {
throw new DatastoreException(ex);
}
}
private ProcessInstanceCreationRequestI readRequest(ResultSet rs) throws SQLException, DatastoreException {
try
{
ProcessInstanceCreationRequestI request = new ProcessInstanceCreationRequest();
request.setId(rs.getInt(1));
request.setWfId(rs.getLong(2));
request.setComponentId(rs.getString(3));
request.setComponentName(rs.getString(4));
request.setProcessName(rs.getString(5));
request.setUserId(rs.getString(6));
String status = rs.getString(7);
switch (status) {
case "CREATED":
request.setStatus(ProcessInstanceCreationRequestI.RequestStatus.CREATED);
break;
case "REQUESTED":
request.setStatus(ProcessInstanceCreationRequestI.RequestStatus.REQUESTED);
break;
case "REJECTED":
request.setStatus(ProcessInstanceCreationRequestI.RequestStatus.REJECTED);
break;
default :
throw new DatastoreException("Unexpected 'status' found in DB: " + status);
}
request.setSyncMessage(rs.getString(8));
request.setRequestTime(rs.getLong(9));
request.setSyncTime(rs.getLong(10));
request.setVariables(rs.getString(11).isEmpty() ? new HashMap<String, String>() : deserializeMap(rs.getString(11)));
return request;
}
catch (NumberFormatException e)
{
throw new DatastoreException("Encountered an unparseable number in a field that should have held a number: " + e.getMessage());
}
}
@Override
public void createSchema() throws DatastoreException {
try (Connection conn = getDataSource().getConnection()){
log.info("Creating Workflow Process Instance Schema");
DatabaseMetaData dbmd = conn.getMetaData();
ResultSet rs = dbmd.getTables(null, "WORKFLOW", "PINST_REQUESTS", null);
if (!rs.next()) {
Statement s = conn.createStatement();
s.execute("create table PINST_REQUESTS "
+ "(id INTEGER NOT NULL GENERATED ALWAYS AS IDENTITY (START WITH 1, INCREMENT BY 1) PRIMARY KEY, "
+ "wf_id bigint, "
+ "component_id varchar(40), "
+ "component_name varchar(255), "
+ "process_name varchar(255), "
+ "user_id varchar(40), "
+ "status varchar(40), "
+ "sync_message varchar(255), "
+ "request_time bigint, "
+ "sync_time bigint, "
+ "variables long varchar)");
s.execute("create index WORKFLOW_status_idx on PINST_REQUESTS(status)");
s.execute("create index WORKFLOW_component_id_idx on PINST_REQUESTS(component_id)");
s.execute("create index WORKFLOW_wf_id_idx on PINST_REQUESTS(wf_id)");
log.debug("Created table PINST_REQUESTS");
} else {
log.debug("PINST_REQUESTS already exists!");
}
conn.commit();
} catch (SQLException ex) {
throw new DatastoreException(ex);
}
}
@Override
public void dropSchema() throws DatastoreException {
try (Connection conn = getDataSource().getConnection()){
log.info("Dropping PINST_REQUESTS");
Statement s = conn.createStatement();
s.execute("drop table PINST_REQUESTS");
} catch (SQLException ex) {
if (ex.getMessage().contains("does not exist")) {
log.info("Table did not exist");
}
else {
throw new DatastoreException(ex);
}
}
}
private String serializeMap(Map<String, String> map ) {
if (map == null) {
return "";
}
ByteArrayOutputStream bos = new ByteArrayOutputStream();
XMLEncoder xmlEncoder = new XMLEncoder(bos);
xmlEncoder.writeObject(map);
xmlEncoder.close();
String serializedMap = bos.toString();
return serializedMap;
}
@SuppressWarnings("unchecked")
private Map<String, String> deserializeMap(String serializedMap) {
if (serializedMap == null || serializedMap.isEmpty()) {
return new HashMap<String, String>();
} else {
try (XMLDecoder xmlDecoder = new XMLDecoder(new ByteArrayInputStream(serializedMap.getBytes())))
{
return (Map<String, String>) xmlDecoder.readObject();
}
catch (Exception e)
{
log.error("Unexpected error while deserializing map", e);
throw e;
}
}
}
}
| |
/*******************************************************************************
* Copyright 2011 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.badlogic.gdx.graphics.g2d;
import static com.badlogic.gdx.graphics.Texture.TextureWrap.*;
import com.badlogic.gdx.Files.FileType;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.files.FileHandle;
import com.badlogic.gdx.graphics.Pixmap.Format;
import com.badlogic.gdx.graphics.Texture;
import com.badlogic.gdx.graphics.Texture.TextureFilter;
import com.badlogic.gdx.graphics.Texture.TextureWrap;
import com.badlogic.gdx.graphics.g2d.TextureAtlas.TextureAtlasData.Page;
import com.badlogic.gdx.graphics.g2d.TextureAtlas.TextureAtlasData.Region;
import com.badlogic.gdx.utils.Array;
import com.badlogic.gdx.utils.Disposable;
import com.badlogic.gdx.utils.GdxRuntimeException;
import com.badlogic.gdx.utils.ObjectMap;
import com.badlogic.gdx.utils.Sort;
import com.badlogic.gdx.utils.StreamUtils;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.Comparator;
import java.util.HashSet;
import java.util.Set;
/** Loads images from texture atlases created by TexturePacker.<br>
* <br>
* A TextureAtlas must be disposed to free up the resources consumed by the backing textures.
* @author Nathan Sweet */
public class TextureAtlas implements Disposable {
static final String[] tuple = new String[4];
private final HashSet<Texture> textures = new HashSet(4);
private final Array<AtlasRegion> regions = new Array<AtlasRegion>();
public static class TextureAtlasData {
public static class Page {
public final FileHandle textureFile;
public Texture texture;
public final boolean useMipMaps;
public final Format format;
public final TextureFilter minFilter;
public final TextureFilter magFilter;
public final TextureWrap uWrap;
public final TextureWrap vWrap;
public Page (FileHandle handle, boolean useMipMaps, Format format, TextureFilter minFilter, TextureFilter magFilter,
TextureWrap uWrap, TextureWrap vWrap) {
this.textureFile = handle;
this.useMipMaps = useMipMaps;
this.format = format;
this.minFilter = minFilter;
this.magFilter = magFilter;
this.uWrap = uWrap;
this.vWrap = vWrap;
}
}
public static class Region {
public Page page;
public int index;
public String name;
public float offsetX;
public float offsetY;
public int originalWidth;
public int originalHeight;
public boolean rotate;
public int left;
public int top;
public int width;
public int height;
public boolean flip;
public int[] splits;
public int[] pads;
}
final Array<Page> pages = new Array<Page>();
final Array<Region> regions = new Array<Region>();
public TextureAtlasData (FileHandle packFile, FileHandle imagesDir, boolean flip) {
BufferedReader reader = new BufferedReader(new InputStreamReader(packFile.read()), 64);
try {
Page pageImage = null;
while (true) {
String line = reader.readLine();
if (line == null) break;
if (line.trim().length() == 0)
pageImage = null;
else if (pageImage == null) {
FileHandle file = imagesDir.child(line);
Format format = Format.valueOf(readValue(reader));
readTuple(reader);
TextureFilter min = TextureFilter.valueOf(tuple[0]);
TextureFilter max = TextureFilter.valueOf(tuple[1]);
String direction = readValue(reader);
TextureWrap repeatX = ClampToEdge;
TextureWrap repeatY = ClampToEdge;
if (direction.equals("x"))
repeatX = Repeat;
else if (direction.equals("y"))
repeatY = Repeat;
else if (direction.equals("xy")) {
repeatX = Repeat;
repeatY = Repeat;
}
pageImage = new Page(file, min.isMipMap(), format, min, max, repeatX, repeatY);
pages.add(pageImage);
} else {
boolean rotate = Boolean.valueOf(readValue(reader));
readTuple(reader);
int left = Integer.parseInt(tuple[0]);
int top = Integer.parseInt(tuple[1]);
readTuple(reader);
int width = Integer.parseInt(tuple[0]);
int height = Integer.parseInt(tuple[1]);
Region region = new Region();
region.page = pageImage;
region.left = left;
region.top = top;
region.width = width;
region.height = height;
region.name = line;
region.rotate = rotate;
if (readTuple(reader) == 4) { // split is optional
region.splits = new int[] {Integer.parseInt(tuple[0]), Integer.parseInt(tuple[1]),
Integer.parseInt(tuple[2]), Integer.parseInt(tuple[3])};
if (readTuple(reader) == 4) { // pad is optional, but only present with splits
region.pads = new int[] {Integer.parseInt(tuple[0]), Integer.parseInt(tuple[1]),
Integer.parseInt(tuple[2]), Integer.parseInt(tuple[3])};
readTuple(reader);
}
}
region.originalWidth = Integer.parseInt(tuple[0]);
region.originalHeight = Integer.parseInt(tuple[1]);
readTuple(reader);
region.offsetX = Integer.parseInt(tuple[0]);
region.offsetY = Integer.parseInt(tuple[1]);
region.index = Integer.parseInt(readValue(reader));
if (flip) region.flip = true;
regions.add(region);
}
}
} catch (Exception ex) {
throw new GdxRuntimeException("Error reading pack file: " + packFile, ex);
} finally {
StreamUtils.closeQuietly(reader);
}
new Sort().sort((Object[])regions.items, (Comparator)indexComparator, 0, regions.size);
}
public Array<Page> getPages () {
return pages;
}
public Array<Region> getRegions () {
return regions;
}
}
/** Creates an empty atlas to which regions can be added. */
public TextureAtlas () {
}
/** Loads the specified pack file using {@link FileType#Internal}, using the parent directory of the pack file to find the page
* images. */
public TextureAtlas (String internalPackFile) {
this(Gdx.files.internal(internalPackFile));
}
/** Loads the specified pack file, using the parent directory of the pack file to find the page images. */
public TextureAtlas (FileHandle packFile) {
this(packFile, packFile.parent());
}
/** @param flip If true, all regions loaded will be flipped for use with a perspective where 0,0 is the upper left corner.
* @see #TextureAtlas(FileHandle) */
public TextureAtlas (FileHandle packFile, boolean flip) {
this(packFile, packFile.parent(), flip);
}
public TextureAtlas (FileHandle packFile, FileHandle imagesDir) {
this(packFile, imagesDir, false);
}
/** @param flip If true, all regions loaded will be flipped for use with a perspective where 0,0 is the upper left corner. */
public TextureAtlas (FileHandle packFile, FileHandle imagesDir, boolean flip) {
this(new TextureAtlasData(packFile, imagesDir, flip));
}
/** @param data May be null. */
public TextureAtlas (TextureAtlasData data) {
if (data != null) load(data);
}
private void load (TextureAtlasData data) {
ObjectMap<Page, Texture> pageToTexture = new ObjectMap<Page, Texture>();
for (Page page : data.pages) {
Texture texture = null;
if (page.texture == null) {
texture = new Texture(page.textureFile, page.format, page.useMipMaps);
texture.setFilter(page.minFilter, page.magFilter);
texture.setWrap(page.uWrap, page.vWrap);
} else {
texture = page.texture;
texture.setFilter(page.minFilter, page.magFilter);
texture.setWrap(page.uWrap, page.vWrap);
}
textures.add(texture);
pageToTexture.put(page, texture);
}
for (Region region : data.regions) {
int width = region.width;
int height = region.height;
AtlasRegion atlasRegion = new AtlasRegion(pageToTexture.get(region.page), region.left, region.top,
region.rotate ? height : width, region.rotate ? width : height);
atlasRegion.index = region.index;
atlasRegion.name = region.name;
atlasRegion.offsetX = region.offsetX;
atlasRegion.offsetY = region.offsetY;
atlasRegion.originalHeight = region.originalHeight;
atlasRegion.originalWidth = region.originalWidth;
atlasRegion.rotate = region.rotate;
atlasRegion.splits = region.splits;
atlasRegion.pads = region.pads;
if (region.flip) atlasRegion.flip(false, true);
regions.add(atlasRegion);
}
}
/** Adds a region to the atlas. The specified texture will be disposed when the atlas is disposed. */
public AtlasRegion addRegion (String name, Texture texture, int x, int y, int width, int height) {
textures.add(texture);
AtlasRegion region = new AtlasRegion(texture, x, y, width, height);
region.name = name;
region.originalWidth = width;
region.originalHeight = height;
region.index = -1;
regions.add(region);
return region;
}
/** Adds a region to the atlas. The texture for the specified region will be disposed when the atlas is disposed. */
public AtlasRegion addRegion (String name, TextureRegion textureRegion) {
return addRegion(name, textureRegion.texture, textureRegion.getRegionX(), textureRegion.getRegionY(),
textureRegion.getRegionWidth(), textureRegion.getRegionHeight());
}
/** Returns all regions in the atlas. */
public Array<AtlasRegion> getRegions () {
return regions;
}
/** Returns the first region found with the specified name. This method uses string comparison to find the region, so the result
* should be cached rather than calling this method multiple times.
* @return The region, or null. */
public AtlasRegion findRegion (String name) {
for (int i = 0, n = regions.size; i < n; i++)
if (regions.get(i).name.equals(name)) return regions.get(i);
return null;
}
/** Returns the first region found with the specified name and index. This method uses string comparison to find the region, so
* the result should be cached rather than calling this method multiple times.
* @return The region, or null. */
public AtlasRegion findRegion (String name, int index) {
for (int i = 0, n = regions.size; i < n; i++) {
AtlasRegion region = regions.get(i);
if (!region.name.equals(name)) continue;
if (region.index != index) continue;
return region;
}
return null;
}
/** Returns all regions with the specified name, ordered by smallest to largest {@link AtlasRegion#index index}. This method
* uses string comparison to find the regions, so the result should be cached rather than calling this method multiple times. */
public Array<AtlasRegion> findRegions (String name) {
Array<AtlasRegion> matched = new Array();
for (int i = 0, n = regions.size; i < n; i++) {
AtlasRegion region = regions.get(i);
if (region.name.equals(name)) matched.add(new AtlasRegion(region));
}
return matched;
}
/** Returns all regions in the atlas as sprites. This method creates a new sprite for each region, so the result should be
* stored rather than calling this method multiple times.
* @see #createSprite(String) */
public Array<Sprite> createSprites () {
Array sprites = new Array(regions.size);
for (int i = 0, n = regions.size; i < n; i++)
sprites.add(newSprite(regions.get(i)));
return sprites;
}
/** Returns the first region found with the specified name as a sprite. If whitespace was stripped from the region when it was
* packed, the sprite is automatically positioned as if whitespace had not been stripped. This method uses string comparison to
* find the region and constructs a new sprite, so the result should be cached rather than calling this method multiple times.
* @return The sprite, or null. */
public Sprite createSprite (String name) {
for (int i = 0, n = regions.size; i < n; i++)
if (regions.get(i).name.equals(name)) return newSprite(regions.get(i));
return null;
}
/** Returns the first region found with the specified name and index as a sprite. This method uses string comparison to find the
* region and constructs a new sprite, so the result should be cached rather than calling this method multiple times.
* @return The sprite, or null.
* @see #createSprite(String) */
public Sprite createSprite (String name, int index) {
for (int i = 0, n = regions.size; i < n; i++) {
AtlasRegion region = regions.get(i);
if (!region.name.equals(name)) continue;
if (region.index != index) continue;
return newSprite(regions.get(i));
}
return null;
}
/** Returns all regions with the specified name as sprites, ordered by smallest to largest {@link AtlasRegion#index index}. This
* method uses string comparison to find the regions and constructs new sprites, so the result should be cached rather than
* calling this method multiple times.
* @see #createSprite(String) */
public Array<Sprite> createSprites (String name) {
Array<Sprite> matched = new Array();
for (int i = 0, n = regions.size; i < n; i++) {
AtlasRegion region = regions.get(i);
if (region.name.equals(name)) matched.add(newSprite(region));
}
return matched;
}
private Sprite newSprite (AtlasRegion region) {
if (region.packedWidth == region.originalWidth && region.packedHeight == region.originalHeight) {
if (region.rotate) {
Sprite sprite = new Sprite(region);
sprite.setBounds(0, 0, region.getRegionHeight(), region.getRegionWidth());
sprite.rotate90(true);
return sprite;
}
return new Sprite(region);
}
return new AtlasSprite(region);
}
/** Returns the first region found with the specified name as a {@link NinePatch}. The region must have been packed with
* ninepatch splits. This method uses string comparison to find the region and constructs a new ninepatch, so the result should
* be cached rather than calling this method multiple times.
* @return The ninepatch, or null. */
public NinePatch createPatch (String name) {
for (int i = 0, n = regions.size; i < n; i++) {
AtlasRegion region = regions.get(i);
if (region.name.equals(name)) {
int[] splits = region.splits;
if (splits == null) throw new IllegalArgumentException("Region does not have ninepatch splits: " + name);
NinePatch patch = new NinePatch(region, splits[0], splits[1], splits[2], splits[3]);
if (region.pads != null) patch.setPadding(region.pads[0], region.pads[1], region.pads[2], region.pads[3]);
return patch;
}
}
return null;
}
/** @return the textures of the pages, unordered */
public Set<Texture> getTextures () {
return textures;
}
/** Releases all resources associated with this TextureAtlas instance. This releases all the textures backing all TextureRegions
* and Sprites, which should no longer be used after calling dispose. */
public void dispose () {
for (Texture texture : textures)
texture.dispose();
textures.clear();
}
static final Comparator<Region> indexComparator = new Comparator<Region>() {
public int compare (Region region1, Region region2) {
int i1 = region1.index;
if (i1 == -1) i1 = Integer.MAX_VALUE;
int i2 = region2.index;
if (i2 == -1) i2 = Integer.MAX_VALUE;
return i1 - i2;
}
};
static String readValue (BufferedReader reader) throws IOException {
String line = reader.readLine();
int colon = line.indexOf(':');
if (colon == -1) throw new GdxRuntimeException("Invalid line: " + line);
return line.substring(colon + 1).trim();
}
/** Returns the number of tuple values read (2 or 4). */
static int readTuple (BufferedReader reader) throws IOException {
String line = reader.readLine();
int colon = line.indexOf(':');
if (colon == -1) throw new GdxRuntimeException("Invalid line: " + line);
int i = 0, lastMatch = colon + 1;
for (i = 0; i < 3; i++) {
int comma = line.indexOf(',', lastMatch);
if (comma == -1) {
if (i == 0) throw new GdxRuntimeException("Invalid line: " + line);
break;
}
tuple[i] = line.substring(lastMatch, comma).trim();
lastMatch = comma + 1;
}
tuple[i] = line.substring(lastMatch).trim();
return i + 1;
}
/** Describes the region of a packed image and provides information about the original image before it was packed. */
static public class AtlasRegion extends TextureRegion {
/** The number at the end of the original image file name, or -1 if none.<br>
* <br>
* When sprites are packed, if the original file name ends with a number, it is stored as the index and is not considered as
* part of the sprite's name. This is useful for keeping animation frames in order.
* @see TextureAtlas#findRegions(String) */
public int index;
/** The name of the original image file, up to the first underscore. Underscores denote special instructions to the texture
* packer. */
public String name;
/** The offset from the left of the original image to the left of the packed image, after whitespace was removed for packing. */
public float offsetX;
/** The offset from the bottom of the original image to the bottom of the packed image, after whitespace was removed for
* packing. */
public float offsetY;
/** The width of the image, after whitespace was removed for packing. */
public int packedWidth;
/** The height of the image, after whitespace was removed for packing. */
public int packedHeight;
/** The width of the image, before whitespace was removed and rotation was applied for packing. */
public int originalWidth;
/** The height of the image, before whitespace was removed for packing. */
public int originalHeight;
/** If true, the region has been rotated 90 degrees counter clockwise. */
public boolean rotate;
/** The ninepatch splits, or null if not a ninepatch. Has 4 elements: left, right, top, bottom. */
public int[] splits;
/** The ninepatch pads, or null if not a ninepatch or the has no padding. Has 4 elements: left, right, top, bottom. */
public int[] pads;
public AtlasRegion (Texture texture, int x, int y, int width, int height) {
super(texture, x, y, width, height);
originalWidth = width;
originalHeight = height;
packedWidth = width;
packedHeight = height;
}
public AtlasRegion (AtlasRegion region) {
setRegion(region);
index = region.index;
name = region.name;
offsetX = region.offsetX;
offsetY = region.offsetY;
packedWidth = region.packedWidth;
packedHeight = region.packedHeight;
originalWidth = region.originalWidth;
originalHeight = region.originalHeight;
rotate = region.rotate;
splits = region.splits;
}
/** Flips the region, adjusting the offset so the image appears to be flip as if no whitespace has been removed for packing. */
public void flip (boolean x, boolean y) {
super.flip(x, y);
if (x) offsetX = originalWidth - offsetX - getRotatedPackedWidth();
if (y) offsetY = originalHeight - offsetY - getRotatedPackedHeight();
}
/** Returns the packed width considering the rotate value, if it is true then it returns the packedHeight, otherwise it
* returns the packedWidth. */
public float getRotatedPackedWidth () {
return rotate ? packedHeight : packedWidth;
}
/** Returns the packed height considering the rotate value, if it is true then it returns the packedWidth, otherwise it
* returns the packedHeight. */
public float getRotatedPackedHeight () {
return rotate ? packedWidth : packedHeight;
}
}
/** A sprite that, if whitespace was stripped from the region when it was packed, is automatically positioned as if whitespace
* had not been stripped. */
static public class AtlasSprite extends Sprite {
final AtlasRegion region;
float originalOffsetX, originalOffsetY;
public AtlasSprite (AtlasRegion region) {
this.region = new AtlasRegion(region);
originalOffsetX = region.offsetX;
originalOffsetY = region.offsetY;
setRegion(region);
setOrigin(region.originalWidth / 2f, region.originalHeight / 2f);
int width = region.getRegionWidth();
int height = region.getRegionHeight();
if (region.rotate) {
super.rotate90(true);
super.setBounds(region.offsetX, region.offsetY, height, width);
} else
super.setBounds(region.offsetX, region.offsetY, width, height);
setColor(1, 1, 1, 1);
}
public AtlasSprite (AtlasSprite sprite) {
region = sprite.region;
this.originalOffsetX = sprite.originalOffsetX;
this.originalOffsetY = sprite.originalOffsetY;
set(sprite);
}
public void setPosition (float x, float y) {
super.setPosition(x + region.offsetX, y + region.offsetY);
}
public void setBounds (float x, float y, float width, float height) {
float widthRatio = width / region.originalWidth;
float heightRatio = height / region.originalHeight;
region.offsetX = originalOffsetX * widthRatio;
region.offsetY = originalOffsetY * heightRatio;
int packedWidth = region.rotate ? region.packedHeight : region.packedWidth;
int packedHeight = region.rotate ? region.packedWidth : region.packedHeight;
super.setBounds(x + region.offsetX, y + region.offsetY, packedWidth * widthRatio, packedHeight * heightRatio);
}
public void setSize (float width, float height) {
setBounds(getX(), getY(), width, height);
}
public void setOrigin (float originX, float originY) {
super.setOrigin(originX - region.offsetX, originY - region.offsetY);
}
public void flip (boolean x, boolean y) {
// Flip texture.
super.flip(x, y);
float oldOriginX = getOriginX();
float oldOriginY = getOriginY();
float oldOffsetX = region.offsetX;
float oldOffsetY = region.offsetY;
float widthRatio = getWidthRatio();
float heightRatio = getHeightRatio();
region.offsetX = originalOffsetX;
region.offsetY = originalOffsetY;
region.flip(x, y); // Updates x and y offsets.
originalOffsetX = region.offsetX;
originalOffsetY = region.offsetY;
region.offsetX *= widthRatio;
region.offsetY *= heightRatio;
// Update position and origin with new offsets.
translate(region.offsetX - oldOffsetX, region.offsetY - oldOffsetY);
setOrigin(oldOriginX, oldOriginY);
}
public void rotate90 (boolean clockwise) {
// Rotate texture.
super.rotate90(clockwise);
float oldOriginX = getOriginX();
float oldOriginY = getOriginY();
float oldOffsetX = region.offsetX;
float oldOffsetY = region.offsetY;
float widthRatio = getWidthRatio();
float heightRatio = getHeightRatio();
if (clockwise) {
region.offsetX = oldOffsetY;
region.offsetY = region.originalHeight * heightRatio - oldOffsetX - region.packedWidth * widthRatio;
} else {
region.offsetX = region.originalWidth * widthRatio - oldOffsetY - region.packedHeight * heightRatio;
region.offsetY = oldOffsetX;
}
// Update position and origin with new offsets.
translate(region.offsetX - oldOffsetX, region.offsetY - oldOffsetY);
setOrigin(oldOriginX, oldOriginY);
}
public float getX () {
return super.getX() - region.offsetX;
}
public float getY () {
return super.getY() - region.offsetY;
}
public float getOriginX () {
return super.getOriginX() + region.offsetX;
}
public float getOriginY () {
return super.getOriginY() + region.offsetY;
}
public float getWidth () {
return super.getWidth() / region.getRotatedPackedWidth() * region.originalWidth;
}
public float getHeight () {
return super.getHeight() / region.getRotatedPackedHeight() * region.originalHeight;
}
public float getWidthRatio () {
return super.getWidth() / region.getRotatedPackedWidth();
}
public float getHeightRatio () {
return super.getHeight() / region.getRotatedPackedHeight();
}
public AtlasRegion getAtlasRegion () {
return region;
}
}
}
| |
package com.outbrain.ob1k.server;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.outbrain.ob1k.HttpRequestMethodType;
import com.outbrain.ob1k.client.ClientBuilder;
import com.outbrain.ob1k.client.Clients;
import com.outbrain.ob1k.client.targets.SimpleTargetProvider;
import com.outbrain.ob1k.concurrent.ComposableFuture;
import com.outbrain.ob1k.http.common.ContentType;
import com.outbrain.ob1k.server.builder.ConfigureBuilder;
import com.outbrain.ob1k.server.builder.ConfigureBuilder.ConfigureBuilderSection;
import com.outbrain.ob1k.server.builder.ServerBuilder;
import com.outbrain.ob1k.server.builder.ServiceBindBuilder;
import com.outbrain.ob1k.server.builder.ServiceBindBuilder.ServiceBindBuilderSection;
import com.outbrain.ob1k.server.builder.ServiceRegisterBuilder;
import com.outbrain.ob1k.server.builder.ServiceRegisterBuilder.ServiceRegisterBuilderSection;
import com.outbrain.ob1k.server.entities.OtherEntity;
import com.outbrain.ob1k.server.entities.TestEntity;
import com.outbrain.ob1k.server.services.RequestsTestService;
import com.outbrain.ob1k.server.services.RequestsTestServiceImpl;
import com.outbrain.ob1k.server.services.SimpleTestService;
import com.outbrain.ob1k.server.services.SimpleTestServiceImpl;
import org.junit.Assert;
import org.junit.Test;
import java.io.IOException;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
/**
* @author aronen
*/
public class BasicServerRpcTest {
private static Server buildServer(final Listener listener) {
return ServerBuilder.newBuilder().
contextPath("/test").
configure(new ConfigureBuilderSection() {
@Override
public void apply(final ConfigureBuilder builder) {
builder.useRandomPort().requestTimeout(50, TimeUnit.MILLISECONDS);
if (listener != null) {
builder.addListener(listener);
}
}
}).
service(new ServiceRegisterBuilderSection() {
@Override
public void apply(final ServiceRegisterBuilder builder) {
builder.register(new SimpleTestServiceImpl(), "/simple").
register(new RequestsTestServiceImpl(), "/users", new ServiceBindBuilderSection() {
@Override
public void apply(final ServiceBindBuilder builder) {
builder.endpoint(HttpRequestMethodType.GET, "getAll", "/").
endpoint(HttpRequestMethodType.GET, "fetchUser", "/{id}").
endpoint(HttpRequestMethodType.POST, "updateUser", "/{id}").
endpoint(HttpRequestMethodType.DELETE, "deleteUser", "/{id}").
endpoint(HttpRequestMethodType.PUT, "createUser", "/").
endpoint("printDetails", "/print/{firstName}/{lastName}");
}
});
}
}).build();
}
@Test
public void testServerListener() throws Exception {
final Listener listener = new Listener();
final Server server = buildServer(listener);
server.addListener(listener);
server.start();
Assert.assertEquals("serverStarted() wasn't called", 2, listener.serverStartedCallCount);
server.stop();
}
@Test
public void testPathParamsViaGET_Json() throws Exception {
createPathParamsWithBodyTest(HttpRequestMethodType.GET, ContentType.JSON);
}
@Test
public void testPathParamsViaGET_MsgPack() throws Exception {
createPathParamsWithBodyTest(HttpRequestMethodType.GET, ContentType.MESSAGE_PACK);
}
@Test
public void testPathParamsViaPOST_Json() throws Exception {
createPathParamsWithBodyTest(HttpRequestMethodType.POST, ContentType.JSON);
}
@Test
public void testPathParamsViaPOST_MsgPack() throws Exception {
createPathParamsWithBodyTest(HttpRequestMethodType.POST, ContentType.MESSAGE_PACK);
}
private void createPathParamsWithBodyTest(final HttpRequestMethodType methodType, final ContentType contentType) throws Exception {
Server server = null;
RequestsTestService client = null;
try {
server = buildServer(null);
final int port = server.start().getPort();
client = new ClientBuilder<>(RequestsTestService.class).
setTargetProvider(new SimpleTargetProvider("http://localhost:" + port + "/test/users")).
bindEndpoint("printDetails", methodType, "/print/{firstName}/{lastName}").
setRequestTimeout(100000).
setProtocol(contentType).
build();
final String details = client.printDetails("flo", "resent", 20).get();
Assert.assertTrue("details should be flo resent (20)", Objects.equals(details, "flo resent (20)"));
} finally {
if (client != null)
Clients.close(client);
if (server != null)
server.stop();
}
}
@Test
public void testMethodSpecificRequestsWithMsgPack() throws Exception {
testMethodSpecificRequest(ContentType.MESSAGE_PACK);
}
@Test
public void testMethodSpecificRequestsWithJson() throws Exception {
testMethodSpecificRequest(ContentType.JSON);
}
private void testMethodSpecificRequest(final ContentType contentType) throws Exception {
Server server = null;
RequestsTestService client = null;
try {
server = buildServer(null);
final int port = server.start().getPort();
client = new ClientBuilder<>(RequestsTestService.class).
setTargetProvider(new SimpleTargetProvider("http://localhost:" + port + "/test/users")).
bindEndpoint("getAll", HttpRequestMethodType.GET, "/").
bindEndpoint("createUser", HttpRequestMethodType.PUT, "/").
bindEndpoint("fetchUser", HttpRequestMethodType.GET, "/{id}").
bindEndpoint("deleteUser", HttpRequestMethodType.DELETE, "/{id}").
bindEndpoint("updateUser", HttpRequestMethodType.POST, "/{id}").
setRequestTimeout(100000).
setProtocol(contentType).
build();
try {
final List<RequestsTestServiceImpl.Person> persons = client.getAll().get();
Assert.assertTrue("person name should be yossuf", Objects.equals(persons.get(0).name, "Yossuf"));
} catch (final Exception e) {
Assert.fail("Shouldn't have fail: " + e.getMessage());
}
try {
final String updateUser = client.updateUser(1, "Eng. Yossuf", "Java Expert").get();
Assert.assertTrue("response should be great success",
Objects.equals(updateUser, RequestsTestServiceImpl.GREAT_SUCCESS));
} catch (final Exception e) {
Assert.fail("Shouldn't have fail: " + e.getMessage());
}
try {
final RequestsTestService.Person createUser = client.createUser("Julia", "Android Developer").get();
Assert.assertTrue("returned user name should be Julia", Objects.equals(createUser.name, "Julia"));
} catch (final Exception e) {
Assert.fail("Shouldn't have fail: " + e.getMessage());
}
try {
final RequestsTestServiceImpl.Person user = client.fetchUser(1).get();
Assert.assertTrue("person name should be yossuf", Objects.equals(user.name, "Yossuf"));
} catch (final Exception e) {
Assert.fail("Shouldn't have fail: " + e.getMessage());
}
try {
final String deleteUser = client.deleteUser(1).get();
Assert.assertTrue("response should be great success",
Objects.equals(deleteUser, RequestsTestServiceImpl.GREAT_SUCCESS));
} catch (final Exception e) {
Assert.fail("Shouldn't have fail: " + e.getMessage());
}
} finally {
if (client != null)
Clients.close(client);
if (server != null)
server.stop();
}
}
@Test
public void testMaxConnections() throws Exception {
Server server = null;
SimpleTestService client = null;
try {
server = buildServer(null);
final int port = server.start().getPort();
client = buildClientForSimpleTestWithMaxConnections(port, 2);
final ComposableFuture<String> resp1 = client.waitForever();
final ComposableFuture<String> resp2 = client.waitForever();
final ComposableFuture<String> resp3 = client.waitForever();
try {
resp3.get();
Assert.fail("should never return.");
} catch (final ExecutionException e) {
Assert.assertTrue(e.getCause().getMessage().contains("Too many connections"));
}
try {
resp1.get();
Assert.fail("should never return.");
} catch (final ExecutionException e) {
Assert.assertEquals(e.getCause().getClass(), IOException.class);
}
try {
resp2.get();
Assert.fail("should never return.");
} catch (final ExecutionException e) {
Assert.assertEquals(e.getCause().getClass(), IOException.class);
}
} finally {
if (client != null)
Clients.close(client);
if (server != null)
server.stop();
}
}
@Test
public void testServiceCreation() throws Exception {
Server server = null;
SimpleTestService client = null;
try {
server = buildServer(null);
final int port = server.start().getPort();
client = buildClientForSimpleTest(port);
final ComposableFuture<String> res1 =
client.method1(3, "4", new TestEntity(Sets.newHashSet(1L, 2L, 3L), "moshe", null, Lists.<OtherEntity>newArrayList()));
try {
final String response1 = res1.get();
Assert.assertTrue(response1.endsWith("moshe"));
} catch (final ExecutionException e) {
e.printStackTrace();
}
final ComposableFuture<TestEntity> res2 = client.method2(3, "4");
try {
final TestEntity response2 = res2.get();
Assert.assertEquals(response2.getOthers().get(0).getValue1(), 1);
Assert.assertEquals(response2.getOthers().get(0).getValue2(), "2");
} catch (final ExecutionException e) {
e.printStackTrace();
}
} finally {
if (client != null)
Clients.close(client);
if (server != null)
server.stop();
}
}
private SimpleTestService buildClientForSimpleTest(final int port) {
return new ClientBuilder<>(SimpleTestService.class).
setTargetProvider(new SimpleTargetProvider("http://localhost:" + port + "/test/simple")).
build();
}
private SimpleTestService buildClientForSimpleTestWithMaxConnections(final int port, final int maxConnections) {
return new ClientBuilder<>(SimpleTestService.class).
setMaxConnectionsPerHost(maxConnections).
setTargetProvider(new SimpleTargetProvider("http://localhost:" + port + "/test/simple")).
build();
}
@Test
public void testSlowService() throws Exception {
Server server = null;
SimpleTestService client = null;
try {
server = buildServer(null);
final int port = server.start().getPort();
client = buildClientForSimpleTest(port);
final ComposableFuture<Boolean> res = client.slowMethod(100);
try {
res.get();
Assert.fail("should get timeout exception");
} catch (final ExecutionException e) {
Assert.assertTrue(e.getCause().getMessage().contains("status code: 500"));
}
} finally {
if (client != null)
Clients.close(client);
if (server != null)
server.stop();
}
}
@Test
public void testNoParamMethod() throws Exception {
Server server = null;
SimpleTestService client = null;
try {
server = buildServer(null);
final int port = server.start().getPort();
client = buildClientForSimpleTest(port);
try {
final Integer nextNum = client.nextRandom().get();
Assert.assertTrue(nextNum != null);
} catch (final Exception e) {
Assert.fail("no params method failed. error: " + e.getMessage());
}
} finally {
if (client != null)
Clients.close(client);
if (server != null)
server.stop();
}
}
private static class Listener implements Server.Listener {
private int serverStartedCallCount = 0;
@Override
public void serverStarted(final Server server) {
serverStartedCallCount++;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* $Id$ */
package org.apache.fop.pdf;
import java.awt.geom.AffineTransform;
/**
* Utility class for generating PDF text objects. It needs to be subclassed to add writing
* functionality (see {@link #write(String)}).
*/
public abstract class PDFTextUtil {
/** The number of decimal places. */
private static final int DEC = 8;
/** PDF text rendering mode: Fill text */
public static final int TR_FILL = 0;
/** PDF text rendering mode: Stroke text */
public static final int TR_STROKE = 1;
/** PDF text rendering mode: Fill, then stroke text */
public static final int TR_FILL_STROKE = 2;
/** PDF text rendering mode: Neither fill nor stroke text (invisible) */
public static final int TR_INVISIBLE = 3;
/** PDF text rendering mode: Fill text and add to path for clipping */
public static final int TR_FILL_CLIP = 4;
/** PDF text rendering mode: Stroke text and add to path for clipping */
public static final int TR_STROKE_CLIP = 5;
/** PDF text rendering mode: Fill, then stroke text and add to path for clipping */
public static final int TR_FILL_STROKE_CLIP = 6;
/** PDF text rendering mode: Add text to path for clipping */
public static final int TR_CLIP = 7;
private boolean inTextObject;
private String startText;
private String endText;
private boolean useMultiByte;
private StringBuffer bufTJ;
private int textRenderingMode = TR_FILL;
private String currentFontName;
private double currentFontSize;
/**
* Main constructor.
*/
public PDFTextUtil() {
//nop
}
/**
* Writes PDF code.
* @param code the PDF code to write
*/
protected abstract void write(String code);
/**
* Writes PDF code.
* @param code the PDF code to write
*/
protected abstract void write(StringBuffer code);
private void writeAffineTransform(AffineTransform at, StringBuffer sb) {
double[] lt = new double[6];
at.getMatrix(lt);
PDFNumber.doubleOut(lt[0], DEC, sb);
sb.append(' ');
PDFNumber.doubleOut(lt[1], DEC, sb);
sb.append(' ');
PDFNumber.doubleOut(lt[2], DEC, sb);
sb.append(' ');
PDFNumber.doubleOut(lt[3], DEC, sb);
sb.append(' ');
PDFNumber.doubleOut(lt[4], DEC, sb);
sb.append(' ');
PDFNumber.doubleOut(lt[5], DEC, sb);
}
private static void writeChar(char ch, StringBuffer sb, boolean multibyte) {
if (!multibyte) {
if (ch < 32 || ch > 127) {
sb.append("\\").append(Integer.toOctalString(ch));
} else {
switch (ch) {
case '(':
case ')':
case '\\':
sb.append('\\');
break;
default:
}
sb.append(ch);
}
} else {
PDFText.toUnicodeHex(ch, sb);
}
}
private void writeChar(char ch, StringBuffer sb) {
writeChar(ch, sb, useMultiByte);
}
private void checkInTextObject() {
if (!inTextObject) {
throw new IllegalStateException("Not in text object");
}
}
/**
* Indicates whether we are in a text object or not.
* @return true if we are in a text object
*/
public boolean isInTextObject() {
return inTextObject;
}
/**
* Called when a new text object should be started. Be sure to call setFont() before
* issuing any text painting commands.
*/
public void beginTextObject() {
if (inTextObject) {
throw new IllegalStateException("Already in text object");
}
write("BT\n");
this.inTextObject = true;
}
/**
* Called when a text object should be ended.
*/
public void endTextObject() {
checkInTextObject();
write("ET\n");
this.inTextObject = false;
initValues();
}
/**
* Resets the state fields.
*/
protected void initValues() {
this.currentFontName = null;
this.currentFontSize = 0.0;
this.textRenderingMode = TR_FILL;
}
/**
* Creates a "cm" command.
* @param at the transformation matrix
*/
public void concatMatrix(AffineTransform at) {
if (!at.isIdentity()) {
writeTJ();
StringBuffer sb = new StringBuffer();
writeAffineTransform(at, sb);
sb.append(" cm\n");
write(sb);
}
}
/**
* Writes a "Tf" command, setting a new current font.
* @param fontName the name of the font to select
* @param fontSize the font size (in points)
*/
public void writeTf(String fontName, double fontSize) {
checkInTextObject();
StringBuffer sb = new StringBuffer();
sb.append('/');
sb.append(fontName);
sb.append(' ');
PDFNumber.doubleOut(fontSize, 6, sb);
sb.append(" Tf\n");
write(sb);
this.startText = useMultiByte ? "<" : "(";
this.endText = useMultiByte ? ">" : ")";
}
/**
* Updates the current font. This method only writes a "Tf" if the current font changes.
* @param fontName the name of the font to select
* @param fontSize the font size (in points)
* @param multiByte true indicates the font is a multi-byte font, false means single-byte
*/
public void updateTf(String fontName, double fontSize, boolean multiByte) {
checkInTextObject();
if (!fontName.equals(this.currentFontName) || (fontSize != this.currentFontSize)) {
writeTJ();
this.currentFontName = fontName;
this.currentFontSize = fontSize;
this.useMultiByte = multiByte;
writeTf(fontName, fontSize);
}
}
/**
* Sets the text rendering mode.
* @param mode the rendering mode (value 0 to 7, see PDF Spec, constants: TR_*)
*/
public void setTextRenderingMode(int mode) {
if (mode < 0 || mode > 7) {
throw new IllegalArgumentException(
"Illegal value for text rendering mode. Expected: 0-7");
}
if (mode != this.textRenderingMode) {
writeTJ();
this.textRenderingMode = mode;
write(this.textRenderingMode + " Tr\n");
}
}
/**
* Sets the text rendering mode.
* @param fill true if the text should be filled
* @param stroke true if the text should be stroked
* @param addToClip true if the path should be added for clipping
*/
public void setTextRenderingMode(boolean fill, boolean stroke, boolean addToClip) {
int mode;
if (fill) {
mode = (stroke ? 2 : 0);
} else {
mode = (stroke ? 1 : 3);
}
if (addToClip) {
mode += 4;
}
setTextRenderingMode(mode);
}
/**
* Writes a "Tm" command, setting a new text transformation matrix.
* @param localTransform the new text transformation matrix
*/
public void writeTextMatrix(AffineTransform localTransform) {
StringBuffer sb = new StringBuffer();
writeAffineTransform(localTransform, sb);
sb.append(" Tm ");
write(sb);
}
/**
* Writes a char to the "TJ-Buffer".
* @param codepoint the mapped character (code point/character code)
*/
public void writeTJMappedChar(char codepoint) {
if (bufTJ == null) {
bufTJ = new StringBuffer();
}
if (bufTJ.length() == 0) {
bufTJ.append('[');
bufTJ.append(startText);
}
writeChar(codepoint, bufTJ);
}
/**
* Writes a glyph adjust value to the "TJ-Buffer".
* <p>Assumes the following:</p>
* <ol>
* <li>if buffer is currently empty, then this is the start of the array object
* that encodes the adjustment and character values, and, therfore, a LEFT
* SQUARE BRACKET '[' must be prepended; and
* </li>
* <li>otherwise (the buffer is
* not empty), then the last element written to the buffer was a mapped
* character, and, therefore, a terminating '>' or ')' followed by a space
* must be appended to the buffer prior to appending the adjustment value.
* </li>
* </ol>
* @param adjust the glyph adjust value in thousands of text unit space.
*/
public void adjustGlyphTJ(double adjust) {
if (bufTJ == null) {
bufTJ = new StringBuffer();
}
if (bufTJ.length() == 0) {
bufTJ.append('[');
} else {
bufTJ.append(endText);
bufTJ.append(' ');
}
PDFNumber.doubleOut(adjust, DEC - 4, bufTJ);
bufTJ.append(' ');
bufTJ.append(startText);
}
/**
* Writes a "TJ" command, writing out the accumulated buffer with the characters and glyph
* positioning values. The buffer is reset afterwards.
*/
public void writeTJ() {
if (isInString()) {
bufTJ.append(endText);
bufTJ.append("] TJ\n");
write(bufTJ);
bufTJ.setLength(0);
}
}
private boolean isInString() {
return bufTJ != null && bufTJ.length() > 0;
}
/**
* Writes a "Td" command with specified x and y coordinates.
* @param x coordinate
* @param y coordinate
*/
public void writeTd(double x, double y) {
StringBuffer sb = new StringBuffer();
PDFNumber.doubleOut(x, DEC, sb);
sb.append(' ');
PDFNumber.doubleOut(y, DEC, sb);
sb.append(" Td\n");
write(sb);
}
/**
* Writes a "Tj" command with specified character code.
* @param ch character code to write
*/
public void writeTj(char ch) {
StringBuffer sb = new StringBuffer();
sb.append('<');
writeChar(ch, sb, true);
sb.append('>');
sb.append(" Tj\n");
write(sb);
}
}
| |
package com.laytonsmith.core.functions;
import com.laytonsmith.abstraction.MCLocation;
import com.laytonsmith.abstraction.MCPlayer;
import com.laytonsmith.abstraction.MCWorld;
import com.laytonsmith.abstraction.StaticLayer;
import com.laytonsmith.annotations.api;
import com.laytonsmith.core.CHVersion;
import com.laytonsmith.core.ObjectGenerator;
import com.laytonsmith.core.Static;
import com.laytonsmith.core.constructs.CArray;
import com.laytonsmith.core.constructs.CBoolean;
import com.laytonsmith.core.constructs.CInt;
import com.laytonsmith.core.constructs.CVoid;
import com.laytonsmith.core.constructs.Construct;
import com.laytonsmith.core.constructs.Target;
import com.laytonsmith.core.environments.CommandHelperEnvironment;
import com.laytonsmith.core.environments.Environment;
import com.laytonsmith.core.exceptions.CancelCommandException;
import com.laytonsmith.core.exceptions.ConfigRuntimeException;
import com.laytonsmith.core.functions.Exceptions.ExceptionType;
/**
*
*/
public class Weather {
public static String docs() {
return "Provides functions to control the weather";
}
@api(environments=CommandHelperEnvironment.class)
public static class lightning extends AbstractFunction {
@Override
public String getName() {
return "lightning";
}
@Override
public Integer[] numArgs() {
return new Integer[]{1, 2, 3, 4};
}
@Override
public ExceptionType[] thrown() {
return new ExceptionType[]{ExceptionType.CastException, ExceptionType.LengthException, ExceptionType.InvalidWorldException, ExceptionType.FormatException};
}
@Override
public Construct exec(Target t, Environment env, Construct... args) throws CancelCommandException, ConfigRuntimeException {
int x, y, z, ent;
MCWorld w = null;
boolean safe = false;
int safeIndex = 1;
if (args[0] instanceof CArray) {
CArray a = (CArray) args[0];
MCLocation l = ObjectGenerator.GetGenerator().location(a, (env.getEnv(CommandHelperEnvironment.class).GetCommandSender() instanceof MCPlayer ? env.getEnv(CommandHelperEnvironment.class).GetPlayer().getWorld() : null), t);
x = (int) java.lang.Math.floor(l.getX());
y = (int) java.lang.Math.floor(l.getY());
z = (int) java.lang.Math.floor(l.getZ());
w = l.getWorld();
} else {
x = (int) java.lang.Math.floor(Static.getNumber(args[0], t));
y = (int) java.lang.Math.floor(Static.getNumber(args[1], t));
z = (int) java.lang.Math.floor(Static.getNumber(args[2], t));
safeIndex = 3;
}
if (args.length >= safeIndex + 1) {
safe = Static.getBoolean(args[safeIndex]);
}
if (w != null) {
if (!safe) {
ent = w.strikeLightning(StaticLayer.GetLocation(w, x, y + 1, z)).getEntityId();
} else {
ent = w.strikeLightningEffect(StaticLayer.GetLocation(w, x, y + 1, z)).getEntityId();
}
} else {
throw new ConfigRuntimeException("World was not specified", ExceptionType.InvalidWorldException, t);
}
return new CInt(ent, t);
}
@Override
public String docs() {
return "int {strikeLocArray, [safe] | x, y, z, [safe]} Makes"
+ " lightning strike at the x y z coordinates specified"
+ " in the array(x, y, z). Safe defaults to false, but"
+ " if true, lightning striking a player will not hurt"
+ " them. Returns the entityID of the lightning bolt.";
}
@Override
public boolean isRestricted() {
return true;
}
@Override
public CHVersion since() {
return CHVersion.V3_0_1;
}
@Override
public Boolean runAsync() {
return false;
}
}
@api(environments=CommandHelperEnvironment.class)
public static class storm extends AbstractFunction {
@Override
public String getName() {
return "storm";
}
@Override
public Integer[] numArgs() {
return new Integer[]{1, 2, 3};
}
@Override
public Construct exec(Target t, Environment env, Construct... args) throws CancelCommandException, ConfigRuntimeException {
boolean b = Static.getBoolean(args[0]);
MCWorld w = null;
if (env.getEnv(CommandHelperEnvironment.class).GetCommandSender() instanceof MCPlayer) {
w = env.getEnv(CommandHelperEnvironment.class).GetPlayer().getWorld();
}
if (args.length == 2) {
w = Static.getServer().getWorld(args[1].val());
}
if (w != null) {
w.setStorm(b);
} else {
throw new ConfigRuntimeException("World was not specified", ExceptionType.InvalidWorldException, t);
}
if (args.length == 3) {
w.setWeatherDuration(Static.getInt32(args[2], t));
}
return CVoid.VOID;
}
@Override
public String docs() {
return "void {isStorming, [world], [int]} Creates a (rain) storm if isStorming is true, stops a storm if"
+ " isStorming is false. The third argument allows setting how long this weather setting will last.";
}
@Override
public ExceptionType[] thrown() {
return new ExceptionType[]{ExceptionType.CastException, ExceptionType.InvalidWorldException};
}
@Override
public boolean isRestricted() {
return true;
}
@Override
public CHVersion since() {
return CHVersion.V3_0_1;
}
@Override
public Boolean runAsync() {
return false;
}
}
@api(environments=CommandHelperEnvironment.class)
public static class set_thunder extends AbstractFunction {
@Override
public ExceptionType[] thrown() {
return new ExceptionType[]{ExceptionType.InvalidWorldException, ExceptionType.CastException};
}
@Override
public boolean isRestricted() {
return true;
}
@Override
public Boolean runAsync() {
return false;
}
@Override
public Construct exec(Target t, Environment environment,
Construct... args) throws ConfigRuntimeException {
MCWorld w = null;
if (args.length == 1) {
if (environment.getEnv(CommandHelperEnvironment.class).GetCommandSender() instanceof MCPlayer) {
w = environment.getEnv(CommandHelperEnvironment.class).GetPlayer().getWorld();
}
} else {
w = Static.getServer().getWorld(args[1].val());
}
if (w != null) {
w.setThundering(Static.getBoolean(args[0]));
} else {
throw new ConfigRuntimeException("No existing world specified!", ExceptionType.InvalidWorldException, t);
}
if (args.length == 3) {
w.setThunderDuration(Static.getInt32(args[2], t));
}
return CVoid.VOID;
}
@Override
public String getName() {
return "set_thunder";
}
@Override
public Integer[] numArgs() {
return new Integer[]{1, 2, 3};
}
@Override
public String docs() {
return "void {boolean, [world], [int]} Sets whether or not the weather can have thunder. The third argument"
+ " can specify how long the thunder should last.";
}
@Override
public CHVersion since() {
return CHVersion.V3_3_1;
}
}
@api(environments=CommandHelperEnvironment.class)
public static class has_storm extends AbstractFunction {
@Override
public ExceptionType[] thrown() {
return new ExceptionType[]{ExceptionType.InvalidWorldException};
}
@Override
public boolean isRestricted() {
return false;
}
@Override
public Boolean runAsync() {
return false;
}
@Override
public Construct exec(Target t, Environment environment,
Construct... args) throws ConfigRuntimeException {
MCWorld w = null;
if (args.length == 1) {
w = Static.getServer().getWorld(args[0].val());
} else {
if (environment.getEnv(CommandHelperEnvironment.class).GetCommandSender() instanceof MCPlayer) {
w = environment.getEnv(CommandHelperEnvironment.class).GetPlayer().getWorld();
}
}
if (w != null) {
return CBoolean.get(w.isStorming());
} else {
throw new ConfigRuntimeException("No existing world specified!", ExceptionType.InvalidWorldException, t);
}
}
@Override
public String getName() {
return "has_storm";
}
@Override
public Integer[] numArgs() {
return new Integer[]{0, 1};
}
@Override
public String docs() {
return "boolean {[world]} Returns whether the world (defaults to player's world) has a storm.";
}
@Override
public CHVersion since() {
return CHVersion.V3_3_1;
}
}
@api(environments=CommandHelperEnvironment.class)
public static class has_thunder extends AbstractFunction {
@Override
public ExceptionType[] thrown() {
return new ExceptionType[]{ExceptionType.InvalidWorldException};
}
@Override
public boolean isRestricted() {
return false;
}
@Override
public Boolean runAsync() {
return false;
}
@Override
public Construct exec(Target t, Environment environment,
Construct... args) throws ConfigRuntimeException {
MCWorld w = null;
if (args.length == 1) {
w = Static.getServer().getWorld(args[0].val());
} else {
if (environment.getEnv(CommandHelperEnvironment.class).GetCommandSender() instanceof MCPlayer) {
w = environment.getEnv(CommandHelperEnvironment.class).GetPlayer().getWorld();
}
}
if (w != null) {
return CBoolean.get(w.isThundering());
} else {
throw new ConfigRuntimeException("No existing world specified!", ExceptionType.InvalidWorldException, t);
}
}
@Override
public String getName() {
return "has_thunder";
}
@Override
public Integer[] numArgs() {
return new Integer[]{0, 1};
}
@Override
public String docs() {
return "boolean {[world]} Returns whether the world (defaults to player's world) has thunder.";
}
@Override
public CHVersion since() {
return CHVersion.V3_3_1;
}
}
}
| |
/*
* Copyright 1999-2006 University of Chicago
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.globus.gsi;
import java.security.PrivateKey;
import java.security.GeneralSecurityException;
import java.security.cert.X509Certificate;
import java.security.cert.CertificateExpiredException;
import java.security.cert.CertificateEncodingException;
import java.security.cert.CertificateException;
import java.security.interfaces.RSAPrivateKey;
import java.io.IOException;
import java.io.FileNotFoundException;
import java.io.EOFException;
import java.io.ByteArrayInputStream;
import java.io.InputStreamReader;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.FileInputStream;
import java.io.BufferedReader;
import java.io.Serializable;
import java.io.ObjectOutputStream;
import java.io.ObjectInputStream;
import java.io.File;
import java.util.Vector;
import java.util.Date;
import org.globus.common.ChainedIOException;
import org.globus.common.CoGProperties;
import org.globus.util.Base64;
import org.globus.util.I18n;
import org.globus.gsi.bc.BouncyCastleOpenSSLKey;
import org.globus.gsi.bc.BouncyCastleUtil;
import org.globus.gsi.gssapi.SSLUtil;
import org.globus.gsi.proxy.ProxyPathValidator;
import org.globus.gsi.proxy.ProxyPathValidatorException;
import org.globus.gsi.TrustedCertificates;
import org.globus.gsi.CertificateRevocationLists;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* Provides a Java object representation of Globus credential
* which can include the proxy file or host certificates.
*/
public class GlobusCredential implements Serializable {
private static I18n i18n =
I18n.getI18n("org.globus.gsi.errors",
CertUtil.class.getClassLoader());
private static Log logger =
LogFactory.getLog(GlobusCredential.class.getName());
private static transient GlobusCredential defaultCred = null;
// indicates if default credential was explicitely set
// and if so - if the credential expired it try
// to load the proxy from a file.
private static transient boolean credentialSet = false;
private static transient File credentialFile = null;
private static transient long credentialLastModified = -1;
/** holds the private key - this key is time limited */
private PrivateKey key;
/** holds both the certificate chain */
private X509Certificate [] certs;
/** Creates a GlobusCredential from a private key and
* a certificate chain.
*
* @param key the private key
* @param certs the certificate chain
*/
public GlobusCredential(PrivateKey key, X509Certificate [] certs) {
this.key = key;
this.certs = certs;
}
/**
* Creates a GlobusCredential from a proxy file.
*
* @param proxyFile the file to load the credential from.
* @exception GlobusCredentialException if the credential failed to
* load.
*/
public GlobusCredential(String proxyFile)
throws GlobusCredentialException {
if (proxyFile == null) {
throw new IllegalArgumentException(i18n
.getMessage("proxyFileNull"));
}
logger.debug("Loading proxy file: " + proxyFile);
try {
InputStream in = new FileInputStream(proxyFile);
load(in);
} catch(FileNotFoundException f) {
throw new GlobusCredentialException(
GlobusCredentialException.IO_ERROR,
"proxyNotFound",
new Object[] {proxyFile});
}
}
/**
* Creates a GlobusCredential from certificate file and a
* unencrypted key file.
*
* @param certFile the file containing the certificate
* @param unencryptedKeyFile the file containing the private key. The key
* must be unencrypted.
* @exception GlobusCredentialException if something goes wrong.
*/
public GlobusCredential(String certFile,
String unencryptedKeyFile)
throws GlobusCredentialException {
if (certFile == null || unencryptedKeyFile == null) {
throw new IllegalArgumentException();
}
try {
this.certs = CertUtil.loadCertificates(certFile);
OpenSSLKey k = new BouncyCastleOpenSSLKey(unencryptedKeyFile);
if (k.isEncrypted()) {
throw new GlobusCredentialException(
GlobusCredentialException.DEFECTIVE,
"encPrivKey",
new Object [] {unencryptedKeyFile});
}
this.key = k.getPrivateKey();
} catch (IOException e) {
throw new GlobusCredentialException(
GlobusCredentialException.IO_ERROR,
"ioError00",
e);
} catch (GeneralSecurityException e) {
throw new GlobusCredentialException(
GlobusCredentialException.SEC_ERROR,
"secError00",
e);
} catch (Exception e) {
throw new GlobusCredentialException(
GlobusCredentialException.FAILURE,
"error00",
e);
}
}
/**
* Creates a GlobusCredential from an input stream.
*
* @param input the stream to load the credential from.
* @exception GlobusCredentialException if the credential failed to
* load.
*/
public GlobusCredential(InputStream input)
throws GlobusCredentialException {
load(input);
}
protected void load(InputStream input)
throws GlobusCredentialException {
if (input == null) {
String err = i18n.getMessage("credInpStreamNull");
throw new IllegalArgumentException(err);
}
PrivateKey key = null;
X509Certificate cert = null;
Vector chain = new Vector(3);
String line;
BufferedReader reader = null;
try {
reader = new BufferedReader(new InputStreamReader(input));
while( (line = reader.readLine()) != null ) {
if (line.indexOf("BEGIN CERTIFICATE") != -1) {
byte [] data = getDecodedPEMObject(reader);
cert = CertUtil.loadCertificate(new ByteArrayInputStream(data));
chain.addElement(cert);
} else if (line.indexOf("BEGIN RSA PRIVATE KEY") != -1) {
byte [] data = getDecodedPEMObject(reader);
OpenSSLKey k = new BouncyCastleOpenSSLKey("RSA", data);
key = k.getPrivateKey();
}
}
} catch (IOException e) {
throw new GlobusCredentialException(
GlobusCredentialException.IO_ERROR,
"ioError00",
e);
} catch (GeneralSecurityException e) {
throw new GlobusCredentialException(
GlobusCredentialException.SEC_ERROR,
"secError00",
e);
} catch (Exception e) {
throw new GlobusCredentialException(
GlobusCredentialException.FAILURE,
"error00",
e);
} finally {
if (reader != null) {
try { reader.close(); } catch(IOException e) {}
}
}
int size = chain.size();
if (size == 0) {
throw new GlobusCredentialException(
GlobusCredentialException.SEC_ERROR,
"noCerts00",
(Exception)null);
}
if (key == null) {
throw new GlobusCredentialException(
GlobusCredentialException.SEC_ERROR,
"noKey00",
(Exception)null);
}
// set chain
this.certs = new X509Certificate[size];
chain.copyInto(certs);
// set key
this.key = key;
}
/**
* Reads Base64 encoded data from the stream and returns
* its decoded value. The reading continues until the "END"
* string is found in the data. Otherwise, returns null.
*/
private static final byte[] getDecodedPEMObject(BufferedReader reader)
throws IOException {
String line;
StringBuffer buf = new StringBuffer();
while( (line = reader.readLine()) != null ) {
if (line.indexOf("--END") != -1) { // found end
return Base64.decode(buf.toString().getBytes());
} else {
buf.append(line);
}
}
throw new EOFException(i18n.getMessage("pemFooter"));
}
/**
* Saves the credential into a specified output stream.
* The self-signed certificates in the certificate chain will not be saved.
* The output stream should always be closed after calling this function.
*
* @param out the output stream to write the credential to.
* @exception IOException if any error occurred during saving.
*/
public void save(OutputStream out)
throws IOException {
try {
CertUtil.writeCertificate(out, this.certs[0]);
OpenSSLKey k = new BouncyCastleOpenSSLKey(key);
k.writeTo(out);
for (int i=1;i<this.certs.length;i++) {
// this will skip the self-signed certificates
if (this.certs[i].getSubjectDN().equals(certs[i].getIssuerDN())) continue;
CertUtil.writeCertificate(out, this.certs[i]);
}
} catch (CertificateEncodingException e) {
throw new ChainedIOException(e.getMessage(), e);
}
out.flush();
}
/**
* Verifies the validity of the credentials. All certificate path
* validation is performed using trusted certificates in default locations.
*
* @exception GlobusCredentialException if one of the certificates in
* the chain expired or if path validiation fails.
*/
public void verify()
throws GlobusCredentialException {
ProxyPathValidator validator = new ProxyPathValidator();
try {
TrustedCertificates trustedCerts = TrustedCertificates.getDefault();
validator.validate(getCertificateChain(),
trustedCerts.getCertificates(),
CertificateRevocationLists.getDefault(),
trustedCerts.getSigningPolicies());
}
catch (ProxyPathValidatorException e) {
if (e.getMessage().startsWith("[JGLOBUS-96]")) {
throw new GlobusCredentialException(
GlobusCredentialException.EXPIRED,
"expired00",
e);
}
else {
throw new GlobusCredentialException(
GlobusCredentialException.SEC_ERROR,
"certVerifyError",
e);
}
}
}
/**
* Returns the identity certificate of this credential. The identity
* certificate is the first certificate in the chain that is not
* an impersonation proxy certificate.
*
* @return <code>X509Certificate</code> the identity cert. Null,
* if unable to get the identity certificate (an error
* occurred)
*/
public X509Certificate getIdentityCertificate() {
try {
return BouncyCastleUtil.getIdentityCertificate(this.certs);
} catch (CertificateException e) {
logger.debug("Error getting certificate identity", e);
return null;
}
}
/**
* Returns the path length constraint. The shortest length in the chain of
* certificates is returned as the credential's path length.
*
* @return The path length constraint of the credential. -1 is any error
* occurs.
*/
public int getPathConstraint() {
int pathLength = Integer.MAX_VALUE;
try {
for (int i=0; i<this.certs.length; i++) {
int length =
BouncyCastleUtil.getProxyPathConstraint(this.certs[i]);
// if length is one, then no proxy cert extension exists, so
// path length is -1
if (length == -1) {
length = Integer.MAX_VALUE;
}
if (length < pathLength) {
pathLength = length;
}
}
} catch (IOException e) {
logger.error("Error retrieving path length", e);
pathLength = -1;
} catch (CertificateEncodingException e) {
logger.error("Error retrieving path length", e);
pathLength = -1;
}
return pathLength;
}
/**
* Returns the identity of this credential.
* @see #getIdentityCertificate()
*
* @return The identity cert in Globus format (e.g. /C=US/..). Null,
* if unable to get the identity (an error occurred)
*/
public String getIdentity() {
try {
return BouncyCastleUtil.getIdentity(this.certs);
} catch (CertificateException e) {
logger.debug("Error getting certificate identity", e);
return null;
}
}
/**
* Returns the private key of this credential.
*
* @return <code>PrivateKey</code> the private key
*/
public PrivateKey getPrivateKey() {
return key;
}
/**
* Returns the certificate chain of this credential.
*
* @return <code>X509Certificate []</code> the certificate chain
*/
public X509Certificate [] getCertificateChain() {
return this.certs;
}
/**
* Returns the number of certificates in the credential without the
* self-signed certificates.
*
* @return number of certificates without counting self-signed certificates
*/
public int getCertNum() {
for (int i=this.certs.length-1;i>=0;i--) {
if (!this.certs[i].getSubjectDN().equals(this.certs[i].getIssuerDN())) {
return i+1;
}
}
return this.certs.length;
}
/**
* Returns strength of the private/public key in bits.
*
* @return strength of the key in bits. Returns -1
* if unable to determine it.
*/
public int getStrength() {
if (key == null) return -1;
return ((RSAPrivateKey)key).getModulus().bitLength();
}
/**
* Returns the subject DN of the first certificate in the chain.
*
* @return subject DN.
*/
public String getSubject() {
return this.certs[0].getSubjectDN().getName();
}
/**
* Returns the issuer DN of the first certificate in the chain.
*
* @return issuer DN.
*/
public String getIssuer() {
return this.certs[0].getIssuerDN().getName();
}
/**
* Returns the certificate type of the first certificate in
* the chain. Returns -1 if unable to determine the certificate
* type (an error occurred)
* @see BouncyCastleUtil#getCertificateType(X509Certificate)
*
* @return the type of first certificate in the chain. -1 if unable
* to determine the certificate type.
*/
public int getProxyType() {
try {
return BouncyCastleUtil.getCertificateType(this.certs[0]);
} catch (CertificateException e) {
logger.debug("Error getting certificate type", e);
return -1;
}
}
/**
* Returns time left of this credential. The time left of the credential
* is based on the certificate with the shortest validity time.
*
* @return time left in seconds. Returns 0 if the
* certificate has expired.
*/
public long getTimeLeft() {
Date earliestTime = null;
for (int i=0;i<this.certs.length;i++) {
Date time = this.certs[i].getNotAfter();
if (earliestTime == null ||
time.before(earliestTime)) {
earliestTime = time;
}
}
long diff = (earliestTime.getTime() - System.currentTimeMillis())/1000;
return (diff < 0) ? 0 : diff;
}
private void writeObject(ObjectOutputStream oos) throws IOException {
byte [] encoded;
// write key
OpenSSLKey encodedKey = new BouncyCastleOpenSSLKey(this.key);
encoded = encodedKey.getEncoded();
oos.writeInt(encoded.length);
oos.write(encoded);
// write certs
oos.writeInt(this.certs.length);
try {
for (int i=0;i<this.certs.length;i++) {
encoded = this.certs[i].getEncoded();
oos.writeInt(encoded.length);
oos.write(encoded);
}
} catch (Exception e) {
throw new ChainedIOException("", e);
}
}
private static byte[] readData(ObjectInputStream ois)
throws IOException {
int len = ois.readInt();
byte [] encoded = new byte[len];
SSLUtil.readFully(ois, encoded, 0, len);
return encoded;
}
private void readObject(ObjectInputStream ois)
throws IOException, ClassNotFoundException {
// read key
try {
byte [] encoded = readData(ois);
OpenSSLKey encodedKey = new BouncyCastleOpenSSLKey("RSA", encoded);
this.key = encodedKey.getPrivateKey();
} catch (IOException e) {
throw e;
} catch (Exception e) {
throw new ChainedIOException("", e);
}
// read certs
int certs = ois.readInt();
this.certs = new X509Certificate[certs];
try {
for (int i=0;i<certs;i++) {
InputStream in = new ByteArrayInputStream(readData(ois));
this.certs[i] = CertUtil.loadCertificate(in);
}
} catch (IOException e) {
throw e;
} catch (Exception e) {
throw new ChainedIOException("", e);
}
}
/**
* Returns the default credential. The default credential is usually
* the user proxy certificate. <BR>
* The credential will be loaded on the initial call. It must not
* be expired. All subsequent calls to this function return
* cached credential object. Once the credential is cached, and
* the underlying file changes, the credential will be reloaded.
*
* @return the default credential.
* @exception GlobusCredentialException if the credential expired or
* some other error with the credential.
*/
public synchronized static GlobusCredential getDefaultCredential()
throws GlobusCredentialException {
if (defaultCred == null) {
reloadDefaultCredential();
} else if (!credentialSet) {
if (credentialFile.lastModified() == credentialLastModified) {
defaultCred.verify();
} else {
defaultCred = null;
reloadDefaultCredential();
}
}
return defaultCred;
}
private static void reloadDefaultCredential()
throws GlobusCredentialException {
String proxyLocation = CoGProperties.getDefault().getProxyFile();
defaultCred = new GlobusCredential(proxyLocation);
credentialFile = new File(proxyLocation);
credentialLastModified = credentialFile.lastModified();
defaultCred.verify();
}
/**
* Sets default credential.
*
* @param cred the credential to set a default.
*/
public synchronized static void setDefaultCredential(GlobusCredential cred) {
defaultCred = cred;
credentialSet = (cred != null);
}
public String toString() {
String lineSep = System.getProperty("line.separator");
StringBuffer buf = new StringBuffer();
buf.append("subject : ").append(getSubject()).append(lineSep);
buf.append("issuer : ").append(getIssuer()).append(lineSep);
buf.append("strength : ").append(getStrength() + " bits").append(lineSep);
buf.append("timeleft : ").append(getTimeLeft() + " sec").append(lineSep);
buf.append("proxy type : ").append(CertUtil.getProxyTypeAsString(getProxyType()));
return buf.toString();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Vera Y. Petrashkova
*/
package org.apache.harmony.security.tests.java.security;
import java.security.*;
import java.security.cert.Certificate;
import org.apache.harmony.security.tests.support.cert.MyCertificate;
import junit.framework.TestCase;
import junit.framework.Test;
import junit.framework.TestSuite;
/**
* Tests for <code>KeyStore.PrivateKeyEntry</code> class constructor and methods
*
*/
public class KSPrivateKeyEntryTest extends TestCase {
/**
* Constructor for KSPrivateKeyEntryTest.
* @param arg0
*/
public KSPrivateKeyEntryTest(String arg0) {
super(arg0);
}
private PrivateKey testPrivateKey;
private Certificate [] testChain;
private void createParams(boolean diffCerts, boolean diffKeys) {
byte[] encoded = {(byte)0, (byte)1, (byte)2, (byte)3};
testChain = new Certificate[5];
for (int i = 0; i < testChain.length; i++) {
String s = (diffCerts ? Integer.toString(i) : "NEW");
testChain[i] = new MyCertificate("MY_TEST_CERTIFICATE_"
.concat(s), encoded);
}
testPrivateKey = (diffKeys ? (PrivateKey)new tmpPrivateKey() :
(PrivateKey)new tmpPrivateKey(testChain[0].getPublicKey().getAlgorithm()));
}
/**
* Test for <code>PrivateKeyEntry(PrivateKey privateKey, Certificate[] chain)</code>
* constructor
* Assertion: throws NullPointerException when privateKey is null
*/
public void testPrivateKeyEntry01() {
Certificate[] certs = new MyCertificate[1];//new Certificate[1];
PrivateKey pk = null;
try {
new KeyStore.PrivateKeyEntry(pk, certs);
fail("NullPointerException must be thrown when privateKey is null");
} catch (NullPointerException e) {
}
}
/**
* Test for <code>PrivateKeyEntry(PrivateKey privateKey, Certificate[] chain)</code>
* constructor
* Assertion: throws NullPointerException when chain is null
* and throws IllegalArgumentException when chain length is 0
*/
public void testPrivateKeyEntry02() {
Certificate[] chain = null;
PrivateKey pk = new tmpPrivateKey();
try {
new KeyStore.PrivateKeyEntry(pk, chain);
fail("NullPointerException must be thrown when chain is null");
} catch (NullPointerException e) {
}
try {
chain = new Certificate[0];
new KeyStore.PrivateKeyEntry(pk, chain);
fail("IllegalArgumentException must be thrown when chain length is 0");
} catch (IllegalArgumentException e) {
}
}
/**
* Test for <code>PrivateKeyEntry(PrivateKey privateKey, Certificate[] chain)</code>
* constructor
* Assertion: throws IllegalArgumentException when chain contains certificates
* of different types
*/
public void testPrivateKeyEntry03() {
createParams(true, false);
try {
new KeyStore.PrivateKeyEntry(testPrivateKey, testChain);
fail("IllegalArgumentException must be thrown when chain contains certificates of different types");
} catch (IllegalArgumentException e) {
}
}
/**
* Test for <code>PrivateKeyEntry(PrivateKey privateKey, Certificate[] chain)</code>
* constructor
* Assertion: throws IllegalArgumentException when algorithm of privateKey
* does not match the algorithm of PublicKey in the end certificate (with 0 index)
*/
public void testPrivateKeyEntry04() {
createParams(false, true);
try {
new KeyStore.PrivateKeyEntry(testPrivateKey, testChain);
fail("IllegalArgumentException must be thrown when key algorithms do not match");
} catch (IllegalArgumentException e) {
}
}
/**
* Test for <code>getPrivateKey()</code> method
* Assertion: returns PrivateKey object
*/
public void testGetPrivateKey() {
createParams(false, false);
KeyStore.PrivateKeyEntry ksPKE = new KeyStore.PrivateKeyEntry(
testPrivateKey, testChain);
assertEquals("Incorrect PrivateKey", testPrivateKey, ksPKE
.getPrivateKey());
}
/**
* Test for <code>getCertificateChain()</code> method Assertion: returns
* array of the Certificates corresponding to chain
*/
public void testGetCertificateChain() {
createParams(false, false);
KeyStore.PrivateKeyEntry ksPKE = new KeyStore.PrivateKeyEntry(
testPrivateKey, testChain);
Certificate[] res = ksPKE.getCertificateChain();
assertEquals("Incorrect chain length", testChain.length, res.length);
for (int i = 0; i < res.length; i++) {
assertEquals("Incorrect chain element: "
.concat(Integer.toString(i)), testChain[i], res[i]);
}
}
/**
* Test for <code>getCertificate()</code> method
* Assertion: returns end Certificate (with 0 index in chain)
*/
public void testGetCertificate() {
createParams(false, false);
KeyStore.PrivateKeyEntry ksPKE = new KeyStore.PrivateKeyEntry(
testPrivateKey, testChain);
Certificate res = ksPKE.getCertificate();
assertEquals("Incorrect end certificate (number 0)", testChain[0], res);
}
/**
* Test for <code>toString()</code> method
* Assertion: returns non null String
*/
public void testToString() {
createParams(false, false);
KeyStore.PrivateKeyEntry ksPKE = new KeyStore.PrivateKeyEntry(
testPrivateKey, testChain);
String res = ksPKE.toString();
assertNotNull("toString() returns null", res);
}
public static Test suite() {
return new TestSuite(KSPrivateKeyEntryTest.class);
}
public static void main(String args[]) {
junit.textui.TestRunner.run(suite());
}
private static class tmpPrivateKey implements PrivateKey {
private String alg = "My algorithm";
public String getAlgorithm() {
return alg;
}
public String getFormat() {
return "My Format";
}
public byte[] getEncoded() {
return new byte[1];
}
public tmpPrivateKey() {
}
public tmpPrivateKey(String algorithm) {
super();
alg = algorithm;
}
}
}
| |
/*
* Copyright (c) 2004, 2011, Oracle and/or its affiliates. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* - Neither the name of Oracle nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
* IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*
* This source code is provided to illustrate the usage of a given feature
* or technique and has been deliberately simplified. Additional steps
* required for a production-quality application, such as security checks,
* input validation and proper error handling, might not be present in
* this sample code.
*/
/*
*/
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import java.io.IOException;
/**
* This Deadlock class demonstrates the capability of performing
* deadlock detection programmatically within the application using
* the java.lang.management API.
*
* See ThreadMonitor.java for the use of java.lang.management.ThreadMXBean
* API.
*/
public class Deadlock {
public static void main(String[] argv) {
new Deadlock();
// Now find deadlock
ThreadMonitor monitor = new ThreadMonitor();
boolean found = false;
while (!found) {
found = monitor.findDeadlock();
try {
Thread.sleep(500);
} catch (InterruptedException e) {
System.exit(1);
}
}
System.out.println("\nPress <Enter> to exit this Deadlock program.\n");
waitForEnterPressed();
}
private CyclicBarrier barrier = new CyclicBarrier(6);
public Deadlock() {
DeadlockThread[] dThreads = new DeadlockThread[6];
Monitor a = new Monitor("a");
Monitor b = new Monitor("b");
Monitor c = new Monitor("c");
dThreads[0] = new DeadlockThread("MThread-1", a, b);
dThreads[1] = new DeadlockThread("MThread-2", b, c);
dThreads[2] = new DeadlockThread("MThread-3", c, a);
Lock d = new ReentrantLock();
Lock e = new ReentrantLock();
Lock f = new ReentrantLock();
dThreads[3] = new DeadlockThread("SThread-4", d, e);
dThreads[4] = new DeadlockThread("SThread-5", e, f);
dThreads[5] = new DeadlockThread("SThread-6", f, d);
// make them daemon threads so that the test will exit
for (int i = 0; i < 6; i++) {
dThreads[i].setDaemon(true);
dThreads[i].start();
}
}
class DeadlockThread extends Thread {
private Lock lock1 = null;
private Lock lock2 = null;
private Monitor mon1 = null;
private Monitor mon2 = null;
private boolean useSync;
DeadlockThread(String name, Lock lock1, Lock lock2) {
super(name);
this.lock1 = lock1;
this.lock2 = lock2;
this.useSync = true;
}
DeadlockThread(String name, Monitor mon1, Monitor mon2) {
super(name);
this.mon1 = mon1;
this.mon2 = mon2;
this.useSync = false;
}
@Override
public void run() {
if (useSync) {
syncLock();
} else {
monitorLock();
}
}
private void syncLock() {
lock1.lock();
try {
try {
barrier.await();
} catch (InterruptedException e) {
e.printStackTrace();
System.exit(1);
} catch (BrokenBarrierException e) {
e.printStackTrace();
System.exit(1);
}
goSyncDeadlock();
} finally {
lock1.unlock();
}
}
private void goSyncDeadlock() {
try {
barrier.await();
} catch (InterruptedException e) {
e.printStackTrace();
System.exit(1);
} catch (BrokenBarrierException e) {
e.printStackTrace();
System.exit(1);
}
lock2.lock();
throw new RuntimeException("should not reach here.");
}
private void monitorLock() {
synchronized (mon1) {
try {
barrier.await();
} catch (InterruptedException e) {
e.printStackTrace();
System.exit(1);
} catch (BrokenBarrierException e) {
e.printStackTrace();
System.exit(1);
}
goMonitorDeadlock();
}
}
private void goMonitorDeadlock() {
try {
barrier.await();
} catch (InterruptedException e) {
e.printStackTrace();
System.exit(1);
} catch (BrokenBarrierException e) {
e.printStackTrace();
System.exit(1);
}
synchronized (mon2) {
throw new RuntimeException(getName() + " should not reach here.");
}
}
}
class Monitor {
String name;
Monitor(String name) {
this.name = name;
}
}
private static void waitForEnterPressed() {
try {
boolean done = false;
while (!done) {
char ch = (char) System.in.read();
if (ch<0||ch=='\n') {
done = true;
}
}
}
catch (IOException e) {
e.printStackTrace();
System.exit(0);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.model;
import java.util.Map;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAnyAttribute;
import javax.xml.bind.annotation.XmlTransient;
import javax.xml.bind.annotation.XmlType;
import javax.xml.namespace.QName;
import org.apache.camel.CamelContext;
import org.apache.camel.spi.DataFormat;
import org.apache.camel.spi.Metadata;
import org.apache.camel.spi.RouteContext;
import org.apache.camel.util.IntrospectionSupport;
import org.apache.camel.util.ObjectHelper;
import static org.apache.camel.util.EndpointHelper.isReferenceParameter;
/**
* Represents a Camel data format
*/
@Metadata(label = "dataformat,transformation")
@XmlType(name = "dataFormat")
@XmlAccessorType(XmlAccessType.FIELD)
public class DataFormatDefinition extends IdentifiedType implements OtherAttributesAware {
@XmlTransient
private DataFormat dataFormat;
@XmlTransient
private String dataFormatName;
// use xs:any to support optional property placeholders
@XmlAnyAttribute
private Map<QName, Object> otherAttributes;
public DataFormatDefinition() {
}
public DataFormatDefinition(DataFormat dataFormat) {
this.dataFormat = dataFormat;
}
protected DataFormatDefinition(String dataFormatName) {
this.dataFormatName = dataFormatName;
}
/**
* Factory method to create the data format
*
* @param routeContext route context
* @param type the data format type
* @param ref reference to lookup for a data format
* @return the data format or null if not possible to create
*/
public static DataFormat getDataFormat(RouteContext routeContext, DataFormatDefinition type, String ref) {
if (type == null) {
ObjectHelper.notNull(ref, "ref or type");
// try to let resolver see if it can resolve it, its not always possible
type = routeContext.getCamelContext().resolveDataFormatDefinition(ref);
if (type != null) {
return type.getDataFormat(routeContext);
}
DataFormat dataFormat = routeContext.getCamelContext().resolveDataFormat(ref);
if (dataFormat == null) {
throw new IllegalArgumentException("Cannot find data format in registry with ref: " + ref);
}
return dataFormat;
} else {
return type.getDataFormat(routeContext);
}
}
public DataFormat getDataFormat(RouteContext routeContext) {
if (dataFormat == null) {
Runnable propertyPlaceholdersChangeReverter = ProcessorDefinitionHelper.createPropertyPlaceholdersChangeReverter();
// resolve properties before we create the data format
try {
ProcessorDefinitionHelper.resolvePropertyPlaceholders(routeContext.getCamelContext(), this);
} catch (Exception e) {
throw new IllegalArgumentException("Error resolving property placeholders on data format: " + this, e);
}
try {
dataFormat = createDataFormat(routeContext);
if (dataFormat != null) {
configureDataFormat(dataFormat, routeContext.getCamelContext());
} else {
throw new IllegalArgumentException(
"Data format '" + (dataFormatName != null ? dataFormatName : "<null>") + "' could not be created. "
+ "Ensure that the data format is valid and the associated Camel component is present on the classpath");
}
} finally {
propertyPlaceholdersChangeReverter.run();
}
}
return dataFormat;
}
/**
* Factory method to create the data format instance
*/
protected DataFormat createDataFormat(RouteContext routeContext) {
// must use getDataFormatName() as we need special logic in json dataformat
if (getDataFormatName() != null) {
return routeContext.getCamelContext().resolveDataFormat(getDataFormatName());
}
return null;
}
/**
* Allows derived classes to customize the data format
*
* @deprecated use {@link #configureDataFormat(org.apache.camel.spi.DataFormat, org.apache.camel.CamelContext)}
*/
@Deprecated
protected void configureDataFormat(DataFormat dataFormat) {
}
/**
* Allows derived classes to customize the data format
*/
protected void configureDataFormat(DataFormat dataFormat, CamelContext camelContext) {
}
/**
* Sets a named property on the data format instance using introspection
*
* @deprecated use {@link #setProperty(org.apache.camel.CamelContext, Object, String, Object)}
*/
@Deprecated
protected void setProperty(Object bean, String name, Object value) {
setProperty(null, bean, name, value);
}
/**
* Sets a named property on the data format instance using introspection
*/
protected void setProperty(CamelContext camelContext, Object bean, String name, Object value) {
try {
String ref = value instanceof String ? value.toString() : null;
if (isReferenceParameter(ref) && camelContext != null) {
IntrospectionSupport.setProperty(camelContext, camelContext.getTypeConverter(), bean, name, null, ref, true);
} else {
IntrospectionSupport.setProperty(bean, name, value);
}
} catch (Exception e) {
throw new IllegalArgumentException("Failed to set property: " + name + " on: " + bean + ". Reason: " + e, e);
}
}
public String getDataFormatName() {
return dataFormatName;
}
public void setDataFormatName(String dataFormatName) {
this.dataFormatName = dataFormatName;
}
public DataFormat getDataFormat() {
return dataFormat;
}
public void setDataFormat(DataFormat dataFormat) {
this.dataFormat = dataFormat;
}
public Map<QName, Object> getOtherAttributes() {
return otherAttributes;
}
/**
* Adds an optional attribute
*/
public void setOtherAttributes(Map<QName, Object> otherAttributes) {
this.otherAttributes = otherAttributes;
}
public String getShortName() {
String name = getClass().getSimpleName();
if (name.endsWith("DataFormat")) {
name = name.substring(0, name.indexOf("DataFormat"));
}
return name;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.security.PrivilegedAction;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse;
import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest;
import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse;
import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest;
import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse;
import org.apache.hadoop.yarn.api.records.ApplicationAccessType;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
import org.apache.hadoop.yarn.api.records.ContainerState;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.LogAggregationContext;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.NodeState;
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.security.AMRMTokenIdentifier;
import org.apache.hadoop.yarn.server.api.protocolrecords.NMContainerStatus;
import org.apache.hadoop.yarn.server.resourcemanager.amlauncher.AMLauncherEvent;
import org.apache.hadoop.yarn.server.resourcemanager.amlauncher.ApplicationMasterLauncher;
import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.NullRMNodeLabelsManager;
import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager;
import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptEvent;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptEventType;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptImpl;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.event.RMAppAttemptLaunchFailedEvent;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerState;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNodeEvent;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNodeEventType;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNodeImpl;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNodeStartedEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.AbstractYarnScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerApplicationAttempt;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerNode;
import org.apache.hadoop.yarn.server.resourcemanager.security.ClientToAMTokenSecretManagerInRM;
import org.apache.hadoop.yarn.server.resourcemanager.security.NMTokenSecretManagerInRM;
import org.apache.hadoop.yarn.server.resourcemanager.security.RMContainerTokenSecretManager;
import org.apache.hadoop.yarn.util.Records;
import org.apache.hadoop.yarn.util.YarnVersionInfo;
import org.apache.log4j.Level;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.junit.Assert;
@SuppressWarnings("unchecked")
public class MockRM extends ResourceManager {
static final Logger LOG = Logger.getLogger(MockRM.class);
static final String ENABLE_WEBAPP = "mockrm.webapp.enabled";
final private boolean useNullRMNodeLabelsManager;
public MockRM() {
this(new YarnConfiguration());
}
public MockRM(Configuration conf) {
this(conf, null);
}
public MockRM(Configuration conf, RMStateStore store) {
this(conf, store, true);
}
public MockRM(Configuration conf, RMStateStore store,
boolean useNullRMNodeLabelsManager) {
super();
this.useNullRMNodeLabelsManager = useNullRMNodeLabelsManager;
init(conf instanceof YarnConfiguration ? conf : new YarnConfiguration(conf));
if(store != null) {
setRMStateStore(store);
}
Logger rootLogger = LogManager.getRootLogger();
rootLogger.setLevel(Level.DEBUG);
}
@Override
protected RMNodeLabelsManager createNodeLabelManager()
throws InstantiationException, IllegalAccessException {
if (useNullRMNodeLabelsManager) {
RMNodeLabelsManager mgr = new NullRMNodeLabelsManager();
mgr.init(getConfig());
return mgr;
} else {
return super.createNodeLabelManager();
}
}
public void waitForState(ApplicationId appId, RMAppState finalState)
throws Exception {
RMApp app = getRMContext().getRMApps().get(appId);
Assert.assertNotNull("app shouldn't be null", app);
final int timeoutMsecs = 80000;
final int waitMsPerLoop = 500;
int loop = 0;
while (!finalState.equals(app.getState()) &&
((waitMsPerLoop * loop) < timeoutMsecs)) {
LOG.info("App : " + appId + " State is : " + app.getState() +
" Waiting for state : " + finalState);
Thread.yield();
Thread.sleep(waitMsPerLoop);
loop++;
}
int waitedMsecs = waitMsPerLoop * loop;
LOG.info("App State is : " + app.getState());
if (waitedMsecs >= timeoutMsecs) {
Assert.fail("App state is not correct (timedout): expected: " +
finalState + " actual: " + app.getState());
}
}
public void waitForState(ApplicationAttemptId attemptId,
RMAppAttemptState finalState)
throws Exception {
RMApp app = getRMContext().getRMApps().get(attemptId.getApplicationId());
Assert.assertNotNull("app shouldn't be null", app);
RMAppAttempt attempt = app.getRMAppAttempt(attemptId);
final int timeoutMsecs = 40000;
final int minWaitMsecs = 1000;
final int waitMsPerLoop = 10;
int loop = 0;
while (!finalState.equals(attempt.getAppAttemptState())
&& waitMsPerLoop * loop < timeoutMsecs) {
LOG.info("AppAttempt : " + attemptId + " State is : " +
attempt.getAppAttemptState() + " Waiting for state : " + finalState);
Thread.yield();
Thread.sleep(waitMsPerLoop);
loop++;
}
int waitedMsecs = waitMsPerLoop * loop;
if (minWaitMsecs > waitedMsecs) {
Thread.sleep(minWaitMsecs - waitedMsecs);
}
LOG.info("Attempt State is : " + attempt.getAppAttemptState());
if (waitedMsecs >= timeoutMsecs) {
Assert.fail("Attempt state is not correct (timedout): expected: "
+ finalState + " actual: " + attempt.getAppAttemptState());
}
}
public void waitForContainerAllocated(MockNM nm, ContainerId containerId)
throws Exception {
int timeoutSecs = 0;
while (getResourceScheduler().getRMContainer(containerId) == null
&& timeoutSecs++ < 40) {
System.out.println("Waiting for" + containerId + " to be allocated.");
nm.nodeHeartbeat(true);
Thread.sleep(200);
}
Assert.assertNotNull("Failed in waiting for " + containerId + " " +
"allocation.", getResourceScheduler().getRMContainer(containerId));
}
public void waitForContainerToComplete(RMAppAttempt attempt,
NMContainerStatus completedContainer) throws InterruptedException {
while (true) {
List<ContainerStatus> containers = attempt.getJustFinishedContainers();
System.out.println("Received completed containers " + containers);
for (ContainerStatus container : containers) {
if (container.getContainerId().equals(
completedContainer.getContainerId())) {
return;
}
}
Thread.sleep(200);
}
}
public MockAM waitForNewAMToLaunchAndRegister(ApplicationId appId, int attemptSize,
MockNM nm) throws Exception {
RMApp app = getRMContext().getRMApps().get(appId);
Assert.assertNotNull(app);
while (app.getAppAttempts().size() != attemptSize) {
System.out.println("Application " + appId
+ " is waiting for AM to restart. Current has "
+ app.getAppAttempts().size() + " attempts.");
Thread.sleep(200);
}
return launchAndRegisterAM(app, this, nm);
}
public boolean waitForState(MockNM nm, ContainerId containerId,
RMContainerState containerState) throws Exception {
// default is wait for 30,000 ms
return waitForState(nm, containerId, containerState, 30 * 1000);
}
public boolean waitForState(MockNM nm, ContainerId containerId,
RMContainerState containerState, int timeoutMillisecs) throws Exception {
return waitForState(Arrays.asList(nm), containerId, containerState,
timeoutMillisecs);
}
public boolean waitForState(Collection<MockNM> nms, ContainerId containerId,
RMContainerState containerState, int timeoutMillisecs) throws Exception {
RMContainer container = getResourceScheduler().getRMContainer(containerId);
int timeoutSecs = 0;
while(container == null && timeoutSecs++ < timeoutMillisecs / 100) {
for (MockNM nm : nms) {
nm.nodeHeartbeat(true);
}
container = getResourceScheduler().getRMContainer(containerId);
System.out.println("Waiting for container " + containerId + " to be allocated.");
Thread.sleep(100);
if (timeoutMillisecs <= timeoutSecs * 100) {
return false;
}
}
Assert.assertNotNull("Container shouldn't be null", container);
while (!containerState.equals(container.getState())
&& timeoutSecs++ < timeoutMillisecs / 100) {
System.out.println("Container : " + containerId + " State is : "
+ container.getState() + " Waiting for state : " + containerState);
for (MockNM nm : nms) {
nm.nodeHeartbeat(true);
}
Thread.sleep(100);
if (timeoutMillisecs <= timeoutSecs * 100) {
return false;
}
}
System.out.println("Container State is : " + container.getState());
Assert.assertEquals("Container state is not correct (timedout)",
containerState, container.getState());
return true;
}
// get new application id
public GetNewApplicationResponse getNewAppId() throws Exception {
ApplicationClientProtocol client = getClientRMService();
return client.getNewApplication(Records
.newRecord(GetNewApplicationRequest.class));
}
public RMApp submitApp(int masterMemory) throws Exception {
return submitApp(masterMemory, false);
}
public RMApp submitApp(int masterMemory, Priority priority) throws Exception {
Resource resource = Resource.newInstance(masterMemory, 0);
return submitApp(resource, "", UserGroupInformation.getCurrentUser()
.getShortUserName(), null, false, null,
super.getConfig().getInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS,
YarnConfiguration.DEFAULT_RM_AM_MAX_ATTEMPTS), null, null, true,
false, false, null, 0, null, true, priority);
}
public RMApp submitApp(int masterMemory, boolean unmanaged)
throws Exception {
return submitApp(masterMemory, "", UserGroupInformation.getCurrentUser()
.getShortUserName(), unmanaged);
}
// client
public RMApp submitApp(int masterMemory, String name, String user) throws Exception {
return submitApp(masterMemory, name, user, false);
}
public RMApp submitApp(int masterMemory, String name, String user,
boolean unmanaged)
throws Exception {
return submitApp(masterMemory, name, user, null, unmanaged, null,
super.getConfig().getInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS,
YarnConfiguration.DEFAULT_RM_AM_MAX_ATTEMPTS), null);
}
public RMApp submitApp(int masterMemory, String name, String user,
Map<ApplicationAccessType, String> acls) throws Exception {
return submitApp(masterMemory, name, user, acls, false, null,
super.getConfig().getInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS,
YarnConfiguration.DEFAULT_RM_AM_MAX_ATTEMPTS), null);
}
public RMApp submitApp(int masterMemory, String name, String user,
Map<ApplicationAccessType, String> acls, String queue) throws Exception {
return submitApp(masterMemory, name, user, acls, false, queue,
super.getConfig().getInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS,
YarnConfiguration.DEFAULT_RM_AM_MAX_ATTEMPTS), null);
}
public RMApp submitApp(Resource resource, String name, String user,
Map<ApplicationAccessType, String> acls, String queue) throws Exception {
return submitApp(resource, name, user, acls, false, queue,
super.getConfig().getInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS,
YarnConfiguration.DEFAULT_RM_AM_MAX_ATTEMPTS), null, null,
true, false, false, null, 0, null, true, null);
}
public RMApp submitApp(int masterMemory, String name, String user,
Map<ApplicationAccessType, String> acls, String queue,
boolean waitForAccepted) throws Exception {
return submitApp(masterMemory, name, user, acls, false, queue,
super.getConfig().getInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS,
YarnConfiguration.DEFAULT_RM_AM_MAX_ATTEMPTS), null, null,
waitForAccepted);
}
public RMApp submitApp(int masterMemory, String name, String user,
Map<ApplicationAccessType, String> acls, boolean unmanaged, String queue,
int maxAppAttempts, Credentials ts) throws Exception {
return submitApp(masterMemory, name, user, acls, unmanaged, queue,
maxAppAttempts, ts, null);
}
public RMApp submitApp(int masterMemory, String name, String user,
Map<ApplicationAccessType, String> acls, boolean unmanaged, String queue,
int maxAppAttempts, Credentials ts, String appType) throws Exception {
return submitApp(masterMemory, name, user, acls, unmanaged, queue,
maxAppAttempts, ts, appType, true);
}
public RMApp submitApp(int masterMemory, String name, String user,
Map<ApplicationAccessType, String> acls, boolean unmanaged, String queue,
int maxAppAttempts, Credentials ts, String appType,
boolean waitForAccepted)
throws Exception {
return submitApp(masterMemory, name, user, acls, unmanaged, queue,
maxAppAttempts, ts, appType, waitForAccepted, false);
}
public RMApp submitApp(int masterMemory, String name, String user,
Map<ApplicationAccessType, String> acls, boolean unmanaged, String queue,
int maxAppAttempts, Credentials ts, String appType,
boolean waitForAccepted, boolean keepContainers) throws Exception {
Resource resource = Records.newRecord(Resource.class);
resource.setMemory(masterMemory);
return submitApp(resource, name, user, acls, unmanaged, queue,
maxAppAttempts, ts, appType, waitForAccepted, keepContainers,
false, null, 0, null, true, Priority.newInstance(0));
}
public RMApp submitApp(int masterMemory, long attemptFailuresValidityInterval)
throws Exception {
Resource resource = Records.newRecord(Resource.class);
resource.setMemory(masterMemory);
Priority priority = Priority.newInstance(0);
return submitApp(resource, "", UserGroupInformation.getCurrentUser()
.getShortUserName(), null, false, null,
super.getConfig().getInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS,
YarnConfiguration.DEFAULT_RM_AM_MAX_ATTEMPTS), null, null, true, false,
false, null, attemptFailuresValidityInterval, null, true, priority);
}
public RMApp submitApp(int masterMemory, String name, String user,
Map<ApplicationAccessType, String> acls, boolean unmanaged, String queue,
int maxAppAttempts, Credentials ts, String appType,
boolean waitForAccepted, boolean keepContainers, boolean isAppIdProvided,
ApplicationId applicationId) throws Exception {
Resource resource = Records.newRecord(Resource.class);
resource.setMemory(masterMemory);
Priority priority = Priority.newInstance(0);
return submitApp(resource, name, user, acls, unmanaged, queue,
maxAppAttempts, ts, appType, waitForAccepted, keepContainers,
isAppIdProvided, applicationId, 0, null, true, priority);
}
public RMApp submitApp(int masterMemory,
LogAggregationContext logAggregationContext) throws Exception {
Resource resource = Records.newRecord(Resource.class);
resource.setMemory(masterMemory);
Priority priority = Priority.newInstance(0);
return submitApp(resource, "", UserGroupInformation.getCurrentUser()
.getShortUserName(), null, false, null,
super.getConfig().getInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS,
YarnConfiguration.DEFAULT_RM_AM_MAX_ATTEMPTS), null, null, true, false,
false, null, 0, logAggregationContext, true, priority);
}
public RMApp submitApp(Resource capability, String name, String user,
Map<ApplicationAccessType, String> acls, boolean unmanaged, String queue,
int maxAppAttempts, Credentials ts, String appType,
boolean waitForAccepted, boolean keepContainers, boolean isAppIdProvided,
ApplicationId applicationId, long attemptFailuresValidityInterval,
LogAggregationContext logAggregationContext,
boolean cancelTokensWhenComplete, Priority priority)
throws Exception {
ApplicationId appId = isAppIdProvided ? applicationId : null;
ApplicationClientProtocol client = getClientRMService();
if (! isAppIdProvided) {
GetNewApplicationResponse resp = client.getNewApplication(Records
.newRecord(GetNewApplicationRequest.class));
appId = resp.getApplicationId();
}
SubmitApplicationRequest req = Records
.newRecord(SubmitApplicationRequest.class);
ApplicationSubmissionContext sub = Records
.newRecord(ApplicationSubmissionContext.class);
sub.setKeepContainersAcrossApplicationAttempts(keepContainers);
sub.setApplicationId(appId);
sub.setApplicationName(name);
sub.setMaxAppAttempts(maxAppAttempts);
if (unmanaged) {
sub.setUnmanagedAM(true);
}
if (queue != null) {
sub.setQueue(queue);
}
if (priority != null) {
sub.setPriority(priority);
}
sub.setApplicationType(appType);
ContainerLaunchContext clc = Records
.newRecord(ContainerLaunchContext.class);
sub.setResource(capability);
clc.setApplicationACLs(acls);
if (ts != null && UserGroupInformation.isSecurityEnabled()) {
DataOutputBuffer dob = new DataOutputBuffer();
ts.writeTokenStorageToStream(dob);
ByteBuffer securityTokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
clc.setTokens(securityTokens);
}
sub.setAMContainerSpec(clc);
sub.setAttemptFailuresValidityInterval(attemptFailuresValidityInterval);
if (logAggregationContext != null) {
sub.setLogAggregationContext(logAggregationContext);
}
sub.setCancelTokensWhenComplete(cancelTokensWhenComplete);
req.setApplicationSubmissionContext(sub);
UserGroupInformation fakeUser =
UserGroupInformation.createUserForTesting(user, new String[] {"someGroup"});
PrivilegedAction<SubmitApplicationResponse> action =
new PrivilegedAction<SubmitApplicationResponse>() {
ApplicationClientProtocol client;
SubmitApplicationRequest req;
@Override
public SubmitApplicationResponse run() {
try {
return client.submitApplication(req);
} catch (YarnException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
PrivilegedAction<SubmitApplicationResponse> setClientReq(
ApplicationClientProtocol client, SubmitApplicationRequest req) {
this.client = client;
this.req = req;
return this;
}
}.setClientReq(client, req);
fakeUser.doAs(action);
// make sure app is immediately available after submit
if (waitForAccepted) {
waitForState(appId, RMAppState.ACCEPTED);
}
RMApp rmApp = getRMContext().getRMApps().get(appId);
// unmanaged AM won't go to RMAppAttemptState.SCHEDULED.
if (waitForAccepted && !unmanaged) {
waitForState(rmApp.getCurrentAppAttempt().getAppAttemptId(),
RMAppAttemptState.SCHEDULED);
}
return rmApp;
}
public MockNM registerNode(String nodeIdStr, int memory) throws Exception {
MockNM nm = new MockNM(nodeIdStr, memory, getResourceTrackerService());
nm.registerNode();
return nm;
}
public MockNM registerNode(String nodeIdStr, int memory, int vCores)
throws Exception {
MockNM nm =
new MockNM(nodeIdStr, memory, vCores, getResourceTrackerService());
nm.registerNode();
return nm;
}
public MockNM registerNode(String nodeIdStr, int memory, int vCores,
List<ApplicationId> runningApplications) throws Exception {
MockNM nm =
new MockNM(nodeIdStr, memory, vCores, getResourceTrackerService(),
YarnVersionInfo.getVersion());
nm.registerNode(runningApplications);
return nm;
}
public void sendNodeStarted(MockNM nm) throws Exception {
RMNodeImpl node = (RMNodeImpl) getRMContext().getRMNodes().get(
nm.getNodeId());
node.handle(new RMNodeStartedEvent(nm.getNodeId(), null, null));
}
public void sendNodeLost(MockNM nm) throws Exception {
RMNodeImpl node = (RMNodeImpl) getRMContext().getRMNodes().get(
nm.getNodeId());
node.handle(new RMNodeEvent(nm.getNodeId(), RMNodeEventType.EXPIRE));
}
public void NMwaitForState(NodeId nodeid, NodeState finalState)
throws Exception {
RMNode node = getRMContext().getRMNodes().get(nodeid);
Assert.assertNotNull("node shouldn't be null", node);
int timeoutSecs = 0;
while (!finalState.equals(node.getState()) && timeoutSecs++ < 20) {
System.out.println("Node State is : " + node.getState()
+ " Waiting for state : " + finalState);
Thread.sleep(500);
}
System.out.println("Node State is : " + node.getState());
Assert.assertEquals("Node state is not correct (timedout)", finalState,
node.getState());
}
public KillApplicationResponse killApp(ApplicationId appId) throws Exception {
ApplicationClientProtocol client = getClientRMService();
KillApplicationRequest req = KillApplicationRequest.newInstance(appId);
return client.forceKillApplication(req);
}
// from AMLauncher
public MockAM sendAMLaunched(ApplicationAttemptId appAttemptId)
throws Exception {
MockAM am = new MockAM(getRMContext(), masterService, appAttemptId);
am.waitForState(RMAppAttemptState.ALLOCATED);
//create and set AMRMToken
Token<AMRMTokenIdentifier> amrmToken =
this.rmContext.getAMRMTokenSecretManager().createAndGetAMRMToken(
appAttemptId);
((RMAppAttemptImpl) this.rmContext.getRMApps()
.get(appAttemptId.getApplicationId()).getRMAppAttempt(appAttemptId))
.setAMRMToken(amrmToken);
getRMContext()
.getDispatcher()
.getEventHandler()
.handle(
new RMAppAttemptEvent(appAttemptId, RMAppAttemptEventType.LAUNCHED));
return am;
}
public void sendAMLaunchFailed(ApplicationAttemptId appAttemptId)
throws Exception {
MockAM am = new MockAM(getRMContext(), masterService, appAttemptId);
am.waitForState(RMAppAttemptState.ALLOCATED);
getRMContext().getDispatcher().getEventHandler()
.handle(new RMAppAttemptLaunchFailedEvent(appAttemptId, "Failed"));
}
@Override
protected ClientRMService createClientRMService() {
return new ClientRMService(getRMContext(), getResourceScheduler(),
rmAppManager, applicationACLsManager, queueACLsManager,
getRMContext().getRMDelegationTokenSecretManager()) {
@Override
protected void serviceStart() {
// override to not start rpc handler
}
@Override
protected void serviceStop() {
// don't do anything
}
};
}
@Override
protected ResourceTrackerService createResourceTrackerService() {
RMContainerTokenSecretManager containerTokenSecretManager =
getRMContext().getContainerTokenSecretManager();
containerTokenSecretManager.rollMasterKey();
NMTokenSecretManagerInRM nmTokenSecretManager =
getRMContext().getNMTokenSecretManager();
nmTokenSecretManager.rollMasterKey();
return new ResourceTrackerService(getRMContext(), nodesListManager,
this.nmLivelinessMonitor, containerTokenSecretManager,
nmTokenSecretManager) {
@Override
protected void serviceStart() {
// override to not start rpc handler
}
@Override
protected void serviceStop() {
// don't do anything
}
};
}
@Override
protected ApplicationMasterService createApplicationMasterService() {
return new ApplicationMasterService(getRMContext(), scheduler) {
@Override
protected void serviceStart() {
// override to not start rpc handler
}
@Override
protected void serviceStop() {
// don't do anything
}
};
}
@Override
protected ApplicationMasterLauncher createAMLauncher() {
return new ApplicationMasterLauncher(getRMContext()) {
@Override
protected void serviceStart() {
// override to not start rpc handler
}
@Override
public void handle(AMLauncherEvent appEvent) {
// don't do anything
}
@Override
protected void serviceStop() {
// don't do anything
}
};
}
@Override
protected AdminService createAdminService() {
return new AdminService(this, getRMContext()) {
@Override
protected void startServer() {
// override to not start rpc handler
}
@Override
protected void stopServer() {
// don't do anything
}
@Override
protected EmbeddedElectorService createEmbeddedElectorService() {
return null;
}
};
}
public NodesListManager getNodesListManager() {
return this.nodesListManager;
}
public ClientToAMTokenSecretManagerInRM getClientToAMTokenSecretManager() {
return this.getRMContext().getClientToAMTokenSecretManager();
}
public RMAppManager getRMAppManager() {
return this.rmAppManager;
}
public AdminService getAdminService() {
return this.adminService;
}
@Override
protected void startWepApp() {
if (getConfig().getBoolean(ENABLE_WEBAPP, false)) {
super.startWepApp();
return;
}
// Disable webapp
}
public static void finishAMAndVerifyAppState(RMApp rmApp, MockRM rm, MockNM nm,
MockAM am) throws Exception {
FinishApplicationMasterRequest req =
FinishApplicationMasterRequest.newInstance(
FinalApplicationStatus.SUCCEEDED, "", "");
am.unregisterAppAttempt(req,true);
am.waitForState(RMAppAttemptState.FINISHING);
nm.nodeHeartbeat(am.getApplicationAttemptId(), 1, ContainerState.COMPLETE);
am.waitForState(RMAppAttemptState.FINISHED);
rm.waitForState(rmApp.getApplicationId(), RMAppState.FINISHED);
}
@SuppressWarnings("rawtypes")
private static void waitForSchedulerAppAttemptAdded(
ApplicationAttemptId attemptId, MockRM rm) throws InterruptedException {
int tick = 0;
// Wait for at most 5 sec
while (null == ((AbstractYarnScheduler) rm.getResourceScheduler())
.getApplicationAttempt(attemptId) && tick < 50) {
Thread.sleep(100);
if (tick % 10 == 0) {
System.out.println("waiting for SchedulerApplicationAttempt="
+ attemptId + " added.");
}
tick++;
}
Assert.assertNotNull("Timed out waiting for SchedulerApplicationAttempt=" +
attemptId + " to be added.", ((AbstractYarnScheduler)
rm.getResourceScheduler()).getApplicationAttempt(attemptId));
}
public static MockAM launchAM(RMApp app, MockRM rm, MockNM nm)
throws Exception {
rm.waitForState(app.getApplicationId(), RMAppState.ACCEPTED);
RMAppAttempt attempt = app.getCurrentAppAttempt();
waitForSchedulerAppAttemptAdded(attempt.getAppAttemptId(), rm);
rm.waitForState(attempt.getAppAttemptId(), RMAppAttemptState.SCHEDULED);
System.out.println("Launch AM " + attempt.getAppAttemptId());
nm.nodeHeartbeat(true);
MockAM am = rm.sendAMLaunched(attempt.getAppAttemptId());
rm.waitForState(attempt.getAppAttemptId(), RMAppAttemptState.LAUNCHED);
return am;
}
public static MockAM launchAndRegisterAM(RMApp app, MockRM rm, MockNM nm)
throws Exception {
MockAM am = launchAM(app, rm, nm);
am.registerAppAttempt();
rm.waitForState(app.getApplicationId(), RMAppState.RUNNING);
return am;
}
public ApplicationReport getApplicationReport(ApplicationId appId)
throws YarnException, IOException {
ApplicationClientProtocol client = getClientRMService();
GetApplicationReportResponse response =
client.getApplicationReport(GetApplicationReportRequest
.newInstance(appId));
return response.getApplicationReport();
}
// Explicitly reset queue metrics for testing.
@SuppressWarnings("static-access")
public void clearQueueMetrics(RMApp app) {
((AbstractYarnScheduler<SchedulerApplicationAttempt, SchedulerNode>) getResourceScheduler())
.getSchedulerApplications().get(app.getApplicationId()).getQueue()
.getMetrics().clearQueueMetrics();
}
public RMActiveServices getRMActiveService() {
return activeServices;
}
}
| |
package org.jabref.cli;
import java.util.List;
import org.jabref.Globals;
import org.jabref.logic.l10n.Localization;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class JabRefCLI {
private static final int WIDTH = 100; // Number of characters per line
private static final Logger LOGGER = LoggerFactory.getLogger(JabRefCLI.class);
private final CommandLine cl;
private List<String> leftOver;
public JabRefCLI(String[] args) throws ParseException {
Options options = getOptions();
this.cl = new DefaultParser().parse(options, args, true);
this.leftOver = cl.getArgList();
}
public static String getExportMatchesSyntax() {
return String.format("[%s]searchTerm,outputFile:%s[,%s]",
Localization.lang("field"),
Localization.lang("file"),
Localization.lang("exportFormat"));
}
public boolean isHelp() {
return cl.hasOption("help");
}
public boolean isShowVersion() {
return cl.hasOption("version");
}
public boolean isBlank() {
return cl.hasOption("blank");
}
public boolean isLoadSession() {
return cl.hasOption("loads");
}
public boolean isDisableGui() {
return cl.hasOption("nogui");
}
public boolean isPreferencesExport() {
return cl.hasOption("prexp");
}
public String getPreferencesExport() {
return cl.getOptionValue("prexp", "jabref_prefs.xml");
}
public boolean isPreferencesImport() {
return cl.hasOption("primp");
}
public String getPreferencesImport() {
return cl.getOptionValue("primp", "jabref_prefs.xml");
}
public boolean isPreferencesReset() {
return cl.hasOption("prdef");
}
public String getPreferencesReset() {
return cl.getOptionValue("prdef");
}
public boolean isFileExport() {
return cl.hasOption("output");
}
public String getFileExport() {
return cl.getOptionValue("output");
}
public boolean isBibtexImport() {
return cl.hasOption("importBibtex");
}
public String getBibtexImport() {
return cl.getOptionValue("importBibtex");
}
public boolean isFileImport() {
return cl.hasOption("import");
}
public String getFileImport() {
return cl.getOptionValue("import");
}
public boolean isAuxImport() {
return cl.hasOption("aux");
}
public String getAuxImport() {
return cl.getOptionValue("aux");
}
public boolean isImportToOpenBase() {
return cl.hasOption("importToOpen");
}
public String getImportToOpenBase() {
return cl.getOptionValue("importToOpen");
}
public boolean isDebugLogging() {
return cl.hasOption("debug");
}
public boolean isFetcherEngine() {
return cl.hasOption("fetch");
}
public String getFetcherEngine() {
return cl.getOptionValue("fetch");
}
public boolean isExportMatches() {
return cl.hasOption("exportMatches");
}
public String getExportMatches() {
return cl.getOptionValue("exportMatches");
}
public boolean isGenerateBibtexKeys() {
return cl.hasOption("generateBibtexKeys");
}
public boolean isAutomaticallySetFileLinks() {
return cl.hasOption("automaticallySetFileLinks");
}
private static Options getOptions() {
Options options = new Options();
// boolean options
options.addOption("h", "help", false, Localization.lang("Display help on command line options"));
options.addOption("n", "nogui", false, Localization.lang("No GUI. Only process command line options"));
options.addOption("asfl", "automaticallySetFileLinks", false, Localization.lang("Automatically set file links"));
options.addOption("g", "generateBibtexKeys", false, Localization.lang("Regenerate all keys for the entries in a BibTeX file"));
options.addOption("b", "blank", false, Localization.lang("Do not open any files at startup"));
options.addOption("v", "version", false, Localization.lang("Display version"));
options.addOption(null, "debug", false, Localization.lang("Show debug level messages"));
// The "-console" option is handled by the install4j launcher
options.addOption(null, "console", false, Localization.lang("Show console output (only when the launcher is used)"));
options.addOption(Option.builder("i").
longOpt("import").
desc(String.format("%s: '%s'", Localization.lang("Import file"), "-i library.bib")).
hasArg().
argName("FILE[,FORMAT]").
build());
options.addOption(Option.builder().
longOpt("importToOpen").
desc(Localization.lang("Same as --import, but will be imported to the opened tab")).
hasArg().
argName("FILE[,FORMAT]").
build());
options.addOption(Option.builder("ib").
longOpt("importBibtex").
desc(String.format("%s: '%s'", Localization.lang("Import BibTeX"), "-ib @article{entry}")).
hasArg().
argName("BIBTEXT_STRING").
build());
options.addOption(Option.builder("o").
longOpt("output").
desc(String.format("%s: '%s'", Localization.lang("Export an input to a file"), "-i db.bib -o db.htm,html")).
hasArg().
argName("FILE[,FORMAT]").
build());
options.addOption(Option.builder("m").
longOpt("exportMatches").
desc(String.format("%s: '%s'", Localization.lang("Matching"), "-i db.bib -m author=Newton,search.htm,html")).
hasArg().
argName("QUERY,FILE[,FORMAT]").
build());
options.addOption(Option.builder("f").
longOpt("fetch").
desc(String.format("%s: '%s'", Localization.lang("Run fetcher"), "-f Medline/PubMed:cancer")).
hasArg().
argName("FETCHER:QUERY").
build());
options.addOption(Option.builder("a").
longOpt("aux").
desc(String.format("%s: '%s'", Localization.lang("Sublibrary from AUX to BibTeX"), "-a thesis.aux,new.bib")).
hasArg().
argName("FILE[.aux],FILE[.bib] FILE").
build());
options.addOption(Option.builder("x").
longOpt("prexp").
desc(String.format("%s: '%s'", Localization.lang("Export preferences to a file"), "-x prefs.xml")).
hasArg().
argName("[FILE]").
build());
options.addOption(Option.builder("p").
longOpt("primp").
desc(String.format("%s: '%s'", Localization.lang("Import preferences from a file"), "-p prefs.xml")).
hasArg().
argName("[FILE]").
build());
options.addOption(Option.builder("d").
longOpt("prdef").
desc(String.format("%s: '%s'", Localization.lang("Reset preferences"), "-d mainFontSize,newline' or '-d all")).
hasArg().
argName("KEY1[,KEY2][,KEYn] | all").
build());
return options;
}
public void displayVersion() {
System.out.println(getVersionInfo());
}
public static void printUsage() {
String header = "";
String importFormats = Globals.IMPORT_FORMAT_READER.getImportFormatList();
String importFormatsList = String.format("%s:%n%s%n", Localization.lang("Available import formats"), importFormats);
String outFormats = Globals.exportFactory.getExportersAsString(70, 20, "");
String outFormatsList = String.format("%s: %s%n", Localization.lang("Available export formats"), outFormats);
String footer = '\n' + importFormatsList + outFormatsList + "\nPlease report issues at https://github.com/JabRef/jabref/issues.";
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp(WIDTH, "jabref [OPTIONS] [BIBTEX_FILE]\n\nOptions:", header, getOptions(), footer, true);
}
private String getVersionInfo() {
return String.format("JabRef %s", Globals.BUILD_INFO.getVersion());
}
public List<String> getLeftOver() {
return leftOver;
}
}
| |
/*
* GeoTools - The Open Source Java GIS Toolkit
* http://geotools.org
*
* (C) 2002-2008, Open Source Geospatial Foundation (OSGeo)
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation;
* version 2.1 of the License.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*/
package org.geotools.gml;
// Java Topology Suite dependencies
import java.util.ArrayList;
import java.util.logging.Logger;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.geom.GeometryFactory;
import com.vividsolutions.jts.geom.LinearRing;
import com.vividsolutions.jts.geom.Polygon;
import com.vividsolutions.jts.geom.TopologyException;
/**
* Creates a Polygon geometry.
*
* @author Ian Turton, CCG
* @author Rob Hranac, Vision for New York
*
*
* @source $URL$
* @version $Id$
*/
public class SubHandlerPolygon extends SubHandler {
/** The logger for the GML module. */
private static final Logger LOGGER = org.geotools.util.logging.Logging.getLogger("org.geotools.gml");
protected static com.vividsolutions.jts.algorithm.CGAlgorithms cga = new com.vividsolutions.jts.algorithm.RobustCGAlgorithms();
/** Factory for creating the Polygon geometry. */
private GeometryFactory geometryFactory = new GeometryFactory();
/** Handler for the LinearRings that comprise the Polygon. */
private SubHandlerLinearRing currentHandler = new SubHandlerLinearRing();
/** Stores Polygon's outer boundary (shell). */
private LinearRing outerBoundary = null;
/** Stores Polygon's inner boundaries (holes). */
private ArrayList innerBoundaries = new ArrayList();
/**
* Remembers the current location in the parsing stream (inner or outer
* boundary).
*/
private int location = 0;
/** Indicates that we are inside the inner boundary of the Polygon. */
private int INNER_BOUNDARY = 1;
/** Indicates that we are inside the outer boundary of the Polygon. */
private int OUTER_BOUNDARY = 2;
/**
* Creates a new instance of GMLPolygonHandler.
*/
public SubHandlerPolygon() {
}
/**
* Catches inner and outer LinearRings messages and handles them
* appropriately.
*
* @param message Name of sub geometry located.
* @param type Type of sub geometry located.
*/
public void subGeometry(String message, int type) {
// if we have found a linear ring, either
// add it to the list of inner boundaries if we are reading them
// and at the end of the LinearRing
// add it to the outer boundary if we are reading it and at the end of
// the LinearRing
// create a new linear ring, if we are at the start of a new linear ring
if (message.equals("LinearRing")) {
if (type == GEOMETRY_END) {
if (location == INNER_BOUNDARY) {
LinearRing ring = (LinearRing) currentHandler.create(geometryFactory);
Coordinate[] points = ring.getCoordinates();
/* it is important later that internal rings (holes) are
* anticlockwise (counter clockwise) - so we reverse the
* points if necessary
*/
if (cga.isCCW(points)) {
LOGGER.finer("good hole found");
//System.out.println("inner boundary: " + message);
innerBoundaries.add(ring);
} else {
LOGGER.finer("bad hole found - fixing");
Coordinate[] newPoints = new Coordinate[points.length];
for (int i = 0, j = points.length - 1;
i < points.length; i++, j--) {
newPoints[i] = points[j];
}
try {
ring = geometryFactory.createLinearRing(newPoints);
innerBoundaries.add(ring);
} catch (TopologyException e) {
LOGGER.warning(
"Caught Topology exception in GMLPolygonHandler");
ring = null;
}
}
} else if (location == OUTER_BOUNDARY) {
/* it is important later that the outerboundary is
* clockwise - so we reverse the
* points if necessary
*/
outerBoundary = (LinearRing) currentHandler.create(geometryFactory);
Coordinate[] points = outerBoundary.getCoordinates();
if (cga.isCCW(points)) {
LOGGER.finer("bad outer ring - rebuilding");
// System.out.println("rebuilding outer ring");
Coordinate[] newPoints = new Coordinate[points.length];
for (int i = 0, j = points.length - 1;
i < points.length; i++, j--) {
newPoints[i] = points[j];
}
try {
outerBoundary = geometryFactory.createLinearRing(newPoints);
//System.out.println("outer boundary: " + message);
} catch (TopologyException e) {
LOGGER.warning("Caught Topology exception in "
+ "GMLPolygonHandler");
outerBoundary = null;
}
}
}
} else if (type == GEOMETRY_START) {
currentHandler = new SubHandlerLinearRing();
}
} else if (message.equals("outerBoundaryIs")) {
// or, if we are getting notice of an inner/outer boundary marker,
// set current location appropriately
LOGGER.finer("new outer Boundary");
location = OUTER_BOUNDARY;
} else if (message.equals("innerBoundaryIs")) {
LOGGER.finer("new InnerBoundary");
location = INNER_BOUNDARY;
}
}
/**
* Adds a coordinate to the current LinearRing.
*
* @param coordinate Name of sub geometry located.
*/
public void addCoordinate(Coordinate coordinate) {
currentHandler.addCoordinate(coordinate);
}
/**
* Determines whether or not the geometry is ready to be returned.
*
* @param message Name of GML element that prompted this check.
*
* @return Flag indicating whether or not the geometry is ready to be
* returned.
*/
public boolean isComplete(String message) {
// the conditions checked here are that the endGeometry message that
// prompted this check is a Polygon and that this Polygon has an outer
// boundary; if true, then return the all go signal
if (message.equals("Polygon")) {
if (outerBoundary != null) {
return true;
} else {
return false;
}
}
// otherwise, send this message to the subGeometry method for further
// processing
else {
// this.subGeometry(message, GEOMETRY_END);
return false;
}
}
/**
* Returns the completed OGC Polygon.
*
* @param geometryFactory Geometry factory to be used in Polygon creation.
*
* @return Completed OGC Polygon.
*/
public Geometry create(GeometryFactory geometryFactory) {
for (int i = 0; i < innerBoundaries.size(); i++) {
LinearRing hole = (LinearRing) innerBoundaries.get(i);
if (hole.crosses(outerBoundary)) {
LOGGER.warning("Topology Error building polygon");
return null;
}
}
LinearRing[] rings =
(LinearRing[]) innerBoundaries.toArray(new LinearRing[innerBoundaries.size()]);
Polygon polygon = geometryFactory.createPolygon(outerBoundary,rings);
polygon.setUserData( getSRS() );
polygon.setSRID( getSRID() );
return polygon;
}
}
| |
package info.openmods.calc;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import info.openmods.calc.utils.Stack;
import info.openmods.calc.utils.StackUnderflowException;
import org.junit.Assert;
import org.junit.Test;
public class StackTest {
public Stack<Integer> stack = Stack.create();
private void checkStackEmpty() {
Assert.assertTrue(stack.isEmpty());
Assert.assertEquals(0, stack.size());
}
private static void assertValuesOnStack(Stack<Integer> stack, Integer... values) {
Assert.assertEquals("length", values.length, stack.size());
Assert.assertEquals("values", ImmutableList.copyOf(stack), ImmutableList.copyOf(values));
}
private static void assertEquals(final Stack<Integer> expected, Stack<Integer> actual) {
Assert.assertEquals(expected.size(), actual.size());
Assert.assertEquals(ImmutableList.copyOf(expected), ImmutableList.copyOf(actual));
}
@Test
public void testInitialEmpty() {
checkStackEmpty();
}
@Test
public void testPushPopSingleValue() {
final Integer value = 24323;
stack.push(value);
Assert.assertFalse(stack.isEmpty());
Assert.assertEquals(1, stack.size());
Assert.assertEquals(stack.peek(0), value);
final Integer retValue = stack.pop();
Assert.assertEquals(value, retValue);
checkStackEmpty();
}
@Test
public void testPushPopTwoValues() {
final Integer value1 = 24323;
stack.push(value1);
final Integer value2 = 54354;
stack.push(value2);
Assert.assertEquals(2, stack.size());
Assert.assertEquals(value2, stack.peek(0));
Assert.assertEquals(value1, stack.peek(1));
final Integer retValue2 = stack.pop();
Assert.assertEquals(value2, retValue2);
final Integer retValue1 = stack.pop();
Assert.assertEquals(value1, retValue1);
checkStackEmpty();
}
@Test(expected = StackUnderflowException.class)
public void testPeekEmptyStack() {
stack.peek(0);
}
@Test(expected = StackUnderflowException.class)
public void testPeekUnderTopStack() {
stack.push(15);
stack.peek(1);
}
@Test
public void zeroLengthSubstack() {
stack.push(1);
stack.push(2);
stack.push(3);
final Stack<Integer> substack = stack.substack(0);
Assert.assertEquals(0, substack.size());
Assert.assertTrue(substack.isEmpty());
Assert.assertTrue(Iterables.isEmpty(substack));
final Integer testValue = 32432;
substack.push(testValue);
Assert.assertEquals(1, substack.size());
Assert.assertFalse(substack.isEmpty());
Assert.assertEquals(4, stack.size());
Assert.assertEquals(testValue, substack.peek(0));
Assert.assertEquals(testValue, stack.peek(0));
assertValuesOnStack(substack, testValue);
Assert.assertEquals(testValue, stack.pop());
Assert.assertEquals(0, substack.size());
Assert.assertTrue(substack.isEmpty());
}
@Test
public void singleElementSubstack() {
stack.push(1);
stack.push(2);
stack.push(3);
final Integer testValue = 32432;
stack.push(testValue);
final Stack<Integer> substack = stack.substack(1);
Assert.assertEquals(1, substack.size());
Assert.assertFalse(substack.isEmpty());
Assert.assertTrue(Iterables.elementsEqual(substack, ImmutableList.of(testValue)));
Assert.assertEquals(testValue, substack.peek(0));
Assert.assertEquals(testValue, substack.pop());
Assert.assertTrue(Iterables.isEmpty(substack));
Assert.assertEquals(0, substack.size());
Assert.assertTrue(substack.isEmpty());
Assert.assertEquals(3, stack.size());
}
@Test
public void twoElementSubstack() {
stack.push(1);
stack.push(2);
stack.push(3);
final Stack<Integer> substack = stack.substack(2);
assertValuesOnStack(substack, 2, 3);
}
@Test
public void testStackAsSubstackNonZeroLength() {
stack.push(1);
stack.push(2);
stack.push(3);
final Stack<Integer> substack = stack.substack(3);
Assert.assertEquals(3, substack.size());
assertEquals(stack, substack);
}
@Test
public void testStackAsSubstackZeroLength() {
final Stack<Integer> substack = stack.substack(0);
Assert.assertEquals(0, substack.size());
Assert.assertTrue(substack.isEmpty());
substack.push(1);
substack.push(2);
substack.push(3);
Assert.assertEquals(3, substack.size());
Assert.assertEquals(3, stack.size());
assertEquals(stack, substack);
}
@Test(expected = StackUnderflowException.class)
public void zeroLengthSubstackUnderflow() {
stack.substack(1);
}
@Test(expected = StackUnderflowException.class)
public void nonZeroLengthSubstackUnderflow() {
stack.push(1);
stack.substack(2);
}
@Test
public void twoMultipleSubstackOperations() {
stack.push(1);
stack.push(2);
stack.push(3);
stack.push(4);
final Stack<Integer> substack = stack.substack(3);
assertValuesOnStack(substack, 2, 3, 4);
{
final Stack<Integer> subsubstack = substack.substack(3);
assertValuesOnStack(subsubstack, 2, 3, 4);
}
{
final Stack<Integer> subsubstack = substack.substack(2);
assertValuesOnStack(subsubstack, 3, 4);
}
{
final Stack<Integer> subsubstack = substack.substack(1);
assertValuesOnStack(subsubstack, 4);
}
{
final Stack<Integer> subsubstack = substack.substack(0);
assertValuesOnStack(subsubstack);
}
}
@Test
public void twoSubstackUnderflow() {
stack.push(1);
stack.push(2);
stack.push(3);
final Stack<Integer> substack = stack.substack(1);
try {
substack.substack(2);
Assert.fail();
} catch (StackUnderflowException e) {}
}
@Test
public void testDropFromTop() {
stack.push(1);
stack.push(2);
stack.push(3);
Assert.assertEquals(Integer.valueOf(3), stack.drop(0));
assertValuesOnStack(stack, 1, 2);
}
@Test
public void testDropFromMiddle() {
stack.push(1);
stack.push(2);
stack.push(3);
Assert.assertEquals(Integer.valueOf(2), stack.drop(1));
assertValuesOnStack(stack, 1, 3);
}
@Test
public void testDropFromBottom() {
stack.push(1);
stack.push(2);
stack.push(3);
Assert.assertEquals(Integer.valueOf(1), stack.drop(2));
assertValuesOnStack(stack, 2, 3);
}
@Test(expected = StackUnderflowException.class)
public void testInvalidDrop() {
stack.drop(0);
}
@Test
public void testClearAll() {
stack.push(1);
stack.push(2);
stack.push(3);
stack.clear();
Assert.assertTrue(stack.isEmpty());
}
@Test
public void testClearSubstack() {
stack.push(1);
stack.push(2);
stack.push(3);
stack.push(4);
final Stack<Integer> substack = stack.substack(2);
substack.clear();
Assert.assertTrue(substack.isEmpty());
assertValuesOnStack(stack, 1, 2);
}
@Test
public void testClearEmptySubstack() {
stack.push(1);
stack.push(2);
stack.push(3);
final Stack<Integer> substack = stack.substack(0);
substack.clear();
Assert.assertTrue(substack.isEmpty());
assertValuesOnStack(stack, 1, 2, 3);
}
@Test
public void testClearFullSubstack() {
stack.push(1);
stack.push(2);
stack.push(3);
final Stack<Integer> substack = stack.substack(3);
substack.clear();
Assert.assertTrue(substack.isEmpty());
Assert.assertTrue(stack.isEmpty());
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide.util;
import com.intellij.CommonBundle;
import com.intellij.history.LocalHistory;
import com.intellij.history.LocalHistoryAction;
import com.intellij.ide.DataManager;
import com.intellij.ide.DeleteProvider;
import com.intellij.ide.IdeBundle;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.actionSystem.LangDataKeys;
import com.intellij.openapi.application.ApplicationBundle;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ApplicationNamesInfo;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.fileEditor.impl.NonProjectFileWritingAccessProvider;
import com.intellij.openapi.project.DumbService;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.ui.ex.MessagesEx;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.vfs.VFileProperty;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.WritingAccessProvider;
import com.intellij.psi.*;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtilBase;
import com.intellij.psi.util.PsiUtilCore;
import com.intellij.refactoring.RefactoringBundle;
import com.intellij.refactoring.safeDelete.SafeDeleteDialog;
import com.intellij.refactoring.safeDelete.SafeDeleteProcessor;
import com.intellij.refactoring.util.CommonRefactoringUtil;
import com.intellij.refactoring.util.RefactoringUIUtil;
import com.intellij.testFramework.LightVirtualFile;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.io.ReadOnlyAttributeUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
public class DeleteHandler {
private DeleteHandler() {
}
public static class DefaultDeleteProvider implements DeleteProvider {
@Override
public boolean canDeleteElement(@NotNull DataContext dataContext) {
if (CommonDataKeys.PROJECT.getData(dataContext) == null) {
return false;
}
final PsiElement[] elements = getPsiElements(dataContext);
return shouldEnableDeleteAction(elements);
}
@Nullable
private static PsiElement[] getPsiElements(DataContext dataContext) {
PsiElement[] elements = LangDataKeys.PSI_ELEMENT_ARRAY.getData(dataContext);
if (elements == null) {
final PsiElement data = CommonDataKeys.PSI_ELEMENT.getData(dataContext);
if (data != null) {
elements = new PsiElement[]{data};
}
else {
final PsiFile data1 = CommonDataKeys.PSI_FILE.getData(dataContext);
if (data1 != null) {
elements = new PsiElement[]{data1};
}
}
}
return elements;
}
@Override
public void deleteElement(@NotNull DataContext dataContext) {
PsiElement[] elements = getPsiElements(dataContext);
if (elements == null) return;
Project project = CommonDataKeys.PROJECT.getData(dataContext);
if (project == null) return;
LocalHistoryAction a = LocalHistory.getInstance().startAction(IdeBundle.message("progress.deleting"));
try {
deletePsiElement(elements, project);
}
finally {
a.finish();
}
}
}
public static void deletePsiElement(final PsiElement[] elementsToDelete, final Project project) {
deletePsiElement(elementsToDelete, project, true);
}
public static void deletePsiElement(final PsiElement[] elementsToDelete, final Project project, boolean needConfirmation) {
if (elementsToDelete == null || elementsToDelete.length == 0) return;
final PsiElement[] elements = PsiTreeUtil.filterAncestors(elementsToDelete);
boolean safeDeleteApplicable = Arrays.stream(elements).allMatch(SafeDeleteProcessor::validElement);
final boolean dumb = DumbService.getInstance(project).isDumb();
if (safeDeleteApplicable && !dumb) {
final Ref<Boolean> exit = Ref.create(false);
final SafeDeleteDialog dialog = new SafeDeleteDialog(project, elements, new SafeDeleteDialog.Callback() {
@Override
public void run(final SafeDeleteDialog dialog) {
if (!CommonRefactoringUtil.checkReadOnlyStatusRecursively(project, Arrays.asList(elements), true)) return;
SafeDeleteProcessor processor = SafeDeleteProcessor.createInstance(project, () -> {
exit.set(true);
dialog.close(DialogWrapper.OK_EXIT_CODE);
}, elements, dialog.isSearchInComments(), dialog.isSearchForTextOccurences(), true);
processor.run();
}
}) {
@Override
protected boolean isDelete() {
return true;
}
};
if (needConfirmation) {
dialog.setTitle(RefactoringBundle.message("delete.title"));
if (!dialog.showAndGet() || exit.get()) {
return;
}
}
}
else {
@SuppressWarnings({"UnresolvedPropertyKey"})
String warningMessage = DeleteUtil.generateWarningMessage(IdeBundle.message("prompt.delete.elements"), elements);
boolean anyDirectories = false;
String directoryName = null;
for (PsiElement psiElement : elementsToDelete) {
if (psiElement instanceof PsiDirectory && !PsiUtilBase.isSymLink((PsiDirectory)psiElement)) {
anyDirectories = true;
directoryName = ((PsiDirectory)psiElement).getName();
break;
}
}
if (anyDirectories) {
if (elements.length == 1) {
warningMessage += IdeBundle.message("warning.delete.all.files.and.subdirectories", directoryName);
}
else {
warningMessage += IdeBundle.message("warning.delete.all.files.and.subdirectories.in.the.selected.directory");
}
}
if (safeDeleteApplicable) {
warningMessage += "\n\nWarning:\n Safe delete is not available while " +
ApplicationNamesInfo.getInstance().getFullProductName() +
" updates indices,\n no usages will be checked.";
}
if (needConfirmation) {
int result = Messages.showOkCancelDialog(project, warningMessage, IdeBundle.message("title.delete"),
ApplicationBundle.message("button.delete"), CommonBundle.getCancelButtonText(),
Messages.getQuestionIcon());
if (result != Messages.OK) return;
}
}
deleteInCommand(project, elements);
}
private static boolean makeWritable(Project project, PsiElement[] elements) {
Collection<PsiElement> directories = ContainerUtil.newSmartList();
for (PsiElement e : elements) {
if (e instanceof PsiFileSystemItem && e.getParent() != null) {
directories.add(e.getParent());
}
}
return CommonRefactoringUtil.checkReadOnlyStatus(project, Arrays.asList(elements), directories, false);
}
private static void deleteInCommand(Project project, PsiElement[] elements) {
CommandProcessor.getInstance().executeCommand(project, () -> NonProjectFileWritingAccessProvider.disableChecksDuring(() -> {
SmartPointerManager smartPointerManager = SmartPointerManager.getInstance(project);
List<SmartPsiElementPointer> pointers = ContainerUtil.map(elements, smartPointerManager::createSmartPsiElementPointer);
if (!makeWritable(project, elements)) return;
// deleted from project view or something like that.
if (CommonDataKeys.EDITOR.getData(DataManager.getInstance().getDataContext()) == null) {
CommandProcessor.getInstance().markCurrentCommandAsGlobal(project);
}
for (SmartPsiElementPointer pointer : pointers) {
PsiElement elementToDelete = pointer.getElement();
if (elementToDelete == null) continue; //was already deleted
doDelete(project, elementToDelete);
}
}), RefactoringBundle.message("safe.delete.command", RefactoringUIUtil.calculatePsiElementDescriptionList(elements)), null);
}
private static boolean clearFileReadOnlyFlags(Project project, PsiElement elementToDelete) {
if (elementToDelete instanceof PsiDirectory) {
VirtualFile virtualFile = ((PsiDirectory)elementToDelete).getVirtualFile();
if (virtualFile.isInLocalFileSystem() && !virtualFile.is(VFileProperty.SYMLINK)) {
ArrayList<VirtualFile> readOnlyFiles = new ArrayList<>();
CommonRefactoringUtil.collectReadOnlyFiles(virtualFile, readOnlyFiles);
if (!readOnlyFiles.isEmpty()) {
String message = IdeBundle.message("prompt.directory.contains.read.only.files", virtualFile.getPresentableUrl());
int _result = Messages.showYesNoDialog(project, message, IdeBundle.message("title.delete"), Messages.getQuestionIcon());
if (_result != Messages.YES) return false;
boolean success = true;
for (VirtualFile file : readOnlyFiles) {
success = clearReadOnlyFlag(file, project);
if (!success) break;
}
if (!success) return false;
}
}
}
else if (!elementToDelete.isWritable() &&
!(elementToDelete instanceof PsiFileSystemItem && PsiUtilBase.isSymLink((PsiFileSystemItem)elementToDelete))) {
final PsiFile file = elementToDelete.getContainingFile();
if (file != null) {
final VirtualFile virtualFile = file.getVirtualFile();
if (virtualFile.isInLocalFileSystem()) {
int _result = MessagesEx.fileIsReadOnly(project, virtualFile)
.setTitle(IdeBundle.message("title.delete"))
.appendMessage(" " + IdeBundle.message("prompt.delete.it.anyway"))
.askYesNo();
if (_result != Messages.YES) return false;
boolean success = clearReadOnlyFlag(virtualFile, project);
if (!success) return false;
}
}
}
return true;
}
private static void doDelete(Project project, PsiElement element) {
if (!clearFileReadOnlyFlags(project, element)) return;
try {
element.checkDelete();
}
catch (IncorrectOperationException ex) {
Messages.showMessageDialog(project, ex.getMessage(), CommonBundle.getErrorTitle(), Messages.getErrorIcon());
return;
}
ApplicationManager.getApplication().runWriteAction(() -> {
try {
element.delete();
}
catch (final IncorrectOperationException ex) {
ApplicationManager.getApplication().invokeLater(
() -> Messages.showMessageDialog(project, ex.getMessage(), CommonBundle.getErrorTitle(), Messages.getErrorIcon()));
}
});
}
private static boolean clearReadOnlyFlag(final VirtualFile virtualFile, final Project project) {
final boolean[] success = new boolean[1];
CommandProcessor.getInstance().executeCommand(project, () -> {
Runnable action = () -> {
try {
ReadOnlyAttributeUtil.setReadOnlyAttribute(virtualFile, false);
success[0] = true;
}
catch (IOException e1) {
Messages.showMessageDialog(project, e1.getMessage(), CommonBundle.getErrorTitle(), Messages.getErrorIcon());
}
};
ApplicationManager.getApplication().runWriteAction(action);
}, "", null);
return success[0];
}
public static boolean shouldEnableDeleteAction(PsiElement[] elements) {
if (elements == null || elements.length == 0) return false;
for (PsiElement element : elements) {
VirtualFile virtualFile = PsiUtilCore.getVirtualFile(element);
if (virtualFile == null || virtualFile instanceof LightVirtualFile) {
return false;
}
if (!WritingAccessProvider.isPotentiallyWritable(virtualFile, element.getProject())) {
return false;
}
}
return true;
}
}
| |
package org.apache.hadoop.hdfs.server.namenode;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.Channels;
import java.nio.channels.ReadableByteChannel;
import java.util.Random;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager;
import org.apache.hadoop.hdfs.server.blockmanagement.ProvidedStorageMap.BlockProvider;
import org.apache.hadoop.hdfs.server.common.TextFileRegionFormat;
import org.apache.hadoop.hdfs.server.common.TextFileRegionFormat.ReaderOptions;
import org.apache.hadoop.hdfs.server.mover.Mover;
import org.apache.hadoop.hdfs.server.namenode.TreeWalk.TreeIterator;
import org.apache.hadoop.util.ToolRunner;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestName;
import org.mortbay.log.Log;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.*;
public class TestNNLoad {
@Rule public TestName name = new TestName();
public static final Logger LOG = LoggerFactory.getLogger(TestNNLoad.class);
final Random r = new Random();
final File fBASE = new File(MiniDFSCluster.getBaseDirectory());
final Path BASE = new Path(fBASE.toURI().toString());
final Path BLOCKFILE = new Path(BASE, "blocks.csv");
final Path NAMEPATH = new Path("file:///home/virajith/Desktop/protobuf/benchmarks");
final Path AZURE_NAMEPATH = new Path("wasb://x@y.blob.core.windows.net");
final String SINGLEUSER = "dingo";
final String SINGLEGROUP = "yak";
Configuration conf;
MiniDFSCluster cluster;
public static final String PROVIDER = "hdfs.namenode.block.provider.class";
public static final String STORAGE_ID = "hdfs.namenode.block.provider.id";
@Before
public void setSeed() throws Exception {
if (fBASE.exists() && !FileUtil.fullyDelete(fBASE)) {
throw new IOException("Could not fully delete " + fBASE);
}
long seed = r.nextLong();
r.setSeed(seed);
System.out.println(name.getMethodName() + " seed: " + seed);
conf = new HdfsConfiguration();
conf.set(SingleUGIResolver.USER, SINGLEUSER);
conf.set(SingleUGIResolver.GROUP, SINGLEGROUP);
conf.set(TextFileRegionFormat.WriterOptions.FILEPATH, BLOCKFILE.toString());
conf.setClass(PROVIDER, BlockFormatProvider.class, BlockProvider.class);
conf.set(STORAGE_ID, DFSConfigKeys.DFS_NAMENODE_PROVIDED_STORAGEUUID);
conf.setBoolean(DFSConfigKeys.DFS_DATANODE_PROVIDED, true);
conf.set(ReaderOptions.FILEPATH, BLOCKFILE.toString());
conf.set(ReaderOptions.DELIMITER, ",");
conf.set("fs.azure.account.key.y.blob.core.windows.net",
"azkey");
}
@After
public void shutdown() throws Exception {
try {
if (cluster != null) {
cluster.shutdown(true, true);
}
} finally {
cluster = null;
}
}
void createImage(TreeWalk t, Path out, Class<? extends BlockResolver> blockIdsClass) throws Exception {
ImageWriter.Options opts = ImageWriter.defaults();
opts.setConf(conf);
opts.output(out.toString())
.blocks(TextFileRegionFormat.class)
.ugi(FsUGIResolver.class)
.blockIds(blockIdsClass);
try (ImageWriter w = new ImageWriter(opts)) {
for (TreePath e : t) {
w.accept(e);
}
}
}
void startCluster(Path nspath, int numDatanodes) throws IOException {
conf.set(DFS_NAMENODE_NAME_DIR_KEY, nspath.toString());
cluster = new MiniDFSCluster.Builder(conf)
.format(false)
.manageNameDfsDirs(false)
.numDataNodes(numDatanodes)
.build();
cluster.waitActive();
}
@Test(timeout = 20000)
public void testLoadImage() throws Exception {
final long seed = r.nextLong();
createImage(new RandomTreeWalk(seed), NAMEPATH, FixedBlockResolver.class);
startCluster(NAMEPATH, 0);
FileSystem fs = cluster.getFileSystem();
for (TreePath e : new RandomTreeWalk(seed)) {
FileStatus rs = e.getFileStatus();
Path hp = new Path(rs.getPath().toUri().getPath());
assertTrue(fs.exists(hp));
FileStatus hs = fs.getFileStatus(hp);
assertEquals(rs.getPath().toUri().getPath(),
hs.getPath().toUri().getPath());
assertEquals(rs.getPermission(), hs.getPermission());
// TODO: loaded later? Not reflected, yet.
//assertEquals(rs.getReplication(), hs.getReplication());
//assertEquals(rs.getBlockSize(), hs.getBlockSize());
assertEquals(rs.getLen(), hs.getLen());
assertEquals(SINGLEUSER, hs.getOwner());
assertEquals(SINGLEGROUP, hs.getGroup());
assertEquals(rs.getAccessTime(), hs.getAccessTime());
assertEquals(rs.getModificationTime(), hs.getModificationTime());
}
}
@Test(timeout=20000)
public void testBlockLoad() throws Exception {
createImage(new FSTreeWalk(NAMEPATH, conf), NAMEPATH, FixedBlockResolver.class);
startCluster(NAMEPATH, 1);
}
@Test //(timeout=500000)
public void testDefaultLoadReplication() throws Exception {
int targetReplication = 2;
conf.setInt(FixedBlockMultiReplicaResolver.REPLICATION, targetReplication);
createImage(new FSTreeWalk(NAMEPATH, conf), NAMEPATH, FixedBlockMultiReplicaResolver.class);
startCluster(NAMEPATH, 3);
Thread.sleep(100000);
FileSystem fs = cluster.getFileSystem();
int count = 0;
for (TreePath e : new FSTreeWalk(NAMEPATH, conf)) {
FileStatus rs = e.getFileStatus();
Path hp = removePrefix(NAMEPATH, rs.getPath());
LOG.info("hp " + hp.toUri().getPath());
//skip HDFS specific files, which may have been created later on.
if(hp.toString().contains("in_use.lock") || hp.toString().contains("current"))
continue;
e.accept(count++);
assertTrue(fs.exists(hp));
FileStatus hs = fs.getFileStatus(hp);
if (rs.isFile()) {
BlockLocation[] bl = fs.getFileBlockLocations(hs.getPath(), 0, hs.getLen());
int i = 0;
for(; i < bl.length; i++) {
int currentRep = bl[i].getHosts().length;
//+1 is due to caching kicking in when we read! -- TODO have to do this more intelligently!
assertEquals(targetReplication + 1, currentRep);
}
}
}
}
static Path removePrefix(Path base, Path walk) {
Path wpath = new Path(walk.toUri().getPath());
Path bpath = new Path(base.toUri().getPath());
Path ret = new Path("/");
while (!(bpath.equals(wpath) || "".equals(wpath.getName()))) {
ret = "".equals(ret.getName())
? new Path("/", wpath.getName())
: new Path(new Path("/", wpath.getName()),
new Path(ret.toString().substring(1)));
wpath = wpath.getParent();
}
if (!bpath.equals(wpath)) {
throw new IllegalArgumentException(base + " not a prefix of " + walk);
}
return ret;
}
@Test //(timeout=30000)
public void testBlockRead() throws Exception {
conf.setClass(ImageWriter.Options.UGI_CLASS, FsUGIResolver.class, UGIResolver.class);
createImage(new FSTreeWalk(NAMEPATH, conf), NAMEPATH, FixedBlockResolver.class);
startCluster(NAMEPATH, 3);
FileSystem fs = cluster.getFileSystem();
Thread.sleep(2000);
int count = 0;
// read NN metadata, verify contents match
// TODO NN could write, should find something else to validate
for (TreePath e : new FSTreeWalk(NAMEPATH, conf)) {
FileStatus rs = e.getFileStatus();
Path hp = removePrefix(NAMEPATH, rs.getPath());
LOG.info("hp " + hp.toUri().getPath());
//skip HDFS specific files, which may have been created later on.
if(hp.toString().contains("in_use.lock") || hp.toString().contains("current"))
continue;
e.accept(count++);
assertTrue(fs.exists(hp));
FileStatus hs = fs.getFileStatus(hp);
assertEquals(hp.toUri().getPath(), hs.getPath().toUri().getPath());
assertEquals(rs.getPermission(), hs.getPermission());
assertEquals(rs.getOwner(), hs.getOwner());
assertEquals(rs.getGroup(), hs.getGroup());
LOG.info("File " + hp.toUri().getPath() + " User: " + hs.getOwner() + " group: " + hs.getGroup());
if (rs.isFile()) {
BlockLocation[] bl = fs.getFileBlockLocations(hs.getPath(), 0, hs.getLen());
LOG.info("File " + hp.toUri().getPath() + " locations " + bl.length);
assertEquals(rs.getLen(), hs.getLen());
try (ReadableByteChannel i = Channels.newChannel(
new FileInputStream(new File(rs.getPath().toUri())))) {
try (ReadableByteChannel j = Channels.newChannel(
fs.open(hs.getPath()))) {
ByteBuffer ib = ByteBuffer.allocate(4096);
ByteBuffer jb = ByteBuffer.allocate(4096);
while (true) {
int il = i.read(ib);
int jl = j.read(jb);
if (il < 0 || jl < 0) {
assertEquals(il, jl);
break;
}
ib.flip();
jb.flip();
int cmp = Math.min(ib.remaining(), jb.remaining());
for (int k = 0; k < cmp; ++k) {
assertEquals(ib.get(), jb.get());
}
ib.compact();
jb.compact();
}
}
}
// Thread.sleep(100);
// bl = fs.getFileBlockLocations(hs.getPath(), 0, hs.getLen());
// LOG.info("File " + hp.toUri().getPath() + " locations " + bl.length);
// //testing replication!
// int currentReplication = fs.getReplication(hs.getPath());
// short targetReplication = (short)(currentReplication + 2);
// fs.setReplication(hs.getPath(), targetReplication);
// boolean done = false;
// while (!done)
// {
// BlockLocation[] bl = fs.getFileBlockLocations(hs.getPath(), 0, hs.getLen());
// int i = 0;
// for(; i < bl.length; i++) {
// int currentRep = bl[i].getHosts().length;
// if (currentRep != targetReplication) {
// break;
// }
// }
// done = i == bl.length;
// if (done) break;
// LOG.info("Waiting for replication of " + hs.getPath());
// Thread.sleep(1000);
// }
}
}
Thread.sleep(10000);
//checking a 2nd time access
for (TreePath e : new FSTreeWalk(NAMEPATH, conf)) {
FileStatus rs = e.getFileStatus();
Path hp = removePrefix(NAMEPATH, rs.getPath());
LOG.info("hp " + hp.toUri().getPath());
//skip HDFS specific files, which may have been created later on.
if(hp.toString().contains("in_use.lock") || hp.toString().contains("current"))
continue;
e.accept(count++);
assertTrue(fs.exists(hp));
FileStatus hs = fs.getFileStatus(hp);
assertEquals(hp.toUri().getPath(), hs.getPath().toUri().getPath());
assertEquals(rs.getPermission(), hs.getPermission());
assertEquals(rs.getOwner(), hs.getOwner());
assertEquals(rs.getGroup(), hs.getGroup());
if (rs.isFile()) {
BlockLocation[] bl = fs.getFileBlockLocations(hs.getPath(), 0, hs.getLen());
StringBuilder sb = new StringBuilder();
for (int i=0; i < bl.length; i++) {
String[] hosts = bl[i].getTopologyPaths();
StorageType[] storageTypes = bl[i].getStorageTypes();
for(int j = 0; j< hosts.length; j++)
sb.append(hosts[j]+":" + storageTypes[j] +",");
sb.append(" ");
}
LOG.info("File " + hp.toUri().getPath() + " " + sb.toString());
try (ReadableByteChannel i = Channels.newChannel(
new FileInputStream(new File(rs.getPath().toUri())))) {
try (ReadableByteChannel j = Channels.newChannel(
fs.open(hs.getPath()))) {
ByteBuffer ib = ByteBuffer.allocate(4096);
ByteBuffer jb = ByteBuffer.allocate(4096);
while (true) {
int il = i.read(ib);
int jl = j.read(jb);
if (il < 0 || jl < 0) {
assertEquals(il, jl);
break;
}
ib.flip();
jb.flip();
int cmp = Math.min(ib.remaining(), jb.remaining());
for (int k = 0; k < cmp; ++k) {
assertEquals(ib.get(), jb.get());
}
ib.compact();
jb.compact();
}
}
}
}
}
Thread.sleep(20000);
}
@Test //(timeout=30000)
public void testSetStoragePolicy() throws Exception {
createImage(new FSTreeWalk(NAMEPATH, conf), NAMEPATH, FixedBlockResolver.class);
startCluster(NAMEPATH, 3);
FileSystem fs = cluster.getFileSystem();
Thread.sleep(2000);
int count = 0;
// read NN metadata, verify contents match
// TODO NN could write, should find something else to validate
for (TreePath e : new FSTreeWalk(NAMEPATH, conf)) {
FileStatus rs = e.getFileStatus();
Path hp = removePrefix(NAMEPATH, rs.getPath());
LOG.info("hp " + hp.toUri().getPath());
//skip HDFS specific files, which may have been created later on.
if(hp.toString().contains("in_use.lock") || hp.toString().contains("current"))
continue;
e.accept(count++);
assertTrue(fs.exists(hp));
FileStatus hs = fs.getFileStatus(hp);
if (rs.isFile()) {
LOG.info("Setting policy of file: " + hp.toUri().getPath() + " to HOT");
fs.setStoragePolicy(hs.getPath(), "HOT");
int rc = ToolRunner.run(conf, new Mover.Cli(),
new String[] { "-p", hs.getPath().toString() });
assertEquals("Movement to HOT should be successfull", 0, rc);
}
}
//Mover.main(null);
}
}
| |
/**
* Node.java
*
* Copyright 2014-2014 Michael Hoffer <info@michaelhoffer.de>. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY Michael Hoffer <info@michaelhoffer.de> "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL Michael Hoffer <info@michaelhoffer.de> OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation are
* those of the authors and should not be interpreted as representing official
* policies, either expressed or implied, of Michael Hoffer
* <info@michaelhoffer.de>.
*/
package eu.mihosoft.vrl.v3d;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* Holds a node in a BSP tree. A BSP tree is built from a collection of polygons
* by picking a polygon to split along. That polygon (and all other coplanar
* polygons) are added directly to that node and the other polygons are added to
* the front and/or back subtrees. This is not a leafy BSP tree since there is
* no distinction between internal and leaf nodes.
*/
final class Node {
/**
* Polygons.
*/
private List<Polygon> polygons;
/**
* Plane used for BSP.
*/
private Plane plane;
/**
* Polygons in front of the plane.
*/
private Node front;
/**
* Polygons in back of the plane.
*/
private Node back;
/**
* Constructor.
*
* Creates a BSP node consisting of the specified polygons.
*
* @param polygons
* polygons
*/
public Node(List<Polygon> polygons) {
this.polygons = new ArrayList<>();
if (polygons != null) {
this.build(polygons);
}
}
/**
* Constructor. Creates a node without polygons.
*/
public Node() {
this(null);
}
@Override
public Node clone() {
Node node = new Node();
node.plane = this.plane == null ? null : this.plane.clone();
node.front = this.front == null ? null : this.front.clone();
node.back = this.back == null ? null : this.back.clone();
// node.polygons = new ArrayList<>();
// polygons.parallelStream().forEach((Polygon p) -> {
// node.polygons.add(p.clone());
// });
Stream<Polygon> polygonStream;
if (polygons.size() > 200) {
polygonStream = polygons.parallelStream();
} else {
polygonStream = polygons.stream();
}
node.polygons = polygonStream.map(p -> p.clone()).collect(Collectors.toList());
return node;
}
/**
* Converts solid space to empty space and vice verca.
*/
public void invert() {
Stream<Polygon> polygonStream;
if (polygons.size() > 200) {
polygonStream = polygons.parallelStream();
} else {
polygonStream = polygons.stream();
}
polygons = polygonStream
.map(Polygon::flip)
.collect(Collectors.toList());
if (this.plane == null && !polygons.isEmpty()) {
this.plane = polygons.get(0).plane.clone();
} else if (this.plane == null && polygons.isEmpty()) {
throw new RuntimeException("Please fix me! I don't know what to do?");
}
this.plane = plane.flip();
if (this.front != null) {
this.front.invert();
}
if (this.back != null) {
this.back.invert();
}
Node temp = this.front;
this.front = this.back;
this.back = temp;
}
/**
* Recursively removes all polygons in the polygons list that are contained
* within this BSP tree.
*
* <b>Note:</b> polygons are splitted if necessary.
*
* @param polygons
* the polygons to clip
*
* @return the cliped list of polygons
*/
private List<Polygon> clipPolygons(List<Polygon> polygons) {
if (this.plane == null) {
return new ArrayList<>(polygons);
}
List<Polygon> frontP = new ArrayList<>();
List<Polygon> backP = new ArrayList<>();
for (Polygon polygon : polygons) {
this.plane.splitPolygon(polygon, frontP, backP, frontP, backP);
}
if (this.front != null) {
frontP = this.front.clipPolygons(frontP);
}
if (this.back != null) {
backP = this.back.clipPolygons(backP);
} else {
backP = new ArrayList<>(0);
}
frontP.addAll(backP);
return frontP;
}
// Remove all polygons in this BSP tree that are inside the other BSP tree
// `bsp`.
/**
* Removes all polygons in this BSP tree that are inside the specified BSP
* tree ({@code bsp}).
*
* <b>Note:</b> polygons are splitted if necessary.
*
* @param bsp
* bsp that shall be used for clipping
*/
public void clipTo(Node bsp) {
this.polygons = bsp.clipPolygons(this.polygons);
if (this.front != null) {
this.front.clipTo(bsp);
}
if (this.back != null) {
this.back.clipTo(bsp);
}
}
/**
* Returns a list of all polygons in this BSP tree.
*
* @return a list of all polygons in this BSP tree
*/
public List<Polygon> allPolygons() {
List<Polygon> localPolygons = new ArrayList<>(this.polygons);
if (this.front != null) {
localPolygons.addAll(this.front.allPolygons());
// polygons = Utils.concat(polygons, this.front.allPolygons());
}
if (this.back != null) {
// polygons = Utils.concat(polygons, this.back.allPolygons());
localPolygons.addAll(this.back.allPolygons());
}
return localPolygons;
}
/**
* Build a BSP tree out of {@code polygons}. When called on an existing tree,
* the new polygons are filtered down to the bottom of the tree and become new
* nodes there. Each set of polygons is partitioned using the first polygon
* (no heuristic is used to pick a good split).
*
* @param polygons
* polygons used to build the BSP
*/
public final void build(List<Polygon> polygons) {
if (polygons.isEmpty()) {
return;
}
if (this.plane == null) {
this.plane = polygons.get(0).plane.clone();
}
List<Polygon> frontP = new ArrayList<>();
List<Polygon> backP = new ArrayList<>();
// parellel version does not work here
polygons.forEach((polygon) -> {
this.plane.splitPolygon(
polygon, this.polygons, this.polygons, frontP, backP);
});
if (frontP.size() > 0) {
if (this.front == null) {
this.front = new Node();
}
this.front.build(frontP);
}
if (backP.size() > 0) {
if (this.back == null) {
this.back = new Node();
}
this.back.build(backP);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.empire.jsf2.utils;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.text.SimpleDateFormat;
import java.util.Hashtable;
import java.util.Locale;
import org.apache.empire.commons.DateUtils;
import org.apache.empire.commons.StringUtils;
import org.apache.empire.db.DBRowSet;
import org.apache.empire.exceptions.InternalException;
import org.apache.empire.exceptions.InvalidArgumentException;
import org.apache.empire.exceptions.UnexpectedReturnValueException;
import org.apache.empire.jsf2.pages.PageDefinition;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class manages request parameters in a way that they cannot be analyzed and modified by the user
* @author doebele
*
*/
public class ParameterMap // *Deprecated* implements Serializable
{
// *Deprecated* private static final long serialVersionUID = 1L;
private static final Logger log = LoggerFactory.getLogger(ParameterMap.class);
private static final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy.MM.dd hh:mm:ss", Locale.GERMAN);
static private MessageDigest md5 = null;
{
try
{
ParameterMap.md5 = MessageDigest.getInstance("MD5");
}
catch (NoSuchAlgorithmException e)
{
ParameterMap.log.error("MessageDigest NoSuchAlgorithmException.", e);
throw new InternalException(e);
}
}
private final byte[] salt;
protected Hashtable<String, String> codeMap = new Hashtable<String, String>();
protected final Hashtable<String, Hashtable<String, Object>> typeMap = new Hashtable<String, Hashtable<String, Object>>();
public ParameterMap()
{
String dateTime = dateFormat.format(DateUtils.getTimeNow());
salt = dateTime.getBytes();
}
public synchronized String encodeString(String valueAsString)
{
if (valueAsString==null)
throw new InvalidArgumentException("valueAsString", valueAsString);
// log
if (log.isTraceEnabled())
log.trace("Generating code for value {}.", valueAsString);
// generate code
md5.reset();
if (salt!=null)
md5.update(salt);
md5.update(valueAsString.getBytes());
byte s[] = ParameterMap.md5.digest();
StringBuilder hash = new StringBuilder(32);
for (int i = 0; i < s.length; i++)
{ // add the hash part
// String check = Integer.toHexString((0x000000ff & s[i]) | 0xffffff00).substring(6);;
String part = Integer.toHexString(0x000000ff & s[i]);
switch(part.length())
{
case 1: hash.append('0');
case 2: hash.append(part);
break;
default:
throw new UnexpectedReturnValueException(part, "Integer.toHexString");
// hash.append(part.substring(2));
}
}
return hash.toString();
}
public String encodeStringWithCache(String valueAsString)
{
String code = codeMap.get(valueAsString);
if (code==null)
{ // generate code
code = encodeString(valueAsString);
codeMap.put(valueAsString, code);
}
return code;
}
/**
* gets a unique name for a given rowset
* @param rowset
* @return a unique name for the given rowset
*/
protected String getRowSetTypeName(DBRowSet rowset)
{
/*
* alternatively use:
* rowset.getName();
* or
* rowset.getFullName();
*/
return rowset.getClass().getName();
}
/**
* puts an object into the parameter map
* @param typeName
* @param id
* @param value
*/
protected void putValue(String typeName, String encodedId, Object item)
{ // put in Table
if (encodedId==null)
{
throw new InvalidArgumentException("encodedId", encodedId);
}
Hashtable<String, Object> map = typeMap.get(typeName);
if (map==null)
{ map = new Hashtable<String, Object>(1);
typeMap.put(typeName, map);
}
map.put(encodedId, item);
}
/**
* encodes the objectKey and stores the item in the parameter map
* @param typeName
* @param id
* @param value
*/
protected String encodeAndStore(String typeName, String objectKey, Object item, boolean useCache)
{ // Generate the id
String encodedId = (useCache ? encodeStringWithCache(objectKey) : encodeString(objectKey));
// store
putValue(typeName, encodedId, item);
// return id
return encodedId;
}
public String put(String type, String key, boolean useCache)
{
// Generate id and put in map
return encodeAndStore(type, key, key, useCache);
}
/**
* Puts an object into the paramter map that implements the ParameterObject interface
* @param paramObject
* @return
*/
public String put(ParameterObject paramObject)
{
String objectKey;
// check param
if (paramObject==null || StringUtils.isEmpty((objectKey=paramObject.getObjectKey())))
throw new InvalidArgumentException("paramObject", paramObject);
// Generate id and put in map
String type = paramObject.getClass().getName();
return encodeAndStore(type, objectKey, paramObject, false);
}
public String put(DBRowSet rowset, Object[] key)
{
// Generate id and put in map
String rowKey = StringUtils.valueOf(key);
String type = getRowSetTypeName(rowset);
return encodeAndStore(type, rowKey, key, false);
}
/*
* do we really need this?
*
public String put(Class<? extends Object> c, Object[] key)
{
// Generate id and put in map
String ref = StringUtils.valueOf(key);
String type = c.getName();
return encodeAndStore(type, ref, key, false);
}
*/
/**
* Generates an idParam which is only valid for the given page.
* @param targetPage
* @param rowset
* @param key
* @return
*/
public String put(PageDefinition targetPage, DBRowSet rowset, Object[] key) {
// Generate id and put in map
String ref = StringUtils.valueOf(key);
String type = targetPage.getPageBeanName() + "$" + getRowSetTypeName(rowset);
return encodeAndStore(type, ref, key, false);
}
/**
* Gets an object from the parameter map for a given type and id
* @param type the object type (typically the class name)
* @param id the encoded idParam
* @return the object
*/
public Object get(String type, String id)
{
Hashtable<String, Object> map = typeMap.get(type);
return (map!=null ? map.get(id) : null);
}
public void clear(String type)
{
Hashtable<String, Object> map = typeMap.get(type);
if (map!=null)
map.clear();
}
/**
* Puts an object into the paramter map that implements the ParameterObject interface
* @param paramType
* @param id
* @return
*/
@SuppressWarnings("unchecked")
public <T extends ParameterObject> T get(Class<T> paramType, String id)
{
String type = paramType.getName();
Hashtable<String, Object> map = typeMap.get(type);
return (T)(map!=null ? map.get(id) : null);
}
public void clear(Class<? extends ParameterObject> paramType)
{
String type = paramType.getName();
clear(type);
}
/*
* do we really need this?
*
public Object[] getKey(Class<? extends Object> c, String id)
{
String type = c.getName();
Hashtable<String, Object> map = typeMap.get(type);
return (map!=null ? ((Object[])map.get(id)) : null);
}
*/
public Object[] getKey(DBRowSet rowset, String id)
{
String type = getRowSetTypeName(rowset);
Hashtable<String, Object> map = typeMap.get(type);
return (map!=null ? ((Object[])map.get(id)) : null);
}
public void clear(DBRowSet rowset)
{
String type = getRowSetTypeName(rowset);
clear(type);
}
/**
* returns an record key for a given page
* @param page
* @param rowset
* @param id
* @return
*/
public Object[] getKey(PageDefinition page, DBRowSet rowset, String id)
{
String type = page.getPageBeanName() + "$" + getRowSetTypeName(rowset);
Hashtable<String, Object> map = typeMap.get(type);
return (map!=null ? ((Object[])map.get(id)) : null);
}
}
| |
/*
* Copyright (c) 2000-2005 Regents of the University of California.
* All rights reserved.
*
* This software was developed at the University of California, Irvine.
*
* Redistribution and use in source and binary forms are permitted
* provided that the above copyright notice and this paragraph are
* duplicated in all such forms and that any documentation,
* advertising materials, and other materials related to such
* distribution and use acknowledge that the software was developed
* by the University of California, Irvine. The name of the
* University may not be used to endorse or promote products derived
* from this software without specific prior written permission.
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND WITHOUT ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED
* WARRANTIES OF MERCHANTIBILITY AND FITNESS FOR A PARTICULAR PURPOSE.
*/
package edu.uci.isr.xarch.pladiff;
import org.w3c.dom.*;
import edu.uci.isr.xarch.*;
import java.util.*;
/**
* DOM-Based implementation of the IInterfaceEndPoint interface.
*
* @author Automatically generated by xArch apigen.
*/
public class InterfaceEndPointImpl implements IInterfaceEndPoint, DOMBased{
public static final String XSD_TYPE_NSURI = PladiffConstants.NS_URI;
public static final String XSD_TYPE_NAME = "InterfaceEndPoint";
protected IXArch xArch;
/** Tag name for interfaceDescriptions in this object. */
public static final String INTERFACE_DESCRIPTION_ELT_NAME = "interfaceDescription";
/** Tag name for connectingElementDescriptions in this object. */
public static final String CONNECTING_ELEMENT_DESCRIPTION_ELT_NAME = "connectingElementDescription";
protected Element elt;
private static SequenceOrder seqOrd = new SequenceOrder(
new QName[]{
new QName(PladiffConstants.NS_URI, INTERFACE_DESCRIPTION_ELT_NAME),
new QName(PladiffConstants.NS_URI, CONNECTING_ELEMENT_DESCRIPTION_ELT_NAME)
}
);
public InterfaceEndPointImpl(Element elt){
if(elt == null){
throw new IllegalArgumentException("Element cannot be null.");
}
this.elt = elt;
}
public Node getDOMNode(){
return elt;
}
public void setDOMNode(Node node){
if(node.getNodeType() != Node.ELEMENT_NODE){
throw new IllegalArgumentException("Base DOM node of this type must be an Element.");
}
elt = (Element)node;
}
protected static SequenceOrder getSequenceOrder(){
return seqOrd;
}
public void setXArch(IXArch xArch){
this.xArch = xArch;
}
public IXArch getXArch(){
return this.xArch;
}
public IXArchElement cloneElement(int depth){
synchronized(DOMUtils.getDOMLock(elt)){
Document doc = elt.getOwnerDocument();
if(depth == 0){
Element cloneElt = (Element)elt.cloneNode(false);
cloneElt = (Element)doc.importNode(cloneElt, true);
InterfaceEndPointImpl cloneImpl = new InterfaceEndPointImpl(cloneElt);
cloneImpl.setXArch(getXArch());
return cloneImpl;
}
else if(depth == 1){
Element cloneElt = (Element)elt.cloneNode(false);
cloneElt = (Element)doc.importNode(cloneElt, true);
InterfaceEndPointImpl cloneImpl = new InterfaceEndPointImpl(cloneElt);
cloneImpl.setXArch(getXArch());
NodeList nl = elt.getChildNodes();
int size = nl.getLength();
for(int i = 0; i < size; i++){
Node n = nl.item(i);
Node cloneNode = (Node)n.cloneNode(false);
cloneNode = doc.importNode(cloneNode, true);
cloneElt.appendChild(cloneNode);
}
return cloneImpl;
}
else /* depth = infinity */{
Element cloneElt = (Element)elt.cloneNode(true);
cloneElt = (Element)doc.importNode(cloneElt, true);
InterfaceEndPointImpl cloneImpl = new InterfaceEndPointImpl(cloneElt);
cloneImpl.setXArch(getXArch());
return cloneImpl;
}
}
}
//Override 'equals' to be DOM-based...
public boolean equals(Object o){
if(o == null){
return false;
}
if(!(o instanceof DOMBased)){
return super.equals(o);
}
DOMBased db = (DOMBased)o;
Node dbNode = db.getDOMNode();
return dbNode.equals(getDOMNode());
}
//Override 'hashCode' to be based on the underlying node
public int hashCode(){
return getDOMNode().hashCode();
}
/**
* For internal use only.
*/
private static Object makeDerivedWrapper(Element elt, String baseTypeName){
synchronized(DOMUtils.getDOMLock(elt)){
QName typeName = XArchUtils.getXSIType(elt);
if(typeName == null){
return null;
}
else{
if(!DOMUtils.hasXSIType(elt, "http://www.ics.uci.edu/pub/arch/xArch/pladiff.xsd", baseTypeName)){
try{
String packageTitle = XArchUtils.getPackageTitle(typeName.getNamespaceURI());
String packageName = XArchUtils.getPackageName(packageTitle);
String implName = XArchUtils.getImplName(packageName, typeName.getName());
Class c = Class.forName(implName);
java.lang.reflect.Constructor con = c.getConstructor(new Class[]{Element.class});
Object o = con.newInstance(new Object[]{elt});
return o;
}
catch(Exception e){
//Lots of bad things could happen, but this
//is OK, because this is best-effort anyway.
}
}
return null;
}
}
}
public XArchTypeMetadata getTypeMetadata(){
return IInterfaceEndPoint.TYPE_METADATA;
}
public XArchInstanceMetadata getInstanceMetadata(){
return new XArchInstanceMetadata(XArchUtils.getPackageTitle(elt.getNamespaceURI()));
}
public void setInterfaceDescription(edu.uci.isr.xarch.instance.IDescription value){
if(!(value instanceof DOMBased)){
throw new IllegalArgumentException("Cannot handle non-DOM-based xArch entities.");
}
{
edu.uci.isr.xarch.instance.IDescription oldElt = getInterfaceDescription();
DOMUtils.removeChildren(elt, PladiffConstants.NS_URI, INTERFACE_DESCRIPTION_ELT_NAME);
IXArch context = getXArch();
if(context != null){
context.fireXArchEvent(
new XArchEvent(this,
XArchEvent.CLEAR_EVENT,
XArchEvent.ELEMENT_CHANGED,
"interfaceDescription", oldElt,
XArchUtils.getDefaultXArchImplementation().isContainedIn(xArch, this), true)
);
}
}
Element newChildElt = (Element)(((DOMBased)value).getDOMNode());
newChildElt = DOMUtils.cloneAndRename(newChildElt, PladiffConstants.NS_URI, INTERFACE_DESCRIPTION_ELT_NAME);
((DOMBased)value).setDOMNode(newChildElt);
synchronized(DOMUtils.getDOMLock(elt)){
elt.appendChild(newChildElt);
DOMUtils.order(elt, getSequenceOrder());
}
IXArch context = getXArch();
if(context != null){
context.fireXArchEvent(
new XArchEvent(this,
XArchEvent.SET_EVENT,
XArchEvent.ELEMENT_CHANGED,
"interfaceDescription", value,
XArchUtils.getDefaultXArchImplementation().isContainedIn(xArch, this))
);
}
}
public void clearInterfaceDescription(){
edu.uci.isr.xarch.instance.IDescription oldElt = getInterfaceDescription();
DOMUtils.removeChildren(elt, PladiffConstants.NS_URI, INTERFACE_DESCRIPTION_ELT_NAME);
IXArch context = getXArch();
if(context != null){
context.fireXArchEvent(
new XArchEvent(this,
XArchEvent.CLEAR_EVENT,
XArchEvent.ELEMENT_CHANGED,
"interfaceDescription", oldElt,
XArchUtils.getDefaultXArchImplementation().isContainedIn(xArch, this))
);
}
}
public edu.uci.isr.xarch.instance.IDescription getInterfaceDescription(){
NodeList nl = DOMUtils.getChildren(elt, PladiffConstants.NS_URI, INTERFACE_DESCRIPTION_ELT_NAME);
if(nl.getLength() == 0){
return null;
}
else{
Element el = (Element)nl.item(0);
IXArch de = getXArch();
if(de != null){
IXArchElement cachedXArchElt = de.getWrapper(el);
if(cachedXArchElt != null){
return (edu.uci.isr.xarch.instance.IDescription)cachedXArchElt;
}
}
Object o = makeDerivedWrapper(el, "Description");
if(o != null){
try{
((edu.uci.isr.xarch.IXArchElement)o).setXArch(getXArch());
if(de != null){
de.cacheWrapper(el, ((edu.uci.isr.xarch.IXArchElement)o));
}
return (edu.uci.isr.xarch.instance.IDescription)o;
}
catch(Exception e){}
}
edu.uci.isr.xarch.instance.DescriptionImpl eltImpl = new edu.uci.isr.xarch.instance.DescriptionImpl(el);
eltImpl.setXArch(getXArch());
if(de != null){
de.cacheWrapper(el, ((edu.uci.isr.xarch.IXArchElement)eltImpl));
}
return eltImpl;
}
}
public boolean hasInterfaceDescription(edu.uci.isr.xarch.instance.IDescription value){
edu.uci.isr.xarch.instance.IDescription thisValue = getInterfaceDescription();
edu.uci.isr.xarch.instance.IDescription thatValue = value;
if((thisValue == null) && (thatValue == null)){
return true;
}
else if((thisValue == null) && (thatValue != null)){
return false;
}
else if((thisValue != null) && (thatValue == null)){
return false;
}
return thisValue.isEquivalent(thatValue);
}
public void setConnectingElementDescription(edu.uci.isr.xarch.instance.IDescription value){
if(!(value instanceof DOMBased)){
throw new IllegalArgumentException("Cannot handle non-DOM-based xArch entities.");
}
{
edu.uci.isr.xarch.instance.IDescription oldElt = getConnectingElementDescription();
DOMUtils.removeChildren(elt, PladiffConstants.NS_URI, CONNECTING_ELEMENT_DESCRIPTION_ELT_NAME);
IXArch context = getXArch();
if(context != null){
context.fireXArchEvent(
new XArchEvent(this,
XArchEvent.CLEAR_EVENT,
XArchEvent.ELEMENT_CHANGED,
"connectingElementDescription", oldElt,
XArchUtils.getDefaultXArchImplementation().isContainedIn(xArch, this), true)
);
}
}
Element newChildElt = (Element)(((DOMBased)value).getDOMNode());
newChildElt = DOMUtils.cloneAndRename(newChildElt, PladiffConstants.NS_URI, CONNECTING_ELEMENT_DESCRIPTION_ELT_NAME);
((DOMBased)value).setDOMNode(newChildElt);
synchronized(DOMUtils.getDOMLock(elt)){
elt.appendChild(newChildElt);
DOMUtils.order(elt, getSequenceOrder());
}
IXArch context = getXArch();
if(context != null){
context.fireXArchEvent(
new XArchEvent(this,
XArchEvent.SET_EVENT,
XArchEvent.ELEMENT_CHANGED,
"connectingElementDescription", value,
XArchUtils.getDefaultXArchImplementation().isContainedIn(xArch, this))
);
}
}
public void clearConnectingElementDescription(){
edu.uci.isr.xarch.instance.IDescription oldElt = getConnectingElementDescription();
DOMUtils.removeChildren(elt, PladiffConstants.NS_URI, CONNECTING_ELEMENT_DESCRIPTION_ELT_NAME);
IXArch context = getXArch();
if(context != null){
context.fireXArchEvent(
new XArchEvent(this,
XArchEvent.CLEAR_EVENT,
XArchEvent.ELEMENT_CHANGED,
"connectingElementDescription", oldElt,
XArchUtils.getDefaultXArchImplementation().isContainedIn(xArch, this))
);
}
}
public edu.uci.isr.xarch.instance.IDescription getConnectingElementDescription(){
NodeList nl = DOMUtils.getChildren(elt, PladiffConstants.NS_URI, CONNECTING_ELEMENT_DESCRIPTION_ELT_NAME);
if(nl.getLength() == 0){
return null;
}
else{
Element el = (Element)nl.item(0);
IXArch de = getXArch();
if(de != null){
IXArchElement cachedXArchElt = de.getWrapper(el);
if(cachedXArchElt != null){
return (edu.uci.isr.xarch.instance.IDescription)cachedXArchElt;
}
}
Object o = makeDerivedWrapper(el, "Description");
if(o != null){
try{
((edu.uci.isr.xarch.IXArchElement)o).setXArch(getXArch());
if(de != null){
de.cacheWrapper(el, ((edu.uci.isr.xarch.IXArchElement)o));
}
return (edu.uci.isr.xarch.instance.IDescription)o;
}
catch(Exception e){}
}
edu.uci.isr.xarch.instance.DescriptionImpl eltImpl = new edu.uci.isr.xarch.instance.DescriptionImpl(el);
eltImpl.setXArch(getXArch());
if(de != null){
de.cacheWrapper(el, ((edu.uci.isr.xarch.IXArchElement)eltImpl));
}
return eltImpl;
}
}
public boolean hasConnectingElementDescription(edu.uci.isr.xarch.instance.IDescription value){
edu.uci.isr.xarch.instance.IDescription thisValue = getConnectingElementDescription();
edu.uci.isr.xarch.instance.IDescription thatValue = value;
if((thisValue == null) && (thatValue == null)){
return true;
}
else if((thisValue == null) && (thatValue != null)){
return false;
}
else if((thisValue != null) && (thatValue == null)){
return false;
}
return thisValue.isEquivalent(thatValue);
}
public boolean isEquivalent(IInterfaceEndPoint c){
return (getClass().equals(c.getClass())) &&
hasInterfaceDescription(c.getInterfaceDescription()) &&
hasConnectingElementDescription(c.getConnectingElementDescription()) ;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.rcfile;
import com.facebook.presto.spi.type.CharType;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.spi.type.VarcharType;
import io.airlift.slice.Slice;
import io.airlift.slice.SliceInput;
import io.airlift.slice.Slices;
import java.io.IOException;
import static com.google.common.base.Preconditions.checkArgument;
import static io.airlift.slice.SizeOf.SIZE_OF_INT;
import static io.airlift.slice.SizeOf.SIZE_OF_LONG;
import static io.airlift.slice.SliceUtf8.offsetOfCodePoint;
import static java.lang.Math.min;
import static java.lang.Math.toIntExact;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
// faster versions of org.apache.hadoop.io.WritableUtils methods adapted for Slice
public final class RcFileDecoderUtils
{
// 0xFFFF_FFFF + syncFirst(long) + syncSecond(long)
private static final int SYNC_SEQUENCE_LENGTH = SIZE_OF_INT + SIZE_OF_LONG + SIZE_OF_LONG;
private RcFileDecoderUtils()
{
}
public static int decodeVIntSize(Slice slice, int offset)
{
return decodeVIntSize(slice.getByte(offset));
}
public static int decodeVIntSize(byte value)
{
if (value >= -112) {
return 1;
}
if (value < -120) {
return -119 - value;
}
return -111 - value;
}
public static boolean isNegativeVInt(Slice slice, int offset)
{
return isNegativeVInt(slice.getByte(offset));
}
public static boolean isNegativeVInt(byte value)
{
return value < -120 || (value >= -112 && value < 0);
}
public static long readVInt(SliceInput in)
{
byte firstByte = in.readByte();
int length = decodeVIntSize(firstByte);
if (length == 1) {
return firstByte;
}
long value = 0;
for (int i = 1; i < length; i++) {
value <<= 8;
value |= (in.readByte() & 0xFF);
}
return isNegativeVInt(firstByte) ? ~value : value;
}
public static long readVInt(Slice slice, int start)
{
byte firstByte = slice.getByte(start);
int length = decodeVIntSize(firstByte);
if (length == 1) {
return firstByte;
}
return readVIntInternal(slice, start, length);
}
public static long readVInt(Slice slice, int start, int length)
{
if (length == 1) {
return slice.getByte(start);
}
return readVIntInternal(slice, start, length);
}
private static long readVIntInternal(Slice slice, int start, int length)
{
long value = 0;
for (int i = 1; i < length; i++) {
value <<= 8;
value |= (slice.getByte(start + i) & 0xFF);
}
return isNegativeVInt(slice.getByte(start)) ? ~value : value;
}
/**
* Find the beginning of the first full sync sequence that starts within the specified range.
*/
public static long findFirstSyncPosition(RcFileDataSource dataSource, long offset, long length, long syncFirst, long syncSecond)
throws IOException
{
requireNonNull(dataSource, "dataSource is null");
checkArgument(offset >= 0, "offset is negative");
checkArgument(length >= 1, "length must be at least 1");
checkArgument(offset + length <= dataSource.getSize(), "offset plus length is greater than data size");
// The full sync sequence is "0xFFFFFFFF syncFirst syncSecond". If
// this sequence begins the file range, the start position is returned
// even if the sequence finishes after length.
// NOTE: this decision must agree with RcFileReader.nextBlock
Slice sync = Slices.allocate(SIZE_OF_INT + SIZE_OF_LONG + SIZE_OF_LONG);
sync.setInt(0, 0xFFFF_FFFF);
sync.setLong(SIZE_OF_INT, syncFirst);
sync.setLong(SIZE_OF_INT + SIZE_OF_LONG, syncSecond);
// read 1 MB chunks at a time, but only skip ahead 1 MB - SYNC_SEQUENCE_LENGTH bytes
// this causes a re-read of SYNC_SEQUENCE_LENGTH bytes each time, but is much simpler code
byte[] buffer = new byte[toIntExact(min(1 << 20, length + (SYNC_SEQUENCE_LENGTH - 1)))];
Slice bufferSlice = Slices.wrappedBuffer(buffer);
for (long position = 0; position < length; position += bufferSlice.length() - (SYNC_SEQUENCE_LENGTH - 1)) {
// either fill the buffer entirely, or read enough to allow all bytes in offset + length to be a start sequence
int bufferSize = toIntExact(min(buffer.length, length + (SYNC_SEQUENCE_LENGTH - 1) - position));
// don't read off the end of the file
bufferSize = toIntExact(min(bufferSize, dataSource.getSize() - offset - position));
dataSource.readFully(offset + position, buffer, 0, bufferSize);
// find the starting index position of the sync sequence
int index = bufferSlice.indexOf(sync);
if (index >= 0) {
// If the starting position is before the end of the search region, return the
// absolute start position of the sequence.
if (position + index < length) {
long startOfSyncSequence = offset + position + index;
return startOfSyncSequence;
}
else {
// Otherwise, this is not a match for this region
// Note: this case isn't strictly needed as the loop will exit, but it is
// simpler to explicitly call it out.
return -1;
}
}
}
return -1;
}
public static <A, B extends A> B checkType(A value, Class<B> target, String name)
{
if (value == null) {
throw new NullPointerException(format("%s is null", name));
}
checkArgument(
target.isInstance(value),
"%s must be of type %s, not %s",
name,
target.getName(),
value.getClass().getName());
return target.cast(value);
}
public static int calculateTruncationLength(Type type, Slice slice, int offset, int length)
{
requireNonNull(type, "type is null");
if (type instanceof VarcharType) {
return calculateTruncationLength(((VarcharType) type).getLength(), slice, offset, length);
}
if (type instanceof CharType) {
return calculateTruncationLength(((CharType) type).getLength(), slice, offset, length);
}
return length;
}
private static int calculateTruncationLength(int maxCharacterCount, Slice slice, int offset, int length)
{
requireNonNull(slice, "slice is null");
if (maxCharacterCount < 0) {
throw new IllegalArgumentException("Max length must be greater or equal than zero");
}
if (length <= maxCharacterCount) {
return length;
}
int indexEnd = offsetOfCodePoint(slice, offset, maxCharacterCount);
if (indexEnd < 0) {
return length;
}
return indexEnd - offset;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.datanode;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import javax.management.NotCompliantMBeanException;
import javax.management.ObjectName;
import javax.management.StandardMBean;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.StorageType;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.BlockListAsLongs;
import org.apache.hadoop.hdfs.protocol.BlockLocalPathInfo;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.protocol.HdfsBlocksMetadata;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.ReplicaState;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.LengthInputStream;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.ReplicaInputStreams;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.ReplicaOutputStreams;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.RollingLogs;
import org.apache.hadoop.hdfs.server.datanode.metrics.FSDatasetMBean;
import org.apache.hadoop.hdfs.server.protocol.BlockRecoveryCommand.RecoveringBlock;
import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage;
import org.apache.hadoop.hdfs.server.protocol.ReplicaRecoveryInfo;
import org.apache.hadoop.hdfs.server.protocol.StorageReport;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.metrics2.util.MBeans;
import org.apache.hadoop.util.DataChecksum;
import org.apache.hadoop.util.DiskChecker.DiskErrorException;
import edu.cornell.cs.blog.JNIBlog;
import edu.cornell.cs.sa.HybridLogicalClock;
/**
* This class implements a simulated FSDataset.
*
* Blocks that are created are recorded but their data (plus their CRCs) are
* discarded.
* Fixed data is returned when blocks are read; a null CRC meta file is
* created for such data.
*
* This FSDataset does not remember any block information across its
* restarts; it does however offer an operation to inject blocks
* (See the TestInectionForSImulatedStorage()
* for a usage example of injection.
*
* Note the synchronization is coarse grained - it is at each method.
*/
public class SimulatedFSDataset implements FsDatasetSpi<FsVolumeSpi> {
static class Factory extends FsDatasetSpi.Factory<SimulatedFSDataset> {
@Override
public SimulatedFSDataset newInstance(DataNode datanode,
DataStorage storage, Configuration conf) throws IOException {
return new SimulatedFSDataset(storage, conf);
}
@Override
public boolean isSimulated() {
return true;
}
}
public static void setFactory(Configuration conf) {
conf.set(DFSConfigKeys.DFS_DATANODE_FSDATASET_FACTORY_KEY,
Factory.class.getName());
}
public static final String CONFIG_PROPERTY_CAPACITY =
"dfs.datanode.simulateddatastorage.capacity";
public static final long DEFAULT_CAPACITY = 2L<<40; // 1 terabyte
public static final byte DEFAULT_DATABYTE = 9;
public static final String CONFIG_PROPERTY_STATE =
"dfs.datanode.simulateddatastorage.state";
private static final DatanodeStorage.State DEFAULT_STATE =
DatanodeStorage.State.NORMAL;
static final byte[] nullCrcFileData;
static {
DataChecksum checksum = DataChecksum.newDataChecksum(
DataChecksum.Type.NULL, 16*1024 );
byte[] nullCrcHeader = checksum.getHeader();
nullCrcFileData = new byte[2 + nullCrcHeader.length];
nullCrcFileData[0] = (byte) ((BlockMetadataHeader.VERSION >>> 8) & 0xff);
nullCrcFileData[1] = (byte) (BlockMetadataHeader.VERSION & 0xff);
for (int i = 0; i < nullCrcHeader.length; i++) {
nullCrcFileData[i+2] = nullCrcHeader[i];
}
}
// information about a single block
private class BInfo implements ReplicaInPipelineInterface {
final Block theBlock;
private boolean finalized = false; // if not finalized => ongoing creation
SimulatedOutputStream oStream = null;
private long bytesAcked;
private long bytesRcvd;
BInfo(String bpid, Block b, boolean forWriting) throws IOException {
theBlock = new Block(b);
if (theBlock.getNumBytes() < 0) {
theBlock.setNumBytes(0);
}
if (!storage.alloc(bpid, theBlock.getNumBytes())) {
// expected length - actual length may
// be more - we find out at finalize
DataNode.LOG.warn("Lack of free storage on a block alloc");
throw new IOException("Creating block, no free space available");
}
if (forWriting) {
finalized = false;
oStream = new SimulatedOutputStream();
} else {
finalized = true;
oStream = null;
}
}
@Override
public String getStorageUuid() {
return storage.getStorageUuid();
}
@Override
synchronized public long getGenerationStamp() {
return theBlock.getGenerationStamp();
}
@Override
synchronized public long getNumBytes() {
if (!finalized) {
return bytesRcvd;
} else {
return theBlock.getNumBytes();
}
}
@Override
synchronized public void setNumBytes(long length) {
if (!finalized) {
bytesRcvd = length;
} else {
theBlock.setNumBytes(length);
}
}
synchronized SimulatedInputStream getIStream() {
if (!finalized) {
// throw new IOException("Trying to read an unfinalized block");
return new SimulatedInputStream(oStream.getLength(), DEFAULT_DATABYTE);
} else {
return new SimulatedInputStream(theBlock.getNumBytes(), DEFAULT_DATABYTE);
}
}
synchronized void finalizeBlock(String bpid, long finalSize)
throws IOException {
if (finalized) {
throw new IOException(
"Finalizing a block that has already been finalized" +
theBlock.getBlockId());
}
if (oStream == null) {
DataNode.LOG.error("Null oStream on unfinalized block - bug");
throw new IOException("Unexpected error on finalize");
}
if (oStream.getLength() != finalSize) {
DataNode.LOG.warn("Size passed to finalize (" + finalSize +
")does not match what was written:" + oStream.getLength());
throw new IOException(
"Size passed to finalize does not match the amount of data written");
}
// We had allocated the expected length when block was created;
// adjust if necessary
long extraLen = finalSize - theBlock.getNumBytes();
if (extraLen > 0) {
if (!storage.alloc(bpid,extraLen)) {
DataNode.LOG.warn("Lack of free storage on a block alloc");
throw new IOException("Creating block, no free space available");
}
} else {
storage.free(bpid, -extraLen);
}
theBlock.setNumBytes(finalSize);
finalized = true;
oStream = null;
return;
}
synchronized void unfinalizeBlock() throws IOException {
if (!finalized) {
throw new IOException("Unfinalized a block that's not finalized "
+ theBlock);
}
finalized = false;
oStream = new SimulatedOutputStream();
long blockLen = theBlock.getNumBytes();
oStream.setLength(blockLen);
bytesRcvd = blockLen;
bytesAcked = blockLen;
}
SimulatedInputStream getMetaIStream() {
return new SimulatedInputStream(nullCrcFileData);
}
synchronized boolean isFinalized() {
return finalized;
}
@Override
synchronized public ReplicaOutputStreams createStreams(boolean isCreate,
DataChecksum requestedChecksum, long offset/*HDFSRS_RWAPI: not used here*/) throws IOException {
if (finalized) {
throw new IOException("Trying to write to a finalized replica "
+ theBlock);
} else {
SimulatedOutputStream crcStream = new SimulatedOutputStream();
return new ReplicaOutputStreams(oStream, crcStream, requestedChecksum);
}
}
@Override
synchronized public long getBlockId() {
return theBlock.getBlockId();
}
@Override
synchronized public long getVisibleLength() {
return getBytesAcked();
}
@Override
public ReplicaState getState() {
return null;
}
@Override
synchronized public long getBytesAcked() {
if (finalized) {
return theBlock.getNumBytes();
} else {
return bytesAcked;
}
}
@Override
synchronized public void setBytesAcked(long bytesAcked) {
if (!finalized) {
this.bytesAcked = bytesAcked;
}
}
@Override
synchronized public long getBytesOnDisk() {
if (finalized) {
return theBlock.getNumBytes();
} else {
return oStream.getLength();
}
}
@Override
public void setLastChecksumAndDataLen(long dataLength, byte[] lastChecksum) {
oStream.setLength(dataLength);
}
@Override
public ChunkChecksum getLastChecksumAndDataLen() {
return new ChunkChecksum(oStream.getLength(), null);
}
}
/**
* Class is used for tracking block pool storage utilization similar
* to {@link BlockPoolSlice}
*/
private static class SimulatedBPStorage {
private long used; // in bytes
long getUsed() {
return used;
}
void alloc(long amount) {
used += amount;
}
void free(long amount) {
used -= amount;
}
SimulatedBPStorage() {
used = 0;
}
}
/**
* Class used for tracking datanode level storage utilization similar
* to {@link FSVolumeSet}
*/
private static class SimulatedStorage {
private final Map<String, SimulatedBPStorage> map =
new HashMap<String, SimulatedBPStorage>();
private final long capacity; // in bytes
private final DatanodeStorage dnStorage;
synchronized long getFree() {
return capacity - getUsed();
}
long getCapacity() {
return capacity;
}
synchronized long getUsed() {
long used = 0;
for (SimulatedBPStorage bpStorage : map.values()) {
used += bpStorage.getUsed();
}
return used;
}
synchronized long getBlockPoolUsed(String bpid) throws IOException {
return getBPStorage(bpid).getUsed();
}
int getNumFailedVolumes() {
return 0;
}
synchronized boolean alloc(String bpid, long amount) throws IOException {
if (getFree() >= amount) {
getBPStorage(bpid).alloc(amount);
return true;
}
return false;
}
synchronized void free(String bpid, long amount) throws IOException {
getBPStorage(bpid).free(amount);
}
SimulatedStorage(long cap, DatanodeStorage.State state) {
capacity = cap;
dnStorage = new DatanodeStorage(
"SimulatedStorage-" + DatanodeStorage.generateUuid(),
state, StorageType.DEFAULT);
}
synchronized void addBlockPool(String bpid) {
SimulatedBPStorage bpStorage = map.get(bpid);
if (bpStorage != null) {
return;
}
map.put(bpid, new SimulatedBPStorage());
}
synchronized void removeBlockPool(String bpid) {
map.remove(bpid);
}
private SimulatedBPStorage getBPStorage(String bpid) throws IOException {
SimulatedBPStorage bpStorage = map.get(bpid);
if (bpStorage == null) {
throw new IOException("block pool " + bpid + " not found");
}
return bpStorage;
}
String getStorageUuid() {
return dnStorage.getStorageID();
}
DatanodeStorage getDnStorage() {
return dnStorage;
}
synchronized StorageReport getStorageReport(String bpid) {
return new StorageReport(dnStorage,
false, getCapacity(), getUsed(), getFree(),
map.get(bpid).getUsed());
}
}
private final Map<String, Map<Block, BInfo>> blockMap
= new HashMap<String, Map<Block,BInfo>>();
private final SimulatedStorage storage;
private final String datanodeUuid;
public SimulatedFSDataset(DataStorage storage, Configuration conf) {
if (storage != null) {
for (int i = 0; i < storage.getNumStorageDirs(); ++i) {
storage.createStorageID(storage.getStorageDir(i));
}
this.datanodeUuid = storage.getDatanodeUuid();
} else {
this.datanodeUuid = "SimulatedDatanode-" + DataNode.generateUuid();
}
registerMBean(datanodeUuid);
this.storage = new SimulatedStorage(
conf.getLong(CONFIG_PROPERTY_CAPACITY, DEFAULT_CAPACITY),
conf.getEnum(CONFIG_PROPERTY_STATE, DEFAULT_STATE));
}
public synchronized void injectBlocks(String bpid,
Iterable<Block> injectBlocks) throws IOException {
ExtendedBlock blk = new ExtendedBlock();
if (injectBlocks != null) {
for (Block b: injectBlocks) { // if any blocks in list is bad, reject list
if (b == null) {
throw new NullPointerException("Null blocks in block list");
}
blk.set(bpid, b);
if (isValidBlock(blk)) {
throw new IOException("Block already exists in block list");
}
}
Map<Block, BInfo> map = blockMap.get(bpid);
if (map == null) {
map = new HashMap<Block, BInfo>();
blockMap.put(bpid, map);
}
for (Block b: injectBlocks) {
BInfo binfo = new BInfo(bpid, b, false);
map.put(binfo.theBlock, binfo);
}
}
}
/** Get a map for a given block pool Id */
private Map<Block, BInfo> getMap(String bpid) throws IOException {
final Map<Block, BInfo> map = blockMap.get(bpid);
if (map == null) {
throw new IOException("Non existent blockpool " + bpid);
}
return map;
}
@Override // FsDatasetSpi
public synchronized void finalizeBlock(ExtendedBlock b) throws IOException {
final Map<Block, BInfo> map = getMap(b.getBlockPoolId());
BInfo binfo = map.get(b.getLocalBlock());
if (binfo == null) {
throw new IOException("Finalizing a non existing block " + b);
}
binfo.finalizeBlock(b.getBlockPoolId(), b.getNumBytes());
}
@Override // FsDatasetSpi
public synchronized void unfinalizeBlock(ExtendedBlock b) {
if (isValidRbw(b)) {
blockMap.remove(b.getLocalBlock());
}
}
synchronized BlockListAsLongs getBlockReport(String bpid) {
final List<Block> blocks = new ArrayList<Block>();
final Map<Block, BInfo> map = blockMap.get(bpid);
if (map != null) {
for (BInfo b : map.values()) {
if (b.isFinalized()) {
blocks.add(b.theBlock);
}
}
}
return new BlockListAsLongs(blocks, null);
}
@Override
public synchronized Map<DatanodeStorage, BlockListAsLongs> getBlockReports(
String bpid) {
return Collections.singletonMap(storage.getDnStorage(), getBlockReport(bpid));
}
@Override // FsDatasetSpi
public List<Long> getCacheReport(String bpid) {
return new LinkedList<Long>();
}
@Override // FSDatasetMBean
public long getCapacity() {
return storage.getCapacity();
}
@Override // FSDatasetMBean
public long getDfsUsed() {
return storage.getUsed();
}
@Override // FSDatasetMBean
public long getBlockPoolUsed(String bpid) throws IOException {
return storage.getBlockPoolUsed(bpid);
}
@Override // FSDatasetMBean
public long getRemaining() {
return storage.getFree();
}
@Override // FSDatasetMBean
public int getNumFailedVolumes() {
return storage.getNumFailedVolumes();
}
@Override // FSDatasetMBean
public long getCacheUsed() {
return 0l;
}
@Override // FSDatasetMBean
public long getCacheCapacity() {
return 0l;
}
@Override // FSDatasetMBean
public long getNumBlocksCached() {
return 0l;
}
@Override
public long getNumBlocksFailedToCache() {
return 0l;
}
@Override
public long getNumBlocksFailedToUncache() {
return 0l;
}
@Override // FsDatasetSpi
public synchronized long getLength(ExtendedBlock b) throws IOException {
final Map<Block, BInfo> map = getMap(b.getBlockPoolId());
BInfo binfo = map.get(b.getLocalBlock());
if (binfo == null) {
throw new IOException("Finalizing a non existing block " + b);
}
return binfo.getNumBytes();
}
@Override
@Deprecated
public Replica getReplica(ExtendedBlock b) {
final Map<Block, BInfo> map = blockMap.get(b.getBlockPoolId());
if (map != null) {
return map.get(new Block(b.getBlockId()));
}
return null;
}
@Override
public synchronized String getReplicaString(ExtendedBlock b) {
Replica r = null;
final Map<Block, BInfo> map = blockMap.get(b.getBlockPoolId());
if (map != null) {
r = map.get(new Block(b.getBlockId()));
}
return r == null? "null": r.toString();
}
@Override // FsDatasetSpi
public Block getStoredBlock(ExtendedBlock b) throws IOException {
final Map<Block, BInfo> map = blockMap.get(b.getBlockPoolId());
if (map != null) {
BInfo binfo = map.get(new Block(b.getBlockId()));
if (binfo == null) {
return null;
}
return new Block(b.getBlockId(), binfo.getGenerationStamp(), binfo.getNumBytes());
}
return null;
}
@Override // FsDatasetSpi
public synchronized void invalidate(String bpid, Block[] invalidBlks, HybridLogicalClock mhlc/*HDFSRS_VC*/)
throws IOException {
boolean error = false;
if (invalidBlks == null) {
return;
}
final Map<Block, BInfo> map = getMap(bpid);
for (Block b: invalidBlks) {
if (b == null) {
continue;
}
BInfo binfo = map.get(b);
if (binfo == null) {
error = true;
DataNode.LOG.warn("Invalidate: Missing block");
continue;
}
storage.free(bpid, binfo.getNumBytes());
blockMap.remove(b);
}
if (error) {
throw new IOException("Invalidate: Missing blocks.");
}
}
@Override // FSDatasetSpi
public void cache(String bpid, long[] cacheBlks) {
throw new UnsupportedOperationException(
"SimulatedFSDataset does not support cache operation!");
}
@Override // FSDatasetSpi
public void uncache(String bpid, long[] uncacheBlks) {
throw new UnsupportedOperationException(
"SimulatedFSDataset does not support uncache operation!");
}
@Override // FSDatasetSpi
public boolean isCached(String bpid, long blockId) {
return false;
}
private BInfo getBInfo(final ExtendedBlock b) {
final Map<Block, BInfo> map = blockMap.get(b.getBlockPoolId());
return map == null? null: map.get(b.getLocalBlock());
}
@Override // {@link FsDatasetSpi}
public boolean contains(ExtendedBlock block) {
return getBInfo(block) != null;
}
@Override // FsDatasetSpi
public synchronized boolean isValidBlock(ExtendedBlock b) {
final BInfo binfo = getBInfo(b);
return binfo != null && binfo.isFinalized();
}
/* check if a block is created but not finalized */
@Override
public synchronized boolean isValidRbw(ExtendedBlock b) {
final BInfo binfo = getBInfo(b);
return binfo != null && !binfo.isFinalized();
}
@Override
public String toString() {
return getStorageInfo();
}
@Override // FsDatasetSpi
public synchronized ReplicaInPipelineInterface append(ExtendedBlock b, HybridLogicalClock mhlc,
long newGS, long expectedBlockLen) throws IOException {
final Map<Block, BInfo> map = getMap(b.getBlockPoolId());
BInfo binfo = map.get(b.getLocalBlock());
if (binfo == null || !binfo.isFinalized()) {
throw new ReplicaNotFoundException("Block " + b
+ " is not valid, and cannot be appended to.");
}
binfo.unfinalizeBlock();
return binfo;
}
@Override // FsDatasetSpi
public synchronized ReplicaInPipelineInterface recoverAppend(ExtendedBlock b, HybridLogicalClock mhlc,
long newGS, long expectedBlockLen) throws IOException {
final Map<Block, BInfo> map = getMap(b.getBlockPoolId());
BInfo binfo = map.get(b.getLocalBlock());
if (binfo == null) {
throw new ReplicaNotFoundException("Block " + b
+ " is not valid, and cannot be appended to.");
}
if (binfo.isFinalized()) {
binfo.unfinalizeBlock();
}
map.remove(b);
binfo.theBlock.setGenerationStamp(newGS);
map.put(binfo.theBlock, binfo);
return binfo;
}
@Override // FsDatasetSpi
public String recoverClose(ExtendedBlock b, long newGS, long expectedBlockLen)
throws IOException {
final Map<Block, BInfo> map = getMap(b.getBlockPoolId());
BInfo binfo = map.get(b.getLocalBlock());
if (binfo == null) {
throw new ReplicaNotFoundException("Block " + b
+ " is not valid, and cannot be appended to.");
}
if (!binfo.isFinalized()) {
binfo.finalizeBlock(b.getBlockPoolId(), binfo.getNumBytes());
}
map.remove(b.getLocalBlock());
binfo.theBlock.setGenerationStamp(newGS);
map.put(binfo.theBlock, binfo);
return binfo.getStorageUuid();
}
@Override // FsDatasetSpi
public synchronized ReplicaInPipelineInterface recoverRbw(ExtendedBlock b,
long newGS, long minBytesRcvd, long maxBytesRcvd) throws IOException {
final Map<Block, BInfo> map = getMap(b.getBlockPoolId());
BInfo binfo = map.get(b.getLocalBlock());
if ( binfo == null) {
throw new ReplicaNotFoundException("Block " + b
+ " does not exist, and cannot be appended to.");
}
if (binfo.isFinalized()) {
throw new ReplicaAlreadyExistsException("Block " + b
+ " is valid, and cannot be written to.");
}
map.remove(b);
binfo.theBlock.setGenerationStamp(newGS);
map.put(binfo.theBlock, binfo);
return binfo;
}
@Override // FsDatasetSpi
public synchronized Replica createRbw(ExtendedBlock b, HybridLogicalClock mhlc/*HDFRS_VC*/)
throws IOException {
return createTemporary(b,mhlc);
}
@Override // FsDatasetSpi
public synchronized Replica createTemporary(ExtendedBlock b, HybridLogicalClock mhlc/*HDFSRS_VC*/)
throws IOException {
if (isValidBlock(b)) {
throw new ReplicaAlreadyExistsException("Block " + b +
" is valid, and cannot be written to.");
}
if (isValidRbw(b)) {
throw new ReplicaAlreadyExistsException("Block " + b +
" is being written, and cannot be written to.");
}
final Map<Block, BInfo> map = getMap(b.getBlockPoolId());
BInfo binfo = new BInfo(b.getBlockPoolId(), b.getLocalBlock(), true);
map.put(binfo.theBlock, binfo);
return binfo;
}
synchronized InputStream getBlockInputStream(ExtendedBlock b
) throws IOException {
final Map<Block, BInfo> map = getMap(b.getBlockPoolId());
BInfo binfo = map.get(b.getLocalBlock());
if (binfo == null) {
throw new IOException("No such Block " + b );
}
return binfo.getIStream();
}
@Override // FsDatasetSpi
public synchronized InputStream getBlockInputStream(ExtendedBlock b,
long seekOffset) throws IOException {
InputStream result = getBlockInputStream(b);
IOUtils.skipFully(result, seekOffset);
return result;
}
/** Not supported */
@Override // FsDatasetSpi
public ReplicaInputStreams getTmpInputStreams(ExtendedBlock b, long blkoff,
long ckoff) throws IOException {
throw new IOException("Not supported");
}
@Override // FsDatasetSpi
public synchronized LengthInputStream getMetaDataInputStream(ExtendedBlock b
) throws IOException {
final Map<Block, BInfo> map = getMap(b.getBlockPoolId());
BInfo binfo = map.get(b.getLocalBlock());
if (binfo == null) {
throw new IOException("No such Block " + b );
}
if (!binfo.finalized) {
throw new IOException("Block " + b +
" is being written, its meta cannot be read");
}
final SimulatedInputStream sin = binfo.getMetaIStream();
return new LengthInputStream(sin, sin.getLength());
}
@Override
public void checkDataDir() throws DiskErrorException {
// nothing to check for simulated data set
}
@Override // FsDatasetSpi
public synchronized void adjustCrcChannelPosition(ExtendedBlock b,
ReplicaOutputStreams stream,
int checksumSize)
throws IOException {
}
/**
* Simulated input and output streams
*
*/
static private class SimulatedInputStream extends java.io.InputStream {
byte theRepeatedData = 7;
final long length; // bytes
int currentPos = 0;
byte[] data = null;
/**
* An input stream of size l with repeated bytes
* @param l
* @param iRepeatedData
*/
SimulatedInputStream(long l, byte iRepeatedData) {
length = l;
theRepeatedData = iRepeatedData;
}
/**
* An input stream of of the supplied data
*
* @param iData
*/
SimulatedInputStream(byte[] iData) {
data = iData;
length = data.length;
}
/**
*
* @return the lenght of the input stream
*/
long getLength() {
return length;
}
@Override
public int read() throws IOException {
if (currentPos >= length)
return -1;
if (data !=null) {
return data[currentPos++];
} else {
currentPos++;
return theRepeatedData;
}
}
@Override
public int read(byte[] b) throws IOException {
if (b == null) {
throw new NullPointerException();
}
if (b.length == 0) {
return 0;
}
if (currentPos >= length) { // EOF
return -1;
}
int bytesRead = (int) Math.min(b.length, length-currentPos);
if (data != null) {
System.arraycopy(data, currentPos, b, 0, bytesRead);
} else { // all data is zero
for (int i : b) {
b[i] = theRepeatedData;
}
}
currentPos += bytesRead;
return bytesRead;
}
}
/**
* This class implements an output stream that merely throws its data away, but records its
* length.
*
*/
static private class SimulatedOutputStream extends OutputStream {
long length = 0;
/**
* constructor for Simulated Output Steram
*/
SimulatedOutputStream() {
}
/**
*
* @return the length of the data created so far.
*/
long getLength() {
return length;
}
/**
*/
void setLength(long length) {
this.length = length;
}
@Override
public void write(int arg0) throws IOException {
length++;
}
@Override
public void write(byte[] b) throws IOException {
length += b.length;
}
@Override
public void write(byte[] b,
int off,
int len) throws IOException {
length += len;
}
}
private ObjectName mbeanName;
/**
* Register the FSDataset MBean using the name
* "hadoop:service=DataNode,name=FSDatasetState-<storageid>"
* We use storage id for MBean name since a minicluster within a single
* Java VM may have multiple Simulated Datanodes.
*/
void registerMBean(final String storageId) {
// We wrap to bypass standard mbean naming convetion.
// This wraping can be removed in java 6 as it is more flexible in
// package naming for mbeans and their impl.
StandardMBean bean;
try {
bean = new StandardMBean(this,FSDatasetMBean.class);
mbeanName = MBeans.register("DataNode", "FSDatasetState-"+
storageId, bean);
} catch (NotCompliantMBeanException e) {
DataNode.LOG.warn("Error registering FSDatasetState MBean", e);
}
DataNode.LOG.info("Registered FSDatasetState MBean");
}
@Override
public void shutdown() {
if (mbeanName != null) MBeans.unregister(mbeanName);
}
@Override
public String getStorageInfo() {
return "Simulated FSDataset-" + datanodeUuid;
}
@Override
public boolean hasEnoughResource() {
return true;
}
@Override
public ReplicaRecoveryInfo initReplicaRecovery(RecoveringBlock rBlock)
throws IOException {
ExtendedBlock b = rBlock.getBlock();
final Map<Block, BInfo> map = getMap(b.getBlockPoolId());
BInfo binfo = map.get(b.getLocalBlock());
if (binfo == null) {
throw new IOException("No such Block " + b );
}
return new ReplicaRecoveryInfo(binfo.getBlockId(), binfo.getBytesOnDisk(),
binfo.getGenerationStamp(),
binfo.isFinalized()?ReplicaState.FINALIZED : ReplicaState.RBW);
}
@Override // FsDatasetSpi
public String updateReplicaUnderRecovery(ExtendedBlock oldBlock,
long recoveryId,
long newlength) {
// Caller does not care about the exact Storage UUID returned.
return datanodeUuid;
}
@Override // FsDatasetSpi
public long getReplicaVisibleLength(ExtendedBlock block) {
return block.getNumBytes();
}
@Override // FsDatasetSpi
public long getReplicaVisibleLength(ExtendedBlock block,
long timestamp, boolean bUserTimestamp) throws IOException {
throw new IOException("not implemented.");
}
@Override // FsDatasetSpi
public void addBlockPool(String bpid, Configuration conf) {
Map<Block, BInfo> map = new HashMap<Block, BInfo>();
blockMap.put(bpid, map);
storage.addBlockPool(bpid);
}
@Override // FsDatasetSpi
public void shutdownBlockPool(String bpid) {
blockMap.remove(bpid);
storage.removeBlockPool(bpid);
}
@Override // FsDatasetSpi
public void deleteBlockPool(String bpid, boolean force) {
return;
}
@Override
public ReplicaInPipelineInterface convertTemporaryToRbw(ExtendedBlock temporary)
throws IOException {
final Map<Block, BInfo> map = blockMap.get(temporary.getBlockPoolId());
if (map == null) {
throw new IOException("Block pool not found, temporary=" + temporary);
}
final BInfo r = map.get(temporary.getLocalBlock());
if (r == null) {
throw new IOException("Block not found, temporary=" + temporary);
} else if (r.isFinalized()) {
throw new IOException("Replica already finalized, temporary="
+ temporary + ", r=" + r);
}
return r;
}
@Override
public BlockLocalPathInfo getBlockLocalPathInfo(ExtendedBlock b) {
throw new UnsupportedOperationException();
}
@Override
public HdfsBlocksMetadata getHdfsBlocksMetadata(String bpid, long[] blockIds)
throws IOException {
throw new UnsupportedOperationException();
}
@Override
public void enableTrash(String bpid) {
throw new UnsupportedOperationException();
}
@Override
public void restoreTrash(String bpid) {
}
@Override
public boolean trashEnabled(String bpid) {
return false;
}
@Override
public void checkAndUpdate(String bpid, long blockId, File diskFile,
File diskMetaFile, FsVolumeSpi vol) {
throw new UnsupportedOperationException();
}
@Override
public List<FsVolumeSpi> getVolumes() {
throw new UnsupportedOperationException();
}
@Override
public DatanodeStorage getStorage(final String storageUuid) {
return storageUuid.equals(storage.getStorageUuid()) ?
storage.dnStorage :
null;
}
@Override
public StorageReport[] getStorageReports(String bpid) {
return new StorageReport[] {storage.getStorageReport(bpid)};
}
@Override
public List<Block> getFinalizedBlocks(String bpid) {
throw new UnsupportedOperationException();
}
@Override
public Map<String, Object> getVolumeInfoMap() {
throw new UnsupportedOperationException();
}
@Override
public RollingLogs createRollingLogs(String bpid, String prefix) {
throw new UnsupportedOperationException();
}
@Override
public FsVolumeSpi getVolume(ExtendedBlock b) {
throw new UnsupportedOperationException();
}
public StorageType getStorageType() {
return StorageType.DEFAULT;
}
public OutputStream getBlockOutputStream(ExtendedBlock b, long seekOffset)
throws IOException {
throw new UnsupportedOperationException();
}
public void snapshot(long rtc, String bpid)
throws IOException {
throw new UnsupportedOperationException();
}
@Override
public InputStream getBlockInputStream(ExtendedBlock b, long seekOffset, long timestamp, boolean bUserTimestamp)
throws IOException {
if(timestamp != JNIBlog.CURRENT_SNAPSHOT_ID)
throw new UnsupportedOperationException();
else
return getBlockInputStream(b,seekOffset);
}
}
| |
package com.WazaBe.HoloEverywhere.widget;
import android.annotation.SuppressLint;
import android.content.Context;
import android.graphics.drawable.Drawable;
import android.util.AttributeSet;
import android.view.KeyEvent;
import android.view.accessibility.AccessibilityEventSource;
import com.WazaBe.HoloEverywhere.sherlock.SBase;
import com.actionbarsherlock.internal.nineoldandroids.view.animation.AnimatorProxy;
import com.actionbarsherlock.view.ActionMode;
public class View extends android.view.View implements Drawable.Callback,
KeyEvent.Callback, AccessibilityEventSource {
public static final int[] PRESSED_STATE_SET, SUPPORT_EMPTY_STATE_SET,
SUPPORT_WINDOW_FOCUSED_STATE_SET, SUPPORT_SELECTED_STATE_SET,
SUPPORT_SELECTED_WINDOW_FOCUSED_STATE_SET,
SUPPORT_FOCUSED_STATE_SET,
SUPPORT_FOCUSED_WINDOW_FOCUSED_STATE_SET,
SUPPORT_FOCUSED_SELECTED_STATE_SET,
SUPPORT_FOCUSED_SELECTED_WINDOW_FOCUSED_STATE_SET,
SUPPORT_ENABLED_STATE_SET,
SUPPORT_ENABLED_WINDOW_FOCUSED_STATE_SET,
SUPPORT_ENABLED_SELECTED_STATE_SET,
SUPPORT_ENABLED_SELECTED_WINDOW_FOCUSED_STATE_SET,
SUPPORT_ENABLED_FOCUSED_STATE_SET,
SUPPORT_ENABLED_FOCUSED_WINDOW_FOCUSED_STATE_SET,
SUPPORT_ENABLED_FOCUSED_SELECTED_STATE_SET,
SUPPORT_ENABLED_FOCUSED_SELECTED_WINDOW_FOCUSED_STATE_SET,
SUPPORT_PRESSED_WINDOW_FOCUSED_STATE_SET,
SUPPORT_PRESSED_STATE_SET, SUPPORT_PRESSED_SELECTED_STATE_SET,
SUPPORT_PRESSED_SELECTED_WINDOW_FOCUSED_STATE_SET,
SUPPORT_PRESSED_FOCUSED_STATE_SET,
SUPPORT_PRESSED_FOCUSED_WINDOW_FOCUSED_STATE_SET,
SUPPORT_PRESSED_FOCUSED_SELECTED_STATE_SET,
SUPPORT_PRESSED_FOCUSED_SELECTED_WINDOW_FOCUSED_STATE_SET,
SUPPORT_PRESSED_ENABLED_STATE_SET,
SUPPORT_PRESSED_ENABLED_WINDOW_FOCUSED_STATE_SET,
SUPPORT_PRESSED_ENABLED_SELECTED_STATE_SET,
SUPPORT_PRESSED_ENABLED_SELECTED_WINDOW_FOCUSED_STATE_SET,
SUPPORT_PRESSED_ENABLED_FOCUSED_STATE_SET,
SUPPORT_PRESSED_ENABLED_FOCUSED_WINDOW_FOCUSED_STATE_SET,
SUPPORT_PRESSED_ENABLED_FOCUSED_SELECTED_STATE_SET,
SUPPORT_PRESSED_ENABLED_FOCUSED_SELECTED_WINDOW_FOCUSED_STATE_SET;
static final int VIEW_STATE_ACCELERATED = 1 << 6;
static final int VIEW_STATE_ACTIVATED = 1 << 5;
static final int VIEW_STATE_DRAG_CAN_ACCEPT = 1 << 8;
static final int VIEW_STATE_DRAG_HOVERED = 1 << 9;
static final int VIEW_STATE_ENABLED = 1 << 3;
static final int VIEW_STATE_FOCUSED = 1 << 2;
static final int VIEW_STATE_HOVERED = 1 << 7;
static final int VIEW_STATE_PRESSED = 1 << 4;
static final int VIEW_STATE_SELECTED = 1 << 1;
private static final int[][] VIEW_STATE_SETS;
static final int VIEW_STATE_WINDOW_FOCUSED = 1;
private static final int[] ViewDrawableStates = {
android.R.attr.state_pressed, android.R.attr.state_focused,
android.R.attr.state_selected, android.R.attr.state_window_focused,
android.R.attr.state_enabled, android.R.attr.state_activated,
android.R.attr.state_accelerated, android.R.attr.state_hovered,
android.R.attr.state_drag_can_accept,
android.R.attr.state_drag_hovered };
static final int[] Z_VIEW_STATE_IDS = new int[] {
android.R.attr.state_window_focused, VIEW_STATE_WINDOW_FOCUSED,
android.R.attr.state_selected, VIEW_STATE_SELECTED,
android.R.attr.state_focused, VIEW_STATE_FOCUSED,
android.R.attr.state_enabled, VIEW_STATE_ENABLED,
android.R.attr.state_pressed, VIEW_STATE_PRESSED,
android.R.attr.state_activated, VIEW_STATE_ACTIVATED,
android.R.attr.state_accelerated, VIEW_STATE_ACCELERATED,
android.R.attr.state_hovered, VIEW_STATE_HOVERED,
android.R.attr.state_drag_can_accept, VIEW_STATE_DRAG_CAN_ACCEPT,
android.R.attr.state_drag_hovered, VIEW_STATE_DRAG_HOVERED };
static {
if (Z_VIEW_STATE_IDS.length / 2 != ViewDrawableStates.length) {
throw new IllegalStateException(
"VIEW_STATE_IDs array length does not match ViewDrawableStates style array");
}
int[] orderedIds = new int[Z_VIEW_STATE_IDS.length];
for (int i = 0; i < ViewDrawableStates.length; i++) {
int viewState = ViewDrawableStates[i];
for (int j = 0; j < Z_VIEW_STATE_IDS.length; j += 2) {
if (Z_VIEW_STATE_IDS[j] == viewState) {
orderedIds[i * 2] = viewState;
orderedIds[i * 2 + 1] = Z_VIEW_STATE_IDS[j + 1];
}
}
}
final int NUM_BITS = Z_VIEW_STATE_IDS.length / 2;
VIEW_STATE_SETS = new int[1 << NUM_BITS][];
for (int i = 0; i < VIEW_STATE_SETS.length; i++) {
int numBits = Integer.bitCount(i);
int[] set = new int[numBits];
int pos = 0;
for (int j = 0; j < orderedIds.length; j += 2) {
if ((i & orderedIds[j + 1]) != 0) {
set[pos++] = orderedIds[j];
}
}
VIEW_STATE_SETS[i] = set;
}
SUPPORT_EMPTY_STATE_SET = VIEW_STATE_SETS[0];
SUPPORT_WINDOW_FOCUSED_STATE_SET = VIEW_STATE_SETS[VIEW_STATE_WINDOW_FOCUSED];
SUPPORT_SELECTED_STATE_SET = VIEW_STATE_SETS[VIEW_STATE_SELECTED];
SUPPORT_SELECTED_WINDOW_FOCUSED_STATE_SET = VIEW_STATE_SETS[VIEW_STATE_WINDOW_FOCUSED
| VIEW_STATE_SELECTED];
SUPPORT_FOCUSED_STATE_SET = VIEW_STATE_SETS[VIEW_STATE_FOCUSED];
SUPPORT_FOCUSED_WINDOW_FOCUSED_STATE_SET = VIEW_STATE_SETS[VIEW_STATE_WINDOW_FOCUSED
| VIEW_STATE_FOCUSED];
SUPPORT_FOCUSED_SELECTED_STATE_SET = VIEW_STATE_SETS[VIEW_STATE_SELECTED
| VIEW_STATE_FOCUSED];
SUPPORT_FOCUSED_SELECTED_WINDOW_FOCUSED_STATE_SET = VIEW_STATE_SETS[VIEW_STATE_WINDOW_FOCUSED
| VIEW_STATE_SELECTED | VIEW_STATE_FOCUSED];
SUPPORT_ENABLED_STATE_SET = VIEW_STATE_SETS[VIEW_STATE_ENABLED];
SUPPORT_ENABLED_WINDOW_FOCUSED_STATE_SET = VIEW_STATE_SETS[VIEW_STATE_WINDOW_FOCUSED
| VIEW_STATE_ENABLED];
SUPPORT_ENABLED_SELECTED_STATE_SET = VIEW_STATE_SETS[VIEW_STATE_SELECTED
| VIEW_STATE_ENABLED];
SUPPORT_ENABLED_SELECTED_WINDOW_FOCUSED_STATE_SET = VIEW_STATE_SETS[VIEW_STATE_WINDOW_FOCUSED
| VIEW_STATE_SELECTED | VIEW_STATE_ENABLED];
SUPPORT_ENABLED_FOCUSED_STATE_SET = VIEW_STATE_SETS[VIEW_STATE_FOCUSED
| VIEW_STATE_ENABLED];
SUPPORT_ENABLED_FOCUSED_WINDOW_FOCUSED_STATE_SET = VIEW_STATE_SETS[VIEW_STATE_WINDOW_FOCUSED
| VIEW_STATE_FOCUSED | VIEW_STATE_ENABLED];
SUPPORT_ENABLED_FOCUSED_SELECTED_STATE_SET = VIEW_STATE_SETS[VIEW_STATE_SELECTED
| VIEW_STATE_FOCUSED | VIEW_STATE_ENABLED];
SUPPORT_ENABLED_FOCUSED_SELECTED_WINDOW_FOCUSED_STATE_SET = VIEW_STATE_SETS[VIEW_STATE_WINDOW_FOCUSED
| VIEW_STATE_SELECTED | VIEW_STATE_FOCUSED | VIEW_STATE_ENABLED];
SUPPORT_PRESSED_STATE_SET = PRESSED_STATE_SET = VIEW_STATE_SETS[VIEW_STATE_PRESSED];
SUPPORT_PRESSED_WINDOW_FOCUSED_STATE_SET = VIEW_STATE_SETS[VIEW_STATE_WINDOW_FOCUSED
| VIEW_STATE_PRESSED];
SUPPORT_PRESSED_SELECTED_STATE_SET = VIEW_STATE_SETS[VIEW_STATE_SELECTED
| VIEW_STATE_PRESSED];
SUPPORT_PRESSED_SELECTED_WINDOW_FOCUSED_STATE_SET = VIEW_STATE_SETS[VIEW_STATE_WINDOW_FOCUSED
| VIEW_STATE_SELECTED | VIEW_STATE_PRESSED];
SUPPORT_PRESSED_FOCUSED_STATE_SET = VIEW_STATE_SETS[VIEW_STATE_FOCUSED
| VIEW_STATE_PRESSED];
SUPPORT_PRESSED_FOCUSED_WINDOW_FOCUSED_STATE_SET = VIEW_STATE_SETS[VIEW_STATE_WINDOW_FOCUSED
| VIEW_STATE_FOCUSED | VIEW_STATE_PRESSED];
SUPPORT_PRESSED_FOCUSED_SELECTED_STATE_SET = VIEW_STATE_SETS[VIEW_STATE_SELECTED
| VIEW_STATE_FOCUSED | VIEW_STATE_PRESSED];
SUPPORT_PRESSED_FOCUSED_SELECTED_WINDOW_FOCUSED_STATE_SET = VIEW_STATE_SETS[VIEW_STATE_WINDOW_FOCUSED
| VIEW_STATE_SELECTED | VIEW_STATE_FOCUSED | VIEW_STATE_PRESSED];
SUPPORT_PRESSED_ENABLED_STATE_SET = VIEW_STATE_SETS[VIEW_STATE_ENABLED
| VIEW_STATE_PRESSED];
SUPPORT_PRESSED_ENABLED_WINDOW_FOCUSED_STATE_SET = VIEW_STATE_SETS[VIEW_STATE_WINDOW_FOCUSED
| VIEW_STATE_ENABLED | VIEW_STATE_PRESSED];
SUPPORT_PRESSED_ENABLED_SELECTED_STATE_SET = VIEW_STATE_SETS[VIEW_STATE_SELECTED
| VIEW_STATE_ENABLED | VIEW_STATE_PRESSED];
SUPPORT_PRESSED_ENABLED_SELECTED_WINDOW_FOCUSED_STATE_SET = VIEW_STATE_SETS[VIEW_STATE_WINDOW_FOCUSED
| VIEW_STATE_SELECTED | VIEW_STATE_ENABLED | VIEW_STATE_PRESSED];
SUPPORT_PRESSED_ENABLED_FOCUSED_STATE_SET = VIEW_STATE_SETS[VIEW_STATE_FOCUSED
| VIEW_STATE_ENABLED | VIEW_STATE_PRESSED];
SUPPORT_PRESSED_ENABLED_FOCUSED_WINDOW_FOCUSED_STATE_SET = VIEW_STATE_SETS[VIEW_STATE_WINDOW_FOCUSED
| VIEW_STATE_FOCUSED | VIEW_STATE_ENABLED | VIEW_STATE_PRESSED];
SUPPORT_PRESSED_ENABLED_FOCUSED_SELECTED_STATE_SET = VIEW_STATE_SETS[VIEW_STATE_SELECTED
| VIEW_STATE_FOCUSED | VIEW_STATE_ENABLED | VIEW_STATE_PRESSED];
SUPPORT_PRESSED_ENABLED_FOCUSED_SELECTED_WINDOW_FOCUSED_STATE_SET = VIEW_STATE_SETS[VIEW_STATE_WINDOW_FOCUSED
| VIEW_STATE_SELECTED
| VIEW_STATE_FOCUSED
| VIEW_STATE_ENABLED
| VIEW_STATE_PRESSED];
}
public static int supportResolveSize(int size, int measureSpec) {
return View.supportResolveSizeAndState(size, measureSpec, 0)
& MEASURED_SIZE_MASK;
}
public static int supportResolveSizeAndState(int size, int measureSpec,
int childMeasuredState) {
int result = size;
int specMode = MeasureSpec.getMode(measureSpec);
int specSize = MeasureSpec.getSize(measureSpec);
switch (specMode) {
case MeasureSpec.UNSPECIFIED:
result = size;
break;
case MeasureSpec.AT_MOST:
if (specSize < size) {
result = specSize | MEASURED_STATE_TOO_SMALL;
} else {
result = size;
}
break;
case MeasureSpec.EXACTLY:
result = specSize;
break;
}
return result | childMeasuredState & MEASURED_STATE_MASK;
}
private final AnimatorProxy proxy;
public View(Context context) {
super(context);
proxy = AnimatorProxy.NEEDS_PROXY ? AnimatorProxy.wrap(this) : null;
}
public View(Context context, AttributeSet attrs) {
super(context, attrs);
proxy = AnimatorProxy.NEEDS_PROXY ? AnimatorProxy.wrap(this) : null;
}
public View(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
proxy = AnimatorProxy.NEEDS_PROXY ? AnimatorProxy.wrap(this) : null;
}
@SuppressLint("NewApi")
@Override
public float getAlpha() {
if (proxy != null) {
return proxy.getAlpha();
}
return super.getAlpha();
}
public int getMeasuredStateInt() {
return getMeasuredWidth() & MEASURED_STATE_MASK
| getMeasuredHeight() >> MEASURED_HEIGHT_STATE_SHIFT
& MEASURED_STATE_MASK >> MEASURED_HEIGHT_STATE_SHIFT;
}
@SuppressLint("NewApi")
@Override
public float getTranslationX() {
if (proxy != null) {
return proxy.getTranslationX();
}
return super.getTranslationX();
}
@SuppressLint("NewApi")
@Override
public float getTranslationY() {
if (proxy != null) {
return proxy.getTranslationY();
}
return super.getTranslationY();
}
@SuppressLint("NewApi")
public void onVisibilityChanged(View changedView, int visibility) {
super.onVisibilityChanged(changedView, visibility);
}
@SuppressLint("NewApi")
@Override
public void setAlpha(float alpha) {
if (proxy != null) {
proxy.setAlpha(alpha);
}
super.setAlpha(alpha);
}
@SuppressLint("NewApi")
@Override
public void setTranslationX(float translationX) {
if (proxy != null) {
proxy.setTranslationX(translationX);
}
super.setTranslationX(translationX);
}
@SuppressLint("NewApi")
@Override
public void setTranslationY(float translationY) {
if (proxy != null) {
proxy.setTranslationY(translationY);
}
super.setTranslationY(translationY);
}
@Override
public void setVisibility(int visibility) {
if (proxy != null) {
if (visibility == GONE) {
clearAnimation();
} else if (visibility == VISIBLE) {
setAnimation(proxy);
}
}
super.setVisibility(visibility);
}
public ActionMode startActionMode(ActionMode.Callback actionModeCallback) {
return ((SBase) getContext()).startActionMode(actionModeCallback);
}
}
| |
/*
* This file is part of SpongeAPI, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.api.util.command.args;
import static org.spongepowered.api.util.SpongeApiTranslationHelper.t;
import com.flowpowered.math.vector.Vector3d;
import com.google.common.base.Function;
import com.google.common.base.Joiner;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import org.spongepowered.api.CatalogType;
import org.spongepowered.api.Game;
import org.spongepowered.api.entity.player.Player;
import org.spongepowered.api.text.Text;
import org.spongepowered.api.text.TextBuilder;
import org.spongepowered.api.text.Texts;
import org.spongepowered.api.util.StartsWithPredicate;
import org.spongepowered.api.util.command.CommandMessageFormatting;
import org.spongepowered.api.util.command.CommandSource;
import org.spongepowered.api.util.command.source.LocatedSource;
import org.spongepowered.api.world.DimensionType;
import org.spongepowered.api.world.Location;
import org.spongepowered.api.world.World;
import org.spongepowered.api.world.storage.WorldProperties;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import javax.annotation.Nullable;
/**
* Class containing factory methods for common command elements.
*/
public final class GenericArguments {
private GenericArguments() {}
/**
* Expects no arguments.
*
* @return An expectation of no arguments
*/
public static CommandElement none() {
return new SequenceCommandElement(ImmutableList.<CommandElement>of());
}
static CommandElement markTrue(String flag) {
return new MarkTrueCommandElement(flag);
}
/**
* Expect an argument to represent an online player,
* or if nothing matches and the source is a {@link Player}, give the player. If nothing matches and the source is not a player, throw an
* exception
* Gives value of type {@link Player}
*
* @param key The key to store under
* @param game The game to find players in
* @return the argument
*/
public static CommandElement playerOrSource(Text key, Game game) {
return new PlayerCommandElement(key, game, true);
}
/**
* Expect an argument to represent an online player.
* Gives value of type {@link Player}
*
* @param key The key to store under
* @param game The game to find players in
* @return the argument
*/
public static CommandElement player(Text key, Game game) {
return new PlayerCommandElement(key, game, false);
}
/**
* Expect an argument to represent a world. This gives a WorldProperties object rather than an actual world in order to include unloaded worlds
* as well
* Gives values of type {@link WorldProperties}
*
* @param key The key to store under
* @param game The game to find worlds from
* @return the argument
*/
public static CommandElement world(Text key, Game game) {
return new WorldPropertiesCommandElement(key, game);
}
/**
* Expect an argument to represent a dimension.
* Gives values of tye {@link DimensionType}
*
* @param key The key to store under
* @param game The game to find dimensions from
* @return the argument
*/
public static CommandElement dimension(Text key, Game game) {
return catalogedElement(key, game, DimensionType.class);
}
/**
* Expect an argument to represent a {@link Vector3d}.
*
* @param key The key to store under
* @return the argument
*/
public static CommandElement vector3d(Text key) {
return new Vector3dCommandElement(key);
}
/**
* Expect an argument to represent a {@link Location}.
*
* @param key The key to store under
* @param game The game to find worlds from
* @return the argument
*/
public static CommandElement location(Text key, Game game) {
return new LocationCommandElement(key, game);
}
/**
* Expect an argument that is a member of the specified catalog type T.
*
* @param key The key to store the resolved value under
* @param game The game to get the registry from
* @param catalogType The type expected
* @param <T> The type to return
* @return the argument
*/
public static <T extends CatalogType> CommandElement catalogedElement(Text key, Game game, Class<T> catalogType) {
return new CatalogedTypeCommandElement<T>(key, game, catalogType);
}
static class MarkTrueCommandElement extends CommandElement {
public MarkTrueCommandElement(String flag) {
super(Texts.of(flag));
}
@Override
protected Object parseValue(CommandSource source, CommandArgs args) throws ArgumentParseException {
return true;
}
@Override
public List<String> complete(CommandSource src, CommandArgs args, CommandContext context) {
return Collections.emptyList();
}
}
/**
* Get a builder to create a command element that parses flags.
*
* @return the newly created builder
*/
public static CommandFlags.Builder flags() {
return new CommandFlags.Builder();
}
/**
* Consumes a series of arguments. Usage is the elements concated
*
* @param elements The series of arguments to expect
* @return the element to match the input
*/
public static CommandElement seq(CommandElement... elements) {
return new SequenceCommandElement(ImmutableList.copyOf(elements));
}
private static class SequenceCommandElement extends CommandElement {
private final List<CommandElement> elements;
private SequenceCommandElement(List<CommandElement> elements) {
super(null);
this.elements = elements;
}
@Override
public void parse(CommandSource source, CommandArgs args, CommandContext context) throws ArgumentParseException {
for (CommandElement element : this.elements) {
element.parse(source, args, context);
}
}
@Override
protected Object parseValue(CommandSource source, CommandArgs args) throws ArgumentParseException {
return null;
}
@Override
public List<String> complete(CommandSource src, CommandArgs args, CommandContext context) {
for (Iterator<CommandElement> it = this.elements.iterator(); it.hasNext(); ) {
CommandElement element = it.next();
Object startState = args.getState();
try {
element.parse(src, args, context);
Object endState = args.getState();
if (!args.hasNext()) {
args.setState(startState);
List<String> inputs = element.complete(src, args, context);
args.previous();
if (!inputs.contains(args.next())) { // Tabcomplete returns results to complete the last word in an argument.
// If the last word is one of the completions, the command is most likely complete
return inputs;
}
args.setState(endState);
}
} catch (ArgumentParseException e) {
args.setState(startState);
return element.complete(src, args, context);
}
if (!it.hasNext()) {
args.setState(startState);
}
}
return Collections.emptyList();
}
@Override
public Text getUsage(CommandSource commander) {
final TextBuilder build = Texts.builder();
for (Iterator<CommandElement> it = this.elements.iterator(); it.hasNext();) {
build.append(it.next().getUsage(commander));
if (it.hasNext()) {
build.append(CommandMessageFormatting.SPACE_TEXT);
}
}
return build.build();
}
}
/**
* Return an argument that allows selecting from a limited set of values.
* If there are 5 or fewer choices available, the choices will be shown in the command usage. Otherwise, the usage
* will only display only the key. To override this behavior, see {@link #choices(Text, Map, boolean)}.
*
* @param key The key to store the resulting value under
* @param choices The choices users can choose from
* @return the element to match the input
*/
public static CommandElement choices(Text key, Map<String, ?> choices) {
return choices(key, choices, choices.size() <= 5);
}
/**
* Return an argument that allows selecting from a limited set of values.
* Unless {@code choicesInUsage} is true, general command usage will only display the provided key
*
* @param key The key to store the resulting value under
* @param choices The choices users can choose from
* @param choicesInUsage Whether to display the available choices, or simply the provided key, as part of usage
* @return the element to match the input
*/
public static CommandElement choices(Text key, Map<String, ?> choices, boolean choicesInUsage) {
return new ChoicesCommandElement(key, ImmutableMap.copyOf(choices), choicesInUsage);
}
private static class ChoicesCommandElement extends CommandElement {
private final Map<String, Object> choices;
private final boolean choicesInUsage;
private ChoicesCommandElement(Text key, Map<String, Object> choices, boolean choicesInUsage) {
super(key);
this.choices = choices;
this.choicesInUsage = choicesInUsage;
}
@Override
public Object parseValue(CommandSource source, CommandArgs args) throws ArgumentParseException {
Object value = this.choices.get(args.next());
if (value == null) {
throw args.createError(t("Argument was not a valid choice. Valid choices: %s", this.choices.keySet().toString()));
}
return value;
}
@Override
public List<String> complete(CommandSource src, CommandArgs args, CommandContext context) {
final String prefix = args.nextIfPresent().or("");
return ImmutableList.copyOf(Iterables.filter(this.choices.keySet(), new StartsWithPredicate(prefix)));
}
@Override
public Text getUsage(CommandSource commander) {
if (this.choicesInUsage) {
final TextBuilder build = Texts.builder();
build.append(CommandMessageFormatting.LT_TEXT);
for (Iterator<String> it = this.choices.keySet().iterator(); it.hasNext();) {
build.append(Texts.of(it.next()));
if (it.hasNext()) {
build.append(CommandMessageFormatting.PIPE_TEXT);
}
}
build.append(CommandMessageFormatting.GT_TEXT);
return build.build();
} else {
return super.getUsage(commander);
}
}
}
/**
* Returns a command element that matches the first of the provided elements that parses.
* Tab completion matches from all options
*
* @param elements The elements to check against
* @return The command element matching the first passing of the elements provided
*/
public static CommandElement firstParsing(CommandElement... elements) {
return new FirstParsingCommandElement(ImmutableList.copyOf(elements));
}
private static class FirstParsingCommandElement extends CommandElement {
private final List<CommandElement> elements;
private FirstParsingCommandElement(List<CommandElement> elements) {
super(null);
this.elements = elements;
}
@Override
public void parse(CommandSource source, CommandArgs args, CommandContext context) throws ArgumentParseException {
ArgumentParseException lastException = null;
for (CommandElement element : this.elements) {
Object startState = args.getState();
try {
element.parse(source, args, context);
return;
} catch (ArgumentParseException ex) {
lastException = ex;
args.setState(startState);
}
}
if (lastException != null) {
throw lastException;
}
}
@Override
protected Object parseValue(CommandSource source, CommandArgs args) throws ArgumentParseException {
return null;
}
@Override
public List<String> complete(final CommandSource src, final CommandArgs args, final CommandContext context) {
return ImmutableList.copyOf(Iterables.concat(Iterables.transform(this.elements, new Function<CommandElement, Iterable<String>>() {
@Nullable
@Override
public Iterable<String> apply(@Nullable CommandElement input) {
if (input == null) {
return ImmutableList.of();
}
Object startState = args.getState();
List<String> ret = input.complete(src, args, context);
args.setState(startState);
return ret;
}
})));
}
@Override
public Text getUsage(CommandSource commander) {
final TextBuilder ret = Texts.builder();
for (Iterator<CommandElement> it = this.elements.iterator(); it.hasNext();) {
ret.append(it.next().getUsage(commander));
if (it.hasNext()) {
ret.append(CommandMessageFormatting.PIPE_TEXT);
}
}
return ret.build();
}
}
/**
* Make the provided command element optional
* This means the command element is not required. However, if the element is provided with invalid format and there
* are no more args specified, any errors will still be passed on.
*
* @param element The element to optionally require
* @return the element to match the input
*/
public static CommandElement optional(CommandElement element) {
return new OptionalCommandElement(element, null, false);
}
/**
* Make the provided command element optional
* This means the command element is not required. However, if the element is provided with invalid format and there
* are no more args specified, any errors will still be passed on. If the given element's key and {@code value} are not
* null and this element is not provided the element's key will be set to the given value.
*
* @param element The element to optionally require
* @param value The default value to set
* @return the element to match the input
*/
public static CommandElement optional(CommandElement element, Object value) {
return new OptionalCommandElement(element, value, false);
}
/**
* Make the provided command element optional
* This means the command element is not required.
* If the argument is provided but of invalid format, it will be skipped.
*
* @param element The element to optionally require
* @return the element to match the input
*/
public static CommandElement optionalWeak(CommandElement element) {
return new OptionalCommandElement(element, null, true);
}
/**
* Make the provided command element optional
* This means the command element is not required.
* If the argument is provided but of invalid format, it will be skipped.
* If the given element's key and {@code value} are not null and this element is not provided the element's key will
* be set to the given value.
*
* @param element The element to optionally require
* @param value The default value to set
* @return the element to match the input
*/
public static CommandElement optionalWeak(CommandElement element, Object value) {
return new OptionalCommandElement(element, value, true);
}
private static class OptionalCommandElement extends CommandElement {
private final CommandElement element;
@Nullable
private final Object value;
private final boolean considerInvalidFormatEmpty;
private OptionalCommandElement(CommandElement element, @Nullable Object value, boolean considerInvalidFormatEmpty) {
super(null);
this.element = element;
this.value = value;
this.considerInvalidFormatEmpty = considerInvalidFormatEmpty;
}
@Override
public void parse(CommandSource source, CommandArgs args, CommandContext context) throws ArgumentParseException {
if (!args.hasNext()) {
if (this.element.getKey() != null && this.value != null) {
context.putArg(Texts.toPlain(this.element.getKey()), this.value);
}
return;
}
Object startState = args.getState();
try {
this.element.parse(source, args, context);
} catch (ArgumentParseException ex) {
if (this.considerInvalidFormatEmpty || args.hasNext()) { // If there are more args, suppress. Otherwise, throw the error
args.setState(startState);
if (this.element.getKey() != null && this.value != null) {
context.putArg(this.element.getUntranslatedKey(), this.value);
}
} else {
throw ex;
}
}
}
@Override
protected Object parseValue(CommandSource source, CommandArgs args) throws ArgumentParseException {
return args.hasNext() ? null : this.element.parseValue(source, args);
}
@Override
public List<String> complete(CommandSource src, CommandArgs args, CommandContext context) {
return this.element.complete(src, args, context);
}
@Override
public Text getUsage(CommandSource src) {
return Texts.of("[", this.element.getUsage(src), "]");
}
}
/**
* Require a given command element to be provided a certain number of times
* Command values will be stored under their provided keys in the CommandContext
*
* @param element The element to repeat
* @param times The number of times to repeat the element.
* @return the element to match the input
*/
public static CommandElement repeated(CommandElement element, int times) {
return new RepeatedCommandElement(element, times);
}
private static class RepeatedCommandElement extends CommandElement {
private final CommandElement element;
private final int times;
protected RepeatedCommandElement(CommandElement element, int times) {
super(null);
this.element = element;
this.times = times;
}
@Override
public void parse(CommandSource source, CommandArgs args, CommandContext context) throws ArgumentParseException {
for (int i = 0; i < this.times; ++i) {
this.element.parse(source, args, context);
}
}
@Override
protected Object parseValue(CommandSource source, CommandArgs args) throws ArgumentParseException {
return null;
}
@Override
public List<String> complete(CommandSource src, CommandArgs args, CommandContext context) {
for (int i = 0; i < this.times; ++i) {
Object startState = args.getState();
try {
this.element.parse(src, args, context);
} catch (ArgumentParseException e) {
args.setState(startState);
return this.element.complete(src, args, context);
}
}
return Collections.emptyList();
}
@Override
public Text getUsage(CommandSource src) {
return Texts.of(this.times, '*', this.element.getUsage(src));
}
}
/**
* Require all remaining args to match as many instances of CommandElement as will fit
* Command element values will be stored under their provided keys in the CommandContext.
*
* @param element The element to repeat
* @return the element to match the input
*/
public static CommandElement allOf(CommandElement element) {
return new AllOfCommandElement(element);
}
private static class AllOfCommandElement extends CommandElement {
private final CommandElement element;
protected AllOfCommandElement(CommandElement element) {
super(null);
this.element = element;
}
@Override
public void parse(CommandSource source, CommandArgs args, CommandContext context) throws ArgumentParseException {
while (args.hasNext()) {
this.element.parse(source, args, context);
}
}
@Override
protected Object parseValue(CommandSource source, CommandArgs args) throws ArgumentParseException {
return null;
}
@Override
public List<String> complete(CommandSource src, CommandArgs args, CommandContext context) {
while (args.hasNext()) {
Object startState = args.getState();
try {
this.element.parse(src, args, context);
} catch (ArgumentParseException e) {
args.setState(startState);
return this.element.complete(src, args, context);
}
}
return Collections.emptyList();
}
@Override
public Text getUsage(CommandSource context) {
return Texts.of(this.element.getUsage(context), CommandMessageFormatting.PLUS_TEXT);
}
}
// -- Argument types for basic java types
/**
* Parent class that specifies elemenents as having no tab completions. Useful for inputs with a very large domain, like strings and integers
*/
private abstract static class KeyElement extends CommandElement {
private KeyElement(Text key) {
super(key);
}
@Override
public List<String> complete(CommandSource src, CommandArgs args, CommandContext context) {
return Collections.emptyList();
}
}
/**
* Require an argument to be a string. Any provided argument will fit in under this argument.
* Gives values of type {@link String}.
*
* @param key The key to store the parsed argument under
* @return the element to match the input
*/
public static CommandElement string(Text key) {
return new StringElement(key);
}
private static class StringElement extends KeyElement {
private StringElement(Text key) {
super(key);
}
@Override
public Object parseValue(CommandSource source, CommandArgs args) throws ArgumentParseException {
return args.next();
}
}
/**
* Require an argument to be an integer (base 10).
* Gives values of type {@link Integer}
*
* @param key The key to store the parsed argument under
* @return the element to match the input
*/
public static CommandElement integer(Text key) {
return new IntegerElement(key);
}
private static class IntegerElement extends KeyElement {
private IntegerElement(Text key) {
super(key);
}
@Override
public Object parseValue(CommandSource source, CommandArgs args) throws ArgumentParseException {
final String input = args.next();
try {
return Integer.parseInt(input);
} catch (NumberFormatException ex) {
throw args.createError(t("Expected an integer, but input '%s' was not", input));
}
}
}
private static final Map<String, Boolean> BOOLEAN_CHOICES = ImmutableMap.<String, Boolean>builder()
.put("true", true)
.put("t", true)
.put("y", true)
.put("yes", true)
.put("verymuchso", true)
.put("1", true)
.put("false", false)
.put("f", false)
.put("n", false)
.put("no", false)
.put("notatall", false)
.put("0", false)
.build();
/**
* Require an argument to be a boolean.
* The recognized true values are:
* <ul>
* <li>true</li>
* <li>t</li>
* <li>yes</li>
* <li>y</li>
* <li>verymuchso</li>
* </ul>
* The recognized false values are:
* <ul>
* <li>false</li>
* <li>f</li>
* <li>no</li>
* <li>n</li>
* <li>notatall</li>
* </ul>
* Gives values of type {@link Boolean}
*
* @param key The key to store the parsed argument under
* @return the element to match the input
*/
public static CommandElement bool(Text key) {
return GenericArguments.choices(key, BOOLEAN_CHOICES);
}
/**
* Require the argument to be a key under the provided enum.
* Gives values of type T
*
* @param key The key to store the matched enum value under
* @param type The enum class to get enum constants from
* @param <T> The type of enum
* @return the element to match the input
*/
public static <T extends Enum<T>> CommandElement enumValue(Text key, Class<T> type) {
return new EnumValueElement<T>(key, type);
}
private static class EnumValueElement<T extends Enum<T>> extends PatternMatchingCommandElement {
private final Class<T> type;
private EnumValueElement(Text key, Class<T> type) {
super(key);
this.type = type;
}
@Override
protected Iterable<String> getChoices(CommandSource source) {
return Iterables.transform(Arrays.asList(this.type.getEnumConstants()), new Function<T, String>() {
@Nullable
@Override
public String apply(@Nullable T input) {
return input == null ? null : input.name();
}
});
}
@Override
protected Object getValue(String choice) throws IllegalArgumentException {
return Enum.valueOf(this.type, choice.toUpperCase());
}
}
/**
* Require one or more strings, which are combined into a single, space-separated string.
* Gives values of type {@link String}
*
* @param key The key to store the parsed argument under
* @return the element to match the input
*/
public static CommandElement remainingJoinedStrings(Text key) {
return new RemainingJoinedStringsCommandElement(key, false);
}
private static class RemainingJoinedStringsCommandElement extends KeyElement {
private final boolean raw;
private RemainingJoinedStringsCommandElement(Text key, boolean raw) {
super(key);
this.raw = raw;
}
@Override
protected Object parseValue(CommandSource source, CommandArgs args) throws ArgumentParseException {
if (this.raw) {
args.next();
String ret = args.getRaw().substring(args.getRawPosition());
while (args.hasNext()) { // Consume remaining args
args.next();
}
return ret;
} else {
final StringBuilder ret = new StringBuilder(args.next());
while (args.hasNext()) {
ret.append(' ').append(args.next());
}
return ret.toString();
}
}
@Override
public Text getUsage(CommandSource src) {
return Texts.of(CommandMessageFormatting.LT_TEXT, getKey(), CommandMessageFormatting.ELIPSES_TEXT, CommandMessageFormatting.GT_TEXT);
}
}
/**
* Expect a literal sequence of arguments. This element matches the input against a predefined array of arguments expected to be present,
* case-insensitively.
*
* @param key The key to add to the context. Will be set to a value of true if this element matches
* @param expectedArgs The sequence of arguments expected
* @return the appropriate command element
*/
public static CommandElement literal(Text key, String... expectedArgs) {
return new LiteralCommandElement(key, ImmutableList.copyOf(expectedArgs), true);
}
/**
* Expect a literal sequence of arguments. This element matches the input against a predefined array of arguments expected to be present,
* case-insensitively.
*
* @param key The key to store this argument as
* @param putValue The value to put at key if this argument matches. May be null
* @param expectedArgs The sequence of arguments expected
* @return the appropriate command element
*/
public static CommandElement literal(Text key, @Nullable Object putValue, String... expectedArgs) {
return new LiteralCommandElement(key, ImmutableList.copyOf(expectedArgs), putValue);
}
private static class LiteralCommandElement extends CommandElement {
private final List<String> expectedArgs;
@Nullable
private final Object putValue;
protected LiteralCommandElement(@Nullable Text key, List<String> expectedArgs, @Nullable Object putValue) {
super(key);
this.expectedArgs = ImmutableList.copyOf(expectedArgs);
this.putValue = putValue;
}
@Nullable
@Override
protected Object parseValue(CommandSource source, CommandArgs args) throws ArgumentParseException {
for (String arg : this.expectedArgs) {
String current;
if (!(current = args.next()).equalsIgnoreCase(arg)) {
throw args.createError(t("Argument %s did not match expected next argument %s", current, arg));
}
}
return this.putValue;
}
@Override
public List<String> complete(CommandSource src, CommandArgs args, CommandContext context) {
for (String arg : this.expectedArgs) {
final Optional<String> next = args.nextIfPresent();
if (!next.isPresent()) {
break;
} else if (args.hasNext()) {
if (!next.get().equalsIgnoreCase(arg)) {
break;
}
} else {
if (arg.toLowerCase().startsWith(next.get().toLowerCase())) { // Case-insensitive compare
return ImmutableList.of(arg); // TODO: Possibly complete all remaining args? Does that even work
}
}
}
return ImmutableList.of();
}
@Override
public Text getUsage(CommandSource src) {
return Texts.of(Joiner.on(' ').join(this.expectedArgs));
}
}
private static class PlayerCommandElement extends PatternMatchingCommandElement {
private final Game game;
private final boolean returnSource;
protected PlayerCommandElement(Text key, Game game, boolean returnSource) {
super(key);
this.game = game;
this.returnSource = returnSource;
}
@Override
protected Object parseValue(CommandSource source, CommandArgs args) throws ArgumentParseException {
// TODO: Make player name resolution better -- support selectors, etc
if (!args.hasNext() && this.returnSource) {
return tryReturnSource(source, args);
}
Object state = args.getState();
try {
return super.parseValue(source, args);
} catch (ArgumentParseException ex) {
if (this.returnSource) {
args.setState(state);
return tryReturnSource(source, args);
} else {
throw ex;
}
}
}
@Override
protected Iterable<String> getChoices(CommandSource source) {
return Iterables.transform(this.game.getServer().getOnlinePlayers(), new Function<Player, String>() {
@Nullable
@Override
public String apply(@Nullable Player input) {
return input == null ? null : input.getName();
}
});
}
@Override
protected Object getValue(String choice) throws IllegalArgumentException {
Optional<Player> ret = this.game.getServer().getPlayer(choice);
if (!ret.isPresent()) {
throw new IllegalArgumentException("Input value " + choice + " was not a player");
}
return ret.get();
}
private Player tryReturnSource(CommandSource source, CommandArgs args) throws ArgumentParseException {
if (source instanceof Player) {
return ((Player) source);
} else {
throw args.createError(t("No players matched and source was not a player!"));
}
}
@Override
public Text getUsage(CommandSource src) {
return src instanceof Player && this.returnSource ? Texts.of("[", super.getUsage(src), "]") : super.getUsage(src);
}
}
private static class WorldPropertiesCommandElement extends PatternMatchingCommandElement {
private final Game game;
protected WorldPropertiesCommandElement(@Nullable Text key, Game game) {
super(key);
this.game = game;
}
@Nullable
@Override
public Object parseValue(CommandSource source, CommandArgs args) throws ArgumentParseException {
return super.parseValue(source, args);
}
@Override
protected Iterable<String> getChoices(CommandSource source) {
return Iterables.transform(this.game.getServer().getAllWorldProperties(), new Function<WorldProperties, String>() {
@Nullable
@Override
public String apply(@Nullable WorldProperties input) {
return input == null || !input.isEnabled() ? null : input.getWorldName();
}
});
}
@Override
protected Object getValue(String choice) throws IllegalArgumentException {
Optional<WorldProperties> ret = this.game.getServer().getWorldProperties(choice);
if (!ret.isPresent()) {
throw new IllegalArgumentException("Provided argument " + choice + " did not match a WorldProperties");
}
return ret.get();
}
}
/**
* Syntax:
* x,y,z
* x y z.
* each element can be relative to a location? so parseRelativeDouble() -- relative is ~(num)
*
*/
private static class Vector3dCommandElement extends CommandElement {
protected Vector3dCommandElement(@Nullable Text key) {
super(key);
}
@Override
protected Object parseValue(CommandSource source, CommandArgs args) throws ArgumentParseException {
String xStr;
String yStr;
String zStr;
xStr = args.next();
if (xStr.contains(",")) {
String[] split = xStr.split(",");
if (split.length != 3) {
throw args.createError(t("Comma-separated location must have 3 elements, not %s", split.length));
}
xStr = split[0];
yStr = split[1];
zStr = split[2];
} else {
yStr = args.next();
zStr = args.next();
}
final double x = parseRelativeDouble(args, xStr, source instanceof LocatedSource ? ((LocatedSource) source).getLocation().getX() : null);
final double y = parseRelativeDouble(args, yStr, source instanceof LocatedSource ? ((LocatedSource) source).getLocation().getY() : null);
final double z = parseRelativeDouble(args, zStr, source instanceof LocatedSource ? ((LocatedSource) source).getLocation().getZ() : null);
return new Vector3d(x, y, z);
}
@Override
public List<String> complete(CommandSource src, CommandArgs args, CommandContext context) {
Optional<String> arg = args.nextIfPresent();
// Traverse through the possible arguments. We can't really complete arbitrary integers
if (arg.isPresent()) {
if (arg.get().contains(",") || !args.hasNext()) {
return ImmutableList.of(arg.get());
} else {
arg = args.nextIfPresent();
if (args.hasNext()) {
return ImmutableList.of(args.nextIfPresent().get());
} else {
return ImmutableList.of(arg.get());
}
}
} else {
return ImmutableList.of();
}
}
private double parseRelativeDouble(CommandArgs args, String arg, @Nullable Double relativeTo) throws ArgumentParseException {
boolean relative = arg.startsWith("~");
if (relative) {
if (relativeTo == null) {
throw args.createError(t("Relative position specified but source does not have a postion"));
}
arg = arg.substring(1);
}
try {
double ret = Double.parseDouble(arg);
return relative ? ret + relativeTo : ret;
} catch (NumberFormatException e) {
throw args.createError(t("Expected input %s to be a double, but was not", arg));
}
}
}
private static class LocationCommandElement extends CommandElement {
private final Game game;
private final WorldPropertiesCommandElement worldParser;
private final Vector3dCommandElement vectorParser;
protected LocationCommandElement(Text key, Game game) {
super(key);
this.game = game;
this.worldParser = new WorldPropertiesCommandElement(null, game);
this.vectorParser = new Vector3dCommandElement(null);
}
@Override
protected Object parseValue(CommandSource source, CommandArgs args) throws ArgumentParseException {
Object state = args.getState();
Object world;
Object vec = null;
try {
world = Preconditions.checkNotNull(this.worldParser.parseValue(source, args), "worldVal");
} catch (ArgumentParseException ex) {
args.setState(state);
if (!(source instanceof LocatedSource)) {
throw args.createError(t("Source must have a location in order to have a fallback world"));
}
world = ((LocatedSource) source).getWorld();
try {
vec = Preconditions.checkNotNull(this.vectorParser.parseValue(source, args), "vectorVal");
} catch (ArgumentParseException ex2) {
args.setState(state);
throw ex;
}
}
if (vec == null) {
vec = Preconditions.checkNotNull(this.vectorParser.parseValue(source, args), "vectorVal");
}
if (world instanceof Collection<?>) {
// multiple values
if (((Collection) world).size() != 1) {
throw args.createError(t("A location must be specified in only one world!"));
}
world = ((Collection) world).iterator().next();
}
WorldProperties targetWorldProps = ((WorldProperties) world);
Optional<World> targetWorld = this.game.getServer().getWorld(targetWorldProps.getUniqueId());
Vector3d vector = (Vector3d) vec;
return new Location(targetWorld.get(), vector);
}
@Override
public List<String> complete(CommandSource src, CommandArgs args, CommandContext context) {
Object state = args.getState();
List<String> ret;
if ((ret = this.worldParser.complete(src, args, context)).isEmpty()) {
args.setState(state);
ret = this.vectorParser.complete(src, args, context);
}
return ret;
}
}
private static class CatalogedTypeCommandElement<T extends CatalogType> extends PatternMatchingCommandElement {
private final Game game;
private final Class<T> catalogType;
protected CatalogedTypeCommandElement(Text key, Game game, Class<T> catalogType) {
super(key);
this.game = game;
this.catalogType = catalogType;
}
@Override
protected Iterable<String> getChoices(CommandSource source) {
return Iterables.transform(this.game.getRegistry().getAllOf(this.catalogType), new Function<T, String>() {
@Nullable
@Override
public String apply(@Nullable T input) {
return input == null ? null : input.getId(); // TODO: ids or names?
}
});
}
@Override
protected Object getValue(String choice) throws IllegalArgumentException {
final Optional<T> ret = this.game.getRegistry().getType(this.catalogType, choice);
if (!ret.isPresent()) {
throw new IllegalArgumentException("Invalid input " + choice + " was found");
}
return ret.get();
}
}
/**
* Restricts the given command element to only insert one value into the context at the provided key.
*
* @param element The element to restrict
* @return the restricted element
*/
public static CommandElement onlyOne(CommandElement element) {
return new OnlyOneCommandElement(element);
}
private static class OnlyOneCommandElement extends CommandElement {
private final CommandElement element;
protected OnlyOneCommandElement(CommandElement element) {
super(element.getKey());
this.element = element;
}
@Override
public void parse(CommandSource source, CommandArgs args, CommandContext context) throws ArgumentParseException {
this.element.parse(source, args, context);
if (context.getAll(this.element.getUntranslatedKey()).size() > 1) {
throw args.createError(t("Argument %s may have only one value!", this.element.getKey()));
}
}
@Override
public Text getUsage(CommandSource src) {
return this.element.getUsage(src);
}
@Nullable
@Override
protected Object parseValue(CommandSource source, CommandArgs args) throws ArgumentParseException {
return this.element.parseValue(source, args);
}
@Override
public List<String> complete(CommandSource src, CommandArgs args, CommandContext context) {
return this.element.complete(src, args, context);
}
}
/**
* Checks a permission for a given command argument to be used.
*
* @param element The element to wrap
* @param permission The permission to check
* @return the element
*/
public static CommandElement requiringPermission(CommandElement element, String permission) {
return new PermissionCommandElement(element, permission);
}
private static class PermissionCommandElement extends CommandElement {
private final CommandElement element;
private final String permission;
protected PermissionCommandElement(CommandElement element, String permission) {
super(element.getKey());
this.element = element;
this.permission = permission;
}
@Nullable
@Override
protected Object parseValue(CommandSource source, CommandArgs args) throws ArgumentParseException {
checkPermission(source, args);
return this.element.parseValue(source, args);
}
private void checkPermission(CommandSource source, CommandArgs args) throws ArgumentParseException {
if (!source.hasPermission(this.permission)) {
throw args.createError(t("You do not have permission to use the %s argument", getKey()));
}
}
@Override
public List<String> complete(CommandSource src, CommandArgs args, CommandContext context) {
if (!src.hasPermission(this.permission)) {
return ImmutableList.of();
}
return this.element.complete(src, args, context);
}
@Override
public void parse(CommandSource source, CommandArgs args, CommandContext context) throws ArgumentParseException {
checkPermission(source, args);
super.parse(source, args, context);
}
@Override
public Text getUsage(CommandSource src) {
return this.element.getUsage(src);
}
}
}
| |
package com.echomap.kqf.view.ctrl;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.URL;
import java.util.Calendar;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import java.util.ResourceBundle;
import java.util.prefs.Preferences;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import com.echomap.kqf.EchoWriteConst;
import com.echomap.kqf.looper.WorkDoneNotify;
import com.echomap.kqf.view.MainFrame;
import com.echomap.kqf.view.gui.ConfirmResult;
import javafx.animation.Animation;
import javafx.animation.Interpolator;
import javafx.animation.Transition;
import javafx.application.Platform;
import javafx.beans.value.ChangeListener;
import javafx.beans.value.ObservableValue;
import javafx.collections.ListChangeListener;
import javafx.collections.ObservableList;
import javafx.event.ActionEvent;
import javafx.event.EventHandler;
import javafx.fxml.FXMLLoader;
import javafx.fxml.JavaFXBuilderFactory;
import javafx.geometry.Insets;
import javafx.geometry.Pos;
import javafx.scene.Node;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.scene.control.Button;
import javafx.scene.control.CheckBox;
import javafx.scene.control.Control;
import javafx.scene.control.Label;
import javafx.scene.control.SplitPane;
import javafx.scene.control.TableColumn;
import javafx.scene.control.TableView;
import javafx.scene.control.TextArea;
import javafx.scene.control.TextField;
import javafx.scene.control.TextInputControl;
import javafx.scene.control.TitledPane;
import javafx.scene.control.Tooltip;
import javafx.scene.control.skin.TableViewSkin;
import javafx.scene.image.Image;
import javafx.scene.layout.Background;
import javafx.scene.layout.BackgroundFill;
import javafx.scene.layout.CornerRadii;
import javafx.scene.layout.HBox;
import javafx.scene.layout.Pane;
import javafx.scene.layout.VBox;
import javafx.scene.paint.Color;
import javafx.stage.DirectoryChooser;
import javafx.stage.FileChooser;
import javafx.stage.FileChooser.ExtensionFilter;
import javafx.stage.Modality;
import javafx.stage.Stage;
import javafx.stage.WindowEvent;
import javafx.util.Duration;
public abstract class BaseCtrl {
static private final Logger LOGGER = LogManager.getLogger(BaseCtrl.class);
static private int windowCount = 0;
/** Loaded from file "cwc2.properties" */
Properties appProps;
/** Loaded from OS, userNodeForPackage(MainFrame.class), like width, etc */
Preferences appPreferences = null;
/** config params */
Map<String, Object> paramsMap;
//
ResourceBundle messageBundle = null;
//
String appVersion = null;
Stage primaryStage = null;
boolean profileChangeMade = false;
// String lastNofificationMsg = null;
private static Method columnToFitMethod;
static {
try {
columnToFitMethod = TableViewSkin.class.getDeclaredMethod("resizeColumnToFitContent", TableColumn.class,
int.class);
columnToFitMethod.setAccessible(true);
} catch (NoSuchMethodException e) {
e.printStackTrace();
}
}
public BaseCtrl() {
// appPreferences = Preferences.userNodeForPackage(BaseCtrl.class);
// messageBundle = cwc2messages.properties
}
final public static int getWindowCount() {
return windowCount;
}
final void loadPreferencesForWindow(final String keyP, final Stage stage) {
//
final String keyY = String.format("%s/%s", keyP, EchoWriteConst.WINDOW_PREF_Y);
final double sceneY = appPreferences.getDouble(keyY, -1);
LOGGER.debug("loadPreferences: keyY='" + keyY + "' val=" + sceneY);
if (sceneY > -1) {
stage.setY(sceneY);
}
//
final String keyX = String.format("%s/%s", keyP, EchoWriteConst.WINDOW_PREF_X);
final double sceneX = appPreferences.getDouble(keyX, -1);
LOGGER.debug("loadPreferences: keyX='" + keyX + "' val=" + sceneX);
if (sceneX > -1) {
stage.setX(sceneX);
}
//
final String keyH = String.format("%s/%s", keyP, EchoWriteConst.WINDOW_PREF_HEIGHT);
final double sceneHeight = appPreferences.getDouble(keyH, -1);
LOGGER.debug("loadPreferences: keyH='" + keyH + "' val=" + sceneHeight);
if (sceneHeight > -1) {
stage.setHeight(sceneHeight);
}
//
final String keyW = String.format("%s/%s", keyP, EchoWriteConst.WINDOW_PREF_WIDTH);
final double sceneWidth = appPreferences.getDouble(keyW, -1);
LOGGER.debug("loadPreferences: keyW='" + keyW + "' val=" + sceneWidth);
if (sceneWidth > -1) {
stage.setWidth(sceneWidth);
}
}
final void savePreferencesForWindow(final String keyP, final Stage stage) {
//
final double sceneX = stage.getX();
final String keyX = String.format("%s/%s", keyP, EchoWriteConst.WINDOW_PREF_X);
if (sceneX > 0) {
LOGGER.debug("savePreferences: keyX='" + keyX + "' X=" + sceneX);
if (appPreferences != null)
appPreferences.putDouble(keyX, sceneX);
}
//
final double sceneY = stage.getY();
final String keyY = String.format("%s/%s", keyP, EchoWriteConst.WINDOW_PREF_Y);
if (sceneY > 0) {
LOGGER.debug("savePreferences: keyY='" + keyY + "' Y=" + sceneY);
if (appPreferences != null)
appPreferences.putDouble(keyY, sceneY);
}
//
final double sceneWidth = stage.getWidth();
final String keyW = String.format("%s/%s", keyP, EchoWriteConst.WINDOW_PREF_WIDTH);
if (sceneWidth > 0) {
LOGGER.debug("savePreferences: keyW='" + keyW + "' width=" + sceneWidth);
if (appPreferences != null)
appPreferences.putDouble(keyW, sceneWidth);
}
//
final double sceneHeight = stage.getHeight();
final String keyH = String.format("%s/%s", keyP, EchoWriteConst.WINDOW_PREF_HEIGHT);
if (sceneHeight > 0) {
LOGGER.debug("savePreferences: keyH='" + keyH + "' height=" + sceneHeight);
if (appPreferences != null)
appPreferences.putDouble(keyH, sceneHeight);
}
}
final String loadPropFromAppOrDefault(final String key, final String defaultValue) {
if (appProps != null && appProps.containsKey(EchoWriteConst.PROP_KEY_VERSION)) {
final String value = appProps.getProperty(key);
return value;
}
return defaultValue;
}
final void setLastSelectedDirectory(final File lastDir) {
if (appPreferences != null) {
appPreferences.put("LastSelectedDirectory", lastDir.getAbsolutePath());
}
}
final void setLastSelectedDirectory(final String lastDir) {
if (appPreferences != null) {
File tFile = new File(lastDir);
if (tFile.exists())
if (tFile.isDirectory())
appPreferences.put("LastSelectedDirectory", lastDir);
else {
tFile = tFile.getParentFile();
if (!tFile.isDirectory())
appPreferences.put("LastSelectedDirectory", lastDir);
}
}
}
final File getLastSelectedDirectory() {
File fnF = null;
if (appPreferences != null) {
final String fn = appPreferences.get("LastSelectedDirectory", null);
fnF = (fn == null ? null : new File(fn));
}
if (fnF == null || !fnF.exists() || !fnF.isDirectory()) {
// Get input file's directory? Or outputs?
}
return fnF;
}
final public static String getCurrentDateFmt() {
final Calendar cal = Calendar.getInstance();
EchoWriteConst.myDateFormat.setTimeZone(cal.getTimeZone());
String txt = EchoWriteConst.myDateFormat.format(cal.getTime());
LOGGER.debug("date = " + txt);
return txt;
}
// TODO different call for Error? w/colors?
final void showMessage(final String msg, final boolean clearPrevious, final TextArea outputArea) {
final Animation animation = new Transition() {
{
setCycleDuration(Duration.millis(2000));
setInterpolator(Interpolator.EASE_OUT);
}
@Override
protected void interpolate(double frac) {
Color vColor = new Color(1, 0, 0, 1 - frac);
outputArea.setBackground(new Background(new BackgroundFill(vColor, CornerRadii.EMPTY, Insets.EMPTY)));
}
};
animation.play();
if (clearPrevious) {
outputArea.setText(msg);
} else {
outputArea.setText(msg + "\r\n" + outputArea.getText());
}
LOGGER.info(msg);
}
final void showPopupMessage(final String msg1, final String msg2, final boolean error) {
final Stage dialog = new Stage();
dialog.setTitle("KQF Message Dialog");
dialog.setResizable(true);
dialog.initModality(Modality.APPLICATION_MODAL);
dialog.setWidth(420);
dialog.setHeight(280);
if (primaryStage != null)
dialog.initOwner(primaryStage);
final Button closeButton = new Button();
closeButton.setText("_Close");
closeButton.setMnemonicParsing(true);
// closeButton.setStyle("-fx-padding: 8; -fx-margin: 8;");
closeButton.setOnAction(new EventHandler<ActionEvent>() {
@Override
public void handle(ActionEvent event) {
final Node source = (Node) event.getSource();
final Stage stage = (Stage) source.getScene().getWindow();
// doCleanup();
stage.close();
}
});
closeButton.setDefaultButton(true);
final Label text1;
if (msg1 != null) {
text1 = new Label(msg1);
final StringBuilder cssBorder = new StringBuilder();
cssBorder.append("-fx-padding: 4 12 4 12;");
cssBorder.append("-fx-border-style: solid inside;");
cssBorder.append("-fx-border-width: 2;");
cssBorder.append("-fx-border-insets: 5;");
cssBorder.append("-fx-border-radius: 5;");
cssBorder.append("-fx-border-color: black;");
text1.setStyle(cssBorder.toString());
text1.autosize();
} else
text1 = null;
final TextArea text = new TextArea();
text.appendText(msg2);
text.setWrapText(true);
text.setEditable(false);
text.autosize();
text.setFocusTraversable(false);
text.setStyle(
"-fx-control-inner-background:#000000; -fx-font-family: Consolas; -fx-highlight-fill: #00ff00; -fx-highlight-text-fill: #000000; -fx-text-fill: #00ff00; ");
// text.setStyle("-fx-background-color: #EEEEA4;");
// final Text text = new Text(msg);
// text.autosize();
final VBox dialogVbox = new VBox(10);
// dialogVbox.setAlignment(Pos.CENTER);
dialogVbox.setStyle(
"-fx-border-color: #2e8b57; -fx-border-width: 2px; -fx-border-insets: 5; -fx-border-style: solid; ");
VBox.setMargin(closeButton, new Insets(4, 8, 8, 4));
if (text1 != null)
dialogVbox.getChildren().add(text1);
dialogVbox.getChildren().add(text);
dialogVbox.getChildren().add(closeButton);
dialogVbox.autosize();
final Scene dialogScene = new Scene(dialogVbox, 300, 75);
// dialogScene.
dialog.setScene(dialogScene);
// dialog.sizeToScene();
dialog.setOnCloseRequest(new EventHandler<WindowEvent>() {
public void handle(final WindowEvent we) {
LOGGER.debug("SubStage is cleaning up...");
LOGGER.debug("SubStage is closing");
dialog.close();
}
});
closeButton.requestFocus();
dialog.show();
}
final void showConfirmDialog(final String msg1, final String msg2, final ConfirmResult confirmResult) {
final Stage dialog = new Stage();
dialog.setTitle("KQF Message Dialog");
dialog.setResizable(true);
dialog.initModality(Modality.APPLICATION_MODAL);
dialog.setWidth(420);
dialog.setHeight(280);
if (primaryStage != null)
dialog.initOwner(primaryStage);
final Button confirmButton = new Button();
confirmButton.setText("C_onfirm");
confirmButton.setMnemonicParsing(true);
// closeButton.setStyle("-fx-padding: 8; -fx-margin: 8;");
confirmButton.setOnAction(new EventHandler<ActionEvent>() {
@Override
public void handle(ActionEvent event) {
LOGGER.debug("Action Confirmed");
final Node source = (Node) event.getSource();
final Stage stage = (Stage) source.getScene().getWindow();
confirmResult.actionConfirmed(msg1);
stage.close();
}
});
final Button closeButton = new Button();
closeButton.setText("_Cancel");
closeButton.setMnemonicParsing(true);
// closeButton.setStyle("-fx-padding: 8; -fx-margin: 8;");
closeButton.setOnAction(new EventHandler<ActionEvent>() {
@Override
public void handle(ActionEvent event) {
LOGGER.debug("Action Cancelled");
final Node source = (Node) event.getSource();
final Stage stage = (Stage) source.getScene().getWindow();
confirmResult.actionCancelled(msg1);
stage.close();
}
});
closeButton.setDefaultButton(true);
final Label text1;
if (msg1 != null) {
text1 = new Label(msg1);
final StringBuilder cssBorder = new StringBuilder();
cssBorder.append("-fx-padding: 4 12 4 12;");
cssBorder.append("-fx-border-style: solid inside;");
cssBorder.append("-fx-border-width: 2;");
cssBorder.append("-fx-border-insets: 5;");
cssBorder.append("-fx-border-radius: 5;");
cssBorder.append("-fx-border-color: black;");
text1.setStyle(cssBorder.toString());
text1.autosize();
} else
text1 = null;
final TextArea text = new TextArea();
text.appendText(msg2);
text.setWrapText(true);
text.setEditable(false);
text.autosize();
text.setFocusTraversable(false);
text.setStyle(
"-fx-control-inner-background:#000000; -fx-font-family: Consolas; -fx-highlight-fill: #00ff00; -fx-highlight-text-fill: #000000; -fx-text-fill: #00ff00; ");
// text.setStyle("-fx-background-color: #EEEEA4;");
// final Text text = new Text(msg);
// text.autosize();
final VBox dialogVbox = new VBox(10);
// dialogVbox.setAlignment(Pos.CENTER);
dialogVbox.setStyle(
"-fx-border-color: #2e8b57; -fx-border-width: 2px; -fx-border-insets: 5; -fx-border-style: solid; ");
// VBox.setMargin(dialogVboxI, new Insets(4, 8, 8, 4));
final HBox dialogVboxI = new HBox(10);
HBox.setMargin(confirmButton, new Insets(4, 8, 8, 4));
HBox.setMargin(closeButton, new Insets(4, 8, 8, 4));
if (text1 != null)
dialogVbox.getChildren().add(text1);
dialogVbox.getChildren().add(text);
dialogVbox.getChildren().add(dialogVboxI);
dialogVboxI.getChildren().add(confirmButton);
dialogVboxI.getChildren().add(closeButton);
dialogVboxI.autosize();
dialogVbox.autosize();
final Scene dialogScene = new Scene(dialogVbox, 300, 75);
// dialogScene.
dialog.setScene(dialogScene);
// dialog.sizeToScene();
dialog.setOnCloseRequest(new EventHandler<WindowEvent>() {
public void handle(final WindowEvent we) {
LOGGER.debug("SubStage is cleaning up...");
LOGGER.debug("SubStage is closing");
dialog.close();
}
});
closeButton.requestFocus();
dialog.show();
}
final boolean getSafeBooleanOrFalse(final Object val) {
if (val == null)
return false;
if (val instanceof Boolean) {
return (Boolean) val;
}
if (val instanceof String) {
final String valS = (String) val;
if (!StringUtils.isEmpty(valS))
return Boolean.valueOf(valS);
}
return false;
}
final boolean getSafeBooleanOrTrue(final Object val) {
if (val == null)
return true;
if (val instanceof Boolean) {
return (Boolean) val;
}
if (val instanceof String) {
final String valS = (String) val;
if (!StringUtils.isEmpty(valS))
return Boolean.valueOf(valS);
}
return true;
}
// void openNewWindow(String windowName, String windowTitle, final TextArea
// reportArea, Stage owner,
// final BaseCtrl callingCtrl, final Map<String, Object> paramsMap) {
// openNewWindow(windowName, windowTitle, reportArea, owner, callingCtrl,
// paramsMap, true);
// }
final void tryopenNewWindow(String windowName, String windowTitle, final TextArea reportArea, Stage owner,
final BaseCtrl callingCtrl, final Map<String, Object> paramsMap) {
try {
openNewWindow(windowName, windowTitle, reportArea, owner, callingCtrl, null, paramsMap);
} catch (IOException e) {
e.printStackTrace();
showMessage(e.getMessage(), false, reportArea);
}
}
final void openNewWindow(String windowName, String windowTitle, final TextArea reportArea, Stage owner,
final BaseCtrl callingCtrl, final Map<String, Object> paramsMap) throws IOException {
openNewWindow(windowName, windowTitle, reportArea, owner, callingCtrl, null, paramsMap);
}
final void openNewWindow(String windowName, String windowTitle, final TextArea reportArea, Stage owner,
final BaseCtrl callingCtrl, final WorkDoneNotify notifyCtrl, final Map<String, Object> paramsMap)
throws IOException {
LOGGER.debug("openNewWindow: Called w/windowName='" + windowName + "'");
// Parent root;
if (StringUtils.isEmpty(windowName))
windowName = "MainWindow";
if (StringUtils.isEmpty(windowTitle))
windowTitle = "MainWindow";
paramsMap.put(EchoWriteConst.PARAM_FRAMENAME, windowName);
try {
final String fxmlFile = MainFrame.getFxmlFrames().get(windowName);
@SuppressWarnings("rawtypes")
final Class fxmlCtrl = MainFrame.getFxmlCtrl().get(windowName);
LOGGER.debug("openNewWindow: fxmlFile=" + fxmlFile + " fxmlCtrl=" + fxmlCtrl);
// check fxmlFile
final FXMLLoader fxmlLoader = new FXMLLoader();
final URL location = getClass().getResource(fxmlFile);
if (location == null) {
LOGGER.error("Failed to get location!!!!!!");
showMessage("ERROR loading FXML file for the <" + fxmlFile + "> screen", false, reportArea);
// throw new Exception("Failed to get location!!!!!!")
return;
}
fxmlLoader.setLocation(location);
if (fxmlCtrl != null) {
try {
Object obj = fxmlCtrl.newInstance();
fxmlLoader.setController(obj);
} catch (InstantiationException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
}
}
fxmlLoader.setBuilderFactory(new JavaFXBuilderFactory());
final Parent root = (Parent) fxmlLoader.load(location.openStream());
final Stage stage = new Stage();
// check windowTitle
stage.setTitle(windowTitle);
stage.setScene(new Scene(root));
if (owner == null) {
LOGGER.warn("OWNER Is null for this window");
// owner = primaryStage;
}
stage.initOwner(owner);
stage.setUserData(paramsMap);
final boolean modal = getSafeBooleanOrTrue(paramsMap.get(EchoWriteConst.PARAMMAP_MODAL));
if (modal) {
stage.initModality(Modality.APPLICATION_MODAL);
} else {
stage.initModality(Modality.NONE);
if (owner != null)
stage.setX(owner.getX() + owner.getWidth());
}
// TODO Change ICON
final Image appIcon = new Image(getClass().getResourceAsStream("/62863-books-icon.png"));
stage.getIcons().add(appIcon);
//
// final String key = titleOneText.getValue();
// LOGGER.debug("Key = '" + key + "'");
// final Preferences child = getPrefs().node(key);
final BaseCtrl myController = (BaseCtrl) fxmlLoader.getController();
myController.setupController(appProps, appPreferences, stage, paramsMap);
// final FormatDao formatDao = new FormatDao();
// setupDao(formatDao);
// myController.setProfileLoaded(child, formatDao, appProps, stage);
stage.setOnShown(new EventHandler<WindowEvent>() {
public void handle(WindowEvent we) {
String sName = (String) ((Map) stage.getUserData()).get(EchoWriteConst.PARAM_FRAMENAME);
if (sName == null)
sName = stage.getTitle();
LOGGER.info("Stage is being shown...(" + sName + ")");
myController.doSceneShown(stage);
LOGGER.info("Stage is shown");
}
});
stage.setOnCloseRequest(new EventHandler<WindowEvent>() {
public void handle(WindowEvent we) {
String sName = (String) ((Map) stage.getUserData()).get(EchoWriteConst.PARAM_FRAMENAME);
if (sName == null)
sName = stage.getTitle();
LOGGER.info("Stage has been requested to close...(" + sName + ")");
// myController.saveProps();
myController.doCleanup();
if (notifyCtrl != null)
notifyCtrl.finishedWithWork(myController.worktype());
// if (owner != null)
// TODO callback owner.subviewBackToMe();
myController.doSceneCloseRequested();
LOGGER.debug("SubStage is closing");
stage.close();
}
});
stage.setOnHidden(new EventHandler<WindowEvent>() {
public void handle(WindowEvent we) {
String sName = (String) ((Map) stage.getUserData()).get(EchoWriteConst.PARAM_FRAMENAME);
if (sName == null)
sName = stage.getTitle();
LOGGER.info("Stage is hidding...(" + sName + ")");
myController.doSceneHidden();
LOGGER.info("Stage is hidden");
LOGGER.debug("windowCount=" + BaseCtrl.getWindowCount());
if (BaseCtrl.getWindowCount() <= 0) {
LOGGER.info("Closing platform");
// TODO DataManagerBiz.getDataManager().close();
Platform.exit();
}
}
});
stage.setOnHiding(new EventHandler<WindowEvent>() {
public void handle(WindowEvent we) {
String sName = (String) ((Map) stage.getUserData()).get(EchoWriteConst.PARAM_FRAMENAME);
if (sName == null)
sName = stage.getTitle();
LOGGER.info("Stage is being hidden...(" + sName + ")");
myController.doSceneHiding(stage);
}
});
stage.setOnShowing(new EventHandler<WindowEvent>() {
public void handle(WindowEvent we) {
String sName = (String) ((Map) stage.getUserData()).get(EchoWriteConst.PARAM_FRAMENAME);
if (sName == null)
sName = stage.getTitle();
LOGGER.info("Stage is being shown...(" + sName + ")");
myController.doSceneShowing(stage);
LOGGER.info("Stage is shown...(" + sName + ")");
}
});
stage.showAndWait();
// stage.show();
} catch (IOException e) {
e.printStackTrace();
LOGGER.error(e);
throw e;// TODO
}
}
// Virtual, for others to overwrite if they want to use the paramsMap
// public void setupController(final Properties props, final Preferences
// appPreferences, final Stage primaryStage,
// final Map<String, Object> paramsMap) {
// setupController(props, appPreferences, primaryStage);
// }
protected String worktype() {
return EchoWriteConst.PROCESS_NONE;
}
// virtual
public void setupController(final Properties props, final Preferences appPreferences, final Stage primaryStage,
final Map<String, Object> paramsMap) {
LOGGER.debug("setupController: Called");
this.appProps = props;
this.appPreferences = appPreferences;
this.paramsMap = paramsMap;
this.primaryStage = primaryStage;
this.appVersion = appProps.getProperty(EchoWriteConst.PROP_KEY_VERSION);
// final String local1 = appPreferences.get("localization1", "en");
// final String local2 = appPreferences.get("localization2", "US");
final Locale sLocal = new Locale("en", "US");// local1, local2);
this.messageBundle = ResourceBundle.getBundle("cwc2messages", sLocal);
primaryStage.widthProperty().addListener((obs, oldVal, newVal) -> {
this.windowWidthChanged(newVal);
});
primaryStage.heightProperty().addListener((obs, oldVal, newVal) -> {
this.windowHeightChanged(newVal);
});
primaryStage.xProperty().addListener((obs, oldVal, newVal) -> {
this.windowXChanged(newVal);
});
primaryStage.yProperty().addListener((obs, oldVal, newVal) -> {
this.windowYChanged(newVal);
});
// if (appPreferences != null) {
// String prefKey = String.format("view/%s/%s",
// paramsMap.get("FRAMENAME"), Prefs.VIEW_WINDOW_Y);
// final double windowY = appPreferences.getDouble(prefKey, -1);
// prefKey = String.format("view/%s/%s", paramsMap.get("FRAMENAME"),
// Prefs.VIEW_WINDOW_X);
// final double windowX = appPreferences.getDouble(prefKey, -1);
// prefKey = String.format("view/%s/%s", paramsMap.get("FRAMENAME"),
// Prefs.VIEW_WINDOW_W);
// final double windowW = appPreferences.getDouble(prefKey, -1);
// prefKey = String.format("view/%s/%s", paramsMap.get("FRAMENAME"),
// Prefs.VIEW_WINDOW_H);
// final double windowH = appPreferences.getDouble(prefKey, -1);
// if (windowY > -1)
// primaryStage.setY(windowY);
// if (windowX > -1)
// primaryStage.setX(windowX);
// if (windowW > -1)
// primaryStage.setWidth(windowW);
// if (windowH > -1)
// primaryStage.setHeight(windowH);
// }
LOGGER.debug("setupController: Done");
this.doControllerSetupDone();
}
@SuppressWarnings("rawtypes")
final public static void alignColumnLabelsLeftHack(final TableView inputTable) {
// Hack: align column headers to the center.
inputTable.widthProperty().addListener(new ChangeListener<Number>() {
@Override
public void changed(ObservableValue<? extends Number> ov, final Number t, final Number t1) {
Platform.runLater(new Runnable() {
public void run() {
// System.out.print(listerColumn.getText() + " ");
// System.out.println(t1);
if (t != null && t.intValue() > 0)
return; // already aligned
for (Node node : inputTable.lookupAll(".column-header > .label")) {
if (node instanceof Label)
((Label) node).setAlignment(Pos.TOP_LEFT);
}
}
});
};
});
// TODO when I make this Java 8 or like whatever
// inputTable.widthProperty().addListener((src, o, n) ->
// Platform.runLater(() -> {
// if (o != null && o.intValue() > 0)
// return; // already aligned
// for (Node node : inputTable.lookupAll(".column-header > .label")) {
// if (node instanceof Label)
// ((Label) node).setAlignment(Pos.TOP_LEFT);
// }
// }));
}
@SuppressWarnings({ "unchecked", "rawtypes" })
final public static void autoFitTable(final TableView tableView) {
tableView.getItems().addListener(new ListChangeListener<Object>() {
@Override
public void onChanged(Change<?> c) {
for (final Object column : tableView.getColumns()) {
try {
if (column == null)
continue;
if (tableView == null)
continue;
if (tableView.getSkin() == null)
continue;
// if (columnToFitMethod == null) continue;
if (tableView.getSkin() != null && column != null && columnToFitMethod != null)
columnToFitMethod.invoke(tableView.getSkin(), column, -1);
} catch (IllegalAccessException | InvocationTargetException e) {
e.printStackTrace();
}
}
}
});
}
final String getLocalizedText(final String key, final String defaultText) {
if (!messageBundle.containsKey(key)) {
return defaultText;
}
return messageBundle.getString(key);
}
// todo working/needed?
final void setTooltips(final Pane pane) {
// LOGGER.debug("setTooltips: Called ");
if (pane == null)
return;
for (Node node : pane.getChildren()) {
if (node instanceof TextField) {
final TextField tf = (TextField) node;
if (tf.getTooltip() == null && !StringUtils.isBlank(tf.getPromptText())) {
tf.setTooltip(new Tooltip(tf.getPromptText()));
}
} else if (node instanceof Pane) {
setTooltips((Pane) node);
} else if (node instanceof TitledPane) {
final Node nd2 = ((TitledPane) node).getContent();
if (nd2 instanceof Pane) {
setTooltips((Pane) nd2);
}
}
}
// LOGGER.debug("setTooltips: Done ");
}// todo working?
final void setTooltips(final SplitPane pane) {
// LOGGER.debug("setTooltips: Called ");
if (pane == null)
return;
for (Node node : pane.getItems()) {
if (node instanceof TextField) {
final TextField tf = (TextField) node;
if (tf.getTooltip() == null && !StringUtils.isBlank(tf.getPromptText())) {
tf.setTooltip(new Tooltip(tf.getPromptText()));
}
} else if (node instanceof Pane) {
setTooltips((Pane) node);
} else if (node instanceof TitledPane) {
final Node nd2 = ((TitledPane) node).getContent();
if (nd2 instanceof Pane) {
setTooltips((Pane) nd2);
}
}
}
// LOGGER.debug("setTooltips: Done ");
}
final private void windowYChanged(final Number newVal) {
// if (appPreferences != null) {
// final String prefKey = String.format("view/%s/%s",
// paramsMap.get("FRAMENAME"), Prefs.VIEW_WINDOW_Y);
// appPreferences.putDouble(prefKey, newVal.doubleValue());
// }
}
final private void windowXChanged(final Number newVal) {
// if (appPreferences != null) {
// final String prefKey = String.format("view/%s/%s",
// paramsMap.get("FRAMENAME"), Prefs.VIEW_WINDOW_X);
// appPreferences.putDouble(prefKey, newVal.doubleValue());
// }
}
final private void windowHeightChanged(final Number newVal) {
// if (appPreferences != null) {
// final String prefKey = String.format("view/%s/%s",
// paramsMap.get("FRAMENAME"), Prefs.VIEW_WINDOW_H);
// appPreferences.putDouble(prefKey, newVal.doubleValue());
// }
}
final private void windowWidthChanged(final Number newVal) {
// if (appPreferences != null) {
// final String prefKey = String.format("view/%s/%s",
// paramsMap.get("FRAMENAME"), Prefs.VIEW_WINDOW_W);
// appPreferences.putDouble(prefKey, newVal.doubleValue());
// }
}
// protected boolean findAndSetFieldByInputFile(final String windowTitle,
// final String inText,
// final String prevOutText, final TextField outputTextField, final String
// namePattern,
// final String nameExtension) {
// boolean success = false;
// final File initDirectroy = locateDir(windowTitle, inText);
// if (initDirectroy != null) {
// final File inFile = new File(inputFileText.getText());
//
// String outFilename = "\\ChapterCount1.csv";
// final File nFile = new File(outputFormatSingleFileText.getText(),
// outFilename);
// outputCountFileText.setText(nFile.getAbsolutePath());
// outputCountDir = nFile.getParentFile();
// } else {
// outputTextField.setText(prevOutText);
// }
// return false;
// }
//
// protected File locateDir(final String title, final String
// textForInitialDir) {
// final DirectoryChooser chooser = new DirectoryChooser();
// final File lastDir1 = new File(textForInitialDir);
//
// final File lastDir2 = getLastSelectedDirectory();
// if (lastDir1 != null && lastDir1.exists()) {
// if (lastDir1.isDirectory())
// chooser.setInitialDirectory(lastDir1);
// else if (lastDir1.getParentFile() != null &&
// lastDir1.getParentFile().exists()
// && lastDir1.getParentFile().isDirectory())
// chooser.setInitialDirectory(lastDir1.getParentFile());
// } else if (lastDir2 != null && lastDir2.exists()) {
// if (lastDir2.isDirectory())
// chooser.setInitialDirectory(lastDir2);
// else if (lastDir2.getParentFile() != null &&
// lastDir2.getParentFile().exists()
// && lastDir2.getParentFile().isDirectory())
// chooser.setInitialDirectory(lastDir2.getParentFile());
// }
// chooser.setTitle(title);
// final File file = chooser.showDialog(new Stage());
// // textFieldToSet.setText(file.getAbsolutePath());
// // setLastSelectedDirectory(file);
// return file;
// }
final protected boolean locateDir(final ActionEvent event, final String title,
final TextInputControl textFieldToSet, final TextInputControl textFieldDefault) {
final DirectoryChooser chooser = new DirectoryChooser();
TextInputControl textFieldForInitialDir;
if (textFieldDefault != null && StringUtils.isBlank(textFieldToSet.getText()))
textFieldForInitialDir = textFieldDefault;
else
textFieldForInitialDir = textFieldToSet;
final File lastDir1 = new File(textFieldForInitialDir.getText());
final File lastDir2 = getLastSelectedDirectory();
if (lastDir1 != null && lastDir1.exists()) {
if (lastDir1.isDirectory())
chooser.setInitialDirectory(lastDir1);
else if (lastDir1.getParentFile() != null && lastDir1.getParentFile().exists()
&& lastDir1.getParentFile().isDirectory())
chooser.setInitialDirectory(lastDir1.getParentFile());
} else if (lastDir2 != null && lastDir2.exists()) {
if (lastDir2.isDirectory())
chooser.setInitialDirectory(lastDir2);
else if (lastDir2.getParentFile() != null && lastDir2.getParentFile().exists()
&& lastDir2.getParentFile().isDirectory())
chooser.setInitialDirectory(lastDir2.getParentFile());
}
// if (textField.getText() != null && textField.getText().length() > 0)
// chooser.setInitialDirectory(new File(textField.getText()));
// else
// chooser.setInitialDirectory(lastSelectedDirectory);
chooser.setTitle(title);
final File file = chooser.showDialog(new Stage());
if (file == null) {
// textField.setText("");
// lastSelectedDirectory = null;
return false;
} else {
textFieldToSet.setText(file.getAbsolutePath());
setLastSelectedDirectory(file);
return true;
}
}
final protected void locateFile(final ActionEvent event, final String title, final TextField textField) {
locateFile(event, title, textField, null, null);
}
final protected File locateFile(final ActionEvent event, final String title, final TextField textField,
final String defaultName, final EchoWriteConst.FILTERTYPE defaultExtension) {
return locateFile(event, title, textField, defaultName, defaultExtension, null);
}
final protected File locateFile(final ActionEvent event, final String title, final TextField textField,
final String defaultName, final EchoWriteConst.FILTERTYPE defaultExtension, File startDir) {
LOGGER.debug("locateFile: Called");
final FileChooser chooser = new FileChooser();
if (textField != null && !StringUtils.isEmpty(textField.getText())) {
startDir = new File(textField.getText());
if (!startDir.isDirectory())
startDir = startDir.getParentFile();
if (!startDir.isDirectory())
startDir = null;
}
if (startDir == null)
startDir = getLastSelectedDirectory();
if (startDir == null) {
final String sfhome = (String) appProps.get("home");
if (sfhome != null)
startDir = new File(sfhome);
}
if (startDir != null) {
if (!startDir.isDirectory())
startDir = startDir.getParentFile();
if (!startDir.isDirectory())
startDir = null;
}
if (startDir == null) {
final String sfhome = (String) appProps.get("home");
if (sfhome != null)
startDir = new File(sfhome);
}
if (startDir != null) {
chooser.setInitialDirectory(startDir);
setLastSelectedDirectory(startDir);
}
chooser.setTitle(title);
LOGGER.debug("locateFile: defaultName='" + defaultName + "'");
if (!StringUtils.isEmpty(defaultName))
chooser.setInitialFileName(defaultName);
// chooser.setInitialFileName("ChapterCount1.csv");
// System.out.println("lastSelectedDirectory = '" +
// lastSelectedDirectory + "'");
LOGGER.debug("locateFile: defaultExtension=" + defaultExtension);
if (EchoWriteConst.FILTERTYPE.JSON == (defaultExtension))
chooser.getExtensionFilters().addAll(new ExtensionFilter("JSON", "*.json"));
if (EchoWriteConst.FILTERTYPE.YAML == (defaultExtension))
chooser.getExtensionFilters().addAll(new ExtensionFilter("YAML", "*.yaml"));
if (EchoWriteConst.FILTERTYPE.HTML == (defaultExtension))
chooser.getExtensionFilters().addAll(new ExtensionFilter("HTML", "*.html"));
if (EchoWriteConst.FILTERTYPE.TEXT == (defaultExtension))
chooser.getExtensionFilters().addAll(new ExtensionFilter("TEXT", "*.txt"));
if (EchoWriteConst.FILTERTYPE.CSV == (defaultExtension))
chooser.getExtensionFilters().addAll(new ExtensionFilter("CSV", "*.csv"));
chooser.getExtensionFilters().addAll(new ExtensionFilter("ALL", "*.*"));
final File file = chooser.showOpenDialog(new Stage());
if (file == null) {
if (textField != null)
textField.setText("");
// lastSelectedDirectory = null;
} else {
if (textField != null)
textField.setText(file.getAbsolutePath());
// lastSelectedDirectory = file.getParentFile();
}
LOGGER.debug("locateFile: Done");
return file;
}
// For probably new files, not existing ones
// https://docs.oracle.com/javase/8/javafx/api/javafx/stage/FileChooser.html#setSelectedExtensionFilter-javafx.stage.FileChooser.ExtensionFilter-
final File chooseFile(final ActionEvent event, final String title, final TextField textField,
final String defaultName, final EchoWriteConst.FILTERTYPE defaultExtension) {
final FileChooser chooser = new FileChooser();
File startDir = null;
if (textField != null && !StringUtils.isEmpty(textField.getText())) {
startDir = new File(textField.getText());
if (!startDir.isDirectory())
startDir = startDir.getParentFile();
if (!startDir.isDirectory())
startDir = null;
}
if (startDir == null)
startDir = getLastSelectedDirectory();
if (startDir == null) {
final String sfhome = (String) appProps.get("home");
if (sfhome != null)
startDir = new File(sfhome);
}
if (startDir != null) {
if (!startDir.isDirectory())
startDir = startDir.getParentFile();
if (!startDir.isDirectory())
startDir = null;
}
if (startDir != null) {
chooser.setInitialDirectory(startDir);
setLastSelectedDirectory(startDir);
}
chooser.setTitle(title);
if (!StringUtils.isEmpty(defaultName))
chooser.setInitialFileName(defaultName);
LOGGER.debug("locateFile: defaultExtension=" + defaultExtension);
if (EchoWriteConst.FILTERTYPE.JSON == (defaultExtension))
chooser.getExtensionFilters().addAll(new ExtensionFilter("JSON", "*.json"));
if (EchoWriteConst.FILTERTYPE.YAML == (defaultExtension))
chooser.getExtensionFilters().addAll(new ExtensionFilter("YAML", "*.yaml"));
if (EchoWriteConst.FILTERTYPE.HTML == (defaultExtension))
chooser.getExtensionFilters().addAll(new ExtensionFilter("HTML", "*.html"));
if (EchoWriteConst.FILTERTYPE.TEXT == (defaultExtension))
chooser.getExtensionFilters().addAll(new ExtensionFilter("TEXT", "*.txt"));
if (EchoWriteConst.FILTERTYPE.CSV == (defaultExtension))
chooser.getExtensionFilters().addAll(new ExtensionFilter("CSV", "*.csv"));
chooser.getExtensionFilters().addAll(new ExtensionFilter("ALL", "*.*"));
final File file = chooser.showSaveDialog(new Stage());
if (file == null) {
// textField.setText("");
// lastSelectedDirectory = null;
} else {
if (textField != null)
textField.setText(file.getAbsolutePath());
// lastSelectedDirectory = file.getParentFile();
}
return file;
}
// void chooseDirectory() {
// // TODO chooseDirectory
// }
final void setDetectChangesNodeElem(final Node node) {
if (node instanceof TextField) {
final TextField tf = (TextField) node;
// LOGGER.debug("setDetectChanges: adding to field=" +
// tf.getId());
tf.textProperty().addListener(new ChangeListener<String>() {
@Override
public void changed(ObservableValue<? extends String> observable, String oldValue, String newValue) {
LOGGER.debug("setDetectChanges: newValue='" + newValue + "'");
if (oldValue == null || newValue.compareTo(oldValue) != 0)
setProfileChangeMade(true);
}
});
} else if (node instanceof TextArea) {
final TextArea tf = (TextArea) node;
// LOGGER.debug("setDetectChanges: adding to field=" +
// tf.getId());
tf.textProperty().addListener(new ChangeListener<String>() {
@Override
public void changed(ObservableValue<? extends String> observable, String oldValue, String newValue) {
LOGGER.debug("setDetectChanges: newValue='" + newValue + "'");
if (oldValue == null || newValue.compareTo(oldValue) != 0)
setProfileChangeMade(true);
}
});
//
} else if (node instanceof CheckBox) {
final CheckBox cb = (CheckBox) node;
cb.selectedProperty().addListener(new ChangeListener<Boolean>() {
@Override
public void changed(ObservableValue<? extends Boolean> observable, Boolean oldValue, Boolean newValue) {
LOGGER.debug("setDetectChanges: newValue='" + newValue + "'");
if (newValue.compareTo(oldValue) != 0)
setProfileChangeMade(true);
}
});
} else if (node instanceof Pane) {
setDetectChanges((Pane) node);
} else if (node instanceof TitledPane)
{
final Node nd2 = ((TitledPane) node).getContent();
if (nd2 instanceof Pane) {
setDetectChanges((Pane) nd2);
}
}
}
final void setDetectChanges(final Pane pane) {
for (Node node : pane.getChildren()) {
setDetectChangesNodeElem(node);
}
}
final void setDetectChanges(final ObservableList<Node> nodes) {
for (Node node : nodes) {
setDetectChangesNodeElem(node);
}
}
final void setDetectChanges(final Control pane) {
for (Node node : pane.getChildrenUnmodifiable()) {
setDetectChangesNodeElem(node);
}
}
void setProfileChangeMade(boolean b) {
profileChangeMade = b;
}
// void setColumnWidth(final ObservableList<TableColumn<?, ?>> columns,
// final String key, final int coli) {
// LOGGER.debug("setColumnWidth: Called w/key=" + key + " for col#" + coli);
// final double colW = appPreferences.getDouble(key, -1);
// LOGGER.debug("setColumnWidth: colW1=" + colW);
// if (colW > -1) {
// final TableColumn<?, ?> col = columns.get(coli);
// col.setPrefWidth(colW);
// }
// }
@SuppressWarnings("rawtypes")
final void setColumnWidth(final ObservableList<TableColumn> columns, final String key, final int coli) {
LOGGER.debug("setColumnWidth: Called w/key=" + key + " for col#" + coli);
final double colW = appPreferences.getDouble(key, -1);
LOGGER.debug("setColumnWidth: colW1=" + colW);
if (colW > -1) {
final TableColumn<?, ?> col = columns.get(coli);
col.setPrefWidth(colW);
}
}
final void columnWidthChanged(final String MYPREFSKEY, final int colNum, final Number newValue) {
if (appPreferences != null && MYPREFSKEY != null) {
final String key = String.format(MYPREFSKEY, colNum);
// LOGGER.debug("columnWidthChanged: col#" + colNum + " val=" +
// newValue.doubleValue());
appPreferences.putDouble(key, newValue.doubleValue());
} else
LOGGER.warn("NO app preferences set");
}
final void lockAllButtons(final Pane pane) {
if (pane == null)
return;
for (Node node : pane.getChildren()) {
if (node instanceof Button) {
final Button tf = (Button) node;
tf.setDisable(true);
} else if (node instanceof Pane) {
lockAllButtons((Pane) node);
} else if (node instanceof TitledPane) {
final Node nd2 = ((TitledPane) node).getContent();
if (nd2 instanceof Pane) {
lockAllButtons((Pane) nd2);
}
}
}
}
final void unlockAllButtons(final Pane pane) {
for (Node node : pane.getChildren()) {
if (node instanceof Button) {
final Button tf = (Button) node;
tf.setDisable(false);
} else if (node instanceof Pane) {
unlockAllButtons((Pane) node);
} else if (node instanceof TitledPane) {
final Node nd2 = ((TitledPane) node).getContent();
if (nd2 instanceof Pane) {
unlockAllButtons((Pane) nd2);
}
}
}
}
final void hideAllInArea(final Pane pane) {
if (pane == null)
return;
for (Node node : pane.getChildren()) {
node.setVisible(false);
if (node instanceof Button) {
// final Button tf = (Button) node;
// tf.setDisable(true);
} else if (node instanceof Pane) {
hideAllInArea((Pane) node);
} else if (node instanceof TitledPane) {
final Node nd2 = ((TitledPane) node).getContent();
if (nd2 instanceof Pane) {
hideAllInArea((Pane) nd2);
}
}
}
}
final void showAllInArea(final Pane pane) {
if (pane == null)
return;
for (Node node : pane.getChildren()) {
node.setVisible(true);
if (node instanceof Button) {
// final Button tf = (Button) node;
// tf.setDisable(true);
} else if (node instanceof Pane) {
showAllInArea((Pane) node);
} else if (node instanceof TitledPane) {
final Node nd2 = ((TitledPane) node).getContent();
if (nd2 instanceof Pane) {
showAllInArea((Pane) nd2);
}
}
}
}
public void handleClose(final ActionEvent event) {
LOGGER.debug("handleClose: Called");
final Node source = (Node) event.getSource();
final Stage stage = (Stage) source.getScene().getWindow();
doCleanup();
// stage.getOwner()
stage.close();
LOGGER.debug("handleClose: Done");
}
public abstract void doCleanup();
abstract void lockGui();
abstract void unlockGui();
public void doControllerSetupDone() {
}
public void doSceneHidden() {
windowCount -= 1;
LOGGER.debug("windowCount=" + windowCount);
}
public void doSceneHiding(final Stage stage) {
}
public void doSceneShowing(final Stage stage) {
windowCount += 1;
LOGGER.debug("windowCount=" + windowCount);
}
public void doSceneCloseRequest() {
}
public void doSceneCloseRequested() {
}
/**
* Called after Scene is showing
*
* @param stage
*/
public void doSceneShown(final Stage stage) {
}
}
| |
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.collections;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.collect.Ordering;
import io.druid.java.util.common.guava.BaseSequence;
import io.druid.java.util.common.guava.MergeSequence;
import io.druid.java.util.common.guava.Sequence;
import io.druid.java.util.common.guava.SequenceTestHelper;
import io.druid.java.util.common.guava.Sequences;
import io.druid.java.util.common.guava.TestSequence;
import org.junit.Assert;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
/**
*/
public class OrderedMergeSequenceTest
{
@Test
public void testSanity() throws Exception
{
final ArrayList<TestSequence<Integer>> testSequences = Lists.newArrayList(
TestSequence.create(1, 3, 5, 7, 9),
TestSequence.create(2, 8),
TestSequence.create(4, 6, 8)
);
OrderedMergeSequence<Integer> seq = makeMergedSequence(Ordering.<Integer>natural(), testSequences);
SequenceTestHelper.testAll(seq, Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 8, 9));
for (TestSequence<Integer> sequence : testSequences) {
Assert.assertTrue(sequence.isClosed());
}
}
@Test
public void testMergeEmptySequence() throws Exception
{
final ArrayList<TestSequence<Integer>> testSequences = Lists.newArrayList(
TestSequence.create(ImmutableList.<Integer>of()),
TestSequence.create(2, 8),
TestSequence.create(4, 6, 8)
);
OrderedMergeSequence<Integer> seq = makeMergedSequence(Ordering.<Integer>natural(), testSequences);
SequenceTestHelper.testAll(seq, Arrays.asList(2, 4, 6, 8, 8));
for (TestSequence<Integer> sequence : testSequences) {
Assert.assertTrue(sequence.isClosed());
}
}
@Test
public void testMergeEmptySequenceAtEnd() throws Exception
{
final ArrayList<TestSequence<Integer>> testSequences = Lists.newArrayList(
TestSequence.create(2, 8),
TestSequence.create(4, 6, 8),
TestSequence.create(ImmutableList.<Integer>of())
);
OrderedMergeSequence<Integer> seq = makeMergedSequence(Ordering.<Integer>natural(), testSequences);
SequenceTestHelper.testAll(seq, Arrays.asList(2, 4, 6, 8, 8));
for (TestSequence<Integer> sequence : testSequences) {
Assert.assertTrue(sequence.isClosed());
}
}
@Test
public void testMergeEmptySequenceMiddle() throws Exception
{
final ArrayList<TestSequence<Integer>> testSequences = Lists.newArrayList(
TestSequence.create(2, 8),
TestSequence.create(ImmutableList.<Integer>of()),
TestSequence.create(4, 6, 8)
);
OrderedMergeSequence<Integer> seq = makeMergedSequence(Ordering.<Integer>natural(), testSequences);
SequenceTestHelper.testAll(seq, Arrays.asList(2, 4, 6, 8, 8));
for (TestSequence<Integer> sequence : testSequences) {
Assert.assertTrue(sequence.isClosed());
}
}
@Test
public void testScrewsUpOnOutOfOrderBeginningOfList() throws Exception
{
final ArrayList<TestSequence<Integer>> testSequences = Lists.newArrayList(
TestSequence.create(1, 3, 5, 7, 9),
TestSequence.create(4, 6, 8),
TestSequence.create(2, 8)
);
OrderedMergeSequence<Integer> seq = makeMergedSequence(Ordering.<Integer>natural(), testSequences);
SequenceTestHelper.testAll(seq, Arrays.asList(1, 3, 4, 2, 5, 6, 7, 8, 8, 9));
for (TestSequence<Integer> sequence : testSequences) {
Assert.assertTrue(sequence.isClosed());
}
}
@Test
public void testScrewsUpOnOutOfOrderInList() throws Exception
{
final ArrayList<TestSequence<Integer>> testSequences = Lists.newArrayList(
TestSequence.create(1, 3, 5, 4, 7, 9),
TestSequence.create(2, 8),
TestSequence.create(4, 6)
);
OrderedMergeSequence<Integer> seq = makeMergedSequence(Ordering.<Integer>natural(), testSequences);
SequenceTestHelper.testAll(seq, Arrays.asList(1, 2, 3, 4, 5, 4, 6, 7, 8, 9));
for (TestSequence<Integer> sequence : testSequences) {
Assert.assertTrue(sequence.isClosed());
}
}
@Test
public void testLazinessAccumulation() throws Exception
{
final ArrayList<Sequence<Integer>> sequences = makeSyncedSequences();
OrderedMergeSequence<Integer> seq = new OrderedMergeSequence<Integer>(
Ordering.<Integer>natural(), Sequences.simple(sequences)
);
SequenceTestHelper.testAccumulation("", seq, Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9));
}
@Test
public void testLazinessYielder() throws Exception
{
final ArrayList<Sequence<Integer>> sequences = makeSyncedSequences();
OrderedMergeSequence<Integer> seq = new OrderedMergeSequence<Integer>(
Ordering.<Integer>natural(), Sequences.simple(sequences)
);
SequenceTestHelper.testYield("", seq, Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9));
}
private ArrayList<Sequence<Integer>> makeSyncedSequences()
{
final boolean[] done = new boolean[]{false, false};
final ArrayList<Sequence<Integer>> sequences = Lists.newArrayList();
sequences.add(
new BaseSequence<Integer, Iterator<Integer>>(
new BaseSequence.IteratorMaker<Integer, Iterator<Integer>>()
{
@Override
public Iterator<Integer> make()
{
return Arrays.asList(1, 2, 3).iterator();
}
@Override
public void cleanup(Iterator<Integer> iterFromMake)
{
done[0] = true;
}
}
)
);
sequences.add(
new BaseSequence<Integer, Iterator<Integer>>(
new BaseSequence.IteratorMaker<Integer, Iterator<Integer>>()
{
@Override
public Iterator<Integer> make()
{
return new IteratorShell<Integer>(Arrays.asList(4, 5, 6).iterator())
{
int count = 0;
@Override
public boolean hasNext()
{
if (count >= 1) {
Assert.assertTrue("First iterator not complete", done[0]);
}
return super.hasNext();
}
@Override
public Integer next()
{
if (count >= 1) {
Assert.assertTrue("First iterator not complete", done[0]);
}
++count;
return super.next();
}
};
}
@Override
public void cleanup(Iterator<Integer> iterFromMake)
{
done[1] = true;
}
}
)
);
sequences.add(
new BaseSequence<Integer, Iterator<Integer>>(
new BaseSequence.IteratorMaker<Integer, Iterator<Integer>>()
{
@Override
public Iterator<Integer> make()
{
return new IteratorShell<Integer>(Arrays.asList(7, 8, 9).iterator())
{
int count = 0;
@Override
public boolean hasNext()
{
if (count >= 1) {
Assert.assertTrue("Second iterator not complete", done[1]);
}
Assert.assertTrue("First iterator not complete", done[0]);
return super.hasNext();
}
@Override
public Integer next()
{
if (count >= 1) {
Assert.assertTrue("Second iterator not complete", done[1]);
}
Assert.assertTrue("First iterator not complete", done[0]);
++count;
return super.next();
}
};
}
@Override
public void cleanup(Iterator<Integer> iterFromMake)
{
}
}
)
);
return sequences;
}
private <T> OrderedMergeSequence<T> makeMergedSequence(
Ordering<T> ordering,
List<TestSequence<T>> seqs
)
{
return new OrderedMergeSequence<T>(
ordering,
Sequences.simple((List<Sequence<T>>) (List) seqs)
);
}
private <T> MergeSequence<T> makeUnorderedMergedSequence(
Ordering<T> ordering,
List<TestSequence<T>> seqs
)
{
return new MergeSequence<T>(ordering, Sequences.simple(seqs));
}
@Test
public void testHierarchicalMerge() throws Exception
{
final Sequence<Integer> seq1 = makeUnorderedMergedSequence(
Ordering.<Integer>natural(), Lists.newArrayList(
TestSequence.create(1)
)
);
final Sequence<Integer> seq2 = makeUnorderedMergedSequence(
Ordering.<Integer>natural(), Lists.newArrayList(
TestSequence.create(1)
)
);
final OrderedMergeSequence<Integer> finalMerged = new OrderedMergeSequence<Integer>(
Ordering.<Integer>natural(),
Sequences.simple(
Lists.<Sequence<Integer>>newArrayList(seq1, seq2)
)
);
SequenceTestHelper.testAll(finalMerged, Arrays.asList(1, 1));
}
@Test
public void testMergeMerge() throws Exception
{
final Sequence<Integer> seq1 = makeUnorderedMergedSequence(
Ordering.<Integer>natural(), Lists.newArrayList(
TestSequence.create(1)
)
);
final OrderedMergeSequence<Integer> finalMerged = new OrderedMergeSequence<Integer>(
Ordering.<Integer>natural(),
Sequences.simple(
Lists.<Sequence<Integer>>newArrayList(seq1)
)
);
SequenceTestHelper.testAll(finalMerged, Collections.singletonList(1));
}
@Test
public void testOne() throws Exception
{
final MergeSequence<Integer> seq1 = makeUnorderedMergedSequence(
Ordering.<Integer>natural(), Lists.newArrayList(
TestSequence.create(1)
)
);
SequenceTestHelper.testAll(seq1, Collections.singletonList(1));
}
}
| |
// Copyright (c) 2014 Intel Corporation. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.xwalk.embedding.test.v1;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.chromium.content.browser.test.util.CallbackHelper;
import org.xwalk.core.XWalkResourceClient;
import org.xwalk.core.XWalkUIClient;
import org.xwalk.core.XWalkView;
import org.xwalk.embedding.MainActivity;
import org.xwalk.embedding.base.OnLoadFinishedHelper;
import org.xwalk.embedding.base.XWalkViewTestBase;
import android.annotation.SuppressLint;
import android.content.Intent;
import android.os.Bundle;
import android.os.SystemClock;
import android.test.suitebuilder.annotation.SmallTest;
import android.util.Pair;
import android.view.KeyEvent;
@SuppressLint("NewApi")
public class XWalkViewTest extends XWalkViewTestBase {
@SmallTest
public void testAddJavascriptInterface() {
try {
final String name = "add_js_interface.html";
addJavascriptInterface();
loadAssetFile(name);
assertEquals(mExpectedStr, getTitleOnUiThread());
} catch (Exception e) {
e.printStackTrace();
assertTrue(false);
}
}
@SmallTest
public void testAddJavascriptInterfaceWithUrl() {
try {
final String url = "file:///android_asset/add_js_interface.html";
addJavascriptInterface();
loadUrlSync(url);
assertEquals(mExpectedStr, getTitleOnUiThread());
} catch (Exception e) {
e.printStackTrace();
assertTrue(false);
}
}
@SmallTest
public void testAddJavascriptInterfaceWithAnnotation() {
try {
final String name = "index.html";
final String xwalkStr = "\"xwalk\"";
String result;
addJavascriptInterface();
loadAssetFile(name);
result = executeJavaScriptAndWaitForResult("testInterface.getText()");
assertEquals(xwalkStr, result);
raisesExceptionAndSetTitle("testInterface.getTextWithoutAnnotation()");
String title = getTitleOnUiThread();
assertEquals(mExpectedStr, title);
} catch (Exception e) {
assertTrue(false);
e.printStackTrace();
} catch (Throwable e) {
assertTrue(false);
}
}
@SmallTest
public void testEvaluateJavascript() {
try {
String changedTitle = "testEvaluateJavascript_ChangeTitle";
String url = "file:///android_asset/p1bar.html";
loadUrlSync(url,null);
executeJavaScriptAndWaitForResult("document.title='"+changedTitle+"';");
assertEquals(changedTitle, getTitleOnUiThread());
} catch (Exception e) {
assertTrue(false);
e.printStackTrace();
}
}
@SmallTest
public void testClearCache() {
try {
final String pagePath = "/clear_cache_test.html";
List<Pair<String, String>> headers = new ArrayList<Pair<String, String>>();
// Set Cache-Control headers to cache this request. One century should be long enough.
headers.add(Pair.create("Cache-Control", "max-age=3153600000"));
headers.add(Pair.create("Last-Modified", "Tues, 12 September 2014 00:00:00 GMT"));
final String pageUrl = mWebServer.setResponse(
pagePath, "<html><body>foo</body></html>", headers);
// First load to populate cache.
clearCacheOnUiThread(true);
loadUrlSync(pageUrl);
assertEquals(1, mWebServer.getRequestCount(pagePath));
// Load about:blank so next load is not treated as reload by XWalkView and force
// revalidate with the server.
loadUrlSync("about:blank");
// No clearCache call, so should be loaded from cache.
loadUrlSync(pageUrl);
assertEquals(1, mWebServer.getRequestCount(pagePath));
// Same as above.
loadUrlSync("about:blank");
// Clear cache, so should hit server again.
clearCacheOnUiThread(true);
loadUrlSync(pageUrl);
assertEquals(2, mWebServer.getRequestCount(pagePath));
// Same as above.
loadUrlSync("about:blank");
// Do not clear cache, so should be loaded from cache.
clearCacheOnUiThread(false);
loadUrlSync(pageUrl);
assertEquals(2, mWebServer.getRequestCount(pagePath));
} catch (Exception e) {
assertTrue(false);
e.printStackTrace();
}
}
@SmallTest
public void testPauseTimers() {
try {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mXWalkView.pauseTimers();
}
});
assertTrue(true);
} catch (Exception e) {
e.printStackTrace();
assertTrue(false);
}
}
@SmallTest
public void testResumeTimers() {
try {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mXWalkView.pauseTimers();
mXWalkView.resumeTimers();
}
});
assertTrue(true);
} catch (Exception e) {
e.printStackTrace();
assertTrue(false);
}
}
@SmallTest
public void testOnHide() {
try {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mXWalkView.onHide();
}
});
assertTrue(true);
} catch (Exception e) {
e.printStackTrace();
assertTrue(false);
}
}
@SmallTest
public void testOnShow() {
try {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mXWalkView.onShow();
}
});
assertTrue(true);
} catch (Exception e) {
e.printStackTrace();
assertTrue(false);
}
}
@SmallTest
public void testOnDestroy() {
try {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mXWalkView.onDestroy();
}
});
assertTrue(true);
} catch (Exception e) {
e.printStackTrace();
assertTrue(false);
}
}
@SmallTest
public void testOnActivityResult() {
try {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mXWalkView.onActivityResult(WAIT_TIMEOUT_SECONDS, NUM_NAVIGATIONS, null);
}
});
assertTrue(true);
} catch (Exception e) {
e.printStackTrace();
assertTrue(false);
}
}
@SmallTest
public void testOnNewIntent() {
try {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
Intent intent = new Intent();
intent.setClassName("org.xwalk.embedding", MainActivity.class.getName());
mXWalkView.onNewIntent(intent);
}
});
assertTrue(true);
} catch (Exception e) {
e.printStackTrace();
assertTrue(false);
}
}
@SmallTest
public void testSaveState() {
try {
final Bundle state = new Bundle();
state.putByteArray("XWALKVIEW_STATE", "valid state".getBytes());
boolean result = runTestOnUiThreadAndGetResult(new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
return mXWalkView.saveState(state);
}
});
assertTrue(result);
} catch (Exception e) {
assertTrue(false);
e.printStackTrace();
}
}
public void testSaveRestoreStateWithTitle() throws Throwable {
setServerResponseAndLoad(1);
saveAndRestoreStateOnUiThread();
assertTrue(pollOnUiThread(new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
return TITLES[0].equals(mRestoreXWalkView.getTitle());
}
}));
}
@SmallTest
public void testSaveRestoreStateWithHistoryItemList() {
try {
setServerResponseAndLoad(NUM_NAVIGATIONS);
saveAndRestoreStateOnUiThread();
checkHistoryItemList(mRestoreXWalkView);
} catch (Throwable e) {
assertTrue(false);
e.printStackTrace();
}
}
@SmallTest
public void testRestoreState_trueResult() {
try {
final Bundle state = new Bundle();
state.putByteArray("XWALKVIEW_STATE", "valid state".getBytes());
loadUrlSync("file:///android_asset/p1bar.html/");
boolean result = runTestOnUiThreadAndGetResult(new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
mXWalkView.saveState(state);
return mRestoreXWalkView.restoreState(state);
}
});
assertTrue(result);
} catch (Exception e) {
assertTrue(false);
e.printStackTrace();
}
}
@SmallTest
public void testRestoreState_falseResult() {
try {
final Bundle state = new Bundle();
state.putByteArray("XWALKVIEW_STATE", "invalid state".getBytes());
loadUrlSync("file:///android_asset/p1bar.html/");
boolean result = runTestOnUiThreadAndGetResult(new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
return mRestoreXWalkView.restoreState(state);
}
});
assertFalse(result);
} catch (Exception e) {
assertTrue(false);
e.printStackTrace();
}
}
@SmallTest
public void testRestoreState_notLoadFirst() {
try {
final Bundle state = new Bundle();
state.putByteArray("XWALKVIEW_STATE", "valid state".getBytes());
boolean result = runTestOnUiThreadAndGetResult(new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
mXWalkView.saveState(state);
return mRestoreXWalkView.restoreState(state);
}
});
assertFalse(result);
} catch (Exception e) {
assertTrue(false);
e.printStackTrace();
}
}
@SmallTest
public void testGetAPIVersion() {
try {
String version = getAPIVersionOnUiThread();
Pattern pattern = Pattern.compile("^[0-9]+(.[0-9]+)$");
Matcher matcher = pattern.matcher(version);
assertTrue("The API version is invalid.", matcher.find());
} catch (Exception e) {
assertTrue(false);
e.printStackTrace();
}
}
@SmallTest
public void testGetXWalkVersion() {
try {
String version = getXWalkVersionOnUiThread();
Pattern pattern = Pattern.compile("\\d+\\.\\d+\\.\\d+\\.\\d+");
Matcher matcher = pattern.matcher(version);
assertTrue("The Crosswalk version is invalid.", matcher.find());
} catch (Exception e) {
assertTrue(false);
e.printStackTrace();
}
}
@SmallTest
public void testSetResourceClient() {
try {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mXWalkView.setResourceClient(new XWalkResourceClient(mXWalkView));
}
});
assertTrue(true);
} catch (Exception e) {
e.printStackTrace();
assertTrue(false);
}
}
boolean haveLoadflag = false;
@SmallTest
public void testSetResourceClient_function() {
try {
haveLoadflag = false;
final String url = "file:///android_asset/index.html";
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mRestoreXWalkView.setResourceClient(new XWalkResourceClient(mXWalkView) {
@Override
public void onLoadFinished(XWalkView view, String url) {
haveLoadflag = true;
mTestHelperBridge.onLoadFinished(url);
}
});
}
});
OnLoadFinishedHelper mOnLoadFinishedHelper = mTestHelperBridge.getOnLoadFinishedHelper();
int currentCallCount = mOnLoadFinishedHelper.getCallCount();
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mRestoreXWalkView.load(url, null);
}
});
mOnLoadFinishedHelper.waitForCallback(currentCallCount);
assertTrue(haveLoadflag);
} catch (Exception e) {
e.printStackTrace();
assertTrue(false);
}
}
@SmallTest
public void testSetUIClient() {
try {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mXWalkView.setUIClient(new XWalkUIClient(mXWalkView));
}
});
assertTrue(true);
} catch (Exception e) {
e.printStackTrace();
assertTrue(false);
}
}
@SmallTest
public void testSetUIClient_function() {
try {
haveLoadflag = false;
final String url = "file:///android_asset/index.html";
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mRestoreXWalkView.setUIClient(new XWalkUIClient(mXWalkView) {
@Override
public void onPageLoadStopped(XWalkView view,
String url, LoadStatus status) {
haveLoadflag = true;
mTestHelperBridge.onPageFinished(url, status);
}
});
}
});
CallbackHelper pageFinishedHelper = mTestHelperBridge.getOnPageFinishedHelper();
int currentCallCount = pageFinishedHelper.getCallCount();
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mRestoreXWalkView.load(url, null);
}
});
pageFinishedHelper.waitForCallback(currentCallCount, 1, WAIT_TIMEOUT_SECONDS,
TimeUnit.SECONDS);
assertTrue(haveLoadflag);
} catch (Exception e) {
e.printStackTrace();
assertTrue(false);
}
}
@SmallTest
public void testOnKeyUp() {
try {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mXWalkView.onKeyUp(0, null);
}
});
assertTrue(true);
} catch (Exception e) {
e.printStackTrace();
assertTrue(false);
}
}
@SmallTest
public void testOnKeyDown() {
try {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mXWalkView.onKeyDown(65, new KeyEvent(0, 65));
}
});
assertTrue(true);
} catch (Exception e) {
e.printStackTrace();
assertTrue(false);
}
}
//need to be improved
@SmallTest
public void testPauseTimers_function() {
try {
String url = "file:///android_asset/pause_timers.html";
addJavascriptInterface();
loadUrlSync(url);
SystemClock.sleep(2000);
String date = new Date().toString();
pauseTimers();
SystemClock.sleep(2000);
assertEquals(date, getTitleOnUiThread());
} catch (Exception e) {
e.printStackTrace();
assertTrue(false);
}
}
@SmallTest
public void testResumeTimers_function() {
try {
String url = "file:///android_asset/pause_timers.html";
addJavascriptInterface();
loadUrlSync(url);
SystemClock.sleep(2000);
pauseTimers();
SystemClock.sleep(2000);
resumeTimers();
SystemClock.sleep(1000);
String date = new Date().toString();
assertEquals(date, getTitleOnUiThread());
} catch (Exception e) {
e.printStackTrace();
assertTrue(false);
}
}
@SmallTest
public void testOnDestroy_function() {
try {
String url = "file:///android_asset/pause_timers.html";
addJavascriptInterface();
loadUrlSync(url);
SystemClock.sleep(2000);
onDestroy();
SystemClock.sleep(2000);
assertNull(getTitleOnUiThread());
} catch (Exception e) {
e.printStackTrace();
assertTrue(false);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.ConcurrentNavigableMap;
import java.util.concurrent.ThreadFactory;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.types.CopyOnWriteArrayMap;
import org.apache.hadoop.hbase.util.RetryCounter;
import org.apache.hadoop.hbase.util.RetryCounterFactory;
import org.apache.hadoop.hbase.zookeeper.ZKListener;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
/**
* A cache of meta region location metadata. Registers a listener on ZK to track changes to the
* meta table znodes. Clients are expected to retry if the meta information is stale. This class
* is thread-safe (a single instance of this class can be shared by multiple threads without race
* conditions).
*/
@InterfaceAudience.Private
public class MetaRegionLocationCache extends ZKListener {
private static final Logger LOG = LoggerFactory.getLogger(MetaRegionLocationCache.class);
/**
* Maximum number of times we retry when ZK operation times out.
*/
private static final int MAX_ZK_META_FETCH_RETRIES = 10;
/**
* Sleep interval ms between ZK operation retries.
*/
private static final int SLEEP_INTERVAL_MS_BETWEEN_RETRIES = 1000;
private static final int SLEEP_INTERVAL_MS_MAX = 10000;
private final RetryCounterFactory retryCounterFactory =
new RetryCounterFactory(MAX_ZK_META_FETCH_RETRIES, SLEEP_INTERVAL_MS_BETWEEN_RETRIES);
/**
* Cached meta region locations indexed by replica ID.
* CopyOnWriteArrayMap ensures synchronization during updates and a consistent snapshot during
* client requests. Even though CopyOnWriteArrayMap copies the data structure for every write,
* that should be OK since the size of the list is often small and mutations are not too often
* and we do not need to block client requests while mutations are in progress.
*/
private final CopyOnWriteArrayMap<Integer, HRegionLocation> cachedMetaLocations;
private enum ZNodeOpType {
INIT,
CREATED,
CHANGED,
DELETED
}
public MetaRegionLocationCache(ZKWatcher zkWatcher) {
super(zkWatcher);
cachedMetaLocations = new CopyOnWriteArrayMap<>();
watcher.registerListener(this);
// Populate the initial snapshot of data from meta znodes.
// This is needed because stand-by masters can potentially start after the initial znode
// creation. It blocks forever until the initial meta locations are loaded from ZK and watchers
// are established. Subsequent updates are handled by the registered listener. Also, this runs
// in a separate thread in the background to not block master init.
ThreadFactory threadFactory = new ThreadFactoryBuilder().setDaemon(true).build();
RetryCounterFactory retryFactory = new RetryCounterFactory(
Integer.MAX_VALUE, SLEEP_INTERVAL_MS_BETWEEN_RETRIES, SLEEP_INTERVAL_MS_MAX);
threadFactory.newThread(
()->loadMetaLocationsFromZk(retryFactory.create(), ZNodeOpType.INIT)).start();
}
/**
* Populates the current snapshot of meta locations from ZK. If no meta znodes exist, it registers
* a watcher on base znode to check for any CREATE/DELETE events on the children.
* @param retryCounter controls the number of retries and sleep between retries.
*/
private void loadMetaLocationsFromZk(RetryCounter retryCounter, ZNodeOpType opType) {
List<String> znodes = null;
while (retryCounter.shouldRetry()) {
try {
znodes = watcher.getMetaReplicaNodesAndWatchChildren();
break;
} catch (KeeperException ke) {
LOG.debug("Error populating initial meta locations", ke);
if (!retryCounter.shouldRetry()) {
// Retries exhausted and watchers not set. This is not a desirable state since the cache
// could remain stale forever. Propagate the exception.
watcher.abort("Error populating meta locations", ke);
return;
}
try {
retryCounter.sleepUntilNextRetry();
} catch (InterruptedException ie) {
LOG.error("Interrupted while loading meta locations from ZK", ie);
Thread.currentThread().interrupt();
return;
}
}
}
if (znodes == null || znodes.isEmpty()) {
// No meta znodes exist at this point but we registered a watcher on the base znode to listen
// for updates. They will be handled via nodeChildrenChanged().
return;
}
if (znodes.size() == cachedMetaLocations.size()) {
// No new meta znodes got added.
return;
}
for (String znode: znodes) {
String path = ZNodePaths.joinZNode(watcher.getZNodePaths().baseZNode, znode);
updateMetaLocation(path, opType);
}
}
/**
* Gets the HRegionLocation for a given meta replica ID. Renews the watch on the znode for
* future updates.
* @param replicaId ReplicaID of the region.
* @return HRegionLocation for the meta replica.
* @throws KeeperException if there is any issue fetching/parsing the serialized data.
*/
private HRegionLocation getMetaRegionLocation(int replicaId)
throws KeeperException {
RegionState metaRegionState;
try {
byte[] data = ZKUtil.getDataAndWatch(watcher,
watcher.getZNodePaths().getZNodeForReplica(replicaId));
metaRegionState = ProtobufUtil.parseMetaRegionStateFrom(data, replicaId);
} catch (DeserializationException e) {
throw ZKUtil.convert(e);
}
return new HRegionLocation(metaRegionState.getRegion(), metaRegionState.getServerName());
}
private void updateMetaLocation(String path, ZNodeOpType opType) {
if (!isValidMetaPath(path)) {
return;
}
LOG.debug("Updating meta znode for path {}: {}", path, opType.name());
int replicaId = watcher.getZNodePaths().getMetaReplicaIdFromPath(path);
RetryCounter retryCounter = retryCounterFactory.create();
HRegionLocation location = null;
while (retryCounter.shouldRetry()) {
try {
if (opType == ZNodeOpType.DELETED) {
if (!ZKUtil.watchAndCheckExists(watcher, path)) {
// The path does not exist, we've set the watcher and we can break for now.
break;
}
// If it is a transient error and the node appears right away, we fetch the
// latest meta state.
}
location = getMetaRegionLocation(replicaId);
break;
} catch (KeeperException e) {
LOG.debug("Error getting meta location for path {}", path, e);
if (!retryCounter.shouldRetry()) {
LOG.warn("Error getting meta location for path {}. Retries exhausted.", path, e);
break;
}
try {
retryCounter.sleepUntilNextRetry();
} catch (InterruptedException ie) {
Thread.currentThread().interrupt();
return;
}
}
}
if (location == null) {
cachedMetaLocations.remove(replicaId);
return;
}
cachedMetaLocations.put(replicaId, location);
}
/**
* @return Optional list of HRegionLocations for meta replica(s), null if the cache is empty.
*
*/
public Optional<List<HRegionLocation>> getMetaRegionLocations() {
ConcurrentNavigableMap<Integer, HRegionLocation> snapshot =
cachedMetaLocations.tailMap(cachedMetaLocations.firstKey());
if (snapshot.isEmpty()) {
// This could be possible if the master has not successfully initialized yet or meta region
// is stuck in some weird state.
return Optional.empty();
}
List<HRegionLocation> result = new ArrayList<>();
// Explicitly iterate instead of new ArrayList<>(snapshot.values()) because the underlying
// ArrayValueCollection does not implement toArray().
snapshot.values().forEach(location -> result.add(location));
return Optional.of(result);
}
/**
* Helper to check if the given 'path' corresponds to a meta znode. This listener is only
* interested in changes to meta znodes.
*/
private boolean isValidMetaPath(String path) {
return watcher.getZNodePaths().isMetaZNodePath(path);
}
@Override
public void nodeCreated(String path) {
updateMetaLocation(path, ZNodeOpType.CREATED);
}
@Override
public void nodeDeleted(String path) {
updateMetaLocation(path, ZNodeOpType.DELETED);
}
@Override
public void nodeDataChanged(String path) {
updateMetaLocation(path, ZNodeOpType.CHANGED);
}
@Override
public void nodeChildrenChanged(String path) {
if (!path.equals(watcher.getZNodePaths().baseZNode)) {
return;
}
loadMetaLocationsFromZk(retryCounterFactory.create(), ZNodeOpType.CHANGED);
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.aggregations.bucket.children.Children;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.metrics.sum.Sum;
import org.elasticsearch.search.aggregations.metrics.tophits.TopHits;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.junit.Test;
import java.util.*;
import static org.elasticsearch.index.query.QueryBuilders.hasChildQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
import static org.elasticsearch.search.aggregations.AggregationBuilders.*;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*;
import static org.hamcrest.Matchers.*;
/**
*/
@ElasticsearchIntegrationTest.SuiteScopeTest
public class ChildrenTests extends ElasticsearchIntegrationTest {
private final static Map<String, Control> categoryToControl = new HashMap<>();
@Override
public void setupSuiteScopeCluster() throws Exception {
assertAcked(
prepareCreate("test")
.addMapping("article", "_id", "index=not_analyzed")
.addMapping("comment", "_parent", "type=article", "_id", "index=not_analyzed")
);
List<IndexRequestBuilder> requests = new ArrayList<>();
String[] uniqueCategories = new String[randomIntBetween(1, 25)];
for (int i = 0; i < uniqueCategories.length; i++) {
uniqueCategories[i] = Integer.toString(i);
}
int catIndex = 0;
int numParentDocs = randomIntBetween(uniqueCategories.length, uniqueCategories.length * 5);
for (int i = 0; i < numParentDocs; i++) {
String id = Integer.toString(i);
String[] categories = new String[randomIntBetween(1,1)];
for (int j = 0; j < categories.length; j++) {
String category = categories[j] = uniqueCategories[catIndex++ % uniqueCategories.length];
Control control = categoryToControl.get(category);
if (control == null) {
categoryToControl.put(category, control = new Control(category));
}
control.articleIds.add(id);
}
requests.add(client().prepareIndex("test", "article", id).setCreate(true).setSource("category", categories, "randomized", true));
}
String[] commenters = new String[randomIntBetween(5, 50)];
for (int i = 0; i < commenters.length; i++) {
commenters[i] = Integer.toString(i);
}
int id = 0;
for (Control control : categoryToControl.values()) {
for (String articleId : control.articleIds) {
int numChildDocsPerParent = randomIntBetween(0, 5);
for (int i = 0; i < numChildDocsPerParent; i++) {
String commenter = commenters[id % commenters.length];
String idValue = Integer.toString(id++);
control.commentIds.add(idValue);
Set<String> ids = control.commenterToCommentId.get(commenter);
if (ids == null) {
control.commenterToCommentId.put(commenter, ids = new HashSet<>());
}
ids.add(idValue);
requests.add(client().prepareIndex("test", "comment", idValue).setCreate(true).setParent(articleId).setSource("commenter", commenter));
}
}
}
requests.add(client().prepareIndex("test", "article", "a").setSource("category", new String[]{"a"}, "randomized", false));
requests.add(client().prepareIndex("test", "article", "b").setSource("category", new String[]{"a", "b"}, "randomized", false));
requests.add(client().prepareIndex("test", "article", "c").setSource("category", new String[]{"a", "b", "c"}, "randomized", false));
requests.add(client().prepareIndex("test", "article", "d").setSource("category", new String[]{"c"}, "randomized", false));
requests.add(client().prepareIndex("test", "comment", "a").setParent("a").setSource("{}"));
requests.add(client().prepareIndex("test", "comment", "c").setParent("c").setSource("{}"));
indexRandom(true, requests);
ensureSearchable("test");
}
@Test
public void testChildrenAggs() throws Exception {
SearchResponse searchResponse = client().prepareSearch("test")
.setQuery(matchQuery("randomized", true))
.addAggregation(
terms("category").field("category").size(0).subAggregation(
children("to_comment").childType("comment").subAggregation(
terms("commenters").field("commenter").size(0).subAggregation(
topHits("top_comments")
))
)
).get();
assertSearchResponse(searchResponse);
Terms categoryTerms = searchResponse.getAggregations().get("category");
assertThat(categoryTerms.getBuckets().size(), equalTo(categoryToControl.size()));
for (Map.Entry<String, Control> entry1 : categoryToControl.entrySet()) {
Terms.Bucket categoryBucket = categoryTerms.getBucketByKey(entry1.getKey());
assertThat(categoryBucket.getKey(), equalTo(entry1.getKey()));
assertThat(categoryBucket.getDocCount(), equalTo((long) entry1.getValue().articleIds.size()));
Children childrenBucket = categoryBucket.getAggregations().get("to_comment");
assertThat(childrenBucket.getName(), equalTo("to_comment"));
assertThat(childrenBucket.getDocCount(), equalTo((long) entry1.getValue().commentIds.size()));
assertThat((long) childrenBucket.getProperty("_count"), equalTo((long) entry1.getValue().commentIds.size()));
Terms commentersTerms = childrenBucket.getAggregations().get("commenters");
assertThat((Terms) childrenBucket.getProperty("commenters"), sameInstance(commentersTerms));
assertThat(commentersTerms.getBuckets().size(), equalTo(entry1.getValue().commenterToCommentId.size()));
for (Map.Entry<String, Set<String>> entry2 : entry1.getValue().commenterToCommentId.entrySet()) {
Terms.Bucket commentBucket = commentersTerms.getBucketByKey(entry2.getKey());
assertThat(commentBucket.getKey(), equalTo(entry2.getKey()));
assertThat(commentBucket.getDocCount(), equalTo((long) entry2.getValue().size()));
TopHits topHits = commentBucket.getAggregations().get("top_comments");
for (SearchHit searchHit : topHits.getHits().getHits()) {
assertThat(entry2.getValue().contains(searchHit.getId()), is(true));
}
}
}
}
@Test
public void testParentWithMultipleBuckets() throws Exception {
SearchResponse searchResponse = client().prepareSearch("test")
.setQuery(matchQuery("randomized", false))
.addAggregation(
terms("category").field("category").size(0).subAggregation(
children("to_comment").childType("comment").subAggregation(topHits("top_comments").addSort("_id", SortOrder.ASC))
)
).get();
assertSearchResponse(searchResponse);
Terms categoryTerms = searchResponse.getAggregations().get("category");
assertThat(categoryTerms.getBuckets().size(), equalTo(3));
for (Terms.Bucket bucket : categoryTerms.getBuckets()) {
logger.info("bucket=" + bucket.getKey());
Children childrenBucket = bucket.getAggregations().get("to_comment");
TopHits topHits = childrenBucket.getAggregations().get("top_comments");
logger.info("total_hits={}", topHits.getHits().getTotalHits());
for (SearchHit searchHit : topHits.getHits()) {
logger.info("hit= {} {} {}", searchHit.sortValues()[0], searchHit.getType(), searchHit.getId());
}
}
Terms.Bucket categoryBucket = categoryTerms.getBucketByKey("a");
assertThat(categoryBucket.getKey(), equalTo("a"));
assertThat(categoryBucket.getDocCount(), equalTo(3l));
Children childrenBucket = categoryBucket.getAggregations().get("to_comment");
assertThat(childrenBucket.getName(), equalTo("to_comment"));
assertThat(childrenBucket.getDocCount(), equalTo(2l));
TopHits topHits = childrenBucket.getAggregations().get("top_comments");
assertThat(topHits.getHits().totalHits(), equalTo(2l));
assertThat(topHits.getHits().getAt(0).sortValues()[0].toString(), equalTo("a"));
assertThat(topHits.getHits().getAt(0).getId(), equalTo("a"));
assertThat(topHits.getHits().getAt(0).getType(), equalTo("comment"));
assertThat(topHits.getHits().getAt(1).sortValues()[0].toString(), equalTo("c"));
assertThat(topHits.getHits().getAt(1).getId(), equalTo("c"));
assertThat(topHits.getHits().getAt(1).getType(), equalTo("comment"));
categoryBucket = categoryTerms.getBucketByKey("b");
assertThat(categoryBucket.getKey(), equalTo("b"));
assertThat(categoryBucket.getDocCount(), equalTo(2l));
childrenBucket = categoryBucket.getAggregations().get("to_comment");
assertThat(childrenBucket.getName(), equalTo("to_comment"));
assertThat(childrenBucket.getDocCount(), equalTo(1l));
topHits = childrenBucket.getAggregations().get("top_comments");
assertThat(topHits.getHits().totalHits(), equalTo(1l));
assertThat(topHits.getHits().getAt(0).getId(), equalTo("c"));
assertThat(topHits.getHits().getAt(0).getType(), equalTo("comment"));
categoryBucket = categoryTerms.getBucketByKey("c");
assertThat(categoryBucket.getKey(), equalTo("c"));
assertThat(categoryBucket.getDocCount(), equalTo(2l));
childrenBucket = categoryBucket.getAggregations().get("to_comment");
assertThat(childrenBucket.getName(), equalTo("to_comment"));
assertThat(childrenBucket.getDocCount(), equalTo(1l));
topHits = childrenBucket.getAggregations().get("top_comments");
assertThat(topHits.getHits().totalHits(), equalTo(1l));
assertThat(topHits.getHits().getAt(0).getId(), equalTo("c"));
assertThat(topHits.getHits().getAt(0).getType(), equalTo("comment"));
}
@Test
public void testWithDeletes() throws Exception {
String indexName = "xyz";
assertAcked(
prepareCreate(indexName)
.addMapping("parent")
.addMapping("child", "_parent", "type=parent", "count", "type=long")
);
List<IndexRequestBuilder> requests = new ArrayList<>();
requests.add(client().prepareIndex(indexName, "parent", "1").setSource("{}"));
requests.add(client().prepareIndex(indexName, "child", "0").setParent("1").setSource("count", 1));
requests.add(client().prepareIndex(indexName, "child", "1").setParent("1").setSource("count", 1));
requests.add(client().prepareIndex(indexName, "child", "2").setParent("1").setSource("count", 1));
requests.add(client().prepareIndex(indexName, "child", "3").setParent("1").setSource("count", 1));
indexRandom(true, requests);
for (int i = 0; i < 10; i++) {
SearchResponse searchResponse = client().prepareSearch(indexName)
.addAggregation(children("children").childType("child").subAggregation(sum("counts").field("count")))
.get();
assertNoFailures(searchResponse);
Children children = searchResponse.getAggregations().get("children");
assertThat(children.getDocCount(), equalTo(4l));
Sum count = children.getAggregations().get("counts");
assertThat(count.getValue(), equalTo(4.));
String idToUpdate = Integer.toString(randomInt(3));
UpdateResponse updateResponse = client().prepareUpdate(indexName, "child", idToUpdate)
.setRouting("1")
.setDoc("count", 1)
.get();
assertThat(updateResponse.getVersion(), greaterThan(1l));
refresh();
}
}
@Test
public void testNonExistingChildType() throws Exception {
SearchResponse searchResponse = client().prepareSearch("test")
.addAggregation(
children("non-existing").childType("xyz")
).get();
assertSearchResponse(searchResponse);
Children children = searchResponse.getAggregations().get("non-existing");
assertThat(children.getName(), equalTo("non-existing"));
assertThat(children.getDocCount(), equalTo(0l));
}
@Test
public void testPostCollection() throws Exception {
String indexName = "prodcatalog";
String masterType = "masterprod";
String childType = "variantsku";
assertAcked(
prepareCreate(indexName)
.addMapping(masterType, "brand", "type=string", "name", "type=string", "material", "type=string")
.addMapping(childType, "_parent", "type=masterprod", "color", "type=string", "size", "type=string")
);
List<IndexRequestBuilder> requests = new ArrayList<>();
requests.add(client().prepareIndex(indexName, masterType, "1").setSource("brand", "Levis", "name", "Style 501", "material", "Denim"));
requests.add(client().prepareIndex(indexName, childType, "0").setParent("1").setSource("color", "blue", "size", "32"));
requests.add(client().prepareIndex(indexName, childType, "1").setParent("1").setSource("color", "blue", "size", "34"));
requests.add(client().prepareIndex(indexName, childType, "2").setParent("1").setSource("color", "blue", "size", "36"));
requests.add(client().prepareIndex(indexName, childType, "3").setParent("1").setSource("color", "black", "size", "38"));
requests.add(client().prepareIndex(indexName, childType, "4").setParent("1").setSource("color", "black", "size", "40"));
requests.add(client().prepareIndex(indexName, childType, "5").setParent("1").setSource("color", "gray", "size", "36"));
requests.add(client().prepareIndex(indexName, masterType, "2").setSource("brand", "Wrangler", "name", "Regular Cut", "material", "Leather"));
requests.add(client().prepareIndex(indexName, childType, "6").setParent("2").setSource("color", "blue", "size", "32"));
requests.add(client().prepareIndex(indexName, childType, "7").setParent("2").setSource("color", "blue", "size", "34"));
requests.add(client().prepareIndex(indexName, childType, "8").setParent("2").setSource("color", "black", "size", "36"));
requests.add(client().prepareIndex(indexName, childType, "9").setParent("2").setSource("color", "black", "size", "38"));
requests.add(client().prepareIndex(indexName, childType, "10").setParent("2").setSource("color", "black", "size", "40"));
requests.add(client().prepareIndex(indexName, childType, "11").setParent("2").setSource("color", "orange", "size", "36"));
requests.add(client().prepareIndex(indexName, childType, "12").setParent("2").setSource("color", "green", "size", "44"));
indexRandom(true, requests);
SearchResponse response = client().prepareSearch(indexName).setTypes(masterType)
.setQuery(hasChildQuery(childType, termQuery("color", "orange")))
.addAggregation(children("my-refinements")
.childType(childType)
.subAggregation(terms("my-colors").field(childType + ".color"))
.subAggregation(terms("my-sizes").field(childType + ".size"))
).get();
assertNoFailures(response);
assertHitCount(response, 1);
Children childrenAgg = response.getAggregations().get("my-refinements");
assertThat(childrenAgg.getDocCount(), equalTo(7l));
Terms termsAgg = childrenAgg.getAggregations().get("my-colors");
assertThat(termsAgg.getBuckets().size(), equalTo(4));
assertThat(termsAgg.getBucketByKey("black").getDocCount(), equalTo(3l));
assertThat(termsAgg.getBucketByKey("blue").getDocCount(), equalTo(2l));
assertThat(termsAgg.getBucketByKey("green").getDocCount(), equalTo(1l));
assertThat(termsAgg.getBucketByKey("orange").getDocCount(), equalTo(1l));
termsAgg = childrenAgg.getAggregations().get("my-sizes");
assertThat(termsAgg.getBuckets().size(), equalTo(6));
assertThat(termsAgg.getBucketByKey("36").getDocCount(), equalTo(2l));
assertThat(termsAgg.getBucketByKey("32").getDocCount(), equalTo(1l));
assertThat(termsAgg.getBucketByKey("34").getDocCount(), equalTo(1l));
assertThat(termsAgg.getBucketByKey("38").getDocCount(), equalTo(1l));
assertThat(termsAgg.getBucketByKey("40").getDocCount(), equalTo(1l));
assertThat(termsAgg.getBucketByKey("44").getDocCount(), equalTo(1l));
}
private static final class Control {
final String category;
final Set<String> articleIds = new HashSet<>();
final Set<String> commentIds = new HashSet<>();
final Map<String, Set<String>> commenterToCommentId = new HashMap<>();
private Control(String category) {
this.category = category;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.utils;
import java.io.File;
import java.io.FileDescriptor;
import java.io.FileInputStream;
import java.io.IOException;
import java.lang.reflect.Field;
import java.nio.channels.FileChannel;
import java.util.concurrent.TimeUnit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.sun.jna.LastErrorException;
import org.apache.cassandra.io.FSWriteError;
import static org.apache.cassandra.utils.NativeLibrary.OSType.LINUX;
import static org.apache.cassandra.utils.NativeLibrary.OSType.MAC;
import static org.apache.cassandra.utils.NativeLibrary.OSType.WINDOWS;
import static org.apache.cassandra.utils.NativeLibrary.OSType.AIX;
public final class NativeLibrary
{
private static final Logger logger = LoggerFactory.getLogger(NativeLibrary.class);
public enum OSType
{
LINUX,
MAC,
WINDOWS,
AIX,
OTHER;
}
public static final OSType osType;
private static final int MCL_CURRENT;
private static final int MCL_FUTURE;
private static final int ENOMEM = 12;
private static final int F_GETFL = 3; /* get file status flags */
private static final int F_SETFL = 4; /* set file status flags */
private static final int F_NOCACHE = 48; /* Mac OS X specific flag, turns cache on/off */
private static final int O_DIRECT = 040000; /* fcntl.h */
private static final int O_RDONLY = 00000000; /* fcntl.h */
private static final int POSIX_FADV_NORMAL = 0; /* fadvise.h */
private static final int POSIX_FADV_RANDOM = 1; /* fadvise.h */
private static final int POSIX_FADV_SEQUENTIAL = 2; /* fadvise.h */
private static final int POSIX_FADV_WILLNEED = 3; /* fadvise.h */
private static final int POSIX_FADV_DONTNEED = 4; /* fadvise.h */
private static final int POSIX_FADV_NOREUSE = 5; /* fadvise.h */
private static final NativeLibraryWrapper wrappedLibrary;
private static boolean jnaLockable = false;
private static final Field FILE_DESCRIPTOR_FD_FIELD;
private static final Field FILE_CHANNEL_FD_FIELD;
static
{
FILE_DESCRIPTOR_FD_FIELD = FBUtilities.getProtectedField(FileDescriptor.class, "fd");
try
{
FILE_CHANNEL_FD_FIELD = FBUtilities.getProtectedField(Class.forName("sun.nio.ch.FileChannelImpl"), "fd");
}
catch (ClassNotFoundException e)
{
throw new RuntimeException(e);
}
// detect the OS type the JVM is running on and then set the CLibraryWrapper
// instance to a compatable implementation of CLibraryWrapper for that OS type
osType = getOsType();
switch (osType)
{
case MAC: wrappedLibrary = new NativeLibraryDarwin(); break;
case WINDOWS: wrappedLibrary = new NativeLibraryWindows(); break;
case LINUX:
case AIX:
case OTHER:
default: wrappedLibrary = new NativeLibraryLinux();
}
if (System.getProperty("os.arch").toLowerCase().contains("ppc"))
{
if (osType == LINUX)
{
MCL_CURRENT = 0x2000;
MCL_FUTURE = 0x4000;
}
else if (osType == AIX)
{
MCL_CURRENT = 0x100;
MCL_FUTURE = 0x200;
}
else
{
MCL_CURRENT = 1;
MCL_FUTURE = 2;
}
}
else
{
MCL_CURRENT = 1;
MCL_FUTURE = 2;
}
}
private NativeLibrary() {}
/**
* @return the detected OSType of the Operating System running the JVM using crude string matching
*/
private static OSType getOsType()
{
String osName = System.getProperty("os.name").toLowerCase();
if (osName.contains("linux"))
return LINUX;
else if (osName.contains("mac"))
return MAC;
else if (osName.contains("windows"))
return WINDOWS;
logger.warn("the current operating system, {}, is unsupported by cassandra", osName);
if (osName.contains("aix"))
return AIX;
else
// fall back to the Linux impl for all unknown OS types until otherwise implicitly supported as needed
return LINUX;
}
private static int errno(RuntimeException e)
{
assert e instanceof LastErrorException;
try
{
return ((LastErrorException) e).getErrorCode();
}
catch (NoSuchMethodError x)
{
logger.warn("Obsolete version of JNA present; unable to read errno. Upgrade to JNA 3.2.7 or later");
return 0;
}
}
/**
* Checks if the library has been successfully linked.
* @return {@code true} if the library has been successfully linked, {@code false} otherwise.
*/
public static boolean isAvailable()
{
return wrappedLibrary.isAvailable();
}
public static boolean jnaMemoryLockable()
{
return jnaLockable;
}
public static void tryMlockall()
{
try
{
wrappedLibrary.callMlockall(MCL_CURRENT);
jnaLockable = true;
logger.info("JNA mlockall successful");
}
catch (UnsatisfiedLinkError e)
{
// this will have already been logged by CLibrary, no need to repeat it
}
catch (RuntimeException e)
{
if (!(e instanceof LastErrorException))
throw e;
if (errno(e) == ENOMEM && osType == LINUX)
{
logger.warn("Unable to lock JVM memory (ENOMEM)."
+ " This can result in part of the JVM being swapped out, especially with mmapped I/O enabled."
+ " Increase RLIMIT_MEMLOCK or run Cassandra as root.");
}
else if (osType != MAC)
{
// OS X allows mlockall to be called, but always returns an error
logger.warn("Unknown mlockall error {}", errno(e));
}
}
}
public static void trySkipCache(String path, long offset, long len)
{
File f = new File(path);
if (!f.exists())
return;
try (FileInputStream fis = new FileInputStream(f))
{
trySkipCache(getfd(fis.getChannel()), offset, len, path);
}
catch (IOException e)
{
logger.warn("Could not skip cache", e);
}
}
public static void trySkipCache(int fd, long offset, long len, String path)
{
if (len == 0)
trySkipCache(fd, 0, 0, path);
while (len > 0)
{
int sublen = (int) Math.min(Integer.MAX_VALUE, len);
trySkipCache(fd, offset, sublen, path);
len -= sublen;
offset -= sublen;
}
}
public static void trySkipCache(int fd, long offset, int len, String path)
{
if (fd < 0)
return;
try
{
if (osType == LINUX)
{
int result = wrappedLibrary.callPosixFadvise(fd, offset, len, POSIX_FADV_DONTNEED);
if (result != 0)
NoSpamLogger.log(
logger,
NoSpamLogger.Level.WARN,
10,
TimeUnit.MINUTES,
"Failed trySkipCache on file: {} Error: " + wrappedLibrary.callStrerror(result).getString(0),
path);
}
}
catch (UnsatisfiedLinkError e)
{
// if JNA is unavailable just skipping Direct I/O
// instance of this class will act like normal RandomAccessFile
}
catch (RuntimeException e)
{
if (!(e instanceof LastErrorException))
throw e;
logger.warn("posix_fadvise({}, {}) failed, errno ({}).", fd, offset, errno(e));
}
}
public static int tryFcntl(int fd, int command, int flags)
{
// fcntl return value may or may not be useful, depending on the command
int result = -1;
try
{
result = wrappedLibrary.callFcntl(fd, command, flags);
}
catch (UnsatisfiedLinkError e)
{
// if JNA is unavailable just skipping
}
catch (RuntimeException e)
{
if (!(e instanceof LastErrorException))
throw e;
logger.warn("fcntl({}, {}, {}) failed, errno ({}).", fd, command, flags, errno(e));
}
return result;
}
public static int tryOpenDirectory(String path)
{
int fd = -1;
try
{
return wrappedLibrary.callOpen(path, O_RDONLY);
}
catch (UnsatisfiedLinkError e)
{
// JNA is unavailable just skipping Direct I/O
}
catch (RuntimeException e)
{
if (!(e instanceof LastErrorException))
throw e;
logger.warn("open({}, O_RDONLY) failed, errno ({}).", path, errno(e));
}
return fd;
}
public static void trySync(int fd)
{
if (fd == -1)
return;
try
{
wrappedLibrary.callFsync(fd);
}
catch (UnsatisfiedLinkError e)
{
// JNA is unavailable just skipping Direct I/O
}
catch (RuntimeException e)
{
if (!(e instanceof LastErrorException))
throw e;
String errMsg = String.format("fsync(%s) failed, errno (%s) %s", fd, errno(e), e.getMessage());
logger.warn(errMsg);
throw new FSWriteError(e, errMsg);
}
}
public static void tryCloseFD(int fd)
{
if (fd == -1)
return;
try
{
wrappedLibrary.callClose(fd);
}
catch (UnsatisfiedLinkError e)
{
// JNA is unavailable just skipping Direct I/O
}
catch (RuntimeException e)
{
if (!(e instanceof LastErrorException))
throw e;
String errMsg = String.format("close(%d) failed, errno (%d).", fd, errno(e));
logger.warn(errMsg);
throw new FSWriteError(e, errMsg);
}
}
public static int getfd(FileChannel channel)
{
try
{
return getfd((FileDescriptor)FILE_CHANNEL_FD_FIELD.get(channel));
}
catch (IllegalArgumentException|IllegalAccessException e)
{
logger.warn("Unable to read fd field from FileChannel");
}
return -1;
}
/**
* Get system file descriptor from FileDescriptor object.
* @param descriptor - FileDescriptor objec to get fd from
* @return file descriptor, -1 or error
*/
public static int getfd(FileDescriptor descriptor)
{
try
{
return FILE_DESCRIPTOR_FD_FIELD.getInt(descriptor);
}
catch (Exception e)
{
JVMStabilityInspector.inspectThrowable(e);
logger.warn("Unable to read fd field from FileDescriptor");
}
return -1;
}
/**
* @return the PID of the JVM or -1 if we failed to get the PID
*/
public static long getProcessID()
{
try
{
return wrappedLibrary.callGetpid();
}
catch (Exception e)
{
logger.info("Failed to get PID from JNA", e);
}
return -1;
}
}
| |
package com.planet_ink.coffee_mud.Abilities.Common;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.Common.CraftingSkill.CraftParms;
import com.planet_ink.coffee_mud.Abilities.Common.CraftingSkill.CraftingActivity;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.ExpertiseLibrary;
import com.planet_ink.coffee_mud.Libraries.interfaces.ListingLibrary;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2000-2014 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
@SuppressWarnings({"unchecked","rawtypes"})
public class ClanCrafting extends CraftingSkill implements ItemCraftor
{
@Override public String ID() { return "ClanCrafting"; }
private final static String localizedName = CMLib.lang()._("Clan Crafting");
@Override public String name() { return localizedName; }
private static final String[] triggerStrings =_i(new String[] {"CLANCRAFT"});
@Override public String[] triggerStrings(){return triggerStrings;}
@Override public String supportedResourceString(){return "WOODEN|METAL|MITHRIL";}
protected int expRequired = 0;
protected Clan myClan=null;
@Override
public String parametersFormat(){ return
"ITEM_NAME\tRESOURCE_NAME_AMOUNT_MATERIAL_REQUIRED\tRESOURCE_NAME_AMOUNT_MATERIAL_REQUIRED\t"
+"CLAN_ITEM_CODENUMBER\tITEM_LEVEL\tBUILD_TIME_TICKS\tCLAN_EXPERIENCE_COST_AMOUNT\t"
+"ITEM_BASE_VALUE\tITEM_CLASS_ID\tCLAN_AREA_FLAG||CODED_WEAR_LOCATION||READABLE_TEXT\t"
+"CONTAINER_CAPACITY\tBASE_ARMOR_AMOUNT\tCONTAINER_TYPE\tCODED_SPELL_LIST\tREQUIRED_COMMON_SKILL_ID";}
protected static final int RCP_FINALNAME=0;
protected static final int RCP_MATERIAL1=1;
protected static final int RCP_MATERIAL2=2;
protected static final int RCP_CITYPE=3;
protected static final int RCP_LEVEL=4;
protected static final int RCP_TICKS=5;
protected static final int RCP_EXP=6;
protected static final int RCP_VALUE=7;
protected static final int RCP_CLASSTYPE=8;
protected static final int RCP_MISCTYPE=9;
protected static final int RCP_CAPACITY=10;
protected static final int RCP_ARMORDMG=11;
protected static final int RCP_CONTAINMASK=12;
protected static final int RCP_SPELL=13;
protected static final int RCP_REQUIREDSKILL=14;
public Hashtable parametersFields(){ return new Hashtable();}
@Override public String parametersFile(){ return "clancraft.txt";}
@Override protected List<List<String>> loadRecipes(){return super.loadRecipes(parametersFile());}
@Override public boolean supportsDeconstruction() { return false; }
@Override
public void unInvoke()
{
if(canBeUninvoked())
{
if((affected!=null)&&(affected instanceof MOB))
{
final MOB mob=(MOB)affected;
if((buildingI!=null)&&(!aborted))
{
if(messedUp)
{
commonEmote(mob,"<S-NAME> mess(es) up crafting "+buildingI.name()+".");
if(myClan!=null)
{
myClan.setExp(myClan.getExp()+expRequired);
myClan.update();
}
}
else
{
dropAWinner(mob,buildingI);
CMLib.commands().postGet(mob,null,buildingI,true);
}
}
buildingI=null;
}
}
super.unInvoke();
}
@Override
public boolean canBeLearnedBy(MOB teacher, MOB student)
{
if(!super.canBeLearnedBy(teacher,student))
return false;
if(student==null) return true;
if(!student.clans().iterator().hasNext())
{
teacher.tell(_("@x1 is not a member of a clan.",student.name()));
student.tell(_("You need to belong to a clan before you can learn @x1.",name()));
return false;
}
final Pair<Clan,Integer> p=CMLib.clans().findPrivilegedClan(student, Clan.Function.ENCHANT);
if(p==null)
{
teacher.tell(_("@x1 is not authorized to draw from the power of @x2 clan.",student.name(),student.charStats().hisher()));
student.tell(_("You must be authorized to draw from the power of your clan to learn this skill."));
return false;
}
return true;
}
@Override
public String getDecodedComponentsDescription(final MOB mob, final List<String> recipe)
{
return "Not implemented";
}
@Override
public boolean invoke(MOB mob, Vector commands, Physical givenTarget, boolean auto, int asLevel)
{
if(super.checkStop(mob, commands))
return true;
final CraftParms parsedVars=super.parseAutoGenerate(auto,givenTarget,commands);
givenTarget=parsedVars.givenTarget;
randomRecipeFix(mob,addRecipes(mob,loadRecipes()),commands,parsedVars.autoGenerate);
if(commands.size()==0)
{
commonTell(mob,_("Make what? Enter \"clancraft list\" for a list, or \"clancraft stop\" to cancel."));
return false;
}
String clanTypeName="Clan";
String clanName="None";
Clan clanC=null;
if(parsedVars.autoGenerate<=0)
{
if(!mob.clans().iterator().hasNext())
{
mob.tell(_("You must be a member of a clan to use this skill."));
return false;
}
final Pair<Clan,Integer> p=CMLib.clans().findPrivilegedClan(mob, Clan.Function.ENCHANT);
if((p==null)
&&(!CMSecurity.isASysOp(mob)))
{
mob.tell(_("You are not authorized to draw from the power of your clan."));
return false;
}
if(p!=null)
{
clanName=p.first.getName();
clanTypeName=p.first.getGovernmentName();
clanC=p.first;
}
}
final List<List<String>> recipes=addRecipes(mob,loadRecipes());
final String str=(String)commands.elementAt(0);
String startStr=null;
int duration=4;
final int[] cols={
ListingLibrary.ColFixer.fixColWidth(24,mob.session()),
ListingLibrary.ColFixer.fixColWidth(9,mob.session()),
ListingLibrary.ColFixer.fixColWidth(14,mob.session()),
ListingLibrary.ColFixer.fixColWidth(4,mob.session()),
ListingLibrary.ColFixer.fixColWidth(14,mob.session()),
ListingLibrary.ColFixer.fixColWidth(4,mob.session())
};
if(str.equalsIgnoreCase("list"))
{
String mask=CMParms.combine(commands,1);
boolean allFlag=false;
if(mask.equalsIgnoreCase("all"))
{
allFlag=true;
mask="";
}
final StringBuffer buf=new StringBuffer("");
buf.append(CMStrings.padRight(_("Item"),cols[0])+" "
+CMStrings.padRight(_("Exp"),cols[1])+" "
+CMStrings.padRight(_("Material#1"),cols[2])+" "
+CMStrings.padRight(_("Amt#1"),cols[3])+" "
+CMStrings.padRight(_("Material#2"),cols[4])+" "
+CMStrings.padRight(_("Amt#2"),cols[5])+"\n\r");
for(int r=0;r<recipes.size();r++)
{
final List<String> V=recipes.get(r);
if(V.size()>0)
{
final String item=replacePercent(V.get(RCP_FINALNAME),"");
final int level=CMath.s_int(V.get(RCP_LEVEL));
final int exp=CMath.s_int(V.get(RCP_EXP));
if(((level<=xlevel(mob))||allFlag)
&&((mask.length()==0)||mask.equalsIgnoreCase("all")||CMLib.english().containsString(item,mask)))
{
String mat1=V.get(RCP_MATERIAL1);
String mat2=V.get(RCP_MATERIAL2);
String amt1="";
String amt2="";
final int m1=mat1.indexOf('/');
if(m1>=0)
{
amt1=mat1.substring(m1+1);
mat1=mat1.substring(0,m1).toLowerCase();
//amt1=""+adjustWoodRequired(CMath.s_int(amt1),mob);
}
final int m2=mat2.indexOf('/');
if(m2>=0)
{
amt2=mat2.substring(m2+1);
mat2=mat2.substring(0,m2).toLowerCase();
//amt2=""+adjustWoodRequired(CMath.s_int(amt2),mob);
}
buf.append(CMStrings.padRight(item,cols[0])+" "
+CMStrings.padRight(""+exp,cols[1])+" "
+CMStrings.padRight(mat1,cols[2])+" "
+CMStrings.padRight(amt1,cols[3])+" "
+CMStrings.padRight(mat2,cols[4])+" "
+CMStrings.padRight(amt2,cols[5])+"\n\r");
}
}
}
buf.append("\n\r");
commonTell(mob,buf.toString());
return true;
}
activity = CraftingActivity.CRAFTING;
buildingI=null;
messedUp=false;
final String recipeName=CMParms.combine(commands,0);
List<String> foundRecipe=null;
final List<List<String>> matches=matchingRecipeNames(recipes,recipeName,true);
for(int r=0;r<matches.size();r++)
{
final List<String> V=matches.get(r);
if(V.size()>0)
{
final int level=CMath.s_int(V.get(RCP_LEVEL));
if((parsedVars.autoGenerate>0)||(level<=xlevel(mob)))
{
foundRecipe=V;
break;
}
}
}
if(foundRecipe==null)
{
commonTell(mob,_("You don't know how to make a '@x1'. Try \"clancraft list\" for a list.",recipeName));
return false;
}
int amt1=0;
int amt2=0;
String mat1=foundRecipe.get(RCP_MATERIAL1);
String mat2=foundRecipe.get(RCP_MATERIAL2);
final int m1=mat1.indexOf('/');
if(m1>=0)
{
amt1=CMath.s_int(mat1.substring(m1+1));
mat1=mat1.substring(0,m1).toLowerCase();
//amt1=adjustWoodRequired(amt1, mob);
}
final int m2=mat2.indexOf('/');
if(m2>=0)
{
amt2=CMath.s_int(mat2.substring(m2+1));
mat2=mat2.substring(0,m2).toLowerCase();
//amt2=adjustWoodRequired(amt2, mob);
}
expRequired=CMath.s_int(foundRecipe.get(RCP_EXP));
expRequired=getXPCOSTAdjustment(mob,expRequired);
if((clanC!=null)&&(clanC.getExp()<expRequired))
{
mob.tell(_("You need @x1 to do that, but your @x2 has only @x3 experience points.",""+expRequired,clanTypeName,""+clanC.getExp()));
return false;
}
final int[][] data=fetchFoundResourceData(mob,amt1,mat1,null,amt2,mat2,null,false,parsedVars.autoGenerate,null);
if(data==null) return false;
amt1=data[0][FOUND_AMT];
amt2=data[1][FOUND_AMT];
final String reqskill=foundRecipe.get(RCP_REQUIREDSKILL);
if((parsedVars.autoGenerate<=0)&&(reqskill.trim().length()>0))
{
final Ability A=CMClass.findAbility(reqskill.trim());
if((A!=null)&&(mob.fetchAbility(A.ID())==null))
{
commonTell(mob,_("You need to know @x1 to craft this item.",A.name()));
return false;
}
}
if(!super.invoke(mob,commands,givenTarget,auto,asLevel))
return false;
if((amt1>0)&&(parsedVars.autoGenerate<=0))
CMLib.materials().destroyResourcesValue(mob.location(),amt1,data[0][FOUND_CODE],0,null);
if((amt2>0)&&(parsedVars.autoGenerate<=0))
CMLib.materials().destroyResourcesValue(mob.location(),amt2,data[1][FOUND_CODE],0,null);
buildingI=CMClass.getItem(foundRecipe.get(RCP_CLASSTYPE));
if(buildingI==null)
{
commonTell(mob,_("There's no such thing as a @x1!!!",foundRecipe.get(RCP_CLASSTYPE)));
return false;
}
duration=getDuration(CMath.s_int(foundRecipe.get(RCP_TICKS)),mob,CMath.s_int(foundRecipe.get(RCP_LEVEL)),4);
final String misctype=foundRecipe.get(RCP_MISCTYPE);
String itemName=null;
if(!misctype.equalsIgnoreCase("area"))
{
if(foundRecipe.get(RCP_FINALNAME).trim().startsWith("%"))
itemName=replacePercent(foundRecipe.get(RCP_FINALNAME),clanTypeName+" "+clanName);
else
itemName=replacePercent(foundRecipe.get(RCP_FINALNAME),"of "+clanTypeName+" "+clanName);
if(misctype.length()>0)
buildingI.setReadableText(misctype);
}
else
{
final LegalBehavior B=CMLib.law().getLegalBehavior(mob.location().getArea());
final Area A2=CMLib.law().getLegalObject(mob.location().getArea());
if((B==null)||(A2==null))
{
commonTell(mob,_("This area is controlled by the Archons -- you can't build that here."));
return false;
}
if((B.rulingOrganization().length()==0)||(mob.getClanRole(B.rulingOrganization())==null))
{
commonTell(mob,_("This area is not controlled by your clan -- you can't build that here."));
return false;
}
itemName=replacePercent(foundRecipe.get(RCP_FINALNAME),"of "+A2.name()).toLowerCase();
buildingI.setReadableText(A2.name());
}
itemName=CMLib.english().startWithAorAn(itemName);
buildingI.setName(itemName);
startStr=_("<S-NAME> start(s) crafting @x1.",buildingI.name());
displayText=_("You are crafting @x1",buildingI.name());
playSound="sanding.wav";
verb=_("crafting @x1",buildingI.name());
buildingI.setDisplayText(_("@x1 lies here",itemName));
buildingI.setDescription(itemName+". ");
buildingI.basePhyStats().setWeight(amt1+amt2);
buildingI.setBaseValue(CMath.s_int(foundRecipe.get(RCP_VALUE)));
buildingI.setMaterial(data[0][FOUND_CODE]);
final int hardness=RawMaterial.CODES.HARDNESS(data[0][FOUND_CODE])-6;
buildingI.basePhyStats().setLevel(CMath.s_int(foundRecipe.get(RCP_LEVEL))+(hardness*3));
if(buildingI.basePhyStats().level()<1) buildingI.basePhyStats().setLevel(1);
final int capacity=CMath.s_int(foundRecipe.get(RCP_CAPACITY));
final long canContain=getContainerType(foundRecipe.get(RCP_CONTAINMASK));
final int armordmg=CMath.s_int(foundRecipe.get(RCP_ARMORDMG));
buildingI.setSecretIdentity(getBrand(mob));
final String spell=(foundRecipe.size()>RCP_SPELL)?foundRecipe.get(RCP_SPELL).trim():"";
if(buildingI instanceof ClanItem)
{
buildingI.basePhyStats().setSensesMask(PhyStats.SENSE_UNLOCATABLE);
if(clanC!=null)
((ClanItem)buildingI).setClanID(clanC.clanID());
else
if(CMLib.clans().numClans()>0)
((ClanItem)buildingI).setClanID(CMLib.clans().clans().nextElement().clanID());
((ClanItem)buildingI).setCIType(CMath.s_int(foundRecipe.get(RCP_CITYPE)));
if(((ClanItem)buildingI).ciType()==ClanItem.CI_PROPAGANDA)
{
buildingI.setMaterial(RawMaterial.RESOURCE_PAPER);
CMLib.flags().setReadable(buildingI,true);
buildingI.setReadableText("Read the glorious propaganda of "+clanTypeName+" "+clanName.toLowerCase()+"! Join and fight for us today!");
}
}
if((buildingI.isReadable())
&&((buildingI.material()&RawMaterial.MATERIAL_MASK)==RawMaterial.MATERIAL_WOODEN))
buildingI.setMaterial(RawMaterial.RESOURCE_PAPER);
addSpells(buildingI,spell);
if((buildingI instanceof Armor)&&(!(buildingI instanceof FalseLimb)))
{
((Armor)buildingI).basePhyStats().setArmor(0);
if(armordmg!=0)
((Armor)buildingI).basePhyStats().setArmor(armordmg+(abilityCode()-1));
setWearLocation(buildingI,misctype,hardness);
}
if(buildingI instanceof Container)
if(capacity>0)
{
((Container)buildingI).setCapacity(capacity+amt1+amt2);
((Container)buildingI).setContainTypes(canContain);
}
buildingI.recoverPhyStats();
buildingI.text();
buildingI.recoverPhyStats();
messedUp=!proficiencyCheck(mob,0,auto);
if(parsedVars.autoGenerate>0)
{
commands.addElement(buildingI);
return true;
}
final CMMsg msg=CMClass.getMsg(mob,buildingI,this,getActivityMessageType(),startStr);
if(mob.location().okMessage(mob,msg))
{
mob.location().send(mob,msg);
beneficialAffect(mob,mob,asLevel,duration);
final ClanCrafting CC=(ClanCrafting)mob.fetchEffect(ID());
if((CC!=null)&&(clanC!=null))
{
clanC.setExp(clanC.getExp()-expRequired);
clanC.update();
CC.expRequired=expRequired;
CC.myClan=clanC;
}
}
return true;
}
}
| |
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2019 Serge Rider (serge@jkiss.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.tools.transfer.database.page;
import org.eclipse.core.runtime.Status;
import org.eclipse.jface.dialogs.StatusDialog;
import org.eclipse.jface.viewers.*;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.KeyAdapter;
import org.eclipse.swt.events.KeyEvent;
import org.eclipse.swt.graphics.Font;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.TableItem;
import org.jkiss.dbeaver.DBException;
import org.jkiss.dbeaver.model.DBPDataSource;
import org.jkiss.dbeaver.model.DBPDataTypeProvider;
import org.jkiss.dbeaver.model.DBPEvaluationContext;
import org.jkiss.dbeaver.model.DBUtils;
import org.jkiss.dbeaver.model.runtime.VoidProgressMonitor;
import org.jkiss.dbeaver.model.struct.DBSDataType;
import org.jkiss.dbeaver.model.struct.DBSEntity;
import org.jkiss.dbeaver.model.struct.DBSEntityAttribute;
import org.jkiss.dbeaver.tools.transfer.database.DatabaseConsumerSettings;
import org.jkiss.dbeaver.tools.transfer.database.DatabaseMappingAttribute;
import org.jkiss.dbeaver.tools.transfer.database.DatabaseMappingContainer;
import org.jkiss.dbeaver.tools.transfer.database.DatabaseMappingType;
import org.jkiss.dbeaver.tools.transfer.wizard.DataTransferWizard;
import org.jkiss.dbeaver.ui.DBeaverIcons;
import org.jkiss.dbeaver.ui.SharedTextColors;
import org.jkiss.dbeaver.ui.UIUtils;
import org.jkiss.dbeaver.ui.controls.CustomComboBoxCellEditor;
import org.jkiss.dbeaver.ui.controls.ListContentProvider;
import org.jkiss.dbeaver.utils.GeneralUtils;
import org.jkiss.utils.CommonUtils;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedHashSet;
import java.util.Set;
/**
* ColumnsMappingDialog
*/
public class ColumnsMappingDialog extends StatusDialog {
private final DatabaseConsumerSettings settings;
private final DatabaseMappingContainer mapping;
private final Collection<DatabaseMappingAttribute> attributeMappings;
private TableViewer mappingViewer;
private Font boldFont;
public ColumnsMappingDialog(DataTransferWizard wizard, DatabaseConsumerSettings settings, DatabaseMappingContainer mapping)
{
super(wizard.getShell());
this.settings = settings;
this.mapping = mapping;
attributeMappings = mapping.getAttributeMappings(wizard.getContainer());
}
@Override
protected boolean isResizable()
{
return true;
}
@Override
protected Control createDialogArea(Composite parent)
{
DBPDataSource targetDataSource = settings.getTargetDataSource(mapping);
getShell().setText("Map columns of " + mapping.getTargetName());
boldFont = UIUtils.makeBoldFont(parent.getFont());
Composite composite = new Composite(parent, SWT.NONE);
composite.setLayout(new GridLayout(1, false));
composite.setLayoutData(new GridData(GridData.FILL_BOTH));
new Label(composite, SWT.NONE).setText("Source entity: " + DBUtils.getObjectFullName(mapping.getSource(), DBPEvaluationContext.UI) +
" [" + mapping.getSource().getDataSource().getContainer().getName() + "]");
new Label(composite, SWT.NONE).setText("Target entity: " + mapping.getTargetName() +
" [" + (targetDataSource == null ? "?" : targetDataSource.getContainer().getName()) + "]");
mappingViewer = new TableViewer(composite, SWT.BORDER | SWT.MULTI | SWT.FULL_SELECTION);
GridData gd = new GridData(GridData.FILL_BOTH);
gd.widthHint = 600;
gd.heightHint = 300;
mappingViewer.getTable().setLayoutData(gd);
mappingViewer.getTable().setLinesVisible(true);
mappingViewer.getTable().setHeaderVisible(true);
mappingViewer.setContentProvider(new ListContentProvider());
mappingViewer.getTable().addKeyListener(new KeyAdapter() {
@Override
public void keyReleased(KeyEvent e) {
if (e.character == SWT.DEL) {
for (TableItem item : mappingViewer.getTable().getSelection()) {
DatabaseMappingAttribute attribute = (DatabaseMappingAttribute) item.getData();
attribute.setMappingType(DatabaseMappingType.skip);
}
updateStatus(Status.OK_STATUS);
mappingViewer.refresh();
} else if (e.character == SWT.SPACE) {
for (TableItem item : mappingViewer.getTable().getSelection()) {
DatabaseMappingAttribute attribute = (DatabaseMappingAttribute) item.getData();
attribute.setMappingType(DatabaseMappingType.existing);
try {
attribute.updateMappingType(new VoidProgressMonitor());
} catch (DBException e1) {
updateStatus(GeneralUtils.makeExceptionStatus(e1));
}
}
mappingViewer.refresh();
}
}
});
{
TableViewerColumn columnSource = new TableViewerColumn(mappingViewer, SWT.LEFT);
columnSource.setLabelProvider(new CellLabelProvider() {
@Override
public void update(ViewerCell cell)
{
DatabaseMappingAttribute attrMapping = (DatabaseMappingAttribute) cell.getElement();
cell.setText(DBUtils.getObjectFullName(attrMapping.getSource(), DBPEvaluationContext.UI));
if (attrMapping.getIcon() != null) {
cell.setImage(DBeaverIcons.getImage(attrMapping.getIcon()));
}
}
});
columnSource.getColumn().setText("Source Column");
columnSource.getColumn().setWidth(170);
}
{
TableViewerColumn columnSourceType = new TableViewerColumn(mappingViewer, SWT.LEFT);
columnSourceType.setLabelProvider(new CellLabelProvider() {
@Override
public void update(ViewerCell cell)
{
cell.setText(((DatabaseMappingAttribute) cell.getElement()).getSourceType());
}
});
columnSourceType.getColumn().setText("Source Type");
columnSourceType.getColumn().setWidth(100);
}
{
TableViewerColumn columnTarget = new TableViewerColumn(mappingViewer, SWT.LEFT);
columnTarget.setLabelProvider(new CellLabelProvider() {
@Override
public void update(ViewerCell cell)
{
DatabaseMappingAttribute mapping = (DatabaseMappingAttribute) cell.getElement();
cell.setText(mapping.getTargetName());
if (mapping.getMappingType() == DatabaseMappingType.unspecified) {
cell.setBackground(UIUtils.getSharedTextColors().getColor(SharedTextColors.COLOR_WARNING));
} else {
cell.setBackground(null);
}
cell.setFont(boldFont);
}
});
columnTarget.getColumn().setText("Target Column");
columnTarget.getColumn().setWidth(170);
columnTarget.setEditingSupport(new EditingSupport(mappingViewer) {
@Override
protected CellEditor getCellEditor(Object element)
{
try {
java.util.List<String> items = new ArrayList<>();
DatabaseMappingAttribute mapping = (DatabaseMappingAttribute) element;
if (mapping.getParent().getMappingType() == DatabaseMappingType.existing &&
mapping.getParent().getTarget() instanceof DBSEntity)
{
DBSEntity parentEntity = (DBSEntity)mapping.getParent().getTarget();
for (DBSEntityAttribute attr : CommonUtils.safeCollection(parentEntity.getAttributes(new VoidProgressMonitor()))) {
items.add(attr.getName());
}
}
items.add(DatabaseMappingAttribute.TARGET_NAME_SKIP);
CustomComboBoxCellEditor editor = new CustomComboBoxCellEditor(
mappingViewer,
mappingViewer.getTable(),
items.toArray(new String[items.size()]),
SWT.DROP_DOWN);
updateStatus(Status.OK_STATUS);
return editor;
} catch (DBException e) {
updateStatus(GeneralUtils.makeExceptionStatus(e));
return null;
}
}
@Override
protected boolean canEdit(Object element)
{
return true;
}
@Override
protected Object getValue(Object element)
{
return ((DatabaseMappingAttribute)element).getTargetName();
}
@Override
protected void setValue(Object element, Object value)
{
try {
String name = CommonUtils.toString(value);
DatabaseMappingAttribute attrMapping = (DatabaseMappingAttribute) element;
if (DatabaseMappingAttribute.TARGET_NAME_SKIP.equals(name)) {
attrMapping.setMappingType(DatabaseMappingType.skip);
} else {
if (attrMapping.getParent().getMappingType() == DatabaseMappingType.existing &&
attrMapping.getParent().getTarget() instanceof DBSEntity)
{
DBSEntity parentEntity = (DBSEntity)attrMapping.getParent().getTarget();
for (DBSEntityAttribute attr : CommonUtils.safeCollection(parentEntity.getAttributes(new VoidProgressMonitor()))) {
if (name.equalsIgnoreCase(attr.getName())) {
attrMapping.setTarget(attr);
attrMapping.setMappingType(DatabaseMappingType.existing);
return;
}
}
}
attrMapping.setMappingType(DatabaseMappingType.create);
attrMapping.setTargetName(name);
}
updateStatus(Status.OK_STATUS);
} catch (DBException e) {
updateStatus(GeneralUtils.makeExceptionStatus(e));
} finally {
mappingViewer.refresh();
}
}
});
}
{
TableViewerColumn columnTargetType = new TableViewerColumn(mappingViewer, SWT.LEFT);
columnTargetType.setLabelProvider(new CellLabelProvider() {
@Override
public void update(ViewerCell cell)
{
DatabaseMappingAttribute attrMapping = (DatabaseMappingAttribute) cell.getElement();
DBPDataSource dataSource = settings.getTargetDataSource(attrMapping);
cell.setText(attrMapping.getTargetType(dataSource));
cell.setFont(boldFont);
}
});
columnTargetType.getColumn().setText("Target Type");
columnTargetType.getColumn().setWidth(100);
columnTargetType.setEditingSupport(new EditingSupport(mappingViewer) {
@Override
protected CellEditor getCellEditor(Object element)
{
DatabaseMappingAttribute attrMapping = (DatabaseMappingAttribute) element;
Set<String> types = new LinkedHashSet<>();
DBPDataSource dataSource = settings.getTargetDataSource(attrMapping);
if (dataSource instanceof DBPDataTypeProvider) {
for (DBSDataType type : ((DBPDataTypeProvider) dataSource).getLocalDataTypes()) {
types.add(type.getName());
}
}
types.add(attrMapping.getTargetType(dataSource));
return new CustomComboBoxCellEditor(mappingViewer, mappingViewer.getTable(), types.toArray(new String[types.size()]), SWT.BORDER);
}
@Override
protected boolean canEdit(Object element)
{
return true;
}
@Override
protected Object getValue(Object element)
{
DatabaseMappingAttribute attrMapping = (DatabaseMappingAttribute) element;
return attrMapping.getTargetType(settings.getTargetDataSource(attrMapping));
}
@Override
protected void setValue(Object element, Object value)
{
DatabaseMappingAttribute attrMapping = (DatabaseMappingAttribute) element;
attrMapping.setTargetType(CommonUtils.toString(value));
mappingViewer.refresh(element);
}
});
}
{
TableViewerColumn columnType = new TableViewerColumn(mappingViewer, SWT.LEFT);
columnType.setLabelProvider(new CellLabelProvider() {
@Override
public void update(ViewerCell cell)
{
DatabaseMappingAttribute mapping = (DatabaseMappingAttribute) cell.getElement();
String text = "";
switch (mapping.getMappingType()) {
case unspecified: text = "?"; break;
case existing: text = "existing"; break;
case create: text = "new"; break;
case skip: text = "skip"; break;
}
cell.setText(text);
}
});
columnType.getColumn().setText("Mapping");
columnType.getColumn().setWidth(60);
}
mappingViewer.setInput(attributeMappings);
return parent;
}
@Override
protected void okPressed()
{
super.okPressed();
}
@Override
public boolean close()
{
UIUtils.dispose(boldFont);
return super.close();
}
}
| |
/*-
*
* * Copyright 2015 Skymind,Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*
*/
package org.nd4j.linalg.api.buffer.factory;
import org.bytedeco.javacpp.DoublePointer;
import org.bytedeco.javacpp.FloatPointer;
import org.bytedeco.javacpp.IntPointer;
import org.bytedeco.javacpp.Pointer;
import org.bytedeco.javacpp.indexer.Indexer;
import org.nd4j.linalg.api.buffer.DataBuffer;
import org.nd4j.linalg.api.memory.MemoryWorkspace;
import java.nio.ByteBuffer;
/**
* DataBufferFactory: Creates the data buffer wrt
* a specified data type
*
* @author Adam Gibson
*/
public interface DataBufferFactory {
/**
* Setter for the allocation mode
* @param allocationMode
*/
void setAllocationMode(DataBuffer.AllocationMode allocationMode);
/**
* Allocation mode for the data buffer
* @return
*/
DataBuffer.AllocationMode allocationMode();
/**
* Create a databuffer wrapping another one
* this allows you to create a view of a buffer
* with a different offset and length
* backed by the same storage
* @param underlyingBuffer the underlying buffer to get the storage from
* @param offset the offset to view the data as
* @param length the length of the buffer
* @return the databuffer as a view
*/
DataBuffer create(DataBuffer underlyingBuffer, long offset, long length);
/**
* Create int buffer
* @param buffer
* @param length
* @return
*/
DataBuffer createInt(long offset, ByteBuffer buffer, int length);
/**
* Create a float data buffer
* @param buffer
* @param length
* @return
*/
DataBuffer createFloat(long offset, ByteBuffer buffer, int length);
/**
* Creates a double data buffer
* @param buffer
* @param length
* @return
*/
DataBuffer createDouble(long offset, ByteBuffer buffer, int length);
/**
* Create a double data buffer
*
* @return the new data buffer
*/
DataBuffer createDouble(long offset, int length);
/**
* This method will create new DataBuffer of the same dataType & same length
* @param buffer
* @return
*/
DataBuffer createSame(DataBuffer buffer, boolean init);
/**
* This method will create new DataBuffer of the same dataType & same length
* @param buffer
* @return
*/
DataBuffer createSame(DataBuffer buffer, boolean init, MemoryWorkspace workspace);
/**
* Create a float data buffer
*
* @param length the length of the buffer
* @return the new data buffer
*/
DataBuffer createFloat(long offset, int length);
/**
* Create an int data buffer
*
* @param length the length of the data buffer
* @return the create data buffer
*/
DataBuffer createInt(long offset, int length);
/**
* Creates a double data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createDouble(long offset, int[] data);
/**
* Creates a double data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createFloat(long offset, int[] data);
/**
* Creates a double data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createInt(long offset, int[] data);
/**
* Creates a double data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createDouble(long offset, double[] data);
DataBuffer createDouble(long offset, double[] data, MemoryWorkspace workspace);
/**
* Create a double buffer buffer
* @param data
* @param length
* @return
*/
DataBuffer createDouble(long offset, byte[] data, int length);
/**
* Create a double buffer
* @param data
* @param length
* @return
*/
DataBuffer createFloat(long offset, byte[] data, int length);
/**
* Creates a float data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createFloat(long offset, double[] data);
/**
* Creates an int data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createInt(long offset, double[] data);
/**
* Creates a double data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createDouble(long offset, float[] data);
/**
* Creates a float data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createFloat(long offset, float[] data);
DataBuffer createFloat(long offset, float[] data, MemoryWorkspace workspace);
/**
* Creates an int data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createInt(long offset, float[] data);
/**
* Creates a double data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createDouble(long offset, int[] data, boolean copy);
/**
* Creates a double data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createFloat(long offset, int[] data, boolean copy);
/**
* Creates a double data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createInt(long offset, int[] data, boolean copy);
/**
* Creates a double data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createDouble(long offset, double[] data, boolean copy);
/**
* Creates a float data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createFloat(long offset, double[] data, boolean copy);
/**
* Creates an int data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createInt(long offset, double[] data, boolean copy);
/**
* Creates a double data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createDouble(long offset, float[] data, boolean copy);
/**
* Creates a float data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createFloat(long offset, float[] data, boolean copy);
/**
* Creates an int data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createInt(long offset, float[] data, boolean copy);
/**
* Create int buffer
* @param buffer
* @param length
* @return
*/
DataBuffer createInt(ByteBuffer buffer, int length);
/**
* Create a float data buffer
* @param buffer
* @param length
* @return
*/
DataBuffer createFloat(ByteBuffer buffer, int length);
/**
* Creates a double data buffer
* @param buffer
* @param length
* @return
*/
DataBuffer createDouble(ByteBuffer buffer, int length);
/**
* Create a double data buffer
*
* @return the new data buffer
*/
DataBuffer createDouble(long length);
/**
* Create a double data buffer, with optional initialization
*
* @param initialize If true: initialize the buffer. If false: don't initialize.
*
* @return the new data buffer
*/
DataBuffer createDouble(long length, boolean initialize);
DataBuffer createDouble(long length, boolean initialize, MemoryWorkspace workspace);
/**
* Create a float data buffer
*
* @param length the length of the buffer
* @return the new data buffer
*/
DataBuffer createFloat(long length);
/**
* Create a float data buffer, with optional initialization
*
* @param length the length of the buffer
* @param initialize If true: initialize the buffer. If false: don't initialize.
* @return the new data buffer
*/
DataBuffer createFloat(long length, boolean initialize);
DataBuffer createFloat(long length, boolean initialize, MemoryWorkspace workspace);
/**
* Create an int data buffer
*
* @param length the length of the data buffer
* @return the create data buffer
*/
DataBuffer createInt(long length);
/**
* Create an int data buffer, with optional initialization
*
* @param length the length of the data buffer
* @param initialize If true: initialize the buffer. If false: don't initialize.
* @return the create data buffer
*/
DataBuffer createInt(long length, boolean initialize);
DataBuffer createInt(long length, boolean initialize, MemoryWorkspace workspace);
/**
* Creates a double data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createDouble(int[] data);
/**
* Creates a double data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createFloat(int[] data);
/**
* Creates a double data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createInt(int[] data);
DataBuffer createInt(int[] data, MemoryWorkspace workspace);
DataBuffer createInt(int[] data, boolean copy, MemoryWorkspace workspace);
/**
* Creates a double data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createDouble(double[] data);
/**
* Create a double buffer buffer
* @param data
* @param length
* @return
*/
DataBuffer createDouble(byte[] data, int length);
/**
* Create a double buffer
* @param data
* @param length
* @return
*/
DataBuffer createFloat(byte[] data, int length);
/**
* Creates a float data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createFloat(double[] data);
/**
* Creates an int data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createInt(double[] data);
/**
* Creates a double data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createDouble(float[] data);
/**
* Creates a float data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createFloat(float[] data);
DataBuffer createFloat(float[] data, MemoryWorkspace workspace);
/**
* Creates an int data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createInt(float[] data);
/**
* Creates a double data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createDouble(int[] data, boolean copy);
/**
* Creates a double data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createFloat(int[] data, boolean copy);
/**
* Creates a double data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createInt(int[] data, boolean copy);
/**
* Creates a double data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createDouble(double[] data, boolean copy);
/**
* Creates a double data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createDouble(double[] data, MemoryWorkspace workspace);
/**
* Creates a double data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createDouble(double[] data, boolean copy, MemoryWorkspace workspace);
/**
* Creates a float data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createFloat(double[] data, boolean copy);
/**
* Creates an int data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createInt(double[] data, boolean copy);
/**
* Creates a double data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createDouble(float[] data, boolean copy);
/**
* Creates a float data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createFloat(float[] data, boolean copy);
DataBuffer createFloat(float[] data, boolean copy, MemoryWorkspace workspace);
/**
* Creates an int data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createInt(float[] data, boolean copy);
/**
* Create a data buffer based on the
* given pointer, data buffer type,
* and length of the buffer
* @param pointer the pointer to use
* @param type the type of buffer
* @param length the length of the buffer
* @param indexer
* @return the data buffer
* backed by this pointer with the given
* type and length.
*/
DataBuffer create(Pointer pointer, DataBuffer.Type type, long length, Indexer indexer);
/**
*
* @param doublePointer
* @param length
* @return
*/
DataBuffer create(DoublePointer doublePointer, long length);
/**
*
* @param intPointer
* @param length
* @return
*/
DataBuffer create(IntPointer intPointer, long length);
/**
*
* @param floatPointer
* @param length
* @return
*/
DataBuffer create(FloatPointer floatPointer, long length);
/**
* Creates half-precision data buffer
*
* @param length length of new data buffer
* @return
*/
DataBuffer createHalf(long length);
/**
* Creates half-precision data buffer
*
* @param length length of new data buffer
* @param initialize true if memset should be used on allocated memory, false otherwise
* @return
*/
DataBuffer createHalf(long length, boolean initialize);
DataBuffer createHalf(long length, boolean initialize, MemoryWorkspace workspace);
/**
* Creates a half-precision data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createHalf(float[] data, boolean copy);
DataBuffer createHalf(float[] data, MemoryWorkspace workspace);
DataBuffer createHalf(float[] data, boolean copy, MemoryWorkspace workspace);
/**
* Creates a half-precision data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createHalf(double[] data, boolean copy);
/**
* Creates a half-precision data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createHalf(long offset, double[] data, boolean copy);
/**
* Creates a half-precision data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createHalf(long offset, float[] data, boolean copy);
/**
* Creates a half-precision data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createHalf(long offset, int[] data, boolean copy);
/**
* Creates a half-precision data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createHalf(long offset, double[] data);
/**
* Creates a half-precision data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createHalf(long offset, float[] data);
DataBuffer createHalf(long offset, float[] data, MemoryWorkspace workspace);
/**
* Creates a half-precision data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createHalf(long offset, int[] data);
/**
* Creates a half-precision data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createHalf(long offset, byte[] data, boolean copy);
/**
* Creates a half-precision data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createHalf(int[] data, boolean copy);
/**
* Creates a half-precision data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createHalf(float[] data);
/**
* Creates a half-precision data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createHalf(double[] data);
/**
* Creates a half-precision data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createHalf(int[] data);
/**
* Creates a half-precision data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
DataBuffer createHalf(long offset, byte[] data, int length);
/**
* Creates a half-precision data buffer
*
* @return the new buffer
*/
DataBuffer createHalf(long offset, int length);
/**
* Creates a half-precision data buffer
*
* @return the new buffer
*/
DataBuffer createHalf(ByteBuffer buffer, int length);
/**
* Creates a half-precision data buffer
*
* @param data
* @param length
* @return
*/
DataBuffer createHalf(byte[] data, int length);
}
| |
///////////////////////////////////////////////////////////////////////////////
//FILE: ScriptInterface.java
//PROJECT: Micro-Manager
//SUBSYSTEM: mmstudio
//-----------------------------------------------------------------------------
//
// AUTHOR: Nenad Amodaj, nenad@amodaj.com, December 3, 2006
//
// COPYRIGHT: University of California, San Francisco, 2006
//
// LICENSE: This file is distributed under the BSD license.
// License text is included with the source distribution.
//
// This file is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
//
// IN NO EVENT SHALL THE COPYRIGHT OWNER OR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES.
//
package org.micromanager.api;
import ij.gui.ImageWindow;
import java.awt.Color;
import java.awt.Component;
import java.awt.Rectangle;
import java.awt.geom.Point2D;
import java.util.List;
import mmcorej.CMMCore;
import mmcorej.TaggedImage;
// These ought not be part of the public API and methods that refer to them are
// deprecated.
import org.json.JSONObject;
import org.micromanager.dialogs.AcqControlDlg;
import org.micromanager.positionlist.PositionListDlg;
import org.micromanager.acquisition.MMAcquisition;
import org.micromanager.utils.AutofocusManager;
// For historical reasons, this exception class is not in the
// org.micromanager.api package even though it is part of the public API.
import org.micromanager.utils.MMScriptException;
/**
* Interface to execute commands in the main panel. Implemented by
* MMStudio and available as the gui object in the Beanshell
* scripting panel.
*
* Most functions throw MMScriptException
*/
public interface ScriptInterface {
/**
* Request a pause in script execution for the specified number of
* milliseconds. Most systems will guarantee a pause of at least
* the desired duration, but may wait longer than requested before
* continuing execution.
* @param ms Time in millisecond that the script should pause
* @throws org.micromanager.utils.MMScriptException
*/
public void sleep(long ms) throws MMScriptException;
/**
* Displays text in the scripting console output window.
* @param text
* @throws MMScriptException
*/
public void message(String text) throws MMScriptException;
/**
* Clears scripting console output window.
* @throws MMScriptException
*/
public void clearMessageWindow() throws MMScriptException;
/**
* Brings GUI up to date with the recent changes in the mmcore.
*/
public void refreshGUI();
/**
* Brings GUI up to date with the recent changes in the mmcore.
* Does not communicate with hardware, only checks Cache
*/
public void refreshGUIFromCache();
/**
* Snaps image and displays in AcqWindow.
* Opens a new AcqWindow when current one is not open.
* Calling this function is the same as pressing the "Snap" button on the main
* Micro-manager GUI
*/
public void snapSingleImage();
/**
* Opens a new acquisition data set
*
* @param name - Name of the data set
* @param rootDir - Directory where the new data set is going to be created
* @param nrFrames - Number of Frames (time points) in this acquisition
* @param nrChannels - Number of Channels in this acquisition
* @param nrSlices - Number of Slices (Z-positions) in this acquisition
* @param nrPositions Number of (XY) Positions in this acquisition.
* @param show Whether or not to show this acquisition.
* @param save Whether or not save data during acquisition.
* @throws MMScriptException
*/
public void openAcquisition(String name, String rootDir, int nrFrames, int nrChannels, int nrSlices, int nrPositions, boolean show, boolean save) throws MMScriptException;
/**
* Another way to create data set, an alternative to the
* openAcquisition(String name, String rootDir, int nrFrames, int nrChannels, int nrSlices, int nrPositions, boolean show, boolean save)
*
* The caller is responsible for providing all required metadata within the summaryMetadata argument
* @param summaryMetadata The metadata describing the acquisition parameters
* @param diskCached True if images are cached on disk; false if they are kept in RAM only.
* @param displayOff True if no display is to be created or shown.
* @return
*
* @deprecated Use openAcquisition() instead.
*/
@Deprecated
public String createAcquisition(JSONObject summaryMetadata, boolean diskCached,
boolean displayOff);
/**
* Set up image physical dimensions for the data set that has already been opened.
* Once dimensions of the image has been set, they can't be changed, i.e. subsequent calls to this method will generate an error.
* Typically there is no need to call this method, except when display options have to be set before the first image is inserted.
* If this method is not explicitly called after openAcquisition(), the image dimensions will be automatically initialized based
* on the first image inserted in the data set.
*
* @param name - Name of the data set
* @param width - Image width in pixels
* @param height - Image height in pixels
* @param bytesPerPixel - Number of bytes per pixel
* @param bitDepth - Dynamic range in bits per pixel
* @throws MMScriptException
*/
public void initializeAcquisition(String name, int width, int height, int bytesPerPixel, int bitDepth) throws MMScriptException;
/**
* Inserts image into the data set.
* @param name - data set name
* @param frame - 0 based frame number
* @param channel - 0 based channel number
* @param slice - 0 based (z) slice number
* @param position - 0 based position number
* @param taggedImg Tagged Image (image with associated metadata)
* @throws MMScriptException
*/
public void addImageToAcquisition(String name, int frame, int channel, int slice,
int position, TaggedImage taggedImg) throws MMScriptException;
/**
* Change an acquisition so that adding images to it is done asynchronously.
* All calls to e.g. addImageToAcquisition() and other similar functions
* will return nearly-instantly.
* @param name of acquisition
* @throws MMScriptException if the specified acquisition does not exist.
*/
public void setAcquisitionAddImageAsynchronous(String name) throws MMScriptException;
/**
* Snaps an image with the current settings and places pixels in the specified position
* of the Micro-Manager Image viewer
* @param name Name of the acquisition.
* @param frame Frame number (time point, 0-based) in which this image should be inserted.
* @param channel Channel number (0-based) in which this image should be inserted.
* @param z Slice number (0-based) in which this image should be inserted.
* @param position Position number (0-based) in which this image should be inserted.
* @throws MMScriptException
*/
public void snapAndAddImage(String name, int frame, int channel, int z, int position) throws MMScriptException;
/**
* Provides access to the data set through the MMAcquisition interface.
* Typically there is no need to use this low-level method and interfere with the default acquisition execution.
* Intended use is within advanced plugins.
* @param name - data set name
* @return MMAcqusition object
* @throws MMScriptException
*
* @deprecated Because it returns an internal object that is subject to change.
*/
@Deprecated
public MMAcquisition getAcquisition(String name) throws MMScriptException;
/**
* Returns a name beginning with stem that is not yet used.
* @param stem Base name from which a unique name will be constructed
* @return a name beginning with stem that is not yet used
*/
public String getUniqueAcquisitionName(String stem);
/**
* Returns the name of the current album (i.e. the most recently created one)
* In addition to their use through the scripting interface, Albums are used
* by the "Camera --> Album" button in the main window of Micro-Manager and
* the "--> Album" button on the snap/live window
* @return Name of the current Album.
*/
public String getCurrentAlbum();
/**
* Add a TaggedImage to an album; creates a new album if the image and current album
* do not match in image dimensions, bits per pixel, bytes per pixel, or number of channels.
* The current album is the most recently created one
* Albums are also used by the "Camera --> Album" button in the main window of Micro-Manager and
* the "--> Album" button on the snap/live window
* @param image - TaggedImage to be added to album
* @throws org.micromanager.utils.MMScriptException
*/
public void addToAlbum(TaggedImage image) throws MMScriptException;
/**
* Checks whether an acquisition with the given name already exists.
* @param name name to be tested for existence
* @return true when name already exists
*/
public Boolean acquisitionExists(String name);
/**
* Closes the acquisition.
* After this command metadata is complete, all the references to this data set are cleaned-up,
* and no additional images can be added to the acquisition
* Does not close the window in which the acquisition data is displayed
* @param name name of acquisition to be closed
* @throws MMScriptException
*/
public void closeAcquisition(String name) throws MMScriptException;
/**
* Closes all currently open acquisitions.
*/
public void closeAllAcquisitions();
/**
* Gets an Array with names of all open acquisitions
* @return Arrays with names of all acquisitions that are currently open
*/
public String[] getAcquisitionNames();
/**
* Returns the width (in pixels) of images in this acquisition
*/
public int getAcquisitionImageWidth(String acqName) throws MMScriptException;
/**
* Returns the width (in pixels) of images in this acquisition
*/
public int getAcquisitionImageHeight(String acqName) throws MMScriptException;
/**
* Returns the number of bits used per pixel
*/
public int getAcquisitionImageBitDepth(String acqName) throws MMScriptException;
/**
* Returns the number of bytes used per pixel
*/
public int getAcquisitionImageByteDepth(String acqName) throws MMScriptException;
/**
* Returns boolean specifying whether multiple cameras used in this acquisition
*/
public int getAcquisitionMultiCamNumChannels(String acqName) throws MMScriptException;
/**
* Sets custom property attached to the acquisition summary
*/
public void setAcquisitionProperty(String acqName, String propertyName, String value) throws MMScriptException;
/**
* Sets property attached to an individual image.
*/
public void setImageProperty(String acqName, int frame, int channel, int slice, String propName, String value) throws MMScriptException;
/**
* Executes Acquisition with current settings
* Will open the Acquisition Dialog when it is not open yet
* Returns after Acquisition finishes
* Note that this function should not be executed on the EDT (which is the
* thread running the UI).
* @return The name of the acquisition created.
* @throws MMScriptException
*/
public String runAcquisition() throws MMScriptException;
/**
* Executes Acquisition with current settings but allows for changing the data path.
* Will open the Acquisition Dialog when it is not open yet.
* Returns after Acquisition finishes.
* Note that this function should not be executed on the EDT (which is the
* thread running the UI).
* @param name Name of this acquisition.
* @param root Place in the file system where data can be stored.
* @return The name of the acquisition created
* @throws MMScriptException
*/
public String runAcquisition(String name, String root) throws MMScriptException;
/**
* Loads setting for Acquisition Dialog from file
* Will open Acquisition Dialog when it is not open yet
* @throws MMScriptException
*/
public void loadAcquisition(String path) throws MMScriptException;
/**
* Makes this the 'current' PositionList, i.e., the one used by the Acquisition Protocol
* Replaces the list in the PositionList Window
* It will open a position list dialog if it was not already open.
* @throws MMScriptException
*/
public void setPositionList(PositionList pl) throws MMScriptException;
/**
* Returns a copy of the current PositionList, the one used by the Acquisition Protocol
* @throws MMScriptException
*/
public PositionList getPositionList() throws MMScriptException;
/**
* Sets the color of the specified channel in the image viewer. Only has an effect
* for images with 2 or more channels
*/
public void setChannelColor(String title, int channel, Color color) throws MMScriptException;
/**
* Sets the channel name (label)
* @param title - acquisition name
* @param channel - channel index
* @param name - channel label
* @throws MMScriptException
*/
public void setChannelName(String title, int channel, String name) throws MMScriptException;
/**
* Updates the exposure time associated with the given preset
* If the channelgroup and channel name match the current state
* the exposure time will also be updated
*
* @param channelGroup -
*
* @param channel - preset for which to change exposure time
* @param exposure - desired exposure time
*/
public void setChannelExposureTime(String channelGroup, String channel,
double exposure);
/**
* Sets min (black) and max (white or the channel's color) pixel value clipping levels for each channel.
* @param title - acquisition name
* @param channel - channel index (use 0 if there is only a single channel)
* @param min - black clipping level
* @param max - white clipping level
* @throws MMScriptException
*/
public void setChannelContrast(String title, int channel, int min, int max) throws MMScriptException;
/**
* Autoscales contrast for each channel at the current position based on pixel values
* at the current slice and frame
* @param title - acquisition name
* @param frame - frame number
* @param slice - slice number
* @throws MMScriptException
*/
public void setContrastBasedOnFrame(String title, int frame, int slice) throws MMScriptException;
/**
* Returns exposure time for the desired preset in the given channelgroup
* Acquires its info from the preferences
* Same thing is used in MDA window, but this class keeps its own copy
*
* @param channelGroup
* @param channel -
* @param defaultExp - default value
* @return exposure time
*/
public double getChannelExposureTime(String channelGroup, String channel,
double defaultExp);
/**
* Closes the image window corresponding to the acquisition. If being used along with
* closeAcquisitiion, this method should be called first
* @param acquisitionName - Name of the acquisition
* @throws MMScriptException
*/
public void closeAcquisitionWindow(String acquisitionName) throws MMScriptException;
/**
* Obtain the current XY stage position.
* Returns a point in device coordinates in microns.
* @throws MMScriptException
*/
public Point2D.Double getXYStagePosition() throws MMScriptException;
/**
* Move default Focus (Z) and block until done
* @param z absolute z position
* @throws MMScriptException
*/
public void setStagePosition(double z) throws MMScriptException;
/**
* Move default Focus (Z) relative to current position and block until done
* @param z
* @throws MMScriptException
*/
public void setRelativeStagePosition(double z) throws MMScriptException;
/**
* Move default XY stage and block until done.
* @param x - coordinate in um
* @param y - coordinate in um
* @throws MMScriptException
*/
public void setXYStagePosition(double x, double y) throws MMScriptException ;
/**
* Move default XY stage relative to current position and block until done.
* @param x - coordinate in um
* @param y - coordinate in um
* @throws MMScriptException
*/
public void setRelativeXYStagePosition(double x, double y) throws MMScriptException ;
/**
* There can be multiple XY stage devices in a system. This function returns
* the name of the currently active one
* @return Name of the active XYStage device
*/
public String getXYStageName();
/**
* Assigns the current stage position of the default xy-stage to be (x,y),
* thereby offseting the coordinates of all other positions.
* @throws MMScriptException
*/
public void setXYOrigin(double x, double y) throws MMScriptException;
/**
* Save current configuration
*/
public void saveConfigPresets();
/**
* Returns the ImageJ ImageWindow instance that is used for Snap and Live display.
*/
public ImageWindow getSnapLiveWin();
/**
* Given an ImageWindow, return the ImageCache associated with it.
*/
public ImageCache getCacheForWindow(ImageWindow window) throws IllegalArgumentException;
/**
* Installs an autofocus plugin class from the class path.
*/
public String installAutofocusPlugin(String className);
/**
* Provides access to the Core and its functionality.
* @return Micro-Manager core object.
*/
public CMMCore getMMCore();
/**
* Currently active autofocus device (can be either a Java or C++ coded device).
* @return currently active autofocus device
*/
public Autofocus getAutofocus();
/**
* Shows the dialog with options for the currently active autofocus device.
*/
public void showAutofocusDialog();
/**
* Adds a message to the Micro-Manager log (found in Corelogtxt).
* @param msg - message to be added to the log
*/
public void logMessage(String msg);
/**
* Shows a message in the UI.
* @param msg - message to be shown
*/
public void showMessage(String msg);
/**
* Shows a message in the UI.
* @param msg - message to be shown
* @param parent - parent component over which this message should be centered
*/
public void showMessage(String msg, Component parent);
/**
* Writes the stacktrace and a message to the Micro-Manager log (Corelog.txt).
* @param e - Java exception to be logged
* @param msg - message to be shown
*/
public void logError(Exception e, String msg);
/**
* Writes a stacktrace to the Micro-Manager log.
* @param e - Java exception to be logged
*/
public void logError(Exception e);
/**
* Writes an error to the Micro-Manager log (same as logMessage).
* @param msg - message to be logged
*/
public void logError(String msg);
/**
* Shows an error in the UI and logs stacktrace to the Micro-Manager log.
* @param e - Java exception to be shown and logged
* @param msg - Error message to be shown and logged
*/
public void showError(Exception e, String msg);
/**
* Shows and logs a Java exception.
* @param e - Java excpetion to be shown and logged
*/
public void showError(Exception e);
/**
* Shows an error message in the UI and logs to the Micro-Manager log.
* @param msg - error message to be shown and logged
*/
public void showError(String msg);
/**
* Shows an error in the UI and logs stacktrace to the Micro-Manager log.
* @param e - Java exception to be shown and logged
* @param msg - Error message to be shown and logged
* @param parent - frame in which to show dialog, or null for caller
*/
public void showError(Exception e, String msg, Component parent);
/**
* Shows and logs a Java exception.
* @param e - Java exception to be shown and logged
* @param parent - frame in which to show dialog, or null for caller
*/
public void showError(Exception e, Component parent);
/**
* Shows an error message in the UI and logs to the Micro-Manager log.
* @param msg - error message to be shown and logged
* @param parent - frame in which to show dialog, or null for caller
*/
public void showError(String msg, Component parent);
/**
* Allows MMListeners to register themselves so that they will receive
* alerts as defined in the MMListenerInterface
* @param newL
*/
public void addMMListener(MMListenerInterface newL);
/**
* Allows MMListeners to remove themselves
* @param oldL
*/
public void removeMMListener(MMListenerInterface oldL);
/**
* Lets Components register themselves so that their background can be
* manipulated by the Micro-Manager UI.
* @param frame Component to which the listener should be added.
*/
public void addMMBackgroundListener(Component frame);
/**
* Lets Components remove themselves from the list whose background gets
* changed by the Micro-Manager UI.
* @param frame Component from which the listener should be removed.
*/
public void removeMMBackgroundListener(Component frame);
/**
* Returns the current color of the main window so that it can be used in
* derived windows/plugins as well
* @return Current backgroundColor
*/
public Color getBackgroundColor();
/**
* Show a TaggedImage in the snap/live window (uses current camera settings
* to figure out the shape of the image)
* @param image TaggedImage (pixel data and metadata tags) to be displayed
*/
public boolean displayImage(TaggedImage image);
/**
* Determines whether live mode is currently running.
* @return when true, live mode is running, when false live mode is not running.
*/
public boolean isLiveModeOn();
/**
* Turn live mode on or off (equivalent to pressing the Live mode button).
* @param b true starts live mode, false stops live mode.
*/
public void enableLiveMode(boolean b);
/**
* Get the default camera's ROI -- a convenience function.
* @return default camera's ROI
* @throws MMScriptException
*/
public Rectangle getROI() throws MMScriptException;
/**
* Set the default camera's ROI -- a convenience function.
* @param r
* @throws MMScriptException
*/
public void setROI(Rectangle r) throws MMScriptException;
/**
* Get a reference to the ImageCache object associated with the acquisition.
* @param acquisitionName Name of the acquisition
* @return ImageCache object associated with the acquisition
* @throws org.micromanager.utils.MMScriptException
*/
public ImageCache getAcquisitionImageCache(String acquisitionName) throws MMScriptException;
/**
* Opens the XYPositionList when it is not opened.
* Adds the current position to the list (same as pressing the "Mark" button in the XYPositionList)
*/
public void markCurrentPosition();
/**
* Returns the Multi-Dimensional Acquisition Window.
* To show the window, call:
* AcqControlDlg dlg = gui.getAcqDlg();
* dlg.setVisible(true);
* @return Handle to the MDA acquisition dialog
*
* @deprecated Use the get/setAcquisitionSettings() interface instead.
*/
@Deprecated
public AcqControlDlg getAcqDlg();
/**
* Returns the PositionList Dialog.
* If the Dialog did not yet exist, it will be created.
* The Dialog will not necessarily be shown, call the setVisibile method of the dialog to do so
* @return Handle to the positionList Dialog
*
* @deprecated Use the get/setPositionList() interface instead.
*/
@Deprecated
public PositionListDlg getXYPosListDlg();
/**
* Returns true when an acquisition is currently running (note: this function will
* not return true if live mode, snap, or "Camera --> Album" is currently running
*/
public boolean isAcquisitionRunning();
/**
* Displays an error message and returns true if the run-time Micro-Manager version
* is less than the one specified.
* Versions in Micro-Manager are of the format:
* major.minor.minute date
* where ' date' can be omitted
* Examples:
* 1.4.6
* 1.4.6 20110831
* When a date is appended to a version number, it will be newer than the same version
* without a date
* @param version - minimum version needen to run this code
* @throws MMScriptException
*/
public boolean versionLessThan(String version) throws MMScriptException;
/**
* Write various properties of MM and the OS to the log.
*/
public void logStartupProperties();
/*
* Make the main window the frontmost, active window again
*/
public void makeActive();
/**
* @return the currently selected AutoFocusManger object
*/
public AutofocusManager getAutofocusManager();
/**
* @return the current Micro-Manager background style--"Day" or "Night"
*/
public String getBackgroundStyle();
/**
* @return the currently running Micro-Manager version
*/
public String getVersion();
/**
* Sets the background color of the GUI and all its registered components to
* the selected backGroundType
* @param backgroundType either "Day" or "Night"
*/
public void setBackgroundStyle(String backgroundType);
/**
* lets the GUI know that the current configuration has been changed. Activates
* the save button it status is true
* @param status
*/
public void setConfigChanged(boolean status);
/**
* shows the position list dialog
*/
public void showXYPositionList();
/**
* Open an existing data set. Shows the acquisition in a window.
* @return The acquisition object.
*/
public String openAcquisitionData(String location, boolean inRAM) throws MMScriptException;
/**
* Open an existing data set.
* @return The name of the acquisition object.
*/
public String openAcquisitionData(String location, boolean inRAM, boolean show) throws MMScriptException;
/**
* Enabled or disable the ROI buttons on the main window.
*/
public void enableRoiButtons(final boolean enabled);
/**
* Set the format for saving images to disk. Images can be written to disk one per a file
* or multiple images per file. Using multiple images per file should be faster on most systems
* @param imageSavingClass use either org.micromanager.acquisition.TaggedImageStorageDiskDefault.class
* for single-image files of org.micromanager.acquisition.TaggedImageStorageMultipageTiff.class for
* multi-image files
*/
public void setImageSavingFormat(Class imageSavingClass) throws MMScriptException;
/*
* Returns the pipeline
*/
public IAcquisitionEngine2010 getAcquisitionEngine2010();
/*
* Returns true if user has chosen to hide MDA window when it runs.
*/
public boolean getHideMDADisplayOption();
/**
* Adds an image processor to the DataProcessor pipeline.
*/
public void addImageProcessor(DataProcessor<TaggedImage> processor);
/**
* Removes an image processor from the DataProcessor pipeline.
*/
public void removeImageProcessor(DataProcessor<TaggedImage> taggedImageProcessor);
/**
* Retrieve a copy of the current DataProcessor pipeline.
*/
public List<DataProcessor<TaggedImage>> getImageProcessorPipeline();
/**
* Replace the current DataProcessor pipeline with the provided one.
*/
public void setImageProcessorPipeline(List<DataProcessor<TaggedImage>> pipeline);
/**
* Register a new DataProcessor class with the Acquisition Engine. For
* example, if your processor class is named MyProcessor, then you would
* call this function as:
* gui.registerProcessorClass(MyProcessor.class, "My Processor");
*/
public void registerProcessorClass(Class<? extends DataProcessor<TaggedImage>> processorClass, String name);
/**
* Pause/Unpause a running acquistion
*/
public void setPause(boolean state);
/**
* Returns true if the acquisition is currently paused.
*/
public boolean isPaused();
/**
* Attach a runnable to the acquisition engine. Each index (f, p, c, s) can
* be specified. Passing a value of -1 should result in the runnable being attached
* at all values of that index. For example, if the first argument is -1,
* then the runnable should execute at every frame.
*/
public void attachRunnable(int frame, int position, int channel, int slice, Runnable runnable);
/**
* Remove runnables from the acquisition engine
*/
public void clearRunnables();
/**
* Return current acquisition settings
*/
SequenceSettings getAcquisitionSettings();
/**
* Apply new acquisition settings
*/
public void setAcquisitionSettings(SequenceSettings settings);
/**
* Return the current acquisition path, or null if not applicable
*/
public String getAcquisitionPath();
/**
* Display dialog to save data for one of the currently open acquisitions
*/
public void promptToSaveAcquisition(String name, boolean prompt) throws MMScriptException;
/**
* Request that the given object be added to our EventBus for notification
* of events occurring. The available event types that subscribers can
* listen for is in the org.micromanager.api.events package.
*/
public void registerForEvents(Object obj);
/**
* Unregister the given object from EventBus notfications
*/
public void unregisterForEvents(Object obj);
/**
* Autostretch each histogram for the currently-active window, as if the
* "Auto" button had been clicked for each one.
*/
public void autostretchCurrentWindow();
}
| |
package org.knowm.xchange.bitflyer.dto.trade.results;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.math.BigDecimal;
@JsonInclude(JsonInclude.Include.NON_NULL)
public class BitflyerQueryChildOrderResult {
private Long id;
@JsonProperty("child_order_id")
private String childOrderId;
@JsonProperty("product_code")
private String productCode;
private String side;
@JsonProperty("child_order_type")
private String childOrderType;
private BigDecimal price;
@JsonProperty("average_price")
private BigDecimal averagePrice;
private BigDecimal size;
@JsonProperty("child_order_state")
private String childOrderState;
@JsonProperty("expire_date")
private String expireDate;
@JsonProperty("child_order_date")
private String childOrderDate;
@JsonProperty("child_order_acceptance_id")
private String childOrderAcceptanceId;
@JsonProperty("outstanding_size")
private BigDecimal outstandingSize;
@JsonProperty("cancel_size")
private BigDecimal cancelSize;
@JsonProperty("executed_size")
private BigDecimal executedSize;
@JsonProperty("total_commission")
private BigDecimal totalCommission;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getChildOrderId() {
return childOrderId;
}
public void setChildOrderId(String childOrderId) {
this.childOrderId = childOrderId;
}
public String getProductCode() {
return productCode;
}
public void setProductCode(String productCode) {
this.productCode = productCode;
}
public String getSide() {
return side;
}
public void setSide(String side) {
this.side = side;
}
public String getChildOrderType() {
return childOrderType;
}
public void setChildOrderType(String childOrderType) {
this.childOrderType = childOrderType;
}
public BigDecimal getPrice() {
return price;
}
public void setPrice(BigDecimal price) {
this.price = price;
}
public BigDecimal getAveragePrice() {
return averagePrice;
}
public void setAveragePrice(BigDecimal averagePrice) {
this.averagePrice = averagePrice;
}
public BigDecimal getSize() {
return size;
}
public void setSize(BigDecimal size) {
this.size = size;
}
public String getChildOrderState() {
return childOrderState;
}
public void setChildOrderState(String childOrderState) {
this.childOrderState = childOrderState;
}
public String getExpireDate() {
return expireDate;
}
public void setExpireDate(String expireDate) {
this.expireDate = expireDate;
}
public String getChildOrderDate() {
return childOrderDate;
}
public void setChildOrderDate(String childOrderDate) {
this.childOrderDate = childOrderDate;
}
public String getChildOrderAcceptanceId() {
return childOrderAcceptanceId;
}
public void setChildOrderAcceptanceId(String childOrderAcceptanceId) {
this.childOrderAcceptanceId = childOrderAcceptanceId;
}
public BigDecimal getOutstandingSize() {
return outstandingSize;
}
public void setOutstandingSize(BigDecimal outstandingSize) {
this.outstandingSize = outstandingSize;
}
public BigDecimal getCancelSize() {
return cancelSize;
}
public void setCancelSize(BigDecimal cancelSize) {
this.cancelSize = cancelSize;
}
public BigDecimal getExecutedSize() {
return executedSize;
}
public void setExecutedSize(BigDecimal executedSize) {
this.executedSize = executedSize;
}
public BigDecimal getTotalCommission() {
return totalCommission;
}
public void setTotalCommission(BigDecimal totalCommission) {
this.totalCommission = totalCommission;
}
@Override
public String toString() {
return "BitflyerQueryChildOrderResult{"
+ "id="
+ id
+ ", childOrderId='"
+ childOrderId
+ '\''
+ ", productCode='"
+ productCode
+ '\''
+ ", side='"
+ side
+ '\''
+ ", childOrderType='"
+ childOrderType
+ '\''
+ ", price="
+ price
+ ", averagePrice="
+ averagePrice
+ ", size="
+ size
+ ", childOrderState='"
+ childOrderState
+ '\''
+ ", expireDate='"
+ expireDate
+ '\''
+ ", childOrderDate='"
+ childOrderDate
+ '\''
+ ", childOrderAcceptanceId='"
+ childOrderAcceptanceId
+ '\''
+ ", outstandingSize="
+ outstandingSize
+ ", cancelSize="
+ cancelSize
+ ", executedSize="
+ executedSize
+ ", totalCommission="
+ totalCommission
+ '}';
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.