gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/**
* Copyright 2015, Digium, Inc.
* All rights reserved.
*
* This source code is licensed under The MIT License found in the
* LICENSE file in the root directory of this source tree.
*
* For all details and documentation: https://www.respoke.io
*/
package com.digium.respokesdk;
import android.content.Context;
import android.os.Handler;
import android.os.Looper;
import org.json.JSONException;
import org.json.JSONObject;
import org.webrtc.DataChannel;
import org.webrtc.PeerConnection;
import java.io.UnsupportedEncodingException;
import java.lang.ref.WeakReference;
import java.nio.ByteBuffer;
import java.nio.CharBuffer;
import java.nio.charset.CharacterCodingException;
import java.nio.charset.Charset;
import java.nio.charset.CharsetDecoder;
/**
* A direct connection via RTCDataChannel, including state and path negotation.
*/
public class RespokeDirectConnection implements org.webrtc.DataChannel.Observer {
private WeakReference<Listener> listenerReference;
private WeakReference<RespokeCall> callReference;
private DataChannel dataChannel;
/**
* A listener interface to notify the receiver of events occurring with the direct connection
*/
public interface Listener {
/**
* The direct connection setup has begun. This does NOT mean it's ready to send messages yet. Listen to
* onOpen for that notification.
*
* @param sender The direct connection for which the event occurred
*/
public void onStart(RespokeDirectConnection sender);
/**
* Called when the direct connection is opened.
*
* @param sender The direct connection for which the event occurred
*/
public void onOpen(RespokeDirectConnection sender);
/**
* Called when the direct connection is closed.
*
* @param sender The direct connection for which the event occurred
*/
public void onClose(RespokeDirectConnection sender);
/**
* Called when a message is received over the direct connection.
* @param message The message received.
* @param sender The direct connection for which the event occurred
*/
public void onMessage(String message, RespokeDirectConnection sender);
}
/**
* The constructor for this class
*
* @param call The call instance with which this direct connection is associated
*/
public RespokeDirectConnection(RespokeCall call) {
callReference = new WeakReference<RespokeCall>(call);
}
/**
* Set a receiver for the Listener interface
*
* @param listener The new receiver for events from the Listener interface for this instance
*/
public void setListener(Listener listener) {
if (null != listener) {
listenerReference = new WeakReference<Listener>(listener);
} else {
listenerReference = null;
}
}
/**
* Accept the direct connection and start the process of obtaining media.
*
* @param context An application context with which to access system resources
*/
public void accept(Context context) {
if (null != callReference) {
RespokeCall call = callReference.get();
if (null != call) {
call.directConnectionDidAccept(context);
}
}
}
/**
* Indicate whether a datachannel is being setup or is in progress.
*
* @return True the direct connection is active, false otherwise
*/
public boolean isActive() {
return ((null != dataChannel) && (dataChannel.state() == DataChannel.State.OPEN));
}
/**
* Get the call object associated with this direct connection
*
* @return The call instance
*/
public RespokeCall getCall() {
if (null != callReference) {
return callReference.get();
} else {
return null;
}
}
/**
* Send a message to the remote client through the direct connection.
*
* @param message The message to send
* @param completionListener A listener to receive a notification on the success of the asynchronous operation
*/
public void sendMessage(String message, final Respoke.TaskCompletionListener completionListener) {
if (isActive()) {
JSONObject jsonMessage = new JSONObject();
try {
jsonMessage.put("message", message);
byte[] rawMessage = jsonMessage.toString().getBytes(Charset.forName("UTF-8"));
ByteBuffer directData = ByteBuffer.allocateDirect(rawMessage.length);
directData.put(rawMessage);
directData.flip();
DataChannel.Buffer data = new DataChannel.Buffer(directData, false);
if (dataChannel.send(data)) {
Respoke.postTaskSuccess(completionListener);
} else {
Respoke.postTaskError(completionListener, "Error sending message");
}
} catch (JSONException e) {
Respoke.postTaskError(completionListener, "Unable to encode message to JSON");
}
} else {
Respoke.postTaskError(completionListener, "DataChannel not in an open state");
}
}
/**
* Establish a new direct connection instance with the peer connection for the call. This is used internally to the SDK and should not be called directly by your client application.
*/
public void createDataChannel() {
if (null != callReference) {
RespokeCall call = callReference.get();
if (null != call) {
PeerConnection peerConnection = call.getPeerConnection();
dataChannel = peerConnection.createDataChannel("respokeDataChannel", new DataChannel.Init());
dataChannel.registerObserver(this);
}
}
}
/**
* Notify the direct connection instance that the peer connection has opened the specified data channel
*
* @param newDataChannel The DataChannel that has opened
*/
public void peerConnectionDidOpenDataChannel(DataChannel newDataChannel) {
if (null != dataChannel) {
// Replacing the previous connection, so disable observer messages from the old instance
dataChannel.unregisterObserver();
} else {
new Handler(Looper.getMainLooper()).post(new Runnable() {
public void run() {
if (null != listenerReference) {
Listener listener = listenerReference.get();
if (null != listener) {
listener.onStart(RespokeDirectConnection.this);
}
}
}
});
}
dataChannel = newDataChannel;
newDataChannel.registerObserver(this);
}
// org.webrtc.DataChannel.Observer methods
public void onStateChange() {
switch (dataChannel.state()) {
case CONNECTING:
break;
case OPEN: {
if (null != callReference) {
RespokeCall call = callReference.get();
if (null != call) {
call.directConnectionDidOpen(this);
}
}
new Handler(Looper.getMainLooper()).post(new Runnable() {
public void run() {
if (null != listenerReference) {
Listener listener = listenerReference.get();
if (null != listener) {
listener.onOpen(RespokeDirectConnection.this);
}
}
}
});
}
break;
case CLOSING:
break;
case CLOSED: {
if (null != callReference) {
RespokeCall call = callReference.get();
if (null != call) {
call.directConnectionDidClose(this);
}
}
new Handler(Looper.getMainLooper()).post(new Runnable() {
public void run() {
if (null != listenerReference) {
Listener listener = listenerReference.get();
if (null != listener) {
listener.onClose(RespokeDirectConnection.this);
}
}
}
});
}
break;
}
}
public void onMessage(org.webrtc.DataChannel.Buffer buffer) {
if (buffer.binary) {
// TODO
} else {
Charset charset = Charset.forName("UTF-8");
CharsetDecoder decoder = charset.newDecoder();
try {
String message = decoder.decode( buffer.data ).toString();
try {
JSONObject jsonMessage = new JSONObject(message);
final String messageText = jsonMessage.getString("message");
if (null != messageText) {
new Handler(Looper.getMainLooper()).post(new Runnable() {
public void run() {
if (null != listenerReference) {
Listener listener = listenerReference.get();
if (null != listener) {
listener.onMessage(messageText, RespokeDirectConnection.this);
}
}
}
});
}
} catch (JSONException e) {
// If it is not valid json, ignore the message
}
} catch (CharacterCodingException e) {
// If the message can not be decoded, ignore it
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.core.scan.executor.impl;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import org.apache.carbondata.common.CarbonIterator;
import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.common.logging.impl.StandardLogService;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.constants.CarbonV3DataFormatConstants;
import org.apache.carbondata.core.datamap.Segment;
import org.apache.carbondata.core.datastore.IndexKey;
import org.apache.carbondata.core.datastore.block.AbstractIndex;
import org.apache.carbondata.core.datastore.block.SegmentProperties;
import org.apache.carbondata.core.datastore.block.TableBlockInfo;
import org.apache.carbondata.core.datastore.block.TableBlockUniqueIdentifier;
import org.apache.carbondata.core.indexstore.BlockletDetailInfo;
import org.apache.carbondata.core.indexstore.blockletindex.BlockletDataRefNode;
import org.apache.carbondata.core.indexstore.blockletindex.IndexWrapper;
import org.apache.carbondata.core.keygenerator.KeyGenException;
import org.apache.carbondata.core.memory.UnsafeMemoryManager;
import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
import org.apache.carbondata.core.metadata.blocklet.BlockletInfo;
import org.apache.carbondata.core.metadata.blocklet.DataFileFooter;
import org.apache.carbondata.core.metadata.datatype.DataType;
import org.apache.carbondata.core.metadata.encoder.Encoding;
import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
import org.apache.carbondata.core.metadata.schema.table.column.CarbonMeasure;
import org.apache.carbondata.core.scan.executor.QueryExecutor;
import org.apache.carbondata.core.scan.executor.exception.QueryExecutionException;
import org.apache.carbondata.core.scan.executor.infos.BlockExecutionInfo;
import org.apache.carbondata.core.scan.executor.util.QueryUtil;
import org.apache.carbondata.core.scan.executor.util.RestructureUtil;
import org.apache.carbondata.core.scan.filter.FilterUtil;
import org.apache.carbondata.core.scan.model.ProjectionDimension;
import org.apache.carbondata.core.scan.model.ProjectionMeasure;
import org.apache.carbondata.core.scan.model.QueryModel;
import org.apache.carbondata.core.stats.QueryStatistic;
import org.apache.carbondata.core.stats.QueryStatisticsConstants;
import org.apache.carbondata.core.util.CarbonProperties;
import org.apache.carbondata.core.util.CarbonTimeStatisticsFactory;
import org.apache.carbondata.core.util.CarbonUtil;
import org.apache.carbondata.core.util.DataTypeUtil;
import org.apache.carbondata.core.util.ThreadLocalTaskInfo;
import org.apache.carbondata.core.util.path.CarbonTablePath;
import org.apache.commons.lang3.ArrayUtils;
/**
* This class provides a skeletal implementation of the {@link QueryExecutor}
* interface to minimize the effort required to implement this interface. This
* will be used to prepare all the properties required for query execution
*/
public abstract class AbstractQueryExecutor<E> implements QueryExecutor<E> {
private static final LogService LOGGER =
LogServiceFactory.getLogService(AbstractQueryExecutor.class.getName());
/**
* holder for query properties which will be used to execute the query
*/
protected QueryExecutorProperties queryProperties;
/**
* query result iterator which will execute the query
* and give the result
*/
protected CarbonIterator queryIterator;
public AbstractQueryExecutor() {
queryProperties = new QueryExecutorProperties();
}
public void setExecutorService(ExecutorService executorService) {
// add executor service for query execution
queryProperties.executorService = executorService;
}
/**
* Below method will be used to fill the executor properties based on query
* model it will parse the query model and get the detail and fill it in
* query properties
*
* @param queryModel
*/
protected void initQuery(QueryModel queryModel) throws IOException {
StandardLogService.setThreadName(StandardLogService.getPartitionID(
queryModel.getAbsoluteTableIdentifier().getCarbonTableIdentifier().getTableName()),
queryModel.getQueryId());
LOGGER.info("Query will be executed on table: " + queryModel.getAbsoluteTableIdentifier()
.getCarbonTableIdentifier().getTableName());
// Initializing statistics list to record the query statistics
// creating copy on write to handle concurrent scenario
queryProperties.queryStatisticsRecorder = queryModel.getStatisticsRecorder();
if (null == queryProperties.queryStatisticsRecorder) {
queryProperties.queryStatisticsRecorder =
CarbonTimeStatisticsFactory.createExecutorRecorder(queryModel.getQueryId());
queryModel.setStatisticsRecorder(queryProperties.queryStatisticsRecorder);
}
QueryStatistic queryStatistic = new QueryStatistic();
// sort the block info
// so block will be loaded in sorted order this will be required for
// query execution
Collections.sort(queryModel.getTableBlockInfos());
List<AbstractIndex> indexList = new ArrayList<>();
Map<String, List<TableBlockInfo>> listMap = getFilePathToTableBlockInfoMapping(queryModel);
for (List<TableBlockInfo> tableBlockInfos : listMap.values()) {
indexList.add(new IndexWrapper(tableBlockInfos));
}
queryProperties.dataBlocks = indexList;
queryStatistic
.addStatistics(QueryStatisticsConstants.LOAD_BLOCKS_EXECUTOR, System.currentTimeMillis());
queryProperties.queryStatisticsRecorder.recordStatistics(queryStatistic);
// calculating the total number of aggeragted columns
int measureCount = queryModel.getProjectionMeasures().size();
int currentIndex = 0;
DataType[] dataTypes = new DataType[measureCount];
for (ProjectionMeasure carbonMeasure : queryModel.getProjectionMeasures()) {
// adding the data type and aggregation type of all the measure this
// can be used
// to select the aggregator
dataTypes[currentIndex] = carbonMeasure.getMeasure().getDataType();
currentIndex++;
}
queryProperties.measureDataTypes = dataTypes;
// as aggregation will be executed in following order
// 1.aggregate dimension expression
// 2. expression
// 3. query measure
// so calculating the index of the expression start index
// and measure column start index
queryProperties.filterMeasures = new HashSet<>();
queryProperties.complexFilterDimension = new HashSet<>();
QueryUtil.getAllFilterDimensions(queryModel.getFilterExpressionResolverTree(),
queryProperties.complexFilterDimension, queryProperties.filterMeasures);
CarbonTable carbonTable = queryModel.getTable();
queryStatistic = new QueryStatistic();
// dictionary column unique column id to dictionary mapping
// which will be used to get column actual data
queryProperties.columnToDictionaryMapping =
QueryUtil.getDimensionDictionaryDetail(
queryModel.getProjectionDimensions(),
queryProperties.complexFilterDimension,
carbonTable);
queryStatistic
.addStatistics(QueryStatisticsConstants.LOAD_DICTIONARY, System.currentTimeMillis());
queryProperties.queryStatisticsRecorder.recordStatistics(queryStatistic);
queryModel.setColumnToDictionaryMapping(queryProperties.columnToDictionaryMapping);
}
/**
* Method to prepare file path to table block Info mapping
*
* @param queryModel
* @return
* @throws IOException
*/
private Map<String, List<TableBlockInfo>> getFilePathToTableBlockInfoMapping(
QueryModel queryModel) throws IOException {
Map<String, List<TableBlockInfo>> listMap = new LinkedHashMap<>();
// thsi is introduced to handle the case when CACHE_LEVEL=BLOCK and there are few other dataMaps
// like lucene, Bloom created on the table. In that case all the dataMaps will do blocklet
// level pruning and blockInfo entries will be repeated with different blockletIds
Map<String, DataFileFooter> filePathToFileFooterMapping = new HashMap<>();
for (TableBlockInfo blockInfo : queryModel.getTableBlockInfos()) {
List<TableBlockInfo> tableBlockInfos = listMap.get(blockInfo.getFilePath());
if (tableBlockInfos == null) {
tableBlockInfos = new ArrayList<>();
listMap.put(blockInfo.getFilePath(), tableBlockInfos);
}
BlockletDetailInfo blockletDetailInfo = blockInfo.getDetailInfo();
// This case can come in 2 scenarios:
// 1. old stores (1.1 or any prior version to 1.1) where blocklet information is not
// available so read the blocklet information from block file
// 2. CACHE_LEVEL is set to block
if (blockletDetailInfo.getBlockletInfo() == null) {
readAndFillBlockletInfo(filePathToFileFooterMapping, tableBlockInfos, blockInfo,
blockletDetailInfo);
} else {
tableBlockInfos.add(blockInfo);
}
}
return listMap;
}
/**
* Read the file footer of block file and get the blocklets to query
*/
private void readAndFillBlockletInfo(Map<String, DataFileFooter> filePathToFileFooterMapping,
List<TableBlockInfo> tableBlockInfos, TableBlockInfo blockInfo,
BlockletDetailInfo blockletDetailInfo) throws IOException {
blockInfo.setBlockOffset(blockletDetailInfo.getBlockFooterOffset());
DataFileFooter fileFooter = filePathToFileFooterMapping.get(blockInfo.getFilePath());
if (null == fileFooter) {
blockInfo.setDetailInfo(null);
fileFooter = CarbonUtil.readMetadatFile(blockInfo);
filePathToFileFooterMapping.put(blockInfo.getFilePath(), fileFooter);
blockInfo.setDetailInfo(blockletDetailInfo);
}
List<BlockletInfo> blockletList = fileFooter.getBlockletList();
// cases when blockletID will be -1
// 1. In case of legacy store
// 2. In case CACHE_LEVEL is block and no other dataMap apart from blockletDataMap is
// created for a table
// In all above cases entries will be according to the number of blocks and not according to
// number of blocklets
if (blockletDetailInfo.getBlockletId() != -1) {
// fill the info only for given blockletId in detailInfo
BlockletInfo blockletInfo = blockletList.get(blockletDetailInfo.getBlockletId());
fillBlockletInfoToTableBlock(tableBlockInfos, blockInfo, blockletDetailInfo, fileFooter,
blockletInfo, blockletDetailInfo.getBlockletId());
} else {
short count = 0;
for (BlockletInfo blockletInfo : blockletList) {
fillBlockletInfoToTableBlock(tableBlockInfos, blockInfo, blockletDetailInfo, fileFooter,
blockletInfo, count);
count++;
}
}
}
private void fillBlockletInfoToTableBlock(List<TableBlockInfo> tableBlockInfos,
TableBlockInfo blockInfo, BlockletDetailInfo blockletDetailInfo, DataFileFooter fileFooter,
BlockletInfo blockletInfo, short blockletId) {
TableBlockInfo info = blockInfo.copy();
BlockletDetailInfo detailInfo = info.getDetailInfo();
// set column schema details
detailInfo.setColumnSchemas(fileFooter.getColumnInTable());
detailInfo.setRowCount(blockletInfo.getNumberOfRows());
byte[][] maxValues = blockletInfo.getBlockletIndex().getMinMaxIndex().getMaxValues();
byte[][] minValues = blockletInfo.getBlockletIndex().getMinMaxIndex().getMinValues();
if (blockletDetailInfo.isLegacyStore()) {
// update min and max values in case of old store for measures as min and max is written
// opposite for measures in old store ( store <= 1.1 version)
maxValues = CarbonUtil.updateMinMaxValues(fileFooter,
blockletInfo.getBlockletIndex().getMinMaxIndex().getMaxValues(),
blockletInfo.getBlockletIndex().getMinMaxIndex().getMinValues(), false);
minValues = CarbonUtil.updateMinMaxValues(fileFooter,
blockletInfo.getBlockletIndex().getMinMaxIndex().getMaxValues(),
blockletInfo.getBlockletIndex().getMinMaxIndex().getMinValues(), true);
info.setDataBlockFromOldStore(true);
}
blockletInfo.getBlockletIndex().getMinMaxIndex().setMaxValues(maxValues);
blockletInfo.getBlockletIndex().getMinMaxIndex().setMinValues(minValues);
detailInfo.setBlockletInfo(blockletInfo);
detailInfo.setBlockletId(blockletId);
detailInfo.setPagesCount((short) blockletInfo.getNumberOfPages());
tableBlockInfos.add(info);
}
private List<TableBlockUniqueIdentifier> prepareTableBlockUniqueIdentifier(
List<TableBlockInfo> tableBlockInfos, AbsoluteTableIdentifier absoluteTableIdentifier) {
List<TableBlockUniqueIdentifier> tableBlockUniqueIdentifiers =
new ArrayList<>(tableBlockInfos.size());
for (TableBlockInfo blockInfo : tableBlockInfos) {
tableBlockUniqueIdentifiers
.add(new TableBlockUniqueIdentifier(absoluteTableIdentifier, blockInfo));
}
return tableBlockUniqueIdentifiers;
}
protected List<BlockExecutionInfo> getBlockExecutionInfos(QueryModel queryModel)
throws IOException, QueryExecutionException {
initQuery(queryModel);
List<BlockExecutionInfo> blockExecutionInfoList = new ArrayList<BlockExecutionInfo>();
// fill all the block execution infos for all the blocks selected in
// query
// and query will be executed based on that infos
for (int i = 0; i < queryProperties.dataBlocks.size(); i++) {
AbstractIndex abstractIndex = queryProperties.dataBlocks.get(i);
BlockletDataRefNode dataRefNode =
(BlockletDataRefNode) abstractIndex.getDataRefNode();
blockExecutionInfoList.add(
getBlockExecutionInfoForBlock(
queryModel,
abstractIndex,
dataRefNode.getBlockInfos().get(0).getBlockletInfos().getStartBlockletNumber(),
dataRefNode.numberOfNodes(),
dataRefNode.getBlockInfos().get(0).getFilePath(),
dataRefNode.getBlockInfos().get(0).getDeletedDeltaFilePath(),
dataRefNode.getBlockInfos().get(0).getSegment()));
}
if (null != queryModel.getStatisticsRecorder()) {
QueryStatistic queryStatistic = new QueryStatistic();
queryStatistic.addCountStatistic(QueryStatisticsConstants.SCAN_BLOCKS_NUM,
blockExecutionInfoList.size());
queryModel.getStatisticsRecorder().recordStatistics(queryStatistic);
}
return blockExecutionInfoList;
}
/**
* Below method will be used to get the block execution info which is
* required to execute any block based on query model
*
* @param queryModel query model from user query
* @param blockIndex block index
* @return block execution info
* @throws QueryExecutionException any failure during block info creation
*/
private BlockExecutionInfo getBlockExecutionInfoForBlock(QueryModel queryModel,
AbstractIndex blockIndex, int startBlockletIndex, int numberOfBlockletToScan, String filePath,
String[] deleteDeltaFiles, Segment segment)
throws QueryExecutionException {
BlockExecutionInfo blockExecutionInfo = new BlockExecutionInfo();
SegmentProperties segmentProperties = blockIndex.getSegmentProperties();
List<CarbonDimension> tableBlockDimensions = segmentProperties.getDimensions();
// below is to get only those dimension in query which is present in the
// table block
List<ProjectionDimension> projectDimensions = RestructureUtil
.createDimensionInfoAndGetCurrentBlockQueryDimension(blockExecutionInfo,
queryModel.getProjectionDimensions(), tableBlockDimensions,
segmentProperties.getComplexDimensions(), queryModel.getProjectionMeasures().size(),
queryModel.getTable().getTableInfo().isTransactionalTable());
String blockId = CarbonUtil
.getBlockId(queryModel.getAbsoluteTableIdentifier(), filePath, segment.getSegmentNo(),
queryModel.getTable().getTableInfo().isTransactionalTable(),
queryModel.getTable().isHivePartitionTable());
if (queryModel.getTable().isHivePartitionTable()) {
blockExecutionInfo.setBlockId(CarbonTablePath.getShortBlockIdForPartitionTable(blockId));
} else {
blockExecutionInfo.setBlockId(CarbonTablePath.getShortBlockId(blockId));
}
blockExecutionInfo.setDeleteDeltaFilePath(deleteDeltaFiles);
blockExecutionInfo.setStartBlockletIndex(startBlockletIndex);
blockExecutionInfo.setNumberOfBlockletToScan(numberOfBlockletToScan);
blockExecutionInfo.setProjectionDimensions(projectDimensions
.toArray(new ProjectionDimension[projectDimensions.size()]));
// get measures present in the current block
List<ProjectionMeasure> currentBlockQueryMeasures =
getCurrentBlockQueryMeasures(blockExecutionInfo, queryModel, blockIndex);
blockExecutionInfo.setProjectionMeasures(
currentBlockQueryMeasures.toArray(new ProjectionMeasure[currentBlockQueryMeasures.size()]));
blockExecutionInfo.setDataBlock(blockIndex);
// setting whether raw record query or not
blockExecutionInfo.setRawRecordDetailQuery(queryModel.isForcedDetailRawQuery());
// total number dimension
blockExecutionInfo
.setTotalNumberDimensionToRead(
segmentProperties.getDimensionOrdinalToChunkMapping().size());
blockExecutionInfo.setPrefetchBlocklet(!queryModel.isReadPageByPage());
blockExecutionInfo
.setTotalNumberOfMeasureToRead(segmentProperties.getMeasuresOrdinalToChunkMapping().size());
blockExecutionInfo.setComplexDimensionInfoMap(QueryUtil
.getComplexDimensionsMap(projectDimensions,
segmentProperties.getDimensionOrdinalToChunkMapping(),
segmentProperties.getEachComplexDimColumnValueSize(),
queryProperties.columnToDictionaryMapping, queryProperties.complexFilterDimension));
IndexKey startIndexKey = null;
IndexKey endIndexKey = null;
if (null != queryModel.getFilterExpressionResolverTree()) {
// loading the filter executor tree for filter evaluation
blockExecutionInfo.setFilterExecuterTree(FilterUtil
.getFilterExecuterTree(queryModel.getFilterExpressionResolverTree(), segmentProperties,
blockExecutionInfo.getComlexDimensionInfoMap()));
}
try {
startIndexKey = FilterUtil.prepareDefaultStartIndexKey(segmentProperties);
endIndexKey = FilterUtil.prepareDefaultEndIndexKey(segmentProperties);
} catch (KeyGenException e) {
throw new QueryExecutionException(e);
}
//setting the start index key of the block node
blockExecutionInfo.setStartKey(startIndexKey);
//setting the end index key of the block node
blockExecutionInfo.setEndKey(endIndexKey);
// expression measure
List<CarbonMeasure> expressionMeasures =
new ArrayList<CarbonMeasure>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
// setting all the dimension chunk indexes to be read from file
int numberOfElementToConsider = 0;
// list of dimensions to be projected
Set<Integer> allProjectionListDimensionIdexes = new LinkedHashSet<>();
// create a list of filter dimensions present in the current block
Set<CarbonDimension> currentBlockFilterDimensions =
getCurrentBlockFilterDimensions(queryProperties.complexFilterDimension, segmentProperties);
int[] dimensionChunkIndexes = QueryUtil.getDimensionChunkIndexes(projectDimensions,
segmentProperties.getDimensionOrdinalToChunkMapping(),
currentBlockFilterDimensions, allProjectionListDimensionIdexes);
int numberOfColumnToBeReadInOneIO = Integer.parseInt(CarbonProperties.getInstance()
.getProperty(CarbonV3DataFormatConstants.NUMBER_OF_COLUMN_TO_READ_IN_IO,
CarbonV3DataFormatConstants.NUMBER_OF_COLUMN_TO_READ_IN_IO_DEFAULTVALUE));
if (dimensionChunkIndexes.length > 0) {
numberOfElementToConsider = dimensionChunkIndexes[dimensionChunkIndexes.length - 1]
== segmentProperties.getBlockTodimensionOrdinalMapping().size() - 1 ?
dimensionChunkIndexes.length - 1 :
dimensionChunkIndexes.length;
blockExecutionInfo.setAllSelectedDimensionColumnIndexRange(
CarbonUtil.getRangeIndex(dimensionChunkIndexes, numberOfElementToConsider,
numberOfColumnToBeReadInOneIO));
} else {
blockExecutionInfo.setAllSelectedDimensionColumnIndexRange(new int[0][0]);
}
// get the list of updated filter measures present in the current block
Set<CarbonMeasure> filterMeasures =
getCurrentBlockFilterMeasures(queryProperties.filterMeasures, segmentProperties);
// list of measures to be projected
List<Integer> allProjectionListMeasureIndexes = new ArrayList<>();
int[] measureChunkIndexes = QueryUtil.getMeasureChunkIndexes(
currentBlockQueryMeasures, expressionMeasures,
segmentProperties.getMeasuresOrdinalToChunkMapping(), filterMeasures,
allProjectionListMeasureIndexes);
if (measureChunkIndexes.length > 0) {
numberOfElementToConsider = measureChunkIndexes[measureChunkIndexes.length - 1]
== segmentProperties.getMeasures().size() - 1 ?
measureChunkIndexes.length - 1 :
measureChunkIndexes.length;
// setting all the measure chunk indexes to be read from file
blockExecutionInfo.setAllSelectedMeasureIndexRange(
CarbonUtil.getRangeIndex(
measureChunkIndexes, numberOfElementToConsider,
numberOfColumnToBeReadInOneIO));
} else {
blockExecutionInfo.setAllSelectedMeasureIndexRange(new int[0][0]);
}
// setting the indexes of list of dimension in projection list
blockExecutionInfo.setProjectionListDimensionIndexes(ArrayUtils.toPrimitive(
allProjectionListDimensionIdexes
.toArray(new Integer[allProjectionListDimensionIdexes.size()])));
// setting the indexes of list of measures in projection list
blockExecutionInfo.setProjectionListMeasureIndexes(ArrayUtils.toPrimitive(
allProjectionListMeasureIndexes
.toArray(new Integer[allProjectionListMeasureIndexes.size()])));
// setting the size of fixed key column (dictionary column)
blockExecutionInfo
.setFixedLengthKeySize(getKeySize(projectDimensions, segmentProperties));
Set<Integer> dictionaryColumnChunkIndex = new HashSet<Integer>();
List<Integer> noDictionaryColumnChunkIndex = new ArrayList<Integer>();
// get the block index to be read from file for query dimension
// for both dictionary columns and no dictionary columns
QueryUtil.fillQueryDimensionChunkIndexes(projectDimensions,
segmentProperties.getDimensionOrdinalToChunkMapping(), dictionaryColumnChunkIndex,
noDictionaryColumnChunkIndex);
int[] queryDictionaryColumnChunkIndexes = ArrayUtils.toPrimitive(
dictionaryColumnChunkIndex.toArray(new Integer[dictionaryColumnChunkIndex.size()]));
// need to sort the dictionary column as for all dimension
// column key will be filled based on key order
Arrays.sort(queryDictionaryColumnChunkIndexes);
blockExecutionInfo.setDictionaryColumnChunkIndex(queryDictionaryColumnChunkIndexes);
// setting the no dictionary column block indexes
blockExecutionInfo.setNoDictionaryColumnChunkIndexes(ArrayUtils.toPrimitive(
noDictionaryColumnChunkIndex.toArray(new Integer[noDictionaryColumnChunkIndex.size()])));
// setting each column value size
blockExecutionInfo.setEachColumnValueSize(segmentProperties.getEachDimColumnValueSize());
blockExecutionInfo.setComplexColumnParentBlockIndexes(
getComplexDimensionParentBlockIndexes(projectDimensions));
blockExecutionInfo.setVectorBatchCollector(queryModel.isVectorReader());
// set actual query dimensions and measures. It may differ in case of restructure scenarios
blockExecutionInfo.setActualQueryDimensions(queryModel.getProjectionDimensions()
.toArray(new ProjectionDimension[queryModel.getProjectionDimensions().size()]));
blockExecutionInfo.setActualQueryMeasures(queryModel.getProjectionMeasures()
.toArray(new ProjectionMeasure[queryModel.getProjectionMeasures().size()]));
DataTypeUtil.setDataTypeConverter(queryModel.getConverter());
blockExecutionInfo.setRequiredRowId(queryModel.isRequiredRowId());
return blockExecutionInfo;
}
/**
* This method will be used to get fixed key length size this will be used
* to create a row from column chunk
*
* @param queryDimension query dimension
* @param blockMetadataInfo block metadata info
* @return key size
*/
private int getKeySize(List<ProjectionDimension> queryDimension,
SegmentProperties blockMetadataInfo) {
// add the dimension block ordinal for each dictionary column
// existing in the current block dimensions. Set is used because in case of column groups
// ordinal of columns in a column group will be same
Set<Integer> fixedLengthDimensionOrdinal =
new HashSet<>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
int counter = 0;
while (counter < queryDimension.size()) {
if (queryDimension.get(counter).getDimension().getNumberOfChild() > 0) {
counter += queryDimension.get(counter).getDimension().getNumberOfChild();
} else if (!CarbonUtil.hasEncoding(queryDimension.get(counter).getDimension().getEncoder(),
Encoding.DICTIONARY)) {
counter++;
} else {
fixedLengthDimensionOrdinal.add(blockMetadataInfo.getDimensionOrdinalToChunkMapping()
.get(queryDimension.get(counter).getDimension().getOrdinal()));
counter++;
}
}
int[] dictionaryColumnOrdinal = ArrayUtils.toPrimitive(
fixedLengthDimensionOrdinal.toArray(new Integer[fixedLengthDimensionOrdinal.size()]));
// calculate the size of existing query dictionary columns in this block
if (dictionaryColumnOrdinal.length > 0) {
int[] eachColumnValueSize = blockMetadataInfo.getEachDimColumnValueSize();
int keySize = 0;
for (int i = 0; i < dictionaryColumnOrdinal.length; i++) {
keySize += eachColumnValueSize[dictionaryColumnOrdinal[i]];
}
return keySize;
}
return 0;
}
/**
* Below method will be used to get the measures present in the current block
*
* @param executionInfo
* @param queryModel query model
* @param tableBlock table block
* @return
*/
private List<ProjectionMeasure> getCurrentBlockQueryMeasures(BlockExecutionInfo executionInfo,
QueryModel queryModel, AbstractIndex tableBlock) throws QueryExecutionException {
// getting the measure info which will be used while filling up measure data
List<ProjectionMeasure> updatedQueryMeasures = RestructureUtil
.createMeasureInfoAndGetCurrentBlockQueryMeasures(executionInfo,
queryModel.getProjectionMeasures(),
tableBlock.getSegmentProperties().getMeasures(),
queryModel.getTable().getTableInfo().isTransactionalTable());
// setting the measure aggregator for all aggregation function selected
// in query
executionInfo.getMeasureInfo().setMeasureDataTypes(queryProperties.measureDataTypes);
return updatedQueryMeasures;
}
private int[] getComplexDimensionParentBlockIndexes(List<ProjectionDimension> queryDimensions) {
List<Integer> parentBlockIndexList = new ArrayList<Integer>();
for (ProjectionDimension queryDimension : queryDimensions) {
if (queryDimension.getDimension().getDataType().isComplexType()) {
if (null != queryDimension.getDimension().getComplexParentDimension()) {
if (queryDimension.getDimension().isComplex()) {
parentBlockIndexList.add(queryDimension.getDimension().getOrdinal());
} else {
parentBlockIndexList.add(queryDimension.getParentDimension().getOrdinal());
}
} else {
parentBlockIndexList.add(queryDimension.getDimension().getOrdinal());
}
}
}
return ArrayUtils
.toPrimitive(parentBlockIndexList.toArray(new Integer[parentBlockIndexList.size()]));
}
/**
* This method will create the updated list of filter measures present in the current block
*
* @param queryFilterMeasures
* @param segmentProperties
* @return
*/
private Set<CarbonMeasure> getCurrentBlockFilterMeasures(Set<CarbonMeasure> queryFilterMeasures,
SegmentProperties segmentProperties) {
if (!queryFilterMeasures.isEmpty()) {
Set<CarbonMeasure> updatedFilterMeasures = new HashSet<>(queryFilterMeasures.size());
for (CarbonMeasure queryMeasure : queryFilterMeasures) {
CarbonMeasure measureFromCurrentBlock =
segmentProperties.getMeasureFromCurrentBlock(queryMeasure.getColumnId());
if (null != measureFromCurrentBlock) {
updatedFilterMeasures.add(measureFromCurrentBlock);
}
}
return updatedFilterMeasures;
} else {
return queryFilterMeasures;
}
}
/**
* This method will create the updated list of filter dimensions present in the current block
*
* @param queryFilterDimensions
* @param segmentProperties
* @return
*/
private Set<CarbonDimension> getCurrentBlockFilterDimensions(
Set<CarbonDimension> queryFilterDimensions, SegmentProperties segmentProperties) {
if (!queryFilterDimensions.isEmpty()) {
Set<CarbonDimension> updatedFilterDimensions = new HashSet<>(queryFilterDimensions.size());
for (CarbonDimension queryDimension : queryFilterDimensions) {
CarbonDimension dimensionFromCurrentBlock =
segmentProperties.getDimensionFromCurrentBlock(queryDimension);
if (null != dimensionFromCurrentBlock) {
updatedFilterDimensions.add(dimensionFromCurrentBlock);
}
}
return updatedFilterDimensions;
} else {
return queryFilterDimensions;
}
}
/**
* Below method will be used to finish the execution
*
* @throws QueryExecutionException
*/
@Override public void finish() throws QueryExecutionException {
CarbonUtil.clearBlockCache(queryProperties.dataBlocks);
Throwable exceptionOccurred = null;
if (null != queryIterator) {
// catch if there is any exception so that it can be rethrown after clearing all the resources
// else if any exception is thrown from this point executor service will not be terminated
try {
queryIterator.close();
} catch (Throwable e) {
exceptionOccurred = e;
}
}
// clear all the unsafe memory used for the given task ID
UnsafeMemoryManager.INSTANCE.freeMemoryAll(ThreadLocalTaskInfo.getCarbonTaskInfo().getTaskId());
if (null != queryProperties.executorService) {
// In case of limit query when number of limit records is already found so executors
// must stop all the running execution otherwise it will keep running and will hit
// the query performance.
queryProperties.executorService.shutdownNow();
}
// if there is any exception re throw the exception
if (null != exceptionOccurred) {
throw new QueryExecutionException(exceptionOccurred);
}
}
}
| |
package org.schoellerfamily.gedbrowser.selenium.pageobjects;
import java.util.NoSuchElementException;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebDriver.Navigation;
import org.openqa.selenium.WebElement;
import org.schoellerfamily.gedbrowser.selenium.base.PageWaiter;
/**
* Base class for various page object classes.
*
* @author Dick Schoeller
*/
public class PageBase {
/** Ten seconds for passing to sleep for some known longish waits. */
private static final int MEDIUM_SLEEP = 10000;
/** The base page URL. */
private final String baseUrl;
/** The rest of the page URL. */
private final String location;
/** */
private final PageBase previous;
/** */
private final PageFactory factory;
/**
* Constructor.
*
* @param factory the factory for creating more page objects
* @param previous the page we are coming here from
* @param baseUrl the URL of the page
* @param location the location part of the URL
*/
public PageBase(final PageFactory factory, final PageBase previous,
final String baseUrl, final String location) {
this.factory = factory;
this.baseUrl = baseUrl;
this.previous = previous;
this.location = location;
}
/**
* @return the factory for creating more pages
*/
protected PageFactory getFactory() {
return factory;
}
/**
* @return the title string for this page.
*/
public final String getTitle() {
return getDriver().getTitle();
}
/**
* Open the page.
*/
public final void open() {
getDriver().get(baseUrl + location);
}
/**
* Like hitting the back button.
*/
public final void navigateBack() {
final Navigation navigate = getDriver().navigate();
navigate.back();
waitForPageLoaded();
}
/**
* @param by the lookup approach
* @return the web element
*/
public final WebElement getWebElement(final By by) {
return getDriver().findElement(by);
}
/**
* @param cssSelector CSS selector string to find Element
* @return the web element
*/
public final WebElement getWebElement(final String cssSelector) {
return getWebElement(By.cssSelector(cssSelector));
}
/**
* Send text keys to the Element that finds by cssSelector.
* It shortens "driver.findElement(By.cssSelector()).sendKeys()".
*
* @param cssSelector CSS selector string to find Element
* @param text the text to send to the Element
*/
protected final void sendText(final String cssSelector, final String text) {
getWebElement(cssSelector).sendKeys(text);
}
/**
* Is the text present in page.
*
* @param text the text to search for
* @return true if the text is found
*/
public final boolean isTextPresent(final String text) {
return getDriver().getPageSource().contains(text);
}
/**
* Is the Element in page.
*
* @param by the lookup approach
* @return true if the element is found
*/
public final boolean isElementPresent(final By by) {
try {
getWebElement(by);
return true;
} catch (NoSuchElementException e) {
return false;
}
}
/**
* Is the Element present in the DOM (by css selector).
*
* @param cssSelector element locater
* @return true if the element is found
*/
public final boolean isElementPresent(final String cssSelector) {
return isElementPresent(By.cssSelector(cssSelector));
}
/**
* Checks if the Element is in the DOM and displayed.
*
* @param by selector to find the element
* @return true if the Element exists and is displayed
*/
public final boolean isElementPresentAndDisplayed(final By by) {
try {
return getWebElement(by).isDisplayed();
} catch (NoSuchElementException e) {
return false;
}
}
/**
* Returns the page URL.
*
* @return the URL string for this page
*/
public final String getBaseUrl() {
return baseUrl;
}
/**
* Returns the location part of the URL.
*
* @return the URL string for this page
*/
public final String getLocation() {
return location;
}
/**
* Returns the associated web driver for this page.
*
* @return the web driver
*/
public final WebDriver getDriver() {
return factory.getDriver();
}
/**
* @return this pages waiter
*/
public final PageWaiter getPageWaiter() {
return factory.getWaiter();
}
/**
* Wait for page load on real browser. Doesn't work for HTML driver.
*/
public final void waitForPageLoaded() {
getPageWaiter().waitForPageLoaded(getDriver());
}
/**
* Wait for page load on real browser. Doesn't work for HTML driver.
*
* @param multiplier timeout multiplier
*/
public final void waitForPageLoaded(final int multiplier) {
getPageWaiter().waitForPageLoaded(getDriver(), multiplier);
}
/**
* Wait for page load on real browser. Doesn't work for HTML driver.
*
* @param newUrl the target URL of the load
*/
public final void waitForPageLoaded(final String newUrl) {
getPageWaiter().waitForPageLoaded(getDriver(), newUrl);
}
/**
* Wait for page load on real browser. Doesn't work for HTML driver.
*
* @param newUrl the target URL of the load
* @param multiplier timeout multiplier
*/
public final void waitForPageLoaded(final String newUrl,
final int multiplier) {
getPageWaiter().waitForPageLoaded(getDriver(), newUrl, multiplier);
}
/**
* @return the URL the driver thinks we are at
*/
public final String getCurrentUrl() {
return getDriver().getCurrentUrl();
}
/**
* Go back to the previous person.
*
* @return the associated page object.
*/
public final PageBase back() {
if (previous != null) {
navigateBack();
}
return previous;
}
/**
* Sleep for a bit to allow slow stuff to happen.
*/
protected void sleep() {
sleep(1);
}
/**
* Sleep for a bit to allow slow stuff to happen.
*
* @param multiplier number of times the basic amount to sleep
*/
protected void sleep(final int multiplier) {
try {
Thread.sleep(MEDIUM_SLEEP * multiplier);
} catch (InterruptedException e) {
// Do nothing
}
}
/**
* @return the letter we expect to go to when we click index in the menu
*/
protected String getIndexLetter() {
return "A";
}
/**
* @return the page that we came from.
*/
protected PageBase getPrevious() {
return previous;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.sql.relational;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import io.trino.Session;
import io.trino.metadata.Metadata;
import io.trino.metadata.ResolvedFunction;
import io.trino.spi.type.DecimalParseResult;
import io.trino.spi.type.Decimals;
import io.trino.spi.type.RowType;
import io.trino.spi.type.TimeWithTimeZoneType;
import io.trino.spi.type.TimestampType;
import io.trino.spi.type.TimestampWithTimeZoneType;
import io.trino.spi.type.Type;
import io.trino.sql.planner.Symbol;
import io.trino.sql.relational.SpecialForm.Form;
import io.trino.sql.relational.optimizer.ExpressionOptimizer;
import io.trino.sql.tree.ArithmeticBinaryExpression;
import io.trino.sql.tree.ArithmeticUnaryExpression;
import io.trino.sql.tree.AstVisitor;
import io.trino.sql.tree.BetweenPredicate;
import io.trino.sql.tree.BinaryLiteral;
import io.trino.sql.tree.BindExpression;
import io.trino.sql.tree.BooleanLiteral;
import io.trino.sql.tree.Cast;
import io.trino.sql.tree.CharLiteral;
import io.trino.sql.tree.CoalesceExpression;
import io.trino.sql.tree.ComparisonExpression;
import io.trino.sql.tree.ComparisonExpression.Operator;
import io.trino.sql.tree.DecimalLiteral;
import io.trino.sql.tree.DoubleLiteral;
import io.trino.sql.tree.Expression;
import io.trino.sql.tree.FieldReference;
import io.trino.sql.tree.FunctionCall;
import io.trino.sql.tree.GenericLiteral;
import io.trino.sql.tree.Identifier;
import io.trino.sql.tree.IfExpression;
import io.trino.sql.tree.InListExpression;
import io.trino.sql.tree.InPredicate;
import io.trino.sql.tree.IntervalLiteral;
import io.trino.sql.tree.IsNotNullPredicate;
import io.trino.sql.tree.IsNullPredicate;
import io.trino.sql.tree.LambdaArgumentDeclaration;
import io.trino.sql.tree.LambdaExpression;
import io.trino.sql.tree.LogicalExpression;
import io.trino.sql.tree.LongLiteral;
import io.trino.sql.tree.NodeRef;
import io.trino.sql.tree.NotExpression;
import io.trino.sql.tree.NullIfExpression;
import io.trino.sql.tree.NullLiteral;
import io.trino.sql.tree.QualifiedName;
import io.trino.sql.tree.Row;
import io.trino.sql.tree.SearchedCaseExpression;
import io.trino.sql.tree.SimpleCaseExpression;
import io.trino.sql.tree.StringLiteral;
import io.trino.sql.tree.SubscriptExpression;
import io.trino.sql.tree.SymbolReference;
import io.trino.sql.tree.TimeLiteral;
import io.trino.sql.tree.TimestampLiteral;
import io.trino.sql.tree.WhenClause;
import io.trino.type.UnknownType;
import java.util.List;
import java.util.Map;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static io.airlift.slice.SliceUtf8.countCodePoints;
import static io.airlift.slice.Slices.utf8Slice;
import static io.trino.spi.function.OperatorType.EQUAL;
import static io.trino.spi.function.OperatorType.HASH_CODE;
import static io.trino.spi.function.OperatorType.INDETERMINATE;
import static io.trino.spi.function.OperatorType.LESS_THAN_OR_EQUAL;
import static io.trino.spi.function.OperatorType.NEGATION;
import static io.trino.spi.function.OperatorType.SUBSCRIPT;
import static io.trino.spi.type.BigintType.BIGINT;
import static io.trino.spi.type.BooleanType.BOOLEAN;
import static io.trino.spi.type.CharType.createCharType;
import static io.trino.spi.type.DoubleType.DOUBLE;
import static io.trino.spi.type.IntegerType.INTEGER;
import static io.trino.spi.type.VarbinaryType.VARBINARY;
import static io.trino.spi.type.VarcharType.VARCHAR;
import static io.trino.spi.type.VarcharType.createVarcharType;
import static io.trino.sql.analyzer.TypeSignatureProvider.fromTypes;
import static io.trino.sql.relational.Expressions.call;
import static io.trino.sql.relational.Expressions.constant;
import static io.trino.sql.relational.Expressions.constantNull;
import static io.trino.sql.relational.Expressions.field;
import static io.trino.sql.relational.SpecialForm.Form.AND;
import static io.trino.sql.relational.SpecialForm.Form.BETWEEN;
import static io.trino.sql.relational.SpecialForm.Form.BIND;
import static io.trino.sql.relational.SpecialForm.Form.COALESCE;
import static io.trino.sql.relational.SpecialForm.Form.DEREFERENCE;
import static io.trino.sql.relational.SpecialForm.Form.IF;
import static io.trino.sql.relational.SpecialForm.Form.IN;
import static io.trino.sql.relational.SpecialForm.Form.IS_NULL;
import static io.trino.sql.relational.SpecialForm.Form.NULL_IF;
import static io.trino.sql.relational.SpecialForm.Form.OR;
import static io.trino.sql.relational.SpecialForm.Form.ROW_CONSTRUCTOR;
import static io.trino.sql.relational.SpecialForm.Form.SWITCH;
import static io.trino.sql.relational.SpecialForm.Form.WHEN;
import static io.trino.type.DateTimes.parseTime;
import static io.trino.type.DateTimes.parseTimeWithTimeZone;
import static io.trino.type.DateTimes.parseTimestamp;
import static io.trino.type.DateTimes.parseTimestampWithTimeZone;
import static io.trino.type.JsonType.JSON;
import static io.trino.util.DateTimeUtils.parseDayTimeInterval;
import static io.trino.util.DateTimeUtils.parseYearMonthInterval;
import static java.util.Objects.requireNonNull;
public final class SqlToRowExpressionTranslator
{
private SqlToRowExpressionTranslator() {}
public static RowExpression translate(
Expression expression,
Map<NodeRef<Expression>, Type> types,
Map<Symbol, Integer> layout,
Metadata metadata,
Session session,
boolean optimize)
{
Visitor visitor = new Visitor(metadata, types, layout);
RowExpression result = visitor.process(expression, null);
requireNonNull(result, "result is null");
if (optimize) {
ExpressionOptimizer optimizer = new ExpressionOptimizer(metadata, session);
return optimizer.optimize(result);
}
return result;
}
private static class Visitor
extends AstVisitor<RowExpression, Void>
{
private final Metadata metadata;
private final Map<NodeRef<Expression>, Type> types;
private final Map<Symbol, Integer> layout;
private final StandardFunctionResolution standardFunctionResolution;
private Visitor(
Metadata metadata,
Map<NodeRef<Expression>, Type> types,
Map<Symbol, Integer> layout)
{
this.metadata = metadata;
this.types = ImmutableMap.copyOf(requireNonNull(types, "types is null"));
this.layout = layout;
standardFunctionResolution = new StandardFunctionResolution(metadata);
}
private Type getType(Expression node)
{
return types.get(NodeRef.of(node));
}
@Override
protected RowExpression visitExpression(Expression node, Void context)
{
throw new UnsupportedOperationException("not yet implemented: expression translator for " + node.getClass().getName());
}
@Override
protected RowExpression visitFieldReference(FieldReference node, Void context)
{
return field(node.getFieldIndex(), getType(node));
}
@Override
protected RowExpression visitNullLiteral(NullLiteral node, Void context)
{
return constantNull(UnknownType.UNKNOWN);
}
@Override
protected RowExpression visitBooleanLiteral(BooleanLiteral node, Void context)
{
return constant(node.getValue(), BOOLEAN);
}
@Override
protected RowExpression visitLongLiteral(LongLiteral node, Void context)
{
if (node.getValue() >= Integer.MIN_VALUE && node.getValue() <= Integer.MAX_VALUE) {
return constant(node.getValue(), INTEGER);
}
return constant(node.getValue(), BIGINT);
}
@Override
protected RowExpression visitDoubleLiteral(DoubleLiteral node, Void context)
{
return constant(node.getValue(), DOUBLE);
}
@Override
protected RowExpression visitDecimalLiteral(DecimalLiteral node, Void context)
{
DecimalParseResult parseResult = Decimals.parse(node.getValue());
return constant(parseResult.getObject(), parseResult.getType());
}
@Override
protected RowExpression visitStringLiteral(StringLiteral node, Void context)
{
return constant(node.getSlice(), createVarcharType(countCodePoints(node.getSlice())));
}
@Override
protected RowExpression visitCharLiteral(CharLiteral node, Void context)
{
return constant(node.getSlice(), createCharType(node.getValue().length()));
}
@Override
protected RowExpression visitBinaryLiteral(BinaryLiteral node, Void context)
{
return constant(node.getValue(), VARBINARY);
}
@Override
protected RowExpression visitGenericLiteral(GenericLiteral node, Void context)
{
Type type = getType(node);
if (JSON.equals(type)) {
return call(
metadata.resolveFunction(QualifiedName.of("json_parse"), fromTypes(VARCHAR)),
constant(utf8Slice(node.getValue()), VARCHAR));
}
return call(
metadata.getCoercion(VARCHAR, type),
constant(utf8Slice(node.getValue()), VARCHAR));
}
@Override
protected RowExpression visitTimeLiteral(TimeLiteral node, Void context)
{
Type type = getType(node);
Object value;
if (type instanceof TimeWithTimeZoneType) {
value = parseTimeWithTimeZone(((TimeWithTimeZoneType) type).getPrecision(), node.getValue());
}
else {
value = parseTime(node.getValue());
}
return constant(value, type);
}
@Override
protected RowExpression visitTimestampLiteral(TimestampLiteral node, Void context)
{
Type type = getType(node);
Object value;
if (type instanceof TimestampType) {
int precision = ((TimestampType) type).getPrecision();
value = parseTimestamp(precision, node.getValue());
}
else if (type instanceof TimestampWithTimeZoneType) {
int precision = ((TimestampWithTimeZoneType) type).getPrecision();
value = parseTimestampWithTimeZone(precision, node.getValue());
}
else {
throw new IllegalStateException("Unexpected type: " + type);
}
return constant(value, type);
}
@Override
protected RowExpression visitIntervalLiteral(IntervalLiteral node, Void context)
{
long value;
if (node.isYearToMonth()) {
value = node.getSign().multiplier() * parseYearMonthInterval(node.getValue(), node.getStartField(), node.getEndField());
}
else {
value = node.getSign().multiplier() * parseDayTimeInterval(node.getValue(), node.getStartField(), node.getEndField());
}
return constant(value, getType(node));
}
@Override
protected RowExpression visitComparisonExpression(ComparisonExpression node, Void context)
{
RowExpression left = process(node.getLeft(), context);
RowExpression right = process(node.getRight(), context);
Operator operator = node.getOperator();
switch (node.getOperator()) {
case NOT_EQUAL:
return new CallExpression(
metadata.resolveFunction(QualifiedName.of("not"), fromTypes(BOOLEAN)),
ImmutableList.of(visitComparisonExpression(Operator.EQUAL, left, right)));
case GREATER_THAN:
return visitComparisonExpression(Operator.LESS_THAN, right, left);
case GREATER_THAN_OR_EQUAL:
return visitComparisonExpression(Operator.LESS_THAN_OR_EQUAL, right, left);
default:
return visitComparisonExpression(operator, left, right);
}
}
private RowExpression visitComparisonExpression(Operator operator, RowExpression left, RowExpression right)
{
return call(
standardFunctionResolution.comparisonFunction(operator, left.getType(), right.getType()),
left,
right);
}
@Override
protected RowExpression visitFunctionCall(FunctionCall node, Void context)
{
List<RowExpression> arguments = node.getArguments().stream()
.map(value -> process(value, context))
.collect(toImmutableList());
return new CallExpression(metadata.decodeFunction(node.getName()), arguments);
}
@Override
protected RowExpression visitSymbolReference(SymbolReference node, Void context)
{
Integer field = layout.get(Symbol.from(node));
if (field != null) {
return field(field, getType(node));
}
return new VariableReferenceExpression(node.getName(), getType(node));
}
@Override
protected RowExpression visitLambdaExpression(LambdaExpression node, Void context)
{
RowExpression body = process(node.getBody(), context);
Type type = getType(node);
List<Type> typeParameters = type.getTypeParameters();
List<Type> argumentTypes = typeParameters.subList(0, typeParameters.size() - 1);
List<String> argumentNames = node.getArguments().stream()
.map(LambdaArgumentDeclaration::getName)
.map(Identifier::getValue)
.collect(toImmutableList());
return new LambdaDefinitionExpression(argumentTypes, argumentNames, body);
}
@Override
protected RowExpression visitBindExpression(BindExpression node, Void context)
{
ImmutableList.Builder<Type> valueTypesBuilder = ImmutableList.builder();
ImmutableList.Builder<RowExpression> argumentsBuilder = ImmutableList.builder();
for (Expression value : node.getValues()) {
RowExpression valueRowExpression = process(value, context);
valueTypesBuilder.add(valueRowExpression.getType());
argumentsBuilder.add(valueRowExpression);
}
RowExpression function = process(node.getFunction(), context);
argumentsBuilder.add(function);
return new SpecialForm(BIND, getType(node), argumentsBuilder.build());
}
@Override
protected RowExpression visitArithmeticBinary(ArithmeticBinaryExpression node, Void context)
{
RowExpression left = process(node.getLeft(), context);
RowExpression right = process(node.getRight(), context);
return call(
standardFunctionResolution.arithmeticFunction(node.getOperator(), left.getType(), right.getType()),
left,
right);
}
@Override
protected RowExpression visitArithmeticUnary(ArithmeticUnaryExpression node, Void context)
{
RowExpression expression = process(node.getValue(), context);
switch (node.getSign()) {
case PLUS:
return expression;
case MINUS:
return call(
metadata.resolveOperator(NEGATION, ImmutableList.of(expression.getType())),
expression);
}
throw new UnsupportedOperationException("Unsupported unary operator: " + node.getSign());
}
@Override
protected RowExpression visitLogicalExpression(LogicalExpression node, Void context)
{
Form form;
switch (node.getOperator()) {
case AND:
form = AND;
break;
case OR:
form = OR;
break;
default:
throw new IllegalStateException("Unknown logical operator: " + node.getOperator());
}
return new SpecialForm(
form,
BOOLEAN,
node.getTerms().stream()
.map(term -> process(term, context))
.collect(toImmutableList()));
}
@Override
protected RowExpression visitCast(Cast node, Void context)
{
RowExpression value = process(node.getExpression(), context);
Type returnType = getType(node);
if (node.isTypeOnly()) {
return changeType(value, returnType);
}
if (node.isSafe()) {
return call(
metadata.getCoercion(QualifiedName.of("TRY_CAST"), value.getType(), returnType),
value);
}
return call(
metadata.getCoercion(value.getType(), returnType),
value);
}
private static RowExpression changeType(RowExpression value, Type targetType)
{
ChangeTypeVisitor visitor = new ChangeTypeVisitor(targetType);
return value.accept(visitor, null);
}
private static class ChangeTypeVisitor
implements RowExpressionVisitor<RowExpression, Void>
{
private final Type targetType;
private ChangeTypeVisitor(Type targetType)
{
this.targetType = targetType;
}
@Override
public RowExpression visitCall(CallExpression call, Void context)
{
return new CallExpression(call.getResolvedFunction(), call.getArguments());
}
@Override
public RowExpression visitSpecialForm(SpecialForm specialForm, Void context)
{
return new SpecialForm(specialForm.getForm(), targetType, specialForm.getArguments());
}
@Override
public RowExpression visitInputReference(InputReferenceExpression reference, Void context)
{
return field(reference.getField(), targetType);
}
@Override
public RowExpression visitConstant(ConstantExpression literal, Void context)
{
return constant(literal.getValue(), targetType);
}
@Override
public RowExpression visitLambda(LambdaDefinitionExpression lambda, Void context)
{
throw new UnsupportedOperationException();
}
@Override
public RowExpression visitVariableReference(VariableReferenceExpression reference, Void context)
{
return new VariableReferenceExpression(reference.getName(), targetType);
}
}
@Override
protected RowExpression visitCoalesceExpression(CoalesceExpression node, Void context)
{
List<RowExpression> arguments = node.getOperands().stream()
.map(value -> process(value, context))
.collect(toImmutableList());
return new SpecialForm(COALESCE, getType(node), arguments);
}
@Override
protected RowExpression visitSimpleCaseExpression(SimpleCaseExpression node, Void context)
{
ImmutableList.Builder<RowExpression> arguments = ImmutableList.builder();
RowExpression value = process(node.getOperand(), context);
arguments.add(value);
ImmutableList.Builder<ResolvedFunction> functionDependencies = ImmutableList.builder();
for (WhenClause clause : node.getWhenClauses()) {
RowExpression operand = process(clause.getOperand(), context);
RowExpression result = process(clause.getResult(), context);
functionDependencies.add(metadata.resolveOperator(EQUAL, ImmutableList.of(value.getType(), operand.getType())));
arguments.add(new SpecialForm(
WHEN,
getType(clause),
operand,
result));
}
Type returnType = getType(node);
arguments.add(node.getDefaultValue()
.map(defaultValue -> process(defaultValue, context))
.orElse(constantNull(returnType)));
return new SpecialForm(SWITCH, returnType, arguments.build(), functionDependencies.build());
}
@Override
protected RowExpression visitSearchedCaseExpression(SearchedCaseExpression node, Void context)
{
/*
Translates an expression like:
case when cond1 then value1
when cond2 then value2
when cond3 then value3
else value4
end
To:
IF(cond1,
value1,
IF(cond2,
value2,
If(cond3,
value3,
value4)))
*/
RowExpression expression = node.getDefaultValue()
.map((value) -> process(value, context))
.orElse(constantNull(getType(node)));
for (WhenClause clause : Lists.reverse(node.getWhenClauses())) {
expression = new SpecialForm(
IF,
getType(node),
process(clause.getOperand(), context),
process(clause.getResult(), context),
expression);
}
return expression;
}
@Override
protected RowExpression visitIfExpression(IfExpression node, Void context)
{
ImmutableList.Builder<RowExpression> arguments = ImmutableList.builder();
arguments.add(process(node.getCondition(), context))
.add(process(node.getTrueValue(), context));
if (node.getFalseValue().isPresent()) {
arguments.add(process(node.getFalseValue().get(), context));
}
else {
arguments.add(constantNull(getType(node)));
}
return new SpecialForm(IF, getType(node), arguments.build());
}
@Override
protected RowExpression visitInPredicate(InPredicate node, Void context)
{
ImmutableList.Builder<RowExpression> arguments = ImmutableList.builder();
RowExpression value = process(node.getValue(), context);
arguments.add(value);
InListExpression values = (InListExpression) node.getValueList();
for (Expression testValue : values.getValues()) {
arguments.add(process(testValue, context));
}
List<ResolvedFunction> functionDependencies = ImmutableList.<ResolvedFunction>builder()
.add(metadata.resolveOperator(EQUAL, ImmutableList.of(value.getType(), value.getType())))
.add(metadata.resolveOperator(HASH_CODE, ImmutableList.of(value.getType())))
.add(metadata.resolveOperator(INDETERMINATE, ImmutableList.of(value.getType())))
.build();
return new SpecialForm(IN, BOOLEAN, arguments.build(), functionDependencies);
}
@Override
protected RowExpression visitIsNotNullPredicate(IsNotNullPredicate node, Void context)
{
RowExpression expression = process(node.getValue(), context);
return notExpression(new SpecialForm(IS_NULL, BOOLEAN, ImmutableList.of(expression)));
}
@Override
protected RowExpression visitIsNullPredicate(IsNullPredicate node, Void context)
{
RowExpression expression = process(node.getValue(), context);
return new SpecialForm(IS_NULL, BOOLEAN, expression);
}
@Override
protected RowExpression visitNotExpression(NotExpression node, Void context)
{
return notExpression(process(node.getValue(), context));
}
private RowExpression notExpression(RowExpression value)
{
return new CallExpression(
metadata.resolveFunction(QualifiedName.of("not"), fromTypes(BOOLEAN)),
ImmutableList.of(value));
}
@Override
protected RowExpression visitNullIfExpression(NullIfExpression node, Void context)
{
RowExpression first = process(node.getFirst(), context);
RowExpression second = process(node.getSecond(), context);
ResolvedFunction resolvedFunction = metadata.resolveOperator(EQUAL, ImmutableList.of(first.getType(), second.getType()));
List<ResolvedFunction> functionDependencies = ImmutableList.<ResolvedFunction>builder()
.add(resolvedFunction)
.add(metadata.getCoercion(first.getType(), resolvedFunction.getSignature().getArgumentTypes().get(0)))
.add(metadata.getCoercion(second.getType(), resolvedFunction.getSignature().getArgumentTypes().get(0)))
.build();
return new SpecialForm(
NULL_IF,
getType(node),
ImmutableList.of(first, second),
functionDependencies);
}
@Override
protected RowExpression visitBetweenPredicate(BetweenPredicate node, Void context)
{
RowExpression value = process(node.getValue(), context);
RowExpression min = process(node.getMin(), context);
RowExpression max = process(node.getMax(), context);
List<ResolvedFunction> functionDependencies = ImmutableList.<ResolvedFunction>builder()
.add(metadata.resolveOperator(LESS_THAN_OR_EQUAL, ImmutableList.of(value.getType(), max.getType())))
.build();
return new SpecialForm(
BETWEEN,
BOOLEAN,
ImmutableList.of(value, min, max),
functionDependencies);
}
@Override
protected RowExpression visitSubscriptExpression(SubscriptExpression node, Void context)
{
RowExpression base = process(node.getBase(), context);
RowExpression index = process(node.getIndex(), context);
if (getType(node.getBase()) instanceof RowType) {
long value = (Long) ((ConstantExpression) index).getValue();
return new SpecialForm(DEREFERENCE, getType(node), base, constant((int) value - 1, INTEGER));
}
return call(
metadata.resolveOperator(SUBSCRIPT, ImmutableList.of(base.getType(), index.getType())),
base,
index);
}
@Override
protected RowExpression visitRow(Row node, Void context)
{
List<RowExpression> arguments = node.getItems().stream()
.map(value -> process(value, context))
.collect(toImmutableList());
Type returnType = getType(node);
return new SpecialForm(ROW_CONSTRUCTOR, returnType, arguments);
}
}
}
| |
/**
* Copyright 2011-2021 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.runtime.io.text.driver;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.List;
import java.util.function.Function;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.asakusafw.runtime.io.text.FieldReader;
import com.asakusafw.runtime.io.text.TextFormatException;
import com.asakusafw.runtime.io.text.TextInput;
import com.asakusafw.runtime.io.text.TextUtil;
final class InputDriver<T> implements TextInput<T> {
static final Log LOG = LogFactory.getLog(InputDriver.class);
private static final String NOT_AVAILABLE = "N/A";
private final FieldReader reader;
private final String path;
private final Class<?> dataType;
private final FieldDriver<T, ?>[] fields;
private final HeaderType.Input header;
private final boolean trimExtraInput;
private final boolean skipExtraEmptyInput;
private final ErrorAction onLessInput;
private final ErrorAction onMoreInput;
private final boolean fromTextHead;
private boolean requireConsumeHeader;
private final TrimBuffer trimmer = new TrimBuffer();
@SuppressWarnings("unchecked")
InputDriver(
FieldReader reader, String path,
Class<? extends T> dataType, List<FieldDriver<T, ?>> fields,
HeaderType.Input header, boolean trimExtraInput, boolean skipExtraEmptyInput,
ErrorAction onLessInput, ErrorAction onMoreInput,
boolean fromTextHead) {
this.reader = reader;
this.path = path;
this.dataType = dataType;
this.fields = (FieldDriver<T, ?>[]) fields.toArray(new FieldDriver<?, ?>[fields.size()]);
this.header = header;
this.trimExtraInput = trimExtraInput;
this.skipExtraEmptyInput = skipExtraEmptyInput;
this.onLessInput = onLessInput;
this.onMoreInput = onMoreInput;
this.fromTextHead = fromTextHead;
this.requireConsumeHeader = fromTextHead && header != HeaderType.Input.NEVER;
}
@Override
public long getLineNumber() {
return fromTextHead ? reader.getRecordLineNumber() : -1L;
}
@Override
public long getRecordIndex() {
return fromTextHead ? reader.getRecordIndex() : -1L;
}
@Override
public boolean readTo(T model) throws IOException {
try {
if (reader.nextRecord() == false) {
return false;
}
if (LOG.isTraceEnabled()) {
LOG.trace(String.format(
"reading record: path=%s, line=%,d, fields=%s",
path,
getLineNumberMessage(),
collectFields()));
reader.rewindFields();
}
if (requireConsumeHeader) {
requireConsumeHeader = false;
if (doHeaderCheck() == false) {
return false;
}
}
process(model);
return true;
} catch (TextFormatException e) {
throw new IOException(MessageFormat.format(
"text format is not valid: path={0}, line={1}, row={2}",
path != null ? path : NOT_AVAILABLE,
getLineNumberMessage(),
getRecordIndexMessage()), e);
}
}
private void process(T model) throws IOException {
int lessCount = 0;
for (FieldDriver<T, ?> field : fields) {
boolean success = processField(model, field);
if (success == false) {
lessCount++;
}
}
if (lessCount == 0) {
checkRest();
} else {
handleLess(lessCount);
}
}
private <P> boolean processField(T model, FieldDriver<T, P> field) throws IOException {
P property = field.extractor.apply(model);
FieldAdapter<? super P> adapter = field.adapter;
while (reader.nextField()) {
CharSequence value = reader.getContent();
if (value != null) {
if (field.trimInput) {
value = trimmer.wrap(value);
}
if (value.length() == 0 && field.skipEmptyInput) {
if (LOG.isTraceEnabled()) {
LOG.trace(String.format(
"skip empty field: path=%s, line=%,d, row=%,d, column=%,d",
path,
getLineNumberMessage(),
getRecordIndexMessage(),
getFieldIndexMessage()));
}
continue;
}
}
try {
adapter.parse(value, property);
} catch (MalformedFieldException e) {
adapter.clear(property);
handleMalformed(field, value, e);
}
return true;
}
adapter.clear(property);
return false;
}
private void checkRest() throws IOException {
if (onMoreInput == ErrorAction.IGNORE) {
return;
}
int count = countRest();
if (count == 0) {
return;
}
handle(onMoreInput, null, MessageFormat.format(
"record has {0} (of {1}) fields: path={2}, line={3}, row={4}, fields={5}",
count + fields.length,
fields.length,
path != null ? path : NOT_AVAILABLE,
getLineNumberMessage(),
getRecordIndexMessage(),
collectFields()));
}
private void handleLess(int lessCount) throws IOException {
if (onLessInput == ErrorAction.IGNORE) {
return;
}
handle(onLessInput, null, MessageFormat.format(
"record has {0} (of {1}) fields: path={2}, line={3}, row={4}, fields={5}",
fields.length - lessCount,
fields.length,
path != null ? path : NOT_AVAILABLE,
getLineNumberMessage(),
getRecordIndexMessage(),
collectFields()));
}
private void handleMalformed(
FieldDriver<?, ?> field, CharSequence value,
MalformedFieldException cause) throws IOException {
if (field.onMalformedInput == ErrorAction.IGNORE) {
return;
}
handle(field.onMalformedInput, cause, MessageFormat.format(
"field \"{0}\" (in {1}) is malformed: path={2}, line={3}, row={4}, column={5}, content={6}",
field.name,
dataType.getSimpleName(),
path != null ? path : NOT_AVAILABLE,
getLineNumberMessage(),
getRecordIndexMessage(),
getFieldIndexMessage(),
value == null ? "null" : TextUtil.quote(value))); //$NON-NLS-1$
}
private void handle(ErrorAction action, Exception cause, String message) throws IOException {
switch (action) {
case REPORT:
LOG.warn(message, cause);
break;
case ERROR:
throw new IOException(message, cause);
default:
throw new AssertionError(action);
}
}
private boolean doHeaderCheck() throws IOException {
// if the first line is filtered out, we never consume headers
if (reader.getRecordLineNumber() != 0L) {
return true;
}
if (testConsumeHeader()) {
return reader.nextRecord();
} else {
reader.rewindFields();
return true;
}
}
private boolean testConsumeHeader() throws IOException {
switch (header) {
case ALWAYS:
return true;
case OPTIONAL:
return compareHeader();
default:
throw new AssertionError(header);
}
}
private boolean compareHeader() throws IOException {
int matched = 0;
for (FieldDriver<?, ?> field : fields) {
while (reader.nextField()) {
CharSequence value = reader.getContent();
String label = field.name;
if (value != null) {
if (trimExtraInput) {
value = trimmer.wrap(value);
label = label.trim();
}
if (value.length() == 0 && field.skipEmptyInput) {
if (LOG.isTraceEnabled()) {
LOG.trace(String.format(
"skip empty header field: path=%s, column=%,d",
path,
reader.getFieldIndex()));
}
continue;
}
}
if (value == null || label.contentEquals(value) == false) {
if (LOG.isDebugEnabled()) {
LOG.debug(String.format(
"header mismatch: path=%s, column=%,d, expected=%s, appeared=%s", //$NON-NLS-1$
path,
reader.getFieldIndex(),
TextUtil.quote(field.name),
value == null ? "null" : TextUtil.quote(value))); //$NON-NLS-1$
}
return false;
}
matched++;
break;
}
}
if (checkHeaderFieldCount(matched, onLessInput) == false) {
return false;
}
if (checkHeaderFieldCount(fields.length + countRest(), onMoreInput) == false) {
return false;
}
return true;
}
private boolean checkHeaderFieldCount(int count, ErrorAction action) {
if (count == fields.length || action == ErrorAction.IGNORE) {
return true;
}
String message = MessageFormat.format(
"header has {0} (of {1}) fields: path={2}",
count,
fields.length,
path != null ? path : NOT_AVAILABLE);
switch (action) {
case REPORT:
LOG.warn(message);
return true;
case ERROR:
LOG.debug(message);
return false;
default:
throw new AssertionError(action);
}
}
private String collectFields() {
try {
reader.rewindFields();
StringBuilder buffer = new StringBuilder();
buffer.append('{');
while (reader.nextField()) {
if (buffer.length() > 1) {
buffer.append(", "); //$NON-NLS-1$
}
CharSequence content = reader.getContent();
if (content == null) {
buffer.append((Object) null);
} else {
TextUtil.quoteTo(content, buffer);
}
}
buffer.append('}');
return buffer.toString();
} catch (IOException e) {
if (LOG.isDebugEnabled()) {
LOG.debug("error occurred while peeking the current line", e); //$NON-NLS-1$
}
return NOT_AVAILABLE;
}
}
private int countRest() throws IOException {
int count = 0;
while (reader.nextField()) {
if (skipExtraEmptyInput) {
CharSequence cs = reader.getContent();
if (cs != null) {
if (trimExtraInput) {
cs = trimmer.wrap(cs);
}
if (cs.length() == 0) {
continue;
}
}
}
count++;
}
return count;
}
private Object getLineNumberMessage() {
return getIndexMessage(getLineNumber());
}
private Object getRecordIndexMessage() {
return getIndexMessage(getRecordIndex());
}
private Object getFieldIndexMessage() {
return getIndexMessage(reader.getFieldIndex());
}
private Object getIndexMessage(long index) {
return index < 0 ? NOT_AVAILABLE : index + 1;
}
@Override
public void close() throws IOException {
reader.close();
}
@Override
public String toString() {
return String.format("InputDriver(path=%s, reader=%s)", path, reader); //$NON-NLS-1$
}
static class FieldDriver<TRecord, TProperty> {
final String name;
final Function<? super TRecord, ? extends TProperty> extractor;
final FieldAdapter<? super TProperty> adapter;
final boolean trimInput;
final boolean skipEmptyInput;
final ErrorAction onMalformedInput;
FieldDriver(
String name,
Function<? super TRecord, ? extends TProperty> extractor,
FieldAdapter<? super TProperty> adapter,
boolean trimInput, boolean skipEmptyInput,
ErrorAction onMalformedInput) {
this.name = name;
this.extractor = extractor;
this.adapter = adapter;
this.trimInput = trimInput;
this.skipEmptyInput = skipEmptyInput;
this.onMalformedInput = onMalformedInput;
}
}
private static final class TrimBuffer implements CharSequence {
private CharSequence parent;
private int offset;
private int length;
TrimBuffer() {
this.parent = ""; //$NON-NLS-1$
this.offset = 0;
this.length = 0;
}
CharSequence wrap(CharSequence cs) {
int newLength = cs.length();
int newOffset = TextUtil.countLeadingWhitespaces(cs, 0, newLength);
newLength -= newOffset;
newLength -= TextUtil.countTrailingWhitespaces(cs, newOffset, newLength);
if (newLength == 0) {
return ""; //$NON-NLS-1$
} else if (newOffset == 0 && newLength == cs.length()) {
return cs;
} else {
parent = cs;
offset = newOffset;
length = newLength;
return this;
}
}
@Override
public int length() {
return length;
}
@Override
public char charAt(int index) {
if (index < 0 || index >= length) {
throw new IndexOutOfBoundsException();
}
return parent.charAt(index + offset);
}
@Override
public CharSequence subSequence(int start, int end) {
if (start < 0 || start > end || end > length) {
throw new IndexOutOfBoundsException();
}
return parent.subSequence(start + offset, end + offset);
}
@Override
public String toString() {
return parent.subSequence(offset, offset + length).toString();
}
}
}
| |
/*
Copyright 2015, Strategic Gains, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.orangerhymelabs.helenus.cassandra.table;
import java.util.Date;
import java.util.concurrent.ExecutionException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.datastax.driver.core.BoundStatement;
import com.datastax.driver.core.PreparedStatement;
import com.datastax.driver.core.ResultSetFuture;
import com.datastax.driver.core.Row;
import com.datastax.driver.core.Session;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.orangerhymelabs.helenus.cassandra.AbstractCassandraRepository;
import com.orangerhymelabs.helenus.cassandra.SchemaProvider;
import com.orangerhymelabs.helenus.cassandra.document.DocumentRepository;
import com.orangerhymelabs.helenus.cassandra.table.TableRepository.TableStatements;
import com.orangerhymelabs.helenus.cassandra.table.key.KeyDefinitionException;
import com.orangerhymelabs.helenus.cassandra.table.key.KeyDefinitionParser;
import com.orangerhymelabs.helenus.exception.StorageException;
import com.orangerhymelabs.helenus.persistence.Identifier;
import com.orangerhymelabs.helenus.persistence.Query;
import com.orangerhymelabs.helenus.persistence.StatementFactory;
/**
* @author tfredrich
* @since Jun 8, 2015
*/
public class TableRepository
extends AbstractCassandraRepository<Table, TableStatements>
{
private static final Logger LOG = LoggerFactory.getLogger(TableRepository.class);
private static final KeyDefinitionParser KEY_PARSER = new KeyDefinitionParser();
private class Tables
{
static final String BY_ID = "sys_tbl";
}
private class Columns
{
static final String NAME = "tbl_name";
static final String DATABASE = "db_name";
static final String DESCRIPTION = "description";
static final String TYPE = "tbl_type";
static final String KEYS = "keys";
static final String TTL = "tbl_ttl";
static final String VIEWS = "views";
static final String CREATED_AT = "created_at";
static final String UPDATED_AT = "updated_at";
}
public static class Schema
implements SchemaProvider
{
private static final String DROP_TABLE = "drop table if exists %s." + Tables.BY_ID;
private static final String CREATE_TABLE = "create table %s." + Tables.BY_ID +
"(" +
Columns.DATABASE + " text," +
Columns.NAME + " text," +
Columns.DESCRIPTION + " text," +
Columns.TYPE + " text," +
Columns.KEYS + " text," +
Columns.TTL + " bigint," +
Columns.VIEWS + " list<text>," +
Columns.CREATED_AT + " timestamp," +
Columns.UPDATED_AT + " timestamp," +
"primary key ((" + Columns.DATABASE + "), " + Columns.NAME + ")" +
")";
@Override
public boolean drop(Session session, String keyspace)
{
ResultSetFuture rs = session.executeAsync(String.format(DROP_TABLE, keyspace));
try
{
return rs.get().wasApplied();
}
catch (InterruptedException | ExecutionException e)
{
LOG.error("Table schema drop failed", e);
}
return false;
}
@Override
public boolean create(Session session, String keyspace)
{
ResultSetFuture rs = session.executeAsync(String.format(CREATE_TABLE, keyspace));
try
{
return rs.get().wasApplied();
}
catch (InterruptedException | ExecutionException e)
{
LOG.error("Table schema create failed", e);
}
return false;
}
}
private static final String IDENTITY_CQL = " where " + Columns.DATABASE + " = ? and " + Columns.NAME + " = ?";
public interface TableStatements
extends StatementFactory
{
@Override
@Query("insert into %s." + Tables.BY_ID + " ("
+ Columns.NAME + ", "
+ Columns.DATABASE + ", "
+ Columns.DESCRIPTION + ", "
+ Columns.TYPE + ", "
+ Columns.KEYS + ", "
+ Columns.TTL + ", "
+ Columns.CREATED_AT + ", "
+ Columns.UPDATED_AT
+") values (?, ?, ?, ?, ?, ?, ?, ?) if not exists")
PreparedStatement create();
@Override
@Query("delete from %s." + Tables.BY_ID + IDENTITY_CQL)
PreparedStatement delete();
@Override
@Query("update %s." + Tables.BY_ID + " set " + Columns.DESCRIPTION + " = ?, " + Columns.TTL + " = ?, " + Columns.UPDATED_AT + " = ?" + IDENTITY_CQL + " if exists")
PreparedStatement update();
@Override
@Query("select * from %s." + Tables.BY_ID + IDENTITY_CQL)
PreparedStatement read();
@Override
@Query("select count(*) from %s." + Tables.BY_ID + IDENTITY_CQL + " limit 1")
PreparedStatement exists();
@Override
@Query("select * from %s." + Tables.BY_ID + " where " + Columns.DATABASE + " = ?")
PreparedStatement readAll();
}
private static final DocumentRepository.Schema DOCUMENT_SCHEMA = new DocumentRepository.Schema();
public TableRepository(Session session, String keyspace)
{
super(session, keyspace, TableStatements.class);
}
@Override
public ListenableFuture<Table> create(Table table)
{
if (createDocumentSchema(table))
{
// TODO: what about rollback?
return super.create(table);
}
else
{
return Futures.immediateFailedFuture(new StorageException("Failed to create document schema for: " + table.toDbTable()));
}
}
@Override
public ListenableFuture<Boolean> delete(Identifier id)
{
if (dropDocumentSchema(id))
{
// TODO: what about rollback?
return super.delete(id);
}
else
{
return Futures.immediateFailedFuture(new StorageException("Failed to drop document schema for: " + id.toDbName()));
}
}
@Override
protected void bindCreate(BoundStatement bs, Table table)
{
Date now = new Date();
table.createdAt(now);
table.updatedAt(now);
bs.bind(table.name(),
table.database().name(),
table.description(),
table.type().name(),
table.keys(),
table.ttl(),
table.createdAt(),
table.updatedAt());
}
@Override
protected void bindUpdate(BoundStatement bs, Table table)
{
table.updatedAt(new Date());
bs.bind(table.description(),
table.ttl(),
table.updatedAt(),
table.database().name(),
table.name());
}
protected Table marshalRow(Row row)
{
if (row == null) return null;
Table table = new Table();
table.name(row.getString(Columns.NAME));
table.database(row.getString(Columns.DATABASE));
table.description(row.getString(Columns.DESCRIPTION));
table.ttl(row.getLong(Columns.TTL));
table.type(TableType.from(row.getString(Columns.TYPE)));
table.keys(row.getString(Columns.KEYS));
table.createdAt(row.getTimestamp(Columns.CREATED_AT));
table.updatedAt(row.getTimestamp(Columns.UPDATED_AT));
return table;
}
private boolean createDocumentSchema(Table table)
{
try
{
return DOCUMENT_SCHEMA.create(session(), keyspace(), table.toDbTable(), KEY_PARSER.parse(table.keys()));
}
catch (KeyDefinitionException e)
{
throw new StorageException(e);
}
}
private boolean dropDocumentSchema(Identifier id)
{
return DOCUMENT_SCHEMA.drop(session(), keyspace(), id.toDbName());
}
}
| |
/*
* The MIT License
*
* Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi, Brian Westrich, Jean-Baptiste Quenot, id:cactusman
* 2015 Kanstantsin Shautsou
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.triggers;
import antlr.ANTLRException;
import com.google.common.base.Preconditions;
import hudson.Extension;
import hudson.Util;
import hudson.console.AnnotatedLargeText;
import hudson.model.AbstractBuild;
import hudson.model.AbstractProject;
import hudson.model.Action;
import hudson.model.AdministrativeMonitor;
import hudson.model.Cause;
import hudson.model.CauseAction;
import hudson.model.Item;
import hudson.model.Run;
import hudson.scm.SCM;
import hudson.scm.SCMDescriptor;
import hudson.util.FlushProofOutputStream;
import hudson.util.FormValidation;
import hudson.util.IOUtils;
import hudson.util.NamingThreadFactory;
import hudson.util.SequentialExecutionQueue;
import hudson.util.StreamTaskListener;
import hudson.util.TimeUnit2;
import org.apache.commons.io.FileUtils;
import org.apache.commons.jelly.XMLOutput;
import org.jenkinsci.Symbol;
import org.kohsuke.accmod.Restricted;
import org.kohsuke.accmod.restrictions.DoNotUse;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.DataBoundConstructor;
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
import java.nio.charset.Charset;
import java.text.DateFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadFactory;
import java.util.logging.Level;
import java.util.logging.Logger;
import jenkins.model.Jenkins;
import jenkins.triggers.SCMTriggerItem;
import net.sf.json.JSONObject;
import org.kohsuke.accmod.restrictions.NoExternalUse;
import org.kohsuke.stapler.QueryParameter;
import org.kohsuke.stapler.StaplerResponse;
import static java.util.logging.Level.*;
import jenkins.model.RunAction2;
/**
* {@link Trigger} that checks for SCM updates periodically.
*
* You can add UI elements under the SCM section by creating a
* config.jelly or config.groovy in the resources area for
* your class that inherits from SCMTrigger and has the
* @{@link hudson.model.Extension} annotation. The UI should
* be wrapped in an f:section element to denote it.
*
* @author Kohsuke Kawaguchi
*/
public class SCMTrigger extends Trigger<Item> {
private boolean ignorePostCommitHooks;
public SCMTrigger(String scmpoll_spec) throws ANTLRException {
this(scmpoll_spec, false);
}
@DataBoundConstructor
public SCMTrigger(String scmpoll_spec, boolean ignorePostCommitHooks) throws ANTLRException {
super(scmpoll_spec);
this.ignorePostCommitHooks = ignorePostCommitHooks;
}
/**
* This trigger wants to ignore post-commit hooks.
* <p>
* SCM plugins must respect this and not run this trigger for post-commit notifications.
*
* @since 1.493
*/
public boolean isIgnorePostCommitHooks() {
return this.ignorePostCommitHooks;
}
@Override
public void run() {
if (job == null) {
return;
}
run(null);
}
/**
* Run the SCM trigger with additional build actions. Used by SubversionRepositoryStatus
* to trigger a build at a specific revisionn number.
*
* @param additionalActions
* @since 1.375
*/
public void run(Action[] additionalActions) {
if (job == null) {
return;
}
DescriptorImpl d = getDescriptor();
LOGGER.fine("Scheduling a polling for "+job);
if (d.synchronousPolling) {
LOGGER.fine("Running the trigger directly without threading, " +
"as it's already taken care of by Trigger.Cron");
new Runner(additionalActions).run();
} else {
// schedule the polling.
// even if we end up submitting this too many times, that's OK.
// the real exclusion control happens inside Runner.
LOGGER.fine("scheduling the trigger to (asynchronously) run");
d.queue.execute(new Runner(additionalActions));
d.clogCheck();
}
}
@Override
public DescriptorImpl getDescriptor() {
return (DescriptorImpl)super.getDescriptor();
}
@Override
public Collection<? extends Action> getProjectActions() {
if (job == null) {
return Collections.emptyList();
}
return Collections.singleton(new SCMAction());
}
/**
* Returns the file that records the last/current polling activity.
*/
public File getLogFile() {
return new File(job.getRootDir(),"scm-polling.log");
}
@Extension @Symbol("scm")
public static class DescriptorImpl extends TriggerDescriptor {
private static ThreadFactory threadFactory() {
return new NamingThreadFactory(Executors.defaultThreadFactory(), "SCMTrigger");
}
/**
* Used to control the execution of the polling tasks.
* <p>
* This executor implementation has a semantics suitable for polling. Namely, no two threads will try to poll the same project
* at once, and multiple polling requests to the same job will be combined into one. Note that because executor isn't aware
* of a potential workspace lock between a build and a polling, we may end up using executor threads unwisely --- they
* may block.
*/
private transient final SequentialExecutionQueue queue = new SequentialExecutionQueue(Executors.newSingleThreadExecutor(threadFactory()));
/**
* Whether the projects should be polled all in one go in the order of dependencies. The default behavior is
* that each project polls for changes independently.
*/
public boolean synchronousPolling = false;
/**
* Max number of threads for SCM polling.
* 0 for unbounded.
*/
private int maximumThreads;
public DescriptorImpl() {
load();
resizeThreadPool();
}
public boolean isApplicable(Item item) {
return SCMTriggerItem.SCMTriggerItems.asSCMTriggerItem(item) != null;
}
public ExecutorService getExecutor() {
return queue.getExecutors();
}
/**
* Returns true if the SCM polling thread queue has too many jobs
* than it can handle.
*/
public boolean isClogged() {
return queue.isStarving(STARVATION_THRESHOLD);
}
/**
* Checks if the queue is clogged, and if so,
* activate {@link AdministrativeMonitorImpl}.
*/
public void clogCheck() {
AdministrativeMonitor.all().get(AdministrativeMonitorImpl.class).on = isClogged();
}
/**
* Gets the snapshot of {@link Runner}s that are performing polling.
*/
public List<Runner> getRunners() {
return Util.filter(queue.getInProgress(),Runner.class);
}
// originally List<SCMedItem> but known to be used only for logging, in which case the instances are not actually cast to SCMedItem anyway
public List<SCMTriggerItem> getItemsBeingPolled() {
List<SCMTriggerItem> r = new ArrayList<SCMTriggerItem>();
for (Runner i : getRunners())
r.add(i.getTarget());
return r;
}
public String getDisplayName() {
return Messages.SCMTrigger_DisplayName();
}
/**
* Gets the number of concurrent threads used for polling.
*
* @return
* 0 if unlimited.
*/
public int getPollingThreadCount() {
return maximumThreads;
}
/**
* Sets the number of concurrent threads used for SCM polling and resizes the thread pool accordingly
* @param n number of concurrent threads, zero or less means unlimited, maximum is 100
*/
public void setPollingThreadCount(int n) {
// fool proof
if(n<0) n=0;
if(n>100) n=100;
maximumThreads = n;
resizeThreadPool();
}
@Restricted(NoExternalUse.class)
public boolean isPollingThreadCountOptionVisible() {
// unless you have a fair number of projects, this option is likely pointless.
// so let's hide this option for new users to avoid confusing them
// unless it was already changed
// TODO switch to check for SCMTriggerItem
return Jenkins.getInstance().getAllItems(AbstractProject.class).size() > 10
|| getPollingThreadCount() != 0;
}
/**
* Update the {@link ExecutorService} instance.
*/
/*package*/ synchronized void resizeThreadPool() {
queue.setExecutors(
(maximumThreads==0 ? Executors.newCachedThreadPool(threadFactory()) : Executors.newFixedThreadPool(maximumThreads, threadFactory())));
}
@Override
public boolean configure(StaplerRequest req, JSONObject json) throws FormException {
String t = json.optString("pollingThreadCount",null);
if(t==null || t.length()==0)
setPollingThreadCount(0);
else
setPollingThreadCount(Integer.parseInt(t));
// Save configuration
save();
return true;
}
public FormValidation doCheckPollingThreadCount(@QueryParameter String value) {
if (value != null && "".equals(value.trim()))
return FormValidation.ok();
return FormValidation.validateNonNegativeInteger(value);
}
}
@Extension
public static final class AdministrativeMonitorImpl extends AdministrativeMonitor {
private boolean on;
public boolean isActivated() {
return on;
}
}
/**
* Associated with {@link Run} to show the polling log
* that triggered that build.
*
* @since 1.376
*/
public static class BuildAction implements RunAction2 {
private transient /*final*/ Run<?,?> run;
@Deprecated
public transient /*final*/ AbstractBuild build;
/**
* @since 1.568
*/
public BuildAction(Run<?,?> run) {
this.run = run;
build = run instanceof AbstractBuild ? (AbstractBuild) run : null;
}
@Deprecated
public BuildAction(AbstractBuild build) {
this((Run) build);
}
/**
* @since 1.568
*/
public Run<?,?> getRun() {
return run;
}
/**
* Polling log that triggered the build.
*/
public File getPollingLogFile() {
return new File(run.getRootDir(),"polling.log");
}
public String getIconFileName() {
return "clipboard.png";
}
public String getDisplayName() {
return Messages.SCMTrigger_BuildAction_DisplayName();
}
public String getUrlName() {
return "pollingLog";
}
/**
* Sends out the raw polling log output.
*/
public void doPollingLog(StaplerRequest req, StaplerResponse rsp) throws IOException {
rsp.setContentType("text/plain;charset=UTF-8");
// Prevent jelly from flushing stream so Content-Length header can be added afterwards
FlushProofOutputStream out = new FlushProofOutputStream(rsp.getCompressedOutputStream(req));
try {
getPollingLogText().writeLogTo(0, out);
} finally {
IOUtils.closeQuietly(out);
}
}
public AnnotatedLargeText getPollingLogText() {
return new AnnotatedLargeText<BuildAction>(getPollingLogFile(), Charset.defaultCharset(), true, this);
}
/**
* Used from <tt>polling.jelly</tt> to write annotated polling log to the given output.
*/
public void writePollingLogTo(long offset, XMLOutput out) throws IOException {
// TODO: resurrect compressed log file support
getPollingLogText().writeHtmlTo(offset, out.asWriter());
}
@Override public void onAttached(Run<?, ?> r) {
// unnecessary, existing constructor does this
}
@Override public void onLoad(Run<?, ?> r) {
run = r;
build = run instanceof AbstractBuild ? (AbstractBuild) run : null;
}
}
/**
* Action object for job. Used to display the last polling log.
*/
public final class SCMAction implements Action {
public AbstractProject<?,?> getOwner() {
Item item = getItem();
return item instanceof AbstractProject ? ((AbstractProject) item) : null;
}
/**
* @since 1.568
*/
public Item getItem() {
return job().asItem();
}
public String getIconFileName() {
return "clipboard.png";
}
public String getDisplayName() {
Set<SCMDescriptor<?>> descriptors = new HashSet<SCMDescriptor<?>>();
for (SCM scm : job().getSCMs()) {
descriptors.add(scm.getDescriptor());
}
return descriptors.size() == 1 ? Messages.SCMTrigger_getDisplayName(descriptors.iterator().next().getDisplayName()) : Messages.SCMTrigger_BuildAction_DisplayName();
}
public String getUrlName() {
return "scmPollLog";
}
public String getLog() throws IOException {
return Util.loadFile(getLogFile());
}
/**
* Writes the annotated log to the given output.
* @since 1.350
*/
public void writeLogTo(XMLOutput out) throws IOException {
new AnnotatedLargeText<SCMAction>(getLogFile(),Charset.defaultCharset(),true,this).writeHtmlTo(0,out.asWriter());
}
}
private static final Logger LOGGER = Logger.getLogger(SCMTrigger.class.getName());
/**
* {@link Runnable} that actually performs polling.
*/
public class Runner implements Runnable {
/**
* When did the polling start?
*/
private volatile long startTime;
private Action[] additionalActions;
public Runner() {
this(null);
}
public Runner(Action[] actions) {
Preconditions.checkNotNull(job, "Runner can't be instantiated when job is null");
if (actions == null) {
additionalActions = new Action[0];
} else {
additionalActions = actions;
}
}
/**
* Where the log file is written.
*/
public File getLogFile() {
return SCMTrigger.this.getLogFile();
}
/**
* For which {@link Item} are we polling?
* @since 1.568
*/
public SCMTriggerItem getTarget() {
return job();
}
/**
* When was this polling started?
*/
public long getStartTime() {
return startTime;
}
/**
* Human readable string of when this polling is started.
*/
public String getDuration() {
return Util.getTimeSpanString(System.currentTimeMillis()-startTime);
}
private boolean runPolling() {
try {
// to make sure that the log file contains up-to-date text,
// don't do buffering.
StreamTaskListener listener = new StreamTaskListener(getLogFile());
try {
PrintStream logger = listener.getLogger();
long start = System.currentTimeMillis();
logger.println("Started on "+ DateFormat.getDateTimeInstance().format(new Date()));
boolean result = job().poll(listener).hasChanges();
logger.println("Done. Took "+ Util.getTimeSpanString(System.currentTimeMillis()-start));
if(result)
logger.println("Changes found");
else
logger.println("No changes");
return result;
} catch (Error | RuntimeException e) {
e.printStackTrace(listener.error("Failed to record SCM polling for "+job));
LOGGER.log(Level.SEVERE,"Failed to record SCM polling for "+job,e);
throw e;
} finally {
listener.close();
}
} catch (IOException e) {
LOGGER.log(Level.SEVERE,"Failed to record SCM polling for "+job,e);
return false;
}
}
public void run() {
if (job == null) {
return;
}
String threadName = Thread.currentThread().getName();
Thread.currentThread().setName("SCM polling for "+job);
try {
startTime = System.currentTimeMillis();
if(runPolling()) {
SCMTriggerItem p = job();
String name = " #"+p.getNextBuildNumber();
SCMTriggerCause cause;
try {
cause = new SCMTriggerCause(getLogFile());
} catch (IOException e) {
LOGGER.log(WARNING, "Failed to parse the polling log",e);
cause = new SCMTriggerCause();
}
Action[] queueActions = new Action[additionalActions.length + 1];
queueActions[0] = new CauseAction(cause);
System.arraycopy(additionalActions, 0, queueActions, 1, additionalActions.length);
if (p.scheduleBuild2(p.getQuietPeriod(), queueActions) != null) {
LOGGER.info("SCM changes detected in "+ job.getFullDisplayName()+". Triggering "+name);
} else {
LOGGER.info("SCM changes detected in "+ job.getFullDisplayName()+". Job is already in the queue");
}
}
} finally {
Thread.currentThread().setName(threadName);
}
}
// as per the requirement of SequentialExecutionQueue, value equality is necessary
@Override
public boolean equals(Object that) {
return that instanceof Runner && job == ((Runner) that)._job();
}
private Item _job() {return job;}
@Override
public int hashCode() {
return job.hashCode();
}
}
@SuppressWarnings("deprecation")
private SCMTriggerItem job() {
return SCMTriggerItem.SCMTriggerItems.asSCMTriggerItem(job);
}
public static class SCMTriggerCause extends Cause {
/**
* Only used while ths cause is in the queue.
* Once attached to the build, we'll move this into a file to reduce the memory footprint.
*/
private String pollingLog;
private transient Run run;
public SCMTriggerCause(File logFile) throws IOException {
// TODO: charset of this log file?
this(FileUtils.readFileToString(logFile));
}
public SCMTriggerCause(String pollingLog) {
this.pollingLog = pollingLog;
}
/**
* @deprecated
* Use {@link #SCMTrigger.SCMTriggerCause(String)}.
*/
@Deprecated
public SCMTriggerCause() {
this("");
}
@Override
public void onLoad(Run run) {
this.run = run;
}
@Override
public void onAddedTo(Run build) {
this.run = build;
try {
BuildAction a = new BuildAction(build);
FileUtils.writeStringToFile(a.getPollingLogFile(),pollingLog);
build.replaceAction(a);
} catch (IOException e) {
LOGGER.log(WARNING,"Failed to persist the polling log",e);
}
pollingLog = null;
}
@Override
public String getShortDescription() {
return Messages.SCMTrigger_SCMTriggerCause_ShortDescription();
}
@Restricted(DoNotUse.class)
public Run getRun() {
return this.run;
}
@Override
public boolean equals(Object o) {
return o instanceof SCMTriggerCause;
}
@Override
public int hashCode() {
return 3;
}
}
/**
* How long is too long for a polling activity to be in the queue?
*/
public static long STARVATION_THRESHOLD =Long.getLong(SCMTrigger.class.getName()+".starvationThreshold", TimeUnit2.HOURS.toMillis(1));
}
| |
package io.hummer.util.persist;
import io.hummer.util.Configuration;
import io.hummer.util.log.LogUtil;
import io.hummer.util.par.GlobalThreadPool;
import io.hummer.util.str.StringUtil;
import java.text.Normalizer;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicReference;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.EntityManager;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import org.apache.commons.codec.binary.StringUtils;
import org.apache.log4j.Logger;
public interface IDocumentCache {
@Entity(name="CacheEntry")
public static class CacheEntry {
@Id @GeneratedValue
public Long ID;
@Column(name="keyString", columnDefinition="LONGVARCHAR", length=10000000)
public String key;
@Column(name="valueString", columnDefinition="LONGVARCHAR", length=10000000)
public String value;
@Column(name="storeTime")
public long storeTime;
@Column(name="firstStoreTime")
public long firstStoreTime;
public CacheEntry() {}
public CacheEntry(String key, String value) {
this.key = key;
this.value = value;
}
}
CacheEntry get(String key);
CacheEntry put(String key, String value);
void putWithoutWaiting(String key, String value);
List<String> getKeys(String nameLike);
public static class DocumentCache implements IDocumentCache {
public static final Map<String,CacheEntry> cache = new HashMap<String,CacheEntry>();
private static final Logger logger = LogUtil.getLogger(IDocumentCache.class);
/** renew documents (at most) every 60 minutes */
public static final long RENEWAL_INTERVAL = 1000*60*60;
public static final AtomicReference<String> DEFAULT_PERSISTENCE_UNIT =
new AtomicReference<String>("InfosysTools");
private static final boolean CACHE_IN_RAM =
Configuration.getBoolean(Configuration.PROP_CACHE_IN_RAM, false);
private static final boolean CACHE_IN_DB =
Configuration.getBoolean(Configuration.PROP_CACHE_IN_DB, false);
private static final boolean DO_OVERWRITE =
Configuration.getBoolean(Configuration.PROP_CACHE_OVERWRITE, true);
private String persistenceUnitName;
public DocumentCache() {
this.persistenceUnitName = DEFAULT_PERSISTENCE_UNIT.get();
}
public DocumentCache(String persistenceUnitName) {
this.persistenceUnitName = persistenceUnitName;
}
public CacheEntry get(String key) {
String keyString = keyToString(key);
if(cache.containsKey(keyString)) {
CacheEntry existing = cache.get(keyString);
if((System.currentTimeMillis() - existing.storeTime) < RENEWAL_INTERVAL) {
return existing;
} else {
cache.remove(keyString);
}
}
if(CACHE_IN_DB) {
try {
CacheEntry existing = getPersistent(keyString);
if(existing != null) {
if((System.currentTimeMillis() - existing.storeTime) < RENEWAL_INTERVAL) {
return existing;
} else {
removePersistent(existing);
}
}
} catch (Exception e) {
logger.warn("Unable to load cache value from DB:", e);
}
}
return null;
}
public void putWithoutWaiting(final String key, final String value) {
Runnable r = new Runnable() {
public void run() {
put(key, value);
}
};
GlobalThreadPool.execute(r);
}
public CacheEntry put(String key, String value) {
StringUtil util = new StringUtil();
if(logger.isDebugEnabled()) logger.debug("Putting value to cache: " + key + " = " + util.trim(value, 100));
String keyString = keyToString(key);
CacheEntry e = new CacheEntry(keyString, value);
if(CACHE_IN_RAM) {
cache.put(keyString, e);
}
if(CACHE_IN_DB) {
try {
putPersistent(e);
} catch (Exception e2) {
logger.error("DocumentCache: Unable to store value for key '" + key + "'", e2);
}
}
return e;
}
public List<String> getKeys(String nameLike) {
List<String> result = new LinkedList<String>();
try {
if(CACHE_IN_DB) {
EntityManager em = AbstractGenericDAO.get(persistenceUnitName).createEntityManager();
List<?> list = em.createQuery("from " + CacheEntry.class.getSimpleName() +
" where keyString like :key").setParameter("key", nameLike).getResultList();
for(Object o : list) {
if(((CacheEntry)o).key != null)
result.add(((CacheEntry)o).key);
}
}
if(CACHE_IN_RAM) {
result.addAll(cache.keySet());
}
} catch (Exception e) {
logger.warn("Unable to read keys from DB.", e);
}
return result;
}
private void removePersistent(CacheEntry e) throws Exception {
EntityManager em = AbstractGenericDAO.get(persistenceUnitName).createEntityManager();
e = em.merge(e);
em.getTransaction().begin();
em.remove(e);
em.getTransaction().commit();
em.close();
}
private void putPersistent(CacheEntry e) throws Exception {
EntityManager em = AbstractGenericDAO.get(persistenceUnitName).createEntityManager();
CacheEntry existing = getPersistent(e.key);
if(existing != null) {
if(!DO_OVERWRITE) {
if(logger.isDebugEnabled()) logger.debug("Entity with same key ('" + e.key + "') already exists in cache, please choose new name or set 'overwrite' to true...");
return;
} else {
if(logger.isDebugEnabled()) logger.debug("Overwriting object store entry with key '" + e.key + "'");
}
CacheEntry e1 = e;
existing = em.merge(existing);
e = existing;
e.key = e1.key;
e.value = e1.value;
}
e.storeTime = System.currentTimeMillis();
if(e.firstStoreTime <= 0) {
e.firstStoreTime = e.storeTime;
}
e.value = StringUtils.newStringUtf8(StringUtils.getBytesUtf8(e.value));
e.key = StringUtils.newStringUtf8(StringUtils.getBytesUtf8(e.key));
String valueShort = e.value;
if(valueShort.length() > 200)
valueShort = valueShort.substring(0, 200) + "...";
em.getTransaction().begin();
try {
em.persist(e);
} catch (Exception e2) {
if(e.value != null) {
logger.info("Could not persist cache entry. Removing non-mappable characters and re-trying: " + e2);
String convertedString = Normalizer
.normalize(e.value, Normalizer.Form.NFD)
.replaceAll("[^\\p{ASCII}]", "");
e.value = convertedString;
}
em.getTransaction().rollback();
em.getTransaction().begin();
em.persist(e);
}
em.getTransaction().commit();
em.close();
}
private CacheEntry getPersistent(String key) {
EntityManager em = AbstractGenericDAO.get(persistenceUnitName).createEntityManager();
try {
List<?> list = em.createQuery("from " +
CacheEntry.class.getSimpleName() + " where keyString=:key")
.setParameter("key", key).getResultList();
if(!list.isEmpty()) {
CacheEntry existing = (CacheEntry)list.get(0);
if(existing != null) {
return existing;
}
}
} catch (Exception e) { /* swallow */ }
finally {
em.close();
}
return null;
}
private String keyToString(Object key) {
return key.toString();
}
}
}
| |
package ru.job4j.banktransfer;
import org.junit.Test;
import java.util.List;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat;
/**
* Class for testing the Bank class.
*
* @author Dmitrii Eskov (eskovdmi@gmail.com)
* @since 30.01.2019
* @version 1.0
*/
public class BankTest {
/**
* Tests when it needs to transfer from person A to person B.
*/
@Test
public void whenTransferFromOneToAnotherUserThenSuccess() {
Bank bank = new Bank();
User first = new User("Peter", "4123");
User second = new User("Jack", "9720");
bank.addUser(first);
bank.addUser(second);
Account firstAccount = new Account(10000.00, "73298479324");
Account secondAccount = new Account(20000.00, "8453045");
bank.addAccountToUser(first.getPassport(), new Account(10000.00, "73298479324"));
bank.addAccountToUser(second.getPassport(), secondAccount);
assertThat(bank.transferMoney(first.getPassport(), firstAccount.getRequisites(), second.getPassport(),
secondAccount.getRequisites(), 5000), is(true));
}
/**
* Tests when it needs to transfer from person A to person B and the result is unsuccessful.
*/
@Test
public void whenTransferFromOneToAnotherUserThenUnsuccessful() {
Bank bank = new Bank();
User first = new User("Peter", "4123");
User second = new User("Jack", "9720");
bank.addUser(first);
bank.addUser(second);
Account firstAccount = new Account(10000.00, "73298479324");
Account secondAccount = new Account(20000.00, "8453045");
bank.addAccountToUser(first.getPassport(), new Account(10000.00, "73298479324"));
bank.addAccountToUser(second.getPassport(), secondAccount);
assertThat(bank.transferMoney(first.getPassport(), firstAccount.getRequisites(), second.getPassport(),
secondAccount.getRequisites(), 12000), is(false));
}
/**
* Tests when it needs to delete a user's account.
*/
@Test
public void whenToDeleteUserAccountThenSuccess() {
Bank bank = new Bank();
User first = new User("Peter", "1234123");
User second = new User("Dima", "212345");
User third = new User("Ivan", "21263");
User fourth = new User("Vlad", "51823");
bank.addUser(first);
bank.addUser(second);
bank.addUser(third);
bank.addUser(fourth);
Account firstAcc = new Account(10000.00, "73298479324");
Account secondAcc = new Account(20000.00, "63298479324");
Account thirdAcc = new Account(30000.00, "83298479324");
Account fourthAcc = new Account(10000.00, "93298479324");
bank.addAccountToUser(first.getPassport(), firstAcc);
bank.addAccountToUser(second.getPassport(), secondAcc);
bank.addAccountToUser(third.getPassport(), thirdAcc);
bank.addAccountToUser(fourth.getPassport(), fourthAcc);
assertThat(bank.deleteAccountFromUser(third.getPassport(), thirdAcc), is(true));
}
/**
* Tests when it needs to delete a user's account.
*/
@Test
public void whenToDeleteUserAccountThenFail() {
Bank bank = new Bank();
User first = new User("Peter", "1234123");
User second = new User("Dima", "010346");
User third = new User("Jack", "212345");
User fourth = new User("Vlad", "51823");
bank.addUser(first);
bank.addUser(third);
bank.addUser(fourth);
Account firstAcc = new Account(10000.00, "73298479324");
Account secondAcc = new Account(20000.00, "63298479324");
Account thirdAcc = new Account(200400.00, "13298479324");
Account fourthAcc = new Account(10000.00, "93298479324");
bank.addAccountToUser(first.getPassport(), firstAcc);
bank.addAccountToUser(second.getPassport(), secondAcc);
bank.addAccountToUser(third.getPassport(), thirdAcc);
bank.addAccountToUser(fourth.getPassport(), fourthAcc);
assertThat(bank.deleteAccountFromUser(third.getPassport(), fourthAcc), is(false));
assertThat(bank.deleteAccountFromUser(second.getPassport(), secondAcc), is(false));
}
/**
* Tests when it needs to find a user's account and it is not found.
*/
@Test
public void whenAccountNotFound() {
Bank bank = new Bank();
User first = new User("Peter", "4123");
Account firstAcc = new Account(10000.00, "73298479324");
bank.addUser(first);
assertNull(bank.getActualAccount(first.getPassport(), firstAcc));
}
/**
* Tests when it needs to delete a user.
*/
@Test
public void whenToDeleteUserThenSuccess() {
Bank bank = new Bank();
User first = new User("Peter", "4123");
bank.addUser(first);
Account firstAcc = new Account(10000.00, "73298479324");
bank.addAccountToUser(first.getPassport(), firstAcc);
bank.deleteUser(first);
assertNull(bank.getUserAccounts(first.getPassport()));
}
/**
* Tests when it needs to delete a user.
*/
@Test
public void whenToDeleteUserThenNoSuccess() {
Bank bank = new Bank();
User first = new User("Peter", "4123");
bank.deleteUser(first);
}
/**
* Tests when it needs to get all users' accounts.
*/
@Test
public void whenToGetAllUserAccountsThenSuccess() {
Bank bank = new Bank();
User first = new User("Peter", "4123");
bank.addUser(first);
Account firstAccount = new Account(10000.00, "73298479324");
Account secondAccount = new Account(20000.00, "8453045");
bank.addAccountToUser(first.getPassport(), firstAccount);
bank.addAccountToUser(first.getPassport(), secondAccount);
List<Account> expect = List.of(firstAccount, secondAccount);
assertThat(bank.getUserAccounts(first.getPassport()).containsAll(expect), is(true));
}
/**
* Tests when it needs to get an actual user account and the result is successful.
*/
@Test
public void whenToGetActualUserAccountThenSuccess() {
Bank bank = new Bank();
User first = new User("Peter", "4123");
bank.addUser(first);
Account firstAccount = new Account(10000.00, "73298479324");
bank.addAccountToUser(first.getPassport(), firstAccount);
assertThat(bank.getActualAccount(first.getPassport(), firstAccount), is(firstAccount));
}
/**
* Tests when it needs to get an actual user account and the result is unsuccessful.
*/
@Test
public void whenToGetActualUserAccountThenNoSuccess() {
Bank bank = new Bank();
User first = new User("Peter", "4123");
bank.addUser(first);
Account firstAccount = new Account(10000.00, "73298479324");
assertNull(bank.getActualAccount(first.getPassport(), firstAccount));
}
/**
* Tests when it needs to get an actual user account by a passport and requisites.
*/
@Test
public void whenFindAccountByPassportAndRequisitesThenSuccess() {
Bank bank = new Bank();
User first = new User("Peter", "4123");
bank.addUser(first);
Account firstAccount = new Account(10000.00, "73298479324");
Account secondAccount = new Account(20000.00, "83298479324");
Account thirdAccount = new Account(30000.00, "93218479324");
bank.addAccountToUser(first.getPassport(), firstAccount);
bank.addAccountToUser(first.getPassport(), secondAccount);
bank.addAccountToUser(first.getPassport(), thirdAccount);
assertThat(bank.findAccountByPassportAndRequisites(first.getPassport(), thirdAccount.getRequisites()), is(thirdAccount));
}
/**
* When it tries to add a new user to a bank.
*/
@Test
public void whenTryToAddNewUserThenSuccess() {
Bank bank = new Bank();
User first = new User("Peter", "4123");
assertThat(bank.addUser(first), is(true));
}
/**
* When it tries to add an existing user to a bank.
*/
@Test
public void whenTryToAddExistingUserThenFail() {
Bank bank = new Bank();
User first = new User("Peter", "4123");
bank.addUser(first);
assertThat(bank.addUser(first), is(false));
}
/**
* When it needs to add a new account for a user.
*/
@Test
public void whenTryToAddNewAccountForUserThenSuccess() {
Bank bank = new Bank();
User first = new User("Peter", "4123");
bank.addUser(first);
boolean result = bank.addAccountToUser(
first.getPassport(), new Account(10000.00, "73298479324"
)
);
assertThat(result, is(true));
}
/**
* When it needs to add an existing account for a user.
*/
@Test
public void whenTryToAddExistingAccountForUserThenFail() {
Bank bank = new Bank();
User first = new User("Peter", "4123");
bank.addUser(first);
Account firstAccount = new Account(10000.00, "73298479324");
bank.addAccountToUser(first.getPassport(), firstAccount);
boolean result = bank.addAccountToUser(
first.getPassport(), new Account(10000.00, "73298479324"
)
);
assertThat(result, is(false));
}
}
| |
/*
*
* Copyright 2017 Robert Winkler, Lucas Lech
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
package io.github.resilience4j.bulkhead.internal;
import io.github.resilience4j.bulkhead.Bulkhead;
import io.github.resilience4j.bulkhead.BulkheadConfig;
import io.github.resilience4j.bulkhead.BulkheadFullException;
import io.github.resilience4j.bulkhead.event.BulkheadEvent;
import io.github.resilience4j.bulkhead.event.BulkheadOnCallFinishedEvent;
import io.github.resilience4j.bulkhead.event.BulkheadOnCallPermittedEvent;
import io.github.resilience4j.bulkhead.event.BulkheadOnCallRejectedEvent;
import io.github.resilience4j.core.EventConsumer;
import io.github.resilience4j.core.EventProcessor;
import io.github.resilience4j.core.exception.AcquirePermissionCancelledException;
import io.github.resilience4j.core.lang.Nullable;
import java.util.Map;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.function.Supplier;
import static java.util.Collections.emptyMap;
import static java.util.Objects.requireNonNull;
/**
* A Bulkhead implementation based on a semaphore.
*/
public class SemaphoreBulkhead implements Bulkhead {
private static final String CONFIG_MUST_NOT_BE_NULL = "Config must not be null";
private static final String TAGS_MUST_NOTE_BE_NULL = "Tags must not be null";
private final String name;
private final Semaphore semaphore;
private final BulkheadMetrics metrics;
private final BulkheadEventProcessor eventProcessor;
private final Object configChangesLock = new Object();
private final Map<String, String> tags;
@SuppressWarnings("squid:S3077")
// this object is immutable and we replace ref entirely during config change.
private volatile BulkheadConfig config;
/**
* Creates a bulkhead using a configuration supplied
*
* @param name the name of this bulkhead
* @param bulkheadConfig custom bulkhead configuration
*/
public SemaphoreBulkhead(String name, @Nullable BulkheadConfig bulkheadConfig) {
this(name, bulkheadConfig, emptyMap());
}
/**
* Creates a bulkhead using a configuration supplied
*
* @param name the name of this bulkhead
* @param bulkheadConfig custom bulkhead configuration
* @param tags the tags to add to the Bulkhead
*/
public SemaphoreBulkhead(String name, @Nullable BulkheadConfig bulkheadConfig,
Map<String, String> tags) {
this.name = name;
this.config = requireNonNull(bulkheadConfig, CONFIG_MUST_NOT_BE_NULL);
this.tags = requireNonNull(tags, TAGS_MUST_NOTE_BE_NULL);
// init semaphore
this.semaphore = new Semaphore(config.getMaxConcurrentCalls(), config.isFairCallHandlingEnabled());
this.metrics = new BulkheadMetrics();
this.eventProcessor = new BulkheadEventProcessor();
}
/**
* Creates a bulkhead with a default config.
*
* @param name the name of this bulkhead
*/
public SemaphoreBulkhead(String name) {
this(name, BulkheadConfig.ofDefaults(), emptyMap());
}
/**
* Create a bulkhead using a configuration supplier
*
* @param name the name of this bulkhead
* @param configSupplier BulkheadConfig supplier
*/
public SemaphoreBulkhead(String name, Supplier<BulkheadConfig> configSupplier) {
this(name, configSupplier.get(), emptyMap());
}
/**
* Create a bulkhead using a configuration supplier
*
* @param name the name of this bulkhead
* @param configSupplier BulkheadConfig supplier
* @param tags tags to add to the Bulkhead
*/
public SemaphoreBulkhead(String name, Supplier<BulkheadConfig> configSupplier,
Map<String, String> tags) {
this(name, configSupplier.get(), tags);
}
/**
* {@inheritDoc}
*/
@Override
public void changeConfig(final BulkheadConfig newConfig) {
synchronized (configChangesLock) {
int delta = newConfig.getMaxConcurrentCalls() - config.getMaxConcurrentCalls();
if (delta < 0) {
semaphore.acquireUninterruptibly(-delta);
} else if (delta > 0) {
semaphore.release(delta);
}
config = newConfig;
}
}
/**
* {@inheritDoc}
*/
@Override
public boolean tryAcquirePermission() {
boolean callPermitted = tryEnterBulkhead();
publishBulkheadEvent(
() -> callPermitted ? new BulkheadOnCallPermittedEvent(name)
: new BulkheadOnCallRejectedEvent(name)
);
return callPermitted;
}
/**
* {@inheritDoc}
*/
@Override
public void acquirePermission() {
boolean permitted = tryAcquirePermission();
if (permitted) {
return;
}
if (Thread.currentThread().isInterrupted()) {
throw new AcquirePermissionCancelledException();
}
throw BulkheadFullException.createBulkheadFullException(this);
}
/**
* {@inheritDoc}
*/
@Override
public void releasePermission() {
semaphore.release();
}
/**
* {@inheritDoc}
*/
@Override
public void onComplete() {
semaphore.release();
publishBulkheadEvent(() -> new BulkheadOnCallFinishedEvent(name));
}
/**
* {@inheritDoc}
*/
@Override
public String getName() {
return this.name;
}
/**
* {@inheritDoc}
*/
@Override
public BulkheadConfig getBulkheadConfig() {
return config;
}
/**
* {@inheritDoc}
*/
@Override
public Metrics getMetrics() {
return metrics;
}
/**
* {@inheritDoc}
*/
@Override
public Map<String, String> getTags() {
return tags;
}
/**
* {@inheritDoc}
*/
@Override
public EventPublisher getEventPublisher() {
return eventProcessor;
}
@Override
public String toString() {
return String.format("Bulkhead '%s'", this.name);
}
/**
* @return true if caller was able to wait for permission without {@link Thread#interrupt}
*/
boolean tryEnterBulkhead() {
long timeout = config.getMaxWaitDuration().toMillis();
try {
return semaphore.tryAcquire(timeout, TimeUnit.MILLISECONDS);
} catch (InterruptedException ex) {
Thread.currentThread().interrupt();
return false;
}
}
private void publishBulkheadEvent(Supplier<BulkheadEvent> eventSupplier) {
if (eventProcessor.hasConsumers()) {
eventProcessor.consumeEvent(eventSupplier.get());
}
}
private class BulkheadEventProcessor extends EventProcessor<BulkheadEvent> implements
EventPublisher, EventConsumer<BulkheadEvent> {
@Override
public EventPublisher onCallPermitted(
EventConsumer<BulkheadOnCallPermittedEvent> onCallPermittedEventConsumer) {
registerConsumer(BulkheadOnCallPermittedEvent.class.getName(),
onCallPermittedEventConsumer);
return this;
}
@Override
public EventPublisher onCallRejected(
EventConsumer<BulkheadOnCallRejectedEvent> onCallRejectedEventConsumer) {
registerConsumer(BulkheadOnCallRejectedEvent.class.getName(),
onCallRejectedEventConsumer);
return this;
}
@Override
public EventPublisher onCallFinished(
EventConsumer<BulkheadOnCallFinishedEvent> onCallFinishedEventConsumer) {
registerConsumer(BulkheadOnCallFinishedEvent.class.getName(),
onCallFinishedEventConsumer);
return this;
}
@Override
public void consumeEvent(BulkheadEvent event) {
super.processEvent(event);
}
}
private final class BulkheadMetrics implements Metrics {
private BulkheadMetrics() {
}
@Override
public int getAvailableConcurrentCalls() {
return semaphore.availablePermits();
}
@Override
public int getMaxAllowedConcurrentCalls() {
return config.getMaxConcurrentCalls();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.resourcemanager;
import org.apache.flink.api.common.JobID;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.runtime.clusterframework.types.ResourceID;
import org.apache.flink.runtime.clusterframework.types.ResourceProfile;
import org.apache.flink.runtime.heartbeat.HeartbeatServices;
import org.apache.flink.runtime.highavailability.TestingHighAvailabilityServices;
import org.apache.flink.runtime.instance.HardwareDescription;
import org.apache.flink.runtime.io.network.partition.NoOpResourceManagerPartitionTracker;
import org.apache.flink.runtime.jobmaster.utils.TestingJobMasterGateway;
import org.apache.flink.runtime.jobmaster.utils.TestingJobMasterGatewayBuilder;
import org.apache.flink.runtime.leaderelection.TestingLeaderElectionService;
import org.apache.flink.runtime.leaderretrieval.LeaderRetrievalService;
import org.apache.flink.runtime.leaderretrieval.SettableLeaderRetrievalService;
import org.apache.flink.runtime.metrics.groups.UnregisteredMetricGroups;
import org.apache.flink.runtime.registration.RegistrationResponse;
import org.apache.flink.runtime.resourcemanager.slotmanager.SlotManager;
import org.apache.flink.runtime.resourcemanager.slotmanager.SlotManagerBuilder;
import org.apache.flink.runtime.rest.messages.taskmanager.TaskManagerInfo;
import org.apache.flink.runtime.rpc.RpcUtils;
import org.apache.flink.runtime.rpc.TestingRpcService;
import org.apache.flink.runtime.taskexecutor.TaskExecutorGateway;
import org.apache.flink.runtime.taskexecutor.TaskExecutorMemoryConfiguration;
import org.apache.flink.runtime.taskexecutor.TestingTaskExecutorGatewayBuilder;
import org.apache.flink.runtime.testingUtils.TestingUtils;
import org.apache.flink.runtime.util.TestingFatalErrorHandler;
import org.apache.flink.util.TestLogger;
import org.apache.flink.util.function.ThrowingConsumer;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeoutException;
import static org.hamcrest.Matchers.anyOf;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.nullValue;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
/**
* Tests for the {@link ResourceManager}.
*/
public class ResourceManagerTest extends TestLogger {
private static final Time TIMEOUT = Time.minutes(2L);
private static final HeartbeatServices heartbeatServices = new HeartbeatServices(1000L, 10000L);
private static final HeartbeatServices fastHeartbeatServices = new HeartbeatServices(1L, 1L);
private static final HardwareDescription hardwareDescription = new HardwareDescription(
42,
1337L,
1337L,
0L);
private static final int dataPort = 1234;
private static final int jmxPort = 23456;
private static TestingRpcService rpcService;
private TestingHighAvailabilityServices highAvailabilityServices;
private TestingLeaderElectionService resourceManagerLeaderElectionService;
private TestingFatalErrorHandler testingFatalErrorHandler;
private ResourceID resourceManagerResourceId;
private TestingResourceManager resourceManager;
private ResourceManagerId resourceManagerId;
@BeforeClass
public static void setupClass() {
rpcService = new TestingRpcService();
}
@Before
public void setup() throws Exception {
highAvailabilityServices = new TestingHighAvailabilityServices();
resourceManagerLeaderElectionService = new TestingLeaderElectionService();
highAvailabilityServices.setResourceManagerLeaderElectionService(resourceManagerLeaderElectionService);
testingFatalErrorHandler = new TestingFatalErrorHandler();
resourceManagerResourceId = ResourceID.generate();
}
@After
public void after() throws Exception {
if (resourceManager != null) {
RpcUtils.terminateRpcEndpoint(resourceManager, TIMEOUT);
}
if (highAvailabilityServices != null) {
highAvailabilityServices.closeAndCleanupAllData();
}
if (testingFatalErrorHandler.hasExceptionOccurred()) {
testingFatalErrorHandler.rethrowError();
}
}
@AfterClass
public static void tearDownClass() throws Exception {
if (rpcService != null) {
RpcUtils.terminateRpcServices(TIMEOUT, rpcService);
}
}
/**
* Tests that we can retrieve the correct {@link TaskManagerInfo} from the {@link ResourceManager}.
*/
@Test
public void testRequestTaskManagerInfo() throws Exception {
final ResourceID taskManagerId = ResourceID.generate();
final TaskExecutorGateway taskExecutorGateway = new TestingTaskExecutorGatewayBuilder().setAddress(UUID.randomUUID().toString()).createTestingTaskExecutorGateway();
rpcService.registerGateway(taskExecutorGateway.getAddress(), taskExecutorGateway);
resourceManager = createAndStartResourceManager(heartbeatServices);
final ResourceManagerGateway resourceManagerGateway = resourceManager.getSelfGateway(ResourceManagerGateway.class);
registerTaskExecutor(resourceManagerGateway, taskManagerId, taskExecutorGateway.getAddress());
CompletableFuture<TaskManagerInfo> taskManagerInfoFuture = resourceManagerGateway.requestTaskManagerInfo(
taskManagerId,
TestingUtils.TIMEOUT());
TaskManagerInfo taskManagerInfo = taskManagerInfoFuture.get();
assertEquals(taskManagerId, taskManagerInfo.getResourceId());
assertEquals(hardwareDescription, taskManagerInfo.getHardwareDescription());
assertEquals(taskExecutorGateway.getAddress(), taskManagerInfo.getAddress());
assertEquals(dataPort, taskManagerInfo.getDataPort());
assertEquals(jmxPort, taskManagerInfo.getJmxPort());
assertEquals(0, taskManagerInfo.getNumberSlots());
assertEquals(0, taskManagerInfo.getNumberAvailableSlots());
}
private void registerTaskExecutor(ResourceManagerGateway resourceManagerGateway, ResourceID taskExecutorId, String taskExecutorAddress) throws Exception {
TaskExecutorRegistration taskExecutorRegistration = new TaskExecutorRegistration(
taskExecutorAddress,
taskExecutorId,
dataPort,
jmxPort,
hardwareDescription,
new TaskExecutorMemoryConfiguration(1L, 2L, 3L, 4L, 5L, 6L, 7L, 8L, 9L, 10L),
ResourceProfile.ZERO,
ResourceProfile.ZERO);
final CompletableFuture<RegistrationResponse> registrationFuture = resourceManagerGateway.registerTaskExecutor(
taskExecutorRegistration,
TestingUtils.TIMEOUT());
assertThat(registrationFuture.get(), instanceOf(RegistrationResponse.Success.class));
}
@Test
public void testHeartbeatTimeoutWithJobMaster() throws Exception {
final CompletableFuture<ResourceID> heartbeatRequestFuture = new CompletableFuture<>();
final CompletableFuture<ResourceManagerId> disconnectFuture = new CompletableFuture<>();
final TestingJobMasterGateway jobMasterGateway = new TestingJobMasterGatewayBuilder()
.setResourceManagerHeartbeatConsumer(heartbeatRequestFuture::complete)
.setDisconnectResourceManagerConsumer(disconnectFuture::complete)
.build();
rpcService.registerGateway(jobMasterGateway.getAddress(), jobMasterGateway);
final JobID jobId = new JobID();
final ResourceID jobMasterResourceId = ResourceID.generate();
final LeaderRetrievalService jobMasterLeaderRetrievalService = new SettableLeaderRetrievalService(jobMasterGateway.getAddress(), jobMasterGateway.getFencingToken().toUUID());
highAvailabilityServices.setJobMasterLeaderRetrieverFunction(requestedJobId -> {
assertThat(requestedJobId, is(equalTo(jobId)));
return jobMasterLeaderRetrievalService;
});
runHeartbeatTimeoutTest(
resourceManagerGateway -> {
final CompletableFuture<RegistrationResponse> registrationFuture = resourceManagerGateway.registerJobManager(
jobMasterGateway.getFencingToken(),
jobMasterResourceId,
jobMasterGateway.getAddress(),
jobId,
TIMEOUT);
assertThat(registrationFuture.get(), instanceOf(RegistrationResponse.Success.class));
},
resourceManagerResourceId -> {
// might have been completed or not depending whether the timeout was triggered first
final ResourceID optionalHeartbeatRequestOrigin = heartbeatRequestFuture.getNow(null);
assertThat(optionalHeartbeatRequestOrigin, anyOf(is(resourceManagerResourceId), is(nullValue())));
assertThat(disconnectFuture.get(), is(equalTo(resourceManagerId)));
});
}
@Test
public void testHeartbeatTimeoutWithTaskExecutor() throws Exception {
final ResourceID taskExecutorId = ResourceID.generate();
final CompletableFuture<ResourceID> heartbeatRequestFuture = new CompletableFuture<>();
final CompletableFuture<Exception> disconnectFuture = new CompletableFuture<>();
final TaskExecutorGateway taskExecutorGateway = new TestingTaskExecutorGatewayBuilder()
.setDisconnectResourceManagerConsumer(disconnectFuture::complete)
.setHeartbeatResourceManagerConsumer(heartbeatRequestFuture::complete)
.createTestingTaskExecutorGateway();
rpcService.registerGateway(taskExecutorGateway.getAddress(), taskExecutorGateway);
runHeartbeatTimeoutTest(
resourceManagerGateway -> {
registerTaskExecutor(resourceManagerGateway, taskExecutorId, taskExecutorGateway.getAddress());
},
resourceManagerResourceId -> {
// might have been completed or not depending whether the timeout was triggered first
final ResourceID optionalHeartbeatRequestOrigin = heartbeatRequestFuture.getNow(null);
assertThat(optionalHeartbeatRequestOrigin, anyOf(is(resourceManagerResourceId), is(nullValue())));
assertThat(disconnectFuture.get(), instanceOf(TimeoutException.class));
}
);
}
private void runHeartbeatTimeoutTest(
ThrowingConsumer<ResourceManagerGateway, Exception> registerComponentAtResourceManager,
ThrowingConsumer<ResourceID, Exception> verifyHeartbeatTimeout) throws Exception {
resourceManager = createAndStartResourceManager(fastHeartbeatServices);
final ResourceManagerGateway resourceManagerGateway = resourceManager.getSelfGateway(ResourceManagerGateway.class);
registerComponentAtResourceManager.accept(resourceManagerGateway);
verifyHeartbeatTimeout.accept(resourceManagerResourceId);
}
private TestingResourceManager createAndStartResourceManager(HeartbeatServices heartbeatServices) throws Exception {
final SlotManager slotManager = SlotManagerBuilder.newBuilder()
.setScheduledExecutor(rpcService.getScheduledExecutor())
.build();
final JobLeaderIdService jobLeaderIdService = new JobLeaderIdService(
highAvailabilityServices,
rpcService.getScheduledExecutor(),
TestingUtils.infiniteTime());
final TestingResourceManager resourceManager = new TestingResourceManager(
rpcService,
resourceManagerResourceId,
highAvailabilityServices,
heartbeatServices,
slotManager,
NoOpResourceManagerPartitionTracker::get,
jobLeaderIdService,
testingFatalErrorHandler,
UnregisteredMetricGroups.createUnregisteredResourceManagerMetricGroup());
resourceManager.start();
// first make the ResourceManager the leader
resourceManagerId = ResourceManagerId.generate();
resourceManagerLeaderElectionService.isLeader(resourceManagerId.toUUID()).get();
return resourceManager;
}
}
| |
package com.beatsportable.beats;
import java.io.File;
import java.io.FileNotFoundException;
import java.util.LinkedList;
import java.util.Queue;
import java.util.Scanner;
import com.beatsportable.beats.DataNote.NoteType;
import com.beatsportable.beats.DataNotesData.Difficulty;
import com.beatsportable.beats.DataNotesData.NotesType;
/*
* See http://www.stepmania.com/wiki/The_.SM_file_format
*
* #TITLE:...; - The "main title" of the song.
* #SUBTITLE:...; - This text will appear underneath the main title of the song on the Select Music screen. e.g. "~Dirty Mix~" or "(remix)".
* #ARTIST:...; - The artist of the song.
* #TITLETRANSLIT:...; - Transliteration of song's main title.
* #SUBTITLETRANSLIT:...; - Transliteration of song's subtitle.
* #ARTISTTRANSLIT:...; - Transliteration of the artist's name.
* #CREDIT:...; - Give yourself some credit here for creating a wonderful song.
* #BANNER:...; - The file name of the banner image. e.g. "b4u-banner.png". This image must reside in the song folder.
* #BACKGROUND:...; - The file name of the background image. e.g. "b4u-bg.png". This image must reside in the song folder.
* #CDTITLE:...; - The file name of the spinning CD logo. e.g. "b4u-cdtitle.png". This image must reside in the song folder.
* #MUSIC:...; - The file name of the music file. e.g. "b4u.mp3". This image must reside in the song folder.
* #OFFSET:...; - The time in seconds at which beat 0 occurs in the music. This is specified as a floating point value. e.g. "2.34".
* #SAMPLESTART:...; - The time in seconds to start the music sample that plays on the Select Music screen. This is specified as a floating point value. e.g. "32.34".
* #SAMPLELENGTH:...; - The time in seconds let the sample music play after starting. This is specified as a floating point value. e.g. "16.00". Note that in the last 1 second of playing the music will fade out.
* #SELECTABLE:...; - If "NO", the song can not be selected manually and can only be played as part of a course. If "ROULETTE", the song can can also be selected via roulette. The default value is "YES".
* #BPMS:...; - A value of the format "beat=bpm". Indicates that at 'beat', the speed of the arrows will change to "bpm". Both of these values are specified as (positive) floating point values. You must specifiy a BPM value for beat 0. Multiple BPMs can be given by separating them with commas. e.g. "0=160,120=80".
* #STOPS:...; - A value of the format "beat=sec". Indicates that at 'beat', the motion of the arrows should stop for "sec" seconds. Both of these values are specified as floating point values. Multiple stops can be given by separating them with commas. e.g. "60=2.23,80=1.12".
* #BGCHANGE:...; - A value of the format "beat=bg name". Indicates that at 'beat', the background should begin playing a new background named 'bg name'. 'beat' is a fractional value value and 'bg name' is a string. Different bg changes are separated by commas. e.g. "60=falling,80=flower". When StepMania looks for a backgound, it searches in this order:
1. Looks for a movie with file name = "bg name" in the song folder. You must include the file extension in "bg name". e.g. "60=falling.avi,80=flower.mpg".
2. Looks for a BGAnimation folder with the name "bg name" in the song folder.
3. Looks for a movie with file name "bg name" in the RandomMovies folder. You must include the file extension in "bg name". e.g. "60=falling.avi,80=flower.mpg".
4. Looks for a BGAnimation with file name "bg name" in the BGAnimations folder.
5. Looks for a Visualization with the file name "bg name" in the Visualizations folder. For example, if you have a song B4U and special B4U-specific BGAnimations called "robot" and "electric". First, move the robot and electric BGAnimation folders into the B4U song folder (e.g. "Songs\4th Mix\B4U\robot" and "Songs\4th Mix\B4U\electric"). Then, using the editor, insert a new background change at each point in the song where you to switch to a new BGAnimation.
* #NOTES...; - The main part of the file, that describes the steps.
*
*/
public class DataParserSM {
private static void parseBPM(DataFile df, String buffer) throws DataParserException {
Scanner vsc = new Scanner(buffer);
vsc.useDelimiter(",");
while (vsc.hasNext()) {
String pair = vsc.next().trim();
try {
if (pair.indexOf('=') < 0) {
throw new Exception("No '=' found");
} else {
float beat = Float.parseFloat(pair.substring(0, pair.indexOf('=')));
float value = Float.parseFloat(pair.substring(pair.indexOf('=') + 1));
df.addBPM(beat, value);
}
} catch (Exception e) { // Also catch NumberFormatExceptions
vsc.close();
throw new DataParserException(
e.getClass().getSimpleName(),
"Improperly formatted #BPMS pair \"" + pair + "\": " +
e.getMessage(), e
);
}
}
vsc.close();
}
private static void parseStop(DataFile df, String buffer) throws DataParserException {
Scanner vsc = new Scanner(buffer);
vsc.useDelimiter(",");
while (vsc.hasNext()) {
String pair = vsc.next().trim();
try {
if (pair.indexOf('=') < 0) {
throw new Exception("No '=' found");
} else {
float beat = Float.parseFloat(pair.substring(0, pair.indexOf('=')));
float value = Float.parseFloat(pair.substring(pair.indexOf('=') + 1));
df.addStop(beat, value);
}
} catch (Exception e) { // Also catch NumberFormatExceptions
vsc.close();
throw new DataParserException(
e.getClass().getSimpleName(),
"Improperly formatted #STOPS pair \"" + pair + "\": " +
e.getMessage(), e
);
}
}
vsc.close();
}
@SuppressWarnings("unused")
private static void parseBGChange(DataFile df, String buffer) throws DataParserException {
Scanner vsc = new Scanner(buffer);
vsc.useDelimiter(",");
while (vsc.hasNext()) {
String pair = vsc.next().trim();
try {
if (pair.indexOf('=') < 0) {
throw new Exception("No '=' found");
} else {
float beat = Float.parseFloat(pair.substring(0, pair.indexOf('=')));
String value = pair.substring(pair.indexOf('=') + 1);
df.addBGChange(beat, value);
}
} catch (Exception e) { // Also catch NumberFormatExceptions
vsc.close();
throw new DataParserException(
e.getClass().getSimpleName(),
"Improperly formatted #BGCHANGES pair \"" + pair + "\": " +
e.getMessage(), e
);
}
}
vsc.close();
}
/*
* See http://www.stepmania.com/wiki/The_.SM_file_format
*
* Each note is represented by a character:
* 0 = no note here
* 1 = a regular "tap note"
* 2 = beginning of a "hold note"
* 3 = end of a "hold note"
* 4 = beginning of a roll (3.9+, 3.95+, 4.0)
* M = Mine
* L = Lift (3.9+ and 4.0)
* a-z,A-z = tap notes reserved for game types that have sounds associated with notes
*
*/
private static NoteType parseNoteType(char c, boolean holds) {
switch (c) {
case '0': return NoteType.NO_NOTE;
case '1': return NoteType.TAP_NOTE;
case '2':
if (holds) {
return NoteType.HOLD_START;
} else {
return NoteType.TAP_NOTE;
}
case '3':
if (holds) {
return NoteType.HOLD_END;
} else {
return NoteType.NO_NOTE;
}
case '4': return NoteType.ROLL;
case 'M': return NoteType.MINE;
case 'L': return NoteType.LIFT;
default: return NoteType.NO_NOTE;
}
}
private static int parseFraction(int lineIndex, int lineCount) throws DataParserException {
int fraction = lineIndex * 192 / lineCount;
if (fraction % (192/4) == 0) {
return 4;
} else if (fraction % (192/8) == 0) {
return 8;
} else if (fraction % (192/12) == 0) {
return 12;
} else if (fraction % (192/16) == 0) {
return 16;
} else if (fraction % (192/24) == 0) {
return 24;
} else if (fraction % (192/32) == 0) {
return 32;
} else if (fraction % (192/48) == 0) {
return 48;
} else if (fraction % (192/64) == 0) {
return 64;
} else if (fraction % (192/192) == 0) {
return 192;
} else {
throw new DataParserException (
"Unable to determine fraction type with lineIndex " +
lineIndex +
" and lineCount " +
lineCount
);
}
}
public static boolean isSupportedNoteType(NoteType nt) {
// TODO - Support mines and actual holds later
return (nt.equals(NoteType.TAP_NOTE) || nt.equals(NoteType.HOLD_START) ||
nt.equals(NoteType.HOLD_END));
}
// Confusing hold logic but pretty much its to ensure that holds end when jump is on
private static LinkedList<Integer> activeHolds;
private static int osu_num, osu_fraction;
private static Randomizer rand;
private static void addNotes(
DataNotesData nd, String line,
boolean holds, boolean jumps, boolean osu, boolean randomize,
int lineIndex, int lineCount, float beat, float time, float timeIncrease, float offset)
throws DataParserException {
boolean noteAdded = false;
for (int i = 0; i < line.length(); i++) {
// No holds if randomize - logic and keeping track is too confusing (I tried and failed)
NoteType nt = parseNoteType(line.charAt(i), (holds && !randomize));
if (isSupportedNoteType(nt)) {
// Series of checks whether or not the note should be added, confusinggg~
boolean addNote = false;
if (jumps) addNote = true;
if (osu) addNote = false;
if (!noteAdded) addNote = true;
if (!jumps && !activeHolds.isEmpty()) addNote = false;
if (nt.equals(NoteType.HOLD_END) && activeHolds.contains(i)) addNote = true;
if (addNote) {
int pitch;
int fraction;
int noteTime = (int)(time + timeIncrease - offset);
float[] coords;
if (osu) {
coords = rand.nextCoords(lineIndex, lineCount);
pitch = osu_num;
fraction = osu_fraction;
} else { // holds pitch logic put in else since osu! Mod doesn't use pitches
coords = new float[4];
pitch = i;
fraction = parseFraction(lineIndex, lineCount);
if (nt.equals(NoteType.HOLD_START)) {
activeHolds.add(i);
} else if (nt.equals(NoteType.HOLD_END) && activeHolds.contains(i)) {
activeHolds.remove(activeHolds.indexOf(i));
} else if (randomize) {
pitch = rand.nextPitch(jumps);
}
}
DataNote n = new DataNote(
nt,
fraction,
pitch,
noteTime,
beat,
coords,
osu_num
);
nd.addNote(n);
osu_num++;
noteAdded = true;
}
}
}
}
/*
* See http://www.stepmania.com/wiki/The_.SM_file_format
*
* Note rows are grouped into measures. The number of note rows you specify in a measure
* will determine the time value of each note. For example,
* If there are 4 notes rows in a measure, each note will be treated as a quarter note.
* If there are 8 notes rows in a measure, each note will be treated as a eighth note.
* If there are 12 notes rows in a measure, each note will be treated as a triplet (1/12th) note.
* Measures are separated by a comma.
*/
public static void parseNotesData(DataFile df, DataNotesData nd,
boolean jumps, boolean holds, boolean osu, boolean randomize)
throws DataParserException {
Scanner nsc = new Scanner(nd.getNotesData());
nsc.useDelimiter(",");
float beat = 0f;
float time = 0;
String line = "";
String measure = "";
Queue<Float> stopsBeat = df.getStopsBeat();
Queue<Float> stopsValue = df.getStopsValue();
activeHolds = new LinkedList<Integer>();
rand = new Randomizer(df.md5hash.hashCode());
try {
// Measure
osu_fraction = 0; // ++ -> 1
while (nsc.hasNext()) {
measure = nsc.next().trim();
osu_num = 1;
osu_fraction++;
if (osu_fraction > GUINoteImage.OSU_FRACTION_MAX) osu_fraction = 1;
rand.setupNextMeasure();
// Get measure count
Scanner msc = new Scanner(measure);
int lineCount = 0;
while (msc.hasNextLine()) {
if (msc.nextLine().trim().charAt(0) != '/') { // not comment
lineCount++;
}
}
msc.close();
// Assume that stepfile makers are nice and separate lines within a measure by page breaks
int lineIndex = 0;
float timeIncrease = 0;
float offset = df.getOffset();
msc = new Scanner(measure);
// Line
while (msc.hasNextLine()) {
line = msc.nextLine().trim();
if (line.charAt(0) == '/') { // comment
continue;
}
if (line.length() != nd.getNotesType().getNotesCount()) {
msc.close();
throw new DataParserException(
"line length " +
line.length() +
" does not match note type " +
nd.getNotesType().toString()
);
}
// Note
rand.setupNextLine();
addNotes(
nd,
line, holds, jumps, osu, randomize,
lineIndex, lineCount,
beat, time, timeIncrease, offset
);
// TIME_PER_MEASURE = 60s * 1000ms/s * 4 beats/measure
timeIncrease += (60f * 1000f * 4f) / ((float)lineCount * df.getBPM(beat));
if (!stopsBeat.isEmpty() && beat >= stopsBeat.peek()) {
stopsBeat.poll();
timeIncrease += stopsValue.poll() * 1000;
}
lineIndex++;
beat += 4.0 / (float)lineCount;
}
msc.close();
time += timeIncrease;
}
} catch (Exception e) {
throw new DataParserException(
e.getClass().getSimpleName(),
e.getMessage() +
" for line " +
line, e
);
} finally {
nsc.close();
}
}
private static void parseNotes(DataFile df, String buffer)
throws DataParserException {
// Expected format:
// #NOTES:
// <NotesType>:
// <Description>:
// <DifficultyClass>:
// <DifficultyMeter>:
// <RadarValues>:
// <NoteData>;
Scanner ndsc = new Scanner(buffer);
ndsc.useDelimiter(":");
String nbuffer = "";
DataNotesData nd = new DataNotesData();
try {
// Notes Type
nbuffer = ndsc.next().trim();
for (NotesType nt : NotesType.values()) {
if (nbuffer.equals(nt.toString())) {
nd.setNotesType(nt);
break;
}
}
// Because only 4-keys (dance-single) is currently supported
// TODO - add a note reduction system maybe that can reduce more than 4-keys to 4-keys?
if (!nd.getNotesType().equals(NotesType.DANCE_SINGLE)) {
ndsc.close();
return;
}
// Description
nbuffer = ndsc.next().trim();
nd.setDescription(nbuffer);
// Difficulty
nbuffer = ndsc.next().trim();
if (nbuffer.equalsIgnoreCase("beginner")) {
nd.setDifficulty(Difficulty.BEGINNER);
} else if (nbuffer.equalsIgnoreCase("easy")) {
nd.setDifficulty(Difficulty.EASY);
} else if (nbuffer.equalsIgnoreCase("medium")) {
nd.setDifficulty(Difficulty.MEDIUM);
} else if (nbuffer.equalsIgnoreCase("hard")) {
nd.setDifficulty(Difficulty.HARD);
} else if (nbuffer.equalsIgnoreCase("challenge")) {
nd.setDifficulty(Difficulty.CHALLENGE);
} else if (nbuffer.equalsIgnoreCase("edit")) {
nd.setDifficulty(Difficulty.EDIT);
} else {
nd.setDifficulty(Difficulty.UNKNOWN);
}
// Difficulty Meter
nbuffer = ndsc.next().trim();
if (nbuffer.length() > 0) {
nd.setDifficultyMeter(Integer.parseInt(nbuffer));
}
// Radar Values
nbuffer = ndsc.next().trim();
Scanner rsc = new Scanner(nbuffer);
rsc.useDelimiter(",");
int radarValueCount = 0;
while (rsc.hasNext()) {
try {
nd.addRadarValue(Float.valueOf(rsc.next().trim()));
radarValueCount++;
} catch (Exception e) {
rsc.close();
if (radarValueCount < 5) {
throw new DataParserException(
e.getClass().getSimpleName(),
e.getMessage(), e
);
}
}
}
// Notes Data
nbuffer = ndsc.next().trim();
nd.setNotesData(nbuffer);
df.addNotesData(nd);
ndsc.close();
} catch (Exception e) {
ndsc.close();
throw new DataParserException(
e.getClass().getSimpleName(),
"Improperly formatted #NOTES data: " +
e.getMessage(), e
);
}
}
private static String stripSM(String buffer) throws DataParserException {
if (!buffer.contains(":")) {
throw new DataParserException("Info tag missing ':' char: " + buffer);
} else {
return buffer.substring(buffer.indexOf(":") + 1).trim();
}
}
public static void parse(DataFile df) throws DataParserException, FileNotFoundException {
// Setup
File f = new File(df.getFilename());
Scanner sc = new Scanner(f, "UTF-8"); // For all us otaku out there!
sc.useDelimiter(";");
String buffer = "";
try {
while (sc.hasNext()) {
buffer = sc.next().trim();
if (buffer.contains("#")) { // Info tag
// Ignore comments and the byte order mark (xEF BB BF)
if (buffer.charAt(0) != '#') {
buffer = buffer.substring(buffer.indexOf('#'));
}
// Start filling in the info...
if (buffer.contains("#TITLE:")) {
df.setTitle(stripSM(buffer));
} else if (buffer.contains("#SUBTITLE:")) {
df.setSubTitle(stripSM(buffer));
} else if (buffer.contains("#ARTIST:")) {
df.setArtist(stripSM(buffer));
} else if (buffer.contains("#TITLETRANSLIT:")) {
df.setTitleTranslit(stripSM(buffer));
} else if (buffer.contains("#SUBTITLETRANSLIT:")) {
df.setSubTitleTranslit(stripSM(buffer));
} else if (buffer.contains("#ARTISTTRANSLIT:")) {
df.setArtistTranslit(stripSM(buffer));
} else if (buffer.contains("#CREDIT:")) {
df.setCredit(stripSM(buffer));
} else if (buffer.contains("#BANNER:")) {
df.setBanner(stripSM(buffer));
} else if (buffer.contains("#BACKGROUND:")) {
df.setBackground(stripSM(buffer));
} else if (buffer.contains("#CDTITLE:")) {
// Unimplemented
//df.setCDTitle(stripSM(buffer));
} else if (buffer.contains("#MUSIC:")) {
df.setMusic(stripSM(buffer));
} else if (buffer.contains("#OFFSET:")) {
df.setOffset(Float.parseFloat(stripSM(buffer)) * 1000f);
} else if (buffer.contains("#SAMPLESTART:")) {
// Unimplemented
//df.setSampleStart(Float.parseFloat(stripSM(buffer)));
} else if (buffer.contains("#SAMPLELENGTH:")) {
// Unimplemented
//df.setSampleLength(Float.parseFloat(stripSM(buffer)));
} else if (buffer.contains("#SELECTABLE:")) {
// Unimplemented
//df.setSelectable(stripSM(buffer).equalsIgnoreCase("YES"));
} else if (buffer.contains("#BPMS:")) {
parseBPM(df, stripSM(buffer));
} else if (buffer.contains("#STOPS:")) {
parseStop(df, stripSM(buffer));
} else if (buffer.contains("#BGCHANGES:")) {
// Unimplemented
//parseBGChange(df, stripSM(buffer));
} else if (buffer.contains("#NOTES:")) {
parseNotes(df, stripSM(buffer));
} else if (buffer.contains("#LYRICSPATH:")) {
// Unsupported
} else if (buffer.contains("#GENRE:")) {
// Unsupported
} else {
// Unsupported tag outside of SM 3.9's specification?
}
} else {
//Ignore, probably a comment
}
// Because some lazy people don't fully add all tags
df.setMusicBackup();
df.setBackgroundBackup();
}
} catch (Exception e) {
sc.close();
throw new DataParserException(e.getMessage(), e);
}
sc.close();
}
}
| |
/*
* Copyright (C) 2017 Dirk Lemmermann Software & Consulting (dlsc.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package impl.com.calendarfx.view;
import com.calendarfx.model.Calendar;
import com.calendarfx.model.CalendarEvent;
import com.calendarfx.model.CalendarSource;
import com.calendarfx.model.Entry;
import com.calendarfx.util.LoggingDomain;
import com.calendarfx.view.AgendaView;
import com.calendarfx.view.AgendaView.AgendaEntry;
import com.calendarfx.view.Messages;
import impl.com.calendarfx.view.util.Util;
import javafx.beans.InvalidationListener;
import javafx.beans.Observable;
import javafx.beans.WeakInvalidationListener;
import javafx.scene.control.Control;
import javafx.scene.control.Label;
import javafx.scene.control.ListView;
import javafx.scene.layout.BorderPane;
import java.text.MessageFormat;
import java.time.LocalDate;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.time.format.FormatStyle;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.logging.Level;
public class AgendaViewSkin extends DateControlSkin<AgendaView> implements LoadDataSettingsProvider {
private static final String AGENDA_VIEW_PLACEHOLDER_LABEL = "placeholder-label"; //$NON-NLS-1$
private ListView<AgendaEntry> listView;
private DataLoader dataLoader = new DataLoader(this);
private Label statusLabel;
public AgendaViewSkin(AgendaView view) {
super(view);
listView = view.getListView();
listView.setMinWidth(1);
listView.setFixedCellSize(-1);
listView.setSelectionModel(Util.createEmptySelectionModel());
listView.getStyleClass().add("agenda-view-list");
statusLabel = new Label();
statusLabel.getStyleClass().add("status-label"); //$NON-NLS-1$
statusLabel.setMaxWidth(Double.MAX_VALUE);
statusLabel.visibleProperty().bind(view.showStatusLabelProperty());
statusLabel.managedProperty().bind(statusLabel.visibleProperty());
Label placeholderLabel = new Label(Messages.getString("AgendaViewSkin.NO_ENTRIES")); //$NON-NLS-1$
placeholderLabel.getStyleClass().add(AGENDA_VIEW_PLACEHOLDER_LABEL);
listView.setPlaceholder(placeholderLabel);
BorderPane borderPane = new BorderPane();
borderPane.getStyleClass().add("container"); //$NON-NLS-1$
getChildren().add(borderPane);
borderPane.setCenter(listView);
borderPane.setTop(statusLabel);
InvalidationListener reloadListener = it -> updateList("a view property has changed, property = " + it.toString());
view.lookAheadPeriodInDaysProperty().addListener(reloadListener);
view.lookBackPeriodInDaysProperty().addListener(reloadListener);
view.enableHyperlinksProperty().addListener(reloadListener);
view.getCalendars().addListener(reloadListener);
updateList("initial loading");
listenToCalendars();
view.getCalendars().addListener((Observable observable) -> listenToCalendars());
view.dateProperty().addListener(reloadListener);
}
private InvalidationListener calendarVisibilityChanged = it -> updateList("calendar visibility changed");
private WeakInvalidationListener weakCalendarVisibilityChanged = new WeakInvalidationListener(calendarVisibilityChanged);
private void listenToCalendars() {
for (Calendar c : getSkinnable().getCalendars()) {
getSkinnable().getCalendarVisibilityProperty(c).addListener(weakCalendarVisibilityChanged);
}
}
@Override
protected void calendarChanged(Calendar calendar) {
updateList("calendar changed");
}
@Override
protected void entryIntervalChanged(CalendarEvent evt) {
updateList(evt, "entry interval changed, entry = " + evt.getEntry());
}
@Override
protected void entryRecurrenceRuleChanged(CalendarEvent evt) {
updateList(evt, "entry recurrence rule changed, entry = " + evt.getEntry());
}
@Override
protected void entryFullDayChanged(CalendarEvent evt) {
updateList(evt, "entry full day changed changed, entry = " + evt.getEntry());
}
@Override
protected void entryCalendarChanged(CalendarEvent evt) {
updateList(evt, "entry calendar changed, entry = " + evt.getEntry());
}
@Override
protected void refreshData() {
updateList("data refresh");
}
private void updateList(final CalendarEvent evt, String reason) {
Entry<?> entry = evt.getEntry();
// TODO: this can be optimized more to only update when really needed
if (isRelevant(entry)) {
updateList(reason);
}
}
private void updateList(String reason) {
if (LoggingDomain.VIEW.isLoggable(Level.FINE)) {
LoggingDomain.VIEW.fine("updating list inside agenda view, reason = " + reason);
}
Map<LocalDate, List<Entry<?>>> dataMap = new HashMap<>();
dataLoader.loadEntries(dataMap);
List<AgendaEntry> listEntries = new ArrayList<>();
for (LocalDate date : dataMap.keySet()) {
AgendaEntry listViewEntry = new AgendaEntry(date);
for (Entry<?> entry : dataMap.get(date)) {
listViewEntry.getEntries().add(entry);
}
listEntries.add(listViewEntry);
}
Collections.sort(listEntries);
listView.getItems().setAll(listEntries);
DateTimeFormatter formatter = DateTimeFormatter.ofLocalizedDate(FormatStyle.LONG);
String startTime = formatter.format(getLoadStartDate());
String endTime = formatter.format(getLoadEndDate());
statusLabel.setText(MessageFormat.format(Messages.getString("AgendaViewSkin.AGENDA_TIME_RANGE"), startTime, endTime)); //$NON-NLS-1$
}
@Override
public String getLoaderName() {
return "Agenda View"; //$NON-NLS-1$
}
@Override
public LocalDate getLoadStartDate() {
return getSkinnable().getDate().minusDays(
getSkinnable().getLookBackPeriodInDays());
}
@Override
public LocalDate getLoadEndDate() {
return getSkinnable().getDate().plusDays(
getSkinnable().getLookAheadPeriodInDays());
}
@Override
public ZoneId getZoneId() {
return ZoneId.systemDefault();
}
@Override
public List<CalendarSource> getCalendarSources() {
return getSkinnable().getCalendarSources();
}
@Override
public Control getControl() {
return getSkinnable();
}
@Override
public boolean isCalendarVisible(Calendar calendar) {
return getSkinnable().isCalendarVisible(calendar);
}
}
| |
/*
* The MIT License
* Copyright (c) 2012 Microsoft Corporation
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package microsoft.exchange.webservices.data.core.service.schema;
import microsoft.exchange.webservices.data.attribute.Schema;
import microsoft.exchange.webservices.data.core.XmlElementNames;
import microsoft.exchange.webservices.data.core.enumeration.service.ConversationFlagStatus;
import microsoft.exchange.webservices.data.core.enumeration.misc.ExchangeVersion;
import microsoft.exchange.webservices.data.core.enumeration.property.Importance;
import microsoft.exchange.webservices.data.core.enumeration.property.PropertyDefinitionFlags;
import microsoft.exchange.webservices.data.property.complex.ConversationId;
import microsoft.exchange.webservices.data.property.complex.ICreateComplexPropertyDelegate;
import microsoft.exchange.webservices.data.property.complex.ItemIdCollection;
import microsoft.exchange.webservices.data.property.complex.StringList;
import microsoft.exchange.webservices.data.property.definition.BoolPropertyDefinition;
import microsoft.exchange.webservices.data.property.definition.ComplexPropertyDefinition;
import microsoft.exchange.webservices.data.property.definition.DateTimePropertyDefinition;
import microsoft.exchange.webservices.data.property.definition.GenericPropertyDefinition;
import microsoft.exchange.webservices.data.property.definition.IntPropertyDefinition;
import microsoft.exchange.webservices.data.property.definition.PropertyDefinition;
import microsoft.exchange.webservices.data.property.definition.StringPropertyDefinition;
import java.util.EnumSet;
/**
* Represents the schema for Conversation.
*/
@Schema
public class ConversationSchema extends ServiceObjectSchema {
/**
* Field URIs for Item.
*/
private static class FieldUris {
/**
* The Constant ConversationId.
*/
public static final String ConversationId =
"conversation:ConversationId";
/**
* The Constant ConversationTopic.
*/
public static final String ConversationTopic =
"conversation:ConversationTopic";
/**
* The Constant UniqueRecipients.
*/
public static final String UniqueRecipients =
"conversation:UniqueRecipients";
/**
* The Constant GlobalUniqueRecipients.
*/
public static final String GlobalUniqueRecipients =
"conversation:GlobalUniqueRecipients";
/**
* The Constant UniqueUnreadSenders.
*/
public static final String UniqueUnreadSenders =
"conversation:UniqueUnreadSenders";
/**
* The Constant GlobalUniqueUnreadSenders.
*/
public static final String GlobalUniqueUnreadSenders =
"conversation:GlobalUniqueUnreadSenders";
/**
* The Constant UniqueSenders.
*/
public static final String UniqueSenders = "conversation:UniqueSenders";
/**
* The Constant GlobalUniqueSenders.
*/
public static final String GlobalUniqueSenders =
"conversation:GlobalUniqueSenders";
/**
* The Constant LastDeliveryTime.
*/
public static final String LastDeliveryTime =
"conversation:LastDeliveryTime";
/**
* The Constant GlobalLastDeliveryTime.
*/
public static final String GlobalLastDeliveryTime =
"conversation:GlobalLastDeliveryTime";
/**
* The Constant Categories.
*/
public static final String Categories = "conversation:Categories";
/**
* The Constant GlobalCategories.
*/
public static final String GlobalCategories =
"conversation:GlobalCategories";
/**
* The Constant FlagStatus.
*/
public static final String FlagStatus = "conversation:FlagStatus";
/**
* The Constant GlobalFlagStatus.
*/
public static final String GlobalFlagStatus =
"conversation:GlobalFlagStatus";
/**
* The Constant HasAttachments.
*/
public static final String HasAttachments =
"conversation:HasAttachments";
/**
* The Constant GlobalHasAttachments.
*/
public static final String GlobalHasAttachments =
"conversation:GlobalHasAttachments";
/**
* The Constant MessageCount.
*/
public static final String MessageCount = "conversation:MessageCount";
/**
* The Constant GlobalMessageCount.
*/
public static final String GlobalMessageCount =
"conversation:GlobalMessageCount";
/**
* The Constant UnreadCount.
*/
public static final String UnreadCount = "conversation:UnreadCount";
/**
* The Constant GlobalUnreadCount.
*/
public static final String GlobalUnreadCount =
"conversation:GlobalUnreadCount";
/**
* The Constant Size.
*/
public static final String Size = "conversation:Size";
/**
* The Constant GlobalSize.
*/
public static final String GlobalSize = "conversation:GlobalSize";
/**
* The Constant ItemClasses.
*/
public static final String ItemClasses = "conversation:ItemClasses";
/**
* The Constant GlobalItemClasses.
*/
public static final String GlobalItemClasses =
"conversation:GlobalItemClasses";
/**
* The Constant Importance.
*/
public static final String Importance = "conversation:Importance";
/**
* The Constant GlobalImportance.
*/
public static final String GlobalImportance =
"conversation:GlobalImportance";
/**
* The Constant ItemIds.
*/
public static final String ItemIds = "conversation:ItemIds";
/**
* The Constant GlobalItemIds.
*/
public static final String GlobalItemIds = "conversation:GlobalItemIds";
}
/**
* Defines the Id property.
*/
public static final PropertyDefinition Id = new ComplexPropertyDefinition<ConversationId>(
ConversationId.class,
XmlElementNames.ConversationId, FieldUris.ConversationId, EnumSet
.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1,
new ICreateComplexPropertyDelegate<ConversationId>() {
public ConversationId createComplexProperty() {
return new ConversationId();
}
});
/**
* Defines the Topic property.
*/
public static final PropertyDefinition Topic =
new StringPropertyDefinition(
XmlElementNames.ConversationTopic,
FieldUris.ConversationTopic,
EnumSet
.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1);
/**
* Defines the UniqueRecipients property.
*/
public static final PropertyDefinition UniqueRecipients = new
ComplexPropertyDefinition<StringList>(
StringList.class,
XmlElementNames.UniqueRecipients,
FieldUris.UniqueRecipients, EnumSet
.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1,
new ICreateComplexPropertyDelegate<StringList>() {
public StringList createComplexProperty() {
return new StringList();
}
});
/**
* Defines the GlobalUniqueRecipients property.
*/
public static final PropertyDefinition GlobalUniqueRecipients =
new ComplexPropertyDefinition<StringList>(
StringList.class,
XmlElementNames.GlobalUniqueRecipients,
FieldUris.GlobalUniqueRecipients,
EnumSet
.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1,
new ICreateComplexPropertyDelegate<StringList>() {
public StringList createComplexProperty() {
return new StringList();
}
});
/**
* Defines the UniqueUnreadSenders property.
*/
public static final PropertyDefinition UniqueUnreadSenders =
new ComplexPropertyDefinition<StringList>(
StringList.class,
XmlElementNames.UniqueUnreadSenders,
FieldUris.UniqueUnreadSenders,
EnumSet
.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1,
new ICreateComplexPropertyDelegate<StringList>() {
public StringList createComplexProperty() {
return new StringList();
}
});
/**
* Defines the GlobalUniqueUnreadSenders property.
*/
public static final PropertyDefinition GlobalUniqueUnreadSenders =
new ComplexPropertyDefinition<StringList>(
StringList.class,
XmlElementNames.GlobalUniqueUnreadSenders,
FieldUris.GlobalUniqueUnreadSenders,
EnumSet
.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1,
new ICreateComplexPropertyDelegate<StringList>() {
public StringList createComplexProperty() {
return new StringList();
}
});
/**
* Defines the UniqueSenders property.
*/
public static final PropertyDefinition UniqueSenders =
new ComplexPropertyDefinition<StringList>(
StringList.class,
XmlElementNames.UniqueSenders,
FieldUris.UniqueSenders,
EnumSet
.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1,
new ICreateComplexPropertyDelegate<StringList>() {
public StringList createComplexProperty() {
return new StringList();
}
});
/**
* Defines the GlobalUniqueSenders property.
*/
public static final PropertyDefinition GlobalUniqueSenders =
new ComplexPropertyDefinition<StringList>(
StringList.class,
XmlElementNames.GlobalUniqueSenders,
FieldUris.GlobalUniqueSenders,
EnumSet
.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1,
new ICreateComplexPropertyDelegate<StringList>() {
public StringList createComplexProperty() {
return new StringList();
}
});
/**
* Defines the LastDeliveryTime property.
*/
public static final PropertyDefinition LastDeliveryTime =
new DateTimePropertyDefinition(
XmlElementNames.LastDeliveryTime,
FieldUris.LastDeliveryTime,
EnumSet
.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1);
/**
* Defines the GlobalLastDeliveryTime property.
*/
public static final PropertyDefinition GlobalLastDeliveryTime =
new DateTimePropertyDefinition(
XmlElementNames.GlobalLastDeliveryTime,
FieldUris.GlobalLastDeliveryTime,
EnumSet
.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1);
/**
* Defines the Categories property.
*/
public static final PropertyDefinition Categories =
new ComplexPropertyDefinition<StringList>(
StringList.class,
XmlElementNames.Categories,
FieldUris.Categories,
EnumSet
.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1,
new ICreateComplexPropertyDelegate<StringList>() {
public StringList createComplexProperty() {
return new StringList();
}
});
/**
* Defines the GlobalCategories property.
*/
public static final PropertyDefinition GlobalCategories =
new ComplexPropertyDefinition<StringList>(
StringList.class,
XmlElementNames.GlobalCategories,
FieldUris.GlobalCategories,
EnumSet
.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1,
new ICreateComplexPropertyDelegate<StringList>() {
public StringList createComplexProperty() {
return new StringList();
}
});
/**
* Defines the FlagStatus property.
*/
public static final PropertyDefinition FlagStatus =
new GenericPropertyDefinition<ConversationFlagStatus>(
ConversationFlagStatus.class,
XmlElementNames.FlagStatus,
FieldUris.FlagStatus,
EnumSet
.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1);
/**
* Defines the GlobalFlagStatus property.
*/
public static final PropertyDefinition GlobalFlagStatus =
new GenericPropertyDefinition<ConversationFlagStatus>(
ConversationFlagStatus.class,
XmlElementNames.GlobalFlagStatus,
FieldUris.GlobalFlagStatus,
EnumSet
.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1);
/**
* Defines the HasAttachments property.
*/
public static final PropertyDefinition HasAttachments =
new BoolPropertyDefinition(
XmlElementNames.HasAttachments,
FieldUris.HasAttachments,
EnumSet
.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1);
/**
* Defines the GlobalHasAttachments property.
*/
public static final PropertyDefinition GlobalHasAttachments =
new BoolPropertyDefinition(
XmlElementNames.GlobalHasAttachments,
FieldUris.GlobalHasAttachments,
EnumSet
.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1);
/**
* Defines the MessageCount property.
*/
public static final PropertyDefinition MessageCount =
new IntPropertyDefinition(
XmlElementNames.MessageCount,
FieldUris.MessageCount,
EnumSet
.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1);
/**
* Defines the GlobalMessageCount property.
*/
public static final PropertyDefinition GlobalMessageCount =
new IntPropertyDefinition(
XmlElementNames.GlobalMessageCount,
FieldUris.GlobalMessageCount,
EnumSet
.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1);
/**
* Defines the UnreadCount property.
*/
public static final PropertyDefinition UnreadCount =
new IntPropertyDefinition(
XmlElementNames.UnreadCount,
FieldUris.UnreadCount,
EnumSet
.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1);
/**
* Defines the GlobalUnreadCount property.
*/
public static final PropertyDefinition GlobalUnreadCount =
new IntPropertyDefinition(
XmlElementNames.GlobalUnreadCount,
FieldUris.GlobalUnreadCount,
EnumSet
.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1);
/**
* Defines the Size property.
*/
public static final PropertyDefinition Size =
new IntPropertyDefinition(
XmlElementNames.Size,
FieldUris.Size,
EnumSet
.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1);
/**
* Defines the GlobalSize property.
*/
public static final PropertyDefinition GlobalSize =
new IntPropertyDefinition(
XmlElementNames.GlobalSize,
FieldUris.GlobalSize,
EnumSet
.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1);
/**
* Defines the ItemClasses property.
*/
public static final PropertyDefinition ItemClasses =
new ComplexPropertyDefinition<StringList>(
StringList.class,
XmlElementNames.ItemClasses,
FieldUris.ItemClasses,
EnumSet
.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1,
new ICreateComplexPropertyDelegate<StringList>() {
public StringList createComplexProperty() {
return new StringList(XmlElementNames.
ItemClass);
}
});
/**
* Defines the GlobalItemClasses property.
*/
public static final PropertyDefinition GlobalItemClasses =
new ComplexPropertyDefinition<StringList>(
StringList.class,
XmlElementNames.GlobalItemClasses,
FieldUris.GlobalItemClasses,
EnumSet
.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1,
new ICreateComplexPropertyDelegate<StringList>() {
public StringList createComplexProperty() {
return new StringList(XmlElementNames.
ItemClass);
}
});
/**
* Defines the Importance property.
*/
public static final PropertyDefinition Importance =
new GenericPropertyDefinition<microsoft.exchange.webservices.data.core.enumeration.property.Importance>(
Importance.class,
XmlElementNames.Importance,
FieldUris.Importance,
EnumSet
.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1);
/**
* Defines the GlobalImportance property.
*/
public static final PropertyDefinition GlobalImportance =
new GenericPropertyDefinition<Importance>(
Importance.class,
XmlElementNames.GlobalImportance,
FieldUris.GlobalImportance,
EnumSet
.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1);
/**
* Defines the ItemIds property.
*/
public static final PropertyDefinition ItemIds =
new ComplexPropertyDefinition<ItemIdCollection>(
ItemIdCollection.class,
XmlElementNames.ItemIds,
FieldUris.ItemIds,
EnumSet
.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1,
new ICreateComplexPropertyDelegate<ItemIdCollection>() {
public ItemIdCollection createComplexProperty() {
return new ItemIdCollection();
}
});
/**
* Defines the GlobalItemIds property.
*/
public static final PropertyDefinition GlobalItemIds =
new ComplexPropertyDefinition<ItemIdCollection>(
ItemIdCollection.class,
XmlElementNames.GlobalItemIds,
FieldUris.GlobalItemIds,
EnumSet
.of(PropertyDefinitionFlags.CanFind),
ExchangeVersion.Exchange2010_SP1,
new ICreateComplexPropertyDelegate<ItemIdCollection>() {
public ItemIdCollection createComplexProperty() {
return new ItemIdCollection();
}
});
/**
* This must be declared after the property definitions
*/
public static final ConversationSchema Instance =
new ConversationSchema();
/**
* Registers property.
*/
@Override
protected void registerProperties() {
super.registerProperties();
this.registerProperty(Id);
this.registerProperty(Topic);
this.registerProperty(UniqueRecipients);
this.registerProperty(GlobalUniqueRecipients);
this.registerProperty(UniqueUnreadSenders);
this.registerProperty(GlobalUniqueUnreadSenders);
this.registerProperty(UniqueSenders);
this.registerProperty(GlobalUniqueSenders);
this.registerProperty(LastDeliveryTime);
this.registerProperty(GlobalLastDeliveryTime);
this.registerProperty(Categories);
this.registerProperty(GlobalCategories);
this.registerProperty(FlagStatus);
this.registerProperty(GlobalFlagStatus);
this.registerProperty(HasAttachments);
this.registerProperty(GlobalHasAttachments);
this.registerProperty(MessageCount);
this.registerProperty(GlobalMessageCount);
this.registerProperty(UnreadCount);
this.registerProperty(GlobalUnreadCount);
this.registerProperty(Size);
this.registerProperty(GlobalSize);
this.registerProperty(ItemClasses);
this.registerProperty(GlobalItemClasses);
this.registerProperty(Importance);
this.registerProperty(GlobalImportance);
this.registerProperty(ItemIds);
this.registerProperty(GlobalItemIds);
}
/**
* Initializes a new instance of
* the ConversationSchema class.
*/
protected ConversationSchema() {
super();
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See License.txt in the project root.
package com.microsoft.alm.gitcredentialmanager;
import com.microsoft.alm.authentication.BaseVsoAuthentication;
import com.microsoft.alm.authentication.BasicAuthentication;
import com.microsoft.alm.authentication.Configuration;
import com.microsoft.alm.authentication.Credential;
import com.microsoft.alm.authentication.IAuthentication;
import com.microsoft.alm.authentication.ISecureStore;
import com.microsoft.alm.authentication.ITokenStore;
import com.microsoft.alm.authentication.IVsoAadAuthentication;
import com.microsoft.alm.authentication.IVsoMsaAuthentication;
import com.microsoft.alm.authentication.SecretStore;
import com.microsoft.alm.authentication.VsoAadAuthentication;
import com.microsoft.alm.authentication.VsoMsaAuthentication;
import com.microsoft.alm.authentication.VsoTokenScope;
import com.microsoft.alm.authentication.Where;
import com.microsoft.alm.helpers.Debug;
import com.microsoft.alm.helpers.Environment;
import com.microsoft.alm.helpers.Func;
import com.microsoft.alm.helpers.Guid;
import com.microsoft.alm.helpers.IOHelper;
import com.microsoft.alm.helpers.NotImplementedException;
import com.microsoft.alm.helpers.Path;
import com.microsoft.alm.helpers.StringHelper;
import com.microsoft.alm.helpers.Trace;
import com.microsoft.alm.helpers.UriHelper;
import com.microsoft.alm.oauth2.useragent.Provider;
import com.microsoft.alm.oauth2.useragent.Version;
import com.microsoft.alm.oauth2.useragent.subprocess.DefaultProcessFactory;
import com.microsoft.alm.oauth2.useragent.subprocess.ProcessCoordinator;
import com.microsoft.alm.oauth2.useragent.subprocess.TestableProcess;
import com.microsoft.alm.oauth2.useragent.subprocess.TestableProcessFactory;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.PrintStream;
import java.io.UnsupportedEncodingException;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.concurrent.Callable;
import java.util.concurrent.atomic.AtomicReference;
public class Program
{
private static final String ConfigPrefix = "credential";
private static final String SecretsNamespace = "git";
private static final String ProgramFolderName = "git-credential-manager";
private static final VsoTokenScope VsoCredentialScope = VsoTokenScope.CodeWrite;
private static final String AbortAuthenticationProcessResponse = "quit=true";
private static final String CredentialHelperSection = "credential.helper";
private static final String CredentialHelperValueRegex = "git-credential-manager-[0-9]+\\.[0-9]+\\.[0-9]+(-SNAPSHOT)?.jar";
private static final DefaultFileChecker DefaultFileCheckerSingleton = new DefaultFileChecker();
private final InputStream standardIn;
private final PrintStream standardOut;
private final IComponentFactory componentFactory;
// http://stackoverflow.com/a/6773868/
static String getVersion()
{
if (_version == null)
{
_version = Program.class.getPackage().getImplementationVersion();
}
return _version;
}
private static String _version;
static String getTitle()
{
if (_title == null)
{
_title = Program.class.getPackage().getImplementationTitle();
}
return _title;
}
private static String _title;
public static void main(final String[] args)
{
try
{
enableDebugTrace();
final Program program = new Program(System.in, System.out, new ComponentFactory());
program.innerMain(args);
}
catch (final Exception exception)
{
if (Debug.IsDebug)
{
System.err.println("Fatal error encountered. Details:");
exception.printStackTrace(System.err);
}
else
{
System.err.println("Fatal: " + exception.getClass().getName() + " encountered. Details:");
System.err.println(exception.getMessage());
}
logEvent(exception.getMessage(), "EventLogEntryType.Error");
// notice the lack of a new line; Git needs it that way
System.out.print(AbortAuthenticationProcessResponse);
}
Trace.flush();
}
static File determineParentFolder()
{
return findFirstValidFolder(
Environment.SpecialFolder.LocalApplicationData,
Environment.SpecialFolder.ApplicationData,
Environment.SpecialFolder.UserProfile);
}
static File findFirstValidFolder(final Environment.SpecialFolder... candidates)
{
for (final Environment.SpecialFolder candidate : candidates)
{
final String path = Environment.getFolderPath(candidate);
if (path == null)
continue;
final File result = new File(path);
if (result.isDirectory())
{
return result;
}
}
final String path = System.getenv("HOME");
final File result = new File(path);
return result;
}
void innerMain(String[] args) throws Exception
{
if (args.length == 0 || args[0].contains("?"))
{
printHelpMessage();
return;
}
// list of arg => method associations (case-insensitive)
final Map<String, Callable<Void>> actions = new TreeMap<String, Callable<Void>>(String.CASE_INSENSITIVE_ORDER);
actions.put("approve", Store);
actions.put("erase", Erase);
actions.put("fill", Get);
actions.put("get", Get);
actions.put("reject", Erase);
actions.put("store", Store);
actions.put("version", PrintVersion);
actions.put("install", Install);
actions.put("uninstall", Uninstall);
for (final String arg : args)
{
if (actions.containsKey(arg))
{
actions.get(arg).call();
}
}
}
public Program(final InputStream standardIn, final PrintStream standardOut, final IComponentFactory componentFactory)
{
this.standardIn = standardIn;
this.standardOut = standardOut;
this.componentFactory = componentFactory;
}
private void printHelpMessage()
{
Trace.writeLine("Program::printHelpMessage");
standardOut.println("usage: git credential <command> [<args>]");
standardOut.println();
standardOut.println(" authority Defines the type of authentication to be used.");
standardOut.println(" Supports Auto, Basic, AAD, MSA, and Integrated.");
standardOut.println(" Default is Auto.");
standardOut.println();
standardOut.println(" `git config --global credential.microsoft.visualstudio.com.authority AAD`");
standardOut.println();
standardOut.println(" eraseosxkeychain Enables a workaround when running on Mac OS X");
standardOut.println(" and using 'Apple Git' (which includes the osxkeychain");
standardOut.println(" credential helper, hardcoded before all other helpers).");
standardOut.println(" The problem is osxkeychain may return expired or");
standardOut.println(" revoked credentials, aborting the Git operation.");
standardOut.println(" The workaround is to preemptively erase from osxkeychain");
standardOut.println(" any Git credentials that can be refreshed or re-acquired");
standardOut.println(" by this credential helper.");
standardOut.println(" Defaults to FALSE. Ignored by Basic authority.");
standardOut.println(" Does nothing if Apple Git on Mac OS X isn't detected.");
standardOut.println();
standardOut.println(" `git config --global credential.microsoft.visualstudio.com.eraseosxkeychain false`");
standardOut.println();
standardOut.println(" interactive Specifies if user can be prompted for credentials or not.");
standardOut.println(" Supports Auto, Always, or Never. Defaults to Auto.");
standardOut.println(" Only used by AAD and MSA authority.");
standardOut.println();
standardOut.println(" `git config --global credential.microsoft.visualstudio.com.interactive never`");
standardOut.println();
standardOut.println(" validate Causes validation of credentials before supplying them");
standardOut.println(" to Git. Invalid credentials get a refresh attempt");
standardOut.println(" before failing. Incurs some minor overhead.");
standardOut.println(" Defaults to TRUE. Ignored by Basic authority.");
standardOut.println();
standardOut.println(" `git config --global credential.microsoft.visualstudio.com.validate false`");
standardOut.println();
standardOut.println(" writelog Enables trace logging of all activities. Logs are written to");
standardOut.println(" the .git/ folder at the root of the repository.");
standardOut.println(" Defaults to FALSE.");
standardOut.println();
standardOut.println(" `git config --global credential.writelog true`");
standardOut.println();
standardOut.println("Sample Configuration:");
standardOut.println(" [credential \"microsoft.visualstudio.com\"]");
standardOut.println(" authority = AAD");
standardOut.println(" [credential \"visualstudio.com\"]");
standardOut.println(" authority = MSA");
standardOut.println(" [credential]");
standardOut.println(" helper = manager");
}
private final Callable<Void> Erase = new Callable<Void>()
{
@Override public Void call() throws IOException, URISyntaxException
{
erase();
return null;
}
};
private void erase() throws IOException, URISyntaxException
{
final AtomicReference<OperationArguments> operationArgumentsRef = new AtomicReference<OperationArguments>();
final AtomicReference<IAuthentication> authenticationRef = new AtomicReference<IAuthentication>();
initialize("erase", operationArgumentsRef, authenticationRef);
erase(operationArgumentsRef.get(), authenticationRef.get());
}
public static void erase(final OperationArguments operationArguments, final IAuthentication authentication)
{
authentication.deleteCredentials(operationArguments.TargetUri);
}
private final Callable<Void> Get = new Callable<Void>()
{
@Override public Void call() throws IOException, URISyntaxException
{
get();
return null;
}
};
private void get() throws IOException, URISyntaxException
{
final AtomicReference<OperationArguments> operationArgumentsRef = new AtomicReference<OperationArguments>();
final AtomicReference<IAuthentication> authenticationRef = new AtomicReference<IAuthentication>();
initialize("get", operationArgumentsRef, authenticationRef);
final String result = get(operationArgumentsRef.get(), authenticationRef.get());
standardOut.print(result);
}
public static String get(final OperationArguments operationArguments, final IAuthentication authentication)
{
final String AuthFailureMessage = "Logon failed, aborting authentication process.";
final AtomicReference<Credential> credentials = new AtomicReference<Credential>();
switch (operationArguments.Authority)
{
default:
case Basic:
if (authentication.getCredentials(operationArguments.TargetUri, credentials))
{
Trace.writeLine(" credentials found");
operationArguments.setCredentials(credentials.get());
}
break;
case AzureDirectory:
final IVsoAadAuthentication aadAuth = (IVsoAadAuthentication) authentication;
// attempt to get cached creds -> refresh creds -> non-interactive logon -> interactive logon
// note that AAD "credentials" are always scoped access tokens
if (((operationArguments.Interactivity != Interactivity.Always
&& aadAuth.getCredentials(operationArguments.TargetUri, credentials)
&& (!operationArguments.ValidateCredentials
|| aadAuth.validateCredentials(operationArguments.TargetUri, credentials.get())))
|| (operationArguments.Interactivity != Interactivity.Always
&& aadAuth.refreshCredentials(operationArguments.TargetUri, true)
&& aadAuth.getCredentials(operationArguments.TargetUri, credentials)
&& (!operationArguments.ValidateCredentials
|| aadAuth.validateCredentials(operationArguments.TargetUri, credentials.get())))
// || (operationArguments.Interactivity != Interactivity.Always
// && aadAuth.noninteractiveLogon(operationArguments.TargetUri, true)
// && aadAuth.getCredentials(operationArguments.TargetUri, credentials)
// && (!operationArguments.ValidateCredentials
// || aadAuth.validateCredentials(operationArguments.TargetUri, credentials.get())))
|| (operationArguments.Interactivity != Interactivity.Never
&& aadAuth.interactiveLogon(operationArguments.TargetUri, true))
&& aadAuth.getCredentials(operationArguments.TargetUri, credentials)
&& (!operationArguments.ValidateCredentials
|| aadAuth.validateCredentials(operationArguments.TargetUri, credentials.get()))))
{
Trace.writeLine(" credentials found");
operationArguments.setCredentials(credentials.get());
logEvent("Azure Directory credentials for " + operationArguments.TargetUri + " successfully retrieved.", "SuccessAudit");
}
else
{
System.err.println(AuthFailureMessage);
logEvent("Failed to retrieve Azure Directory credentials for " + operationArguments.TargetUri + ".", "FailureAudit");
return AbortAuthenticationProcessResponse;
}
break;
case MicrosoftAccount:
final IVsoMsaAuthentication msaAuth = (IVsoMsaAuthentication) authentication;
// attempt to get cached creds -> refresh creds -> interactive logon
// note that MSA "credentials" are always scoped access tokens
if (((operationArguments.Interactivity != Interactivity.Always
&& msaAuth.getCredentials(operationArguments.TargetUri, credentials)
&& (!operationArguments.ValidateCredentials
|| msaAuth.validateCredentials(operationArguments.TargetUri, credentials.get())))
|| (operationArguments.Interactivity != Interactivity.Always
&& msaAuth.refreshCredentials(operationArguments.TargetUri, true)
&& msaAuth.getCredentials(operationArguments.TargetUri, credentials)
&& (!operationArguments.ValidateCredentials
|| msaAuth.validateCredentials(operationArguments.TargetUri, credentials.get())))
|| (operationArguments.Interactivity != Interactivity.Never
&& msaAuth.interactiveLogon(operationArguments.TargetUri, true))
&& msaAuth.getCredentials(operationArguments.TargetUri, credentials)
&& (!operationArguments.ValidateCredentials
|| msaAuth.validateCredentials(operationArguments.TargetUri, credentials.get()))))
{
Trace.writeLine(" credentials found");
operationArguments.setCredentials(credentials.get());
logEvent("Microsoft Live credentials for " + operationArguments.TargetUri + " successfully retrieved.", "SuccessAudit");
}
else
{
System.err.println(AuthFailureMessage);
logEvent("Failed to retrieve Microsoft Live credentials for " + operationArguments.TargetUri + ".", "FailureAudit");
return AbortAuthenticationProcessResponse;
}
break;
case GitHub:
throw new NotImplementedException(449515);
case Integrated:
credentials.set(new Credential(StringHelper.Empty, StringHelper.Empty));
operationArguments.setCredentials(credentials.get());
break;
}
return operationArguments.toString();
}
private final Callable<Void> Store = new Callable<Void>()
{
@Override public Void call() throws IOException, URISyntaxException
{
store();
return null;
}
};
private void store() throws IOException, URISyntaxException
{
final AtomicReference<OperationArguments> operationArgumentsRef = new AtomicReference<OperationArguments>();
final AtomicReference<IAuthentication> authenticationRef = new AtomicReference<IAuthentication>();
initialize("store", operationArgumentsRef, authenticationRef);
final String osName = System.getProperty("os.name");
final TestableProcessFactory processFactory = new DefaultProcessFactory();
final String pathString = System.getenv("PATH");
final String pathSeparator = File.pathSeparator;
store(operationArgumentsRef.get(), authenticationRef.get(), osName, processFactory, DefaultFileCheckerSingleton, pathString, pathSeparator);
}
public static void store(final OperationArguments operationArguments, final IAuthentication authentication, final String osName, final TestableProcessFactory processFactory, final Func<File, Boolean> fileChecker, final String pathString, final String pathSeparator)
{
Debug.Assert(operationArguments.getUserName() != null, "The operationArguments.Username is null");
final Credential credentials = new Credential(operationArguments.getUserName(), operationArguments.getPassword());
if (authentication instanceof BasicAuthentication)
{
authentication.setCredentials(operationArguments.TargetUri, credentials);
}
else
{
if (operationArguments.EraseOsxKeyChain && Provider.isMac(osName))
{
final String gitResponse = fetchGitVersion(processFactory);
if (gitResponse.contains("Apple Git-"))
{
// check for the presence of git-credential-osxkeychain by scanning PATH
final File osxkeychainFile = findProgram(pathString, pathSeparator, "git-credential-osxkeychain", fileChecker);
if (osxkeychainFile != null)
{
// erase these credentials from osxkeychain
try
{
final String program = osxkeychainFile.getAbsolutePath();
final TestableProcess process = processFactory.create(program, "erase");
final ProcessCoordinator coordinator = new ProcessCoordinator(process);
coordinator.print(operationArguments.toString());
coordinator.waitFor();
}
catch (final IOException e)
{
throw new Error(e);
}
catch (final InterruptedException e)
{
throw new Error(e);
}
}
}
}
}
}
private final Callable<Void> PrintVersion = new Callable<Void>()
{
@Override public Void call()
{
printVersion();
return null;
}
};
private void printVersion()
{
Trace.writeLine("Program::printVersion");
standardOut.println(String.format("%1$s version %2$s", getTitle(), getVersion()));
}
private final Callable<Void> Install = new Callable<Void>()
{
@Override public Void call()
{
install();
return null;
}
};
private void install()
{
final String osName = System.getProperty("os.name");
final String osVersion = System.getProperty("os.version");
final List<Provider> providers = Provider.PROVIDERS;
final TestableProcessFactory processFactory = new DefaultProcessFactory();
install(osName, osVersion, standardOut, providers, processFactory);
}
static void install(final String osName, final String osVersion, final PrintStream standardOut, final List<Provider> providers, final TestableProcessFactory processFactory)
{
List<String> missedRequirements = new ArrayList<String>();
missedRequirements.addAll(checkUserAgentProviderRequirements(providers));
missedRequirements.addAll(checkGitRequirements(processFactory));
missedRequirements.addAll(checkOsRequirements(osName, osVersion));
if (missedRequirements.isEmpty())
{
try
{
// TODO: 457304: Add option to configure for global or system
final String configLocation = "global";
uninstall(processFactory);
configureGit(processFactory, configLocation);
}
catch (IOException e)
{
throw new Error(e);
}
catch (InterruptedException e)
{
throw new Error(e);
}
}
else
{
standardOut.println("Installation failed due to the following unmet requirements:");
for (String msg : missedRequirements)
{
standardOut.println(msg);
}
standardOut.println();
standardOut.println("If you think we are excluding many users with one or more of these requirements, please let us know.");
}
}
static void configureGit(final TestableProcessFactory processFactory, final String configLocation) throws IOException, InterruptedException
{
final URL resourceURL = Program.class.getResource("");
final String javaHome = System.getProperty("java.home");
final File javaExecutable = new File(javaHome, "bin/java");
final String pathToJava = javaExecutable.getAbsolutePath();
final String pathToJar = determinePathToJar(resourceURL);
final boolean isDebug = Debug.IsDebug;
configureGit(processFactory, configLocation, pathToJava, pathToJar, isDebug);
}
static void configureGit(final TestableProcessFactory processFactory, final String configLocation, final String pathToJava, final String pathToJar, final boolean isDebug) throws IOException, InterruptedException
{
final StringBuilder sb = new StringBuilder();
// escape spaces (if any) in paths to java and path to JAR
// i.e. !/usr/bin/jre\ 1.6/bin/java -Ddebug=false -jar /home/example/with\ spaces/gcm.jar
sb.append("!").append(escapeSpaces(pathToJava));
sb.append(" -Ddebug=").append(isDebug).append(" -jar ");
sb.append(escapeSpaces(pathToJar));
final String gcmCommandLine = sb.toString();
final String[] command =
{
"git",
"config",
"--" + configLocation,
"--add",
CredentialHelperSection,
gcmCommandLine,
};
final TestableProcess process = processFactory.create(command);
final ProcessCoordinator coordinator = new ProcessCoordinator(process);
final int exitCode = coordinator.waitFor();
checkGitConfigExitCode(configLocation, exitCode);
}
static String escapeSpaces(final String input)
{
return input.replace(" ", "\\ ");
}
private final Callable<Void> Uninstall = new Callable<Void>()
{
@Override public Void call()
{
uninstall();
return null;
}
};
private void uninstall()
{
final TestableProcessFactory processFactory = new DefaultProcessFactory();
uninstall(processFactory);
}
static void uninstall(final TestableProcessFactory processFactory)
{
try
{
final String configLocation = "global";
// TODO: 457304: unconfigure from both global and system (if we can!), to be sure
if (isGitConfigured(processFactory, configLocation))
{
unconfigureGit(processFactory, configLocation);
}
}
catch (IOException e)
{
throw new Error(e);
}
catch (InterruptedException e)
{
throw new Error(e);
}
}
static boolean isGitConfigured(final TestableProcessFactory processFactory, final String configLocation) throws IOException, InterruptedException
{
final String[] command =
{
"git",
"config",
"--" + configLocation,
"--get",
CredentialHelperSection,
CredentialHelperValueRegex,
};
final TestableProcess process = processFactory.create(command);
final ProcessCoordinator coordinator = new ProcessCoordinator(process);
coordinator.waitFor();
final String stdOut = coordinator.getStdOut();
final boolean result = stdOut.length() > 0;
return result;
}
static void unconfigureGit(final TestableProcessFactory processFactory, final String configLocation) throws IOException, InterruptedException
{
final String[] command =
{
"git",
"config",
"--" + configLocation,
"--unset",
CredentialHelperSection,
CredentialHelperValueRegex,
};
final TestableProcess process = processFactory.create(command);
final ProcessCoordinator coordinator = new ProcessCoordinator(process);
final int exitCode = coordinator.waitFor();
checkGitConfigExitCode(configLocation, exitCode);
}
static void checkGitConfigExitCode(final String configLocation, final int exitCode)
{
String message;
switch (exitCode)
{
case 0:
message = null;
break;
case 3:
message = "The '" + configLocation + "' Git config file is invalid.";
break;
case 4:
message = "Can not write to the '" + configLocation + "' Git config file.";
break;
default:
message = "Unexpected exit code '" + exitCode + "' received from `git config`.";
break;
}
if (message != null)
{
throw new Error(message);
}
}
/**
* Determines the path of the JAR, given a URL to a resource inside the current JAR.
*
*/
static String determinePathToJar(final URL resourceURL)
{
final String packageName = Program.class.getPackage().getName();
final String resourcePath = resourceURL.getPath();
final String decodedResourcePath;
try
{
decodedResourcePath = URLDecoder.decode(resourcePath, UriHelper.UTF_8);
}
catch (final UnsupportedEncodingException e)
{
throw new Error(e);
}
final String packagePath = packageName.replace(".", "/");
final String resourceSuffix = "!/" + packagePath + "/";
String jarPath = decodedResourcePath.replace(resourceSuffix, "");
jarPath = jarPath.replace("file:", "");
return jarPath;
}
/**
* Asks all the supplied {@link Provider} implementations to check their requirements and
* report only if all of them are missing something.
*
*
* For example, suppose we have support for both JavaFX- and SWT-based browsers:
* we just need to have one of those working on the user's computer.
*
* So, if they are running on Java 6, they can't use JavaFX, but that's fine,
* because they installed xulrunner and the SWT-based browser should work.
*
* @param providers a list of {@link Provider} implementations to interrogate
* @return a list of requirements, per provider,
* if no single user agent provider had all its requirements satisfied
*/
static List<String> checkUserAgentProviderRequirements(final List<Provider> providers)
{
final List<String> results = new ArrayList<String>();
final LinkedHashMap<Provider, List<String>> requirementsByProvider = new LinkedHashMap<Provider, List<String>>();
int numberOfProvidersWithSatisfiedRequirements = 0;
for (final Provider provider : providers)
{
final List<String> requirements = provider.checkRequirements();
if (requirements == null || requirements.isEmpty())
{
numberOfProvidersWithSatisfiedRequirements++;
}
else
{
requirementsByProvider.put(provider, requirements);
}
}
if (numberOfProvidersWithSatisfiedRequirements == 0)
{
for (final Map.Entry<Provider, List<String>> entry : requirementsByProvider.entrySet())
{
final Provider provider = entry.getKey();
final List<String> requirements = entry.getValue();
results.add("The " + provider.getClassName() + " user agent provider has the following unmet requirements:");
for (final String requirement : requirements)
{
results.add(" - " + requirement);
}
}
}
return results;
}
/**
* Checks if git version can be found and if it is the correct version
*
* @return if git requirements are met
*/
static List<String> checkGitRequirements(final TestableProcessFactory processFactory)
{
final String trimmedResponse = fetchGitVersion(processFactory);
return isValidGitVersion(trimmedResponse);
}
static String fetchGitVersion(final TestableProcessFactory processFactory)
{
try
{
// finding git version via commandline
final TestableProcess gitProcess = processFactory.create("git", "--version");
final ProcessCoordinator coordinator = new ProcessCoordinator(gitProcess);
coordinator.waitFor();
final String gitResponse = coordinator.getStdOut();
return gitResponse.trim();
}
catch (final IOException e)
{
throw new Error(e);
}
catch (final InterruptedException e)
{
throw new Error(e);
}
}
private static class DefaultFileChecker implements Func<File, Boolean>
{
@Override public Boolean call(final File file)
{
return file.isFile();
}
}
static File findProgram(final String pathString, final String pathSeparator, final String executableName, final Func<File, Boolean> fileChecker)
{
final String[] partArray = pathString.split(pathSeparator);
final List<String> parts = Arrays.asList(partArray);
return findProgram(parts, executableName, fileChecker);
}
static File findProgram(final List<String> directories, final String executableName, final Func<File, Boolean> fileChecker)
{
for (final String directoryString : directories)
{
final File directory = new File(directoryString);
final File executableFile = new File(directory, executableName);
if (fileChecker.call(executableFile))
{
return executableFile;
}
}
return null;
}
/**
* Parses git version response for major and minor version and checks if it's 1.9 or above
*
* @param gitResponse the output from 'git --version'
* @return if the git version meets the requirement
*/
protected static List<String> isValidGitVersion(final String gitResponse)
{
Trace.writeLine("Program::isValidGitVersion");
Trace.writeLine(" gitResponse:" + gitResponse);
final String GitNotFound = "Git is a requirement for installation and cannot be found. Please check that Git is installed and is added to your PATH";
final List<String> result = new ArrayList<String>();
// if git responded with a version then parse it for the version number
if (gitResponse != null)
{
// TODO: 450002: Detect "Apple Git" and warn the user
// git version numbers are in the form of x.y.z and we only need x.y to ensure the requirements are met
Version version = null;
try
{
version = Version.parseVersion(gitResponse);
}
catch (final IllegalArgumentException ignored)
{
Trace.writeLine(" " + ignored.getMessage());
result.add(GitNotFound);
}
if (version != null)
{
if (version.getMajor() < 1
|| (version.getMajor() == 1 && version.getMinor() < 9))
{
result.add("Git version " + version.getMajor() + "." + version.getMinor() + " was found but version 1.9 or above is required.");
}
}
}
else
{
result.add(GitNotFound);
}
return result;
}
/**
* Checks if the OS meets the requirements to run installation
*
* @param osName the name of the operating system, as retrieved from the os.name property.
* @param osVersion the version of the operating system, as retrieved from the os.version property.
* @return a list of strings representing unmet requirements.
*/
protected static List<String> checkOsRequirements(final String osName, final String osVersion)
{
final ArrayList<String> result = new ArrayList<String>();
if (Provider.isMac(osName))
{
final Version version = Version.parseVersion(osVersion);
final String badVersionMessage = "The version of Mac OS X running is " + version.getMajor() + "." + version.getMinor() + "." + version.getPatch() +
" which does not meet the minimum version of 10.10.5 needed for installation. Please upgrade to Mac OS X 10.10.5 or above to proceed.";
if (version.getMajor() < 10)
{
result.add(badVersionMessage);
}
else if (version.getMajor() == 10)
{
if (version.getMinor() < 10)
{
result.add(badVersionMessage);
}
else if (version.getMinor() == 10)
{
if (version.getPatch() < 5)
{
result.add(badVersionMessage);
}
}
}
}
else if (Provider.isLinux(osName))
{
// only needs a desktop env; already checked by checkUserAgentProviderRequirements()
// TODO: check for supported major distributions and versions (Ubuntu 14+, Fedora 22+, etc.)
}
else if (Provider.isWindows(osName))
{
result.add("It looks like you are running on Windows, please consider using the Git Credential Manager for Windows: https://github.com/Microsoft/Git-Credential-Manager-for-Windows");
}
else
{
result.add("The Git Credential Manager for Mac and Linux is only supported on, well, Mac OS X and Linux. The operating system detected is " + osName + ", which is not supported.");
}
return result;
}
private void initialize(
final String methodName,
final AtomicReference<OperationArguments> operationArgumentsRef,
final AtomicReference<IAuthentication> authenticationRef
) throws IOException, URISyntaxException
{
// parse the operations arguments from stdin (this is how git sends commands)
// see: https://www.kernel.org/pub/software/scm/git/docs/technical/api-credentials.html
// see: https://www.kernel.org/pub/software/scm/git/docs/git-credential.html
final OperationArguments operationArguments;
final BufferedReader reader = new BufferedReader(new InputStreamReader(standardIn));
try
{
operationArguments = new OperationArguments(reader);
}
finally
{
IOHelper.closeQuietly(reader);
}
Debug.Assert(operationArguments.TargetUri != null, "The operationArguments.TargetUri is null");
final Configuration config = componentFactory.createConfiguration();
loadOperationArguments(operationArguments, config);
enableTraceLogging(operationArguments);
Trace.writeLine("Program::" + methodName);
Trace.writeLine(" targetUri = " + operationArguments.TargetUri);
final ISecureStore secureStore = componentFactory.createSecureStore();
final IAuthentication authentication = componentFactory.createAuthentication(operationArguments, secureStore);
operationArgumentsRef.set(operationArguments);
authenticationRef.set(authentication);
}
static IAuthentication createAuthentication(final OperationArguments operationArguments, final ISecureStore secureStore)
{
Debug.Assert(operationArguments != null, "The operationArguments is null");
Trace.writeLine("Program::createAuthentication");
final SecretStore secrets = new SecretStore(secureStore, SecretsNamespace);
final AtomicReference<IAuthentication> authorityRef = new AtomicReference<IAuthentication>();
final ITokenStore adaRefreshTokenStore = null;
if (operationArguments.Authority == AuthorityType.Auto)
{
Trace.writeLine(" detecting authority type");
// detect the authority
if (BaseVsoAuthentication.getAuthentication(operationArguments.TargetUri,
VsoCredentialScope,
secrets,
adaRefreshTokenStore,
authorityRef)
/* TODO: 449515: add GitHub support
|| GithubAuthentication.GetAuthentication(operationArguments.TargetUri,
GithubCredentialScope,
secrets,
authorityRef)*/)
{
// set the authority type based on the returned value
if (authorityRef.get() instanceof VsoMsaAuthentication)
{
operationArguments.Authority = AuthorityType.MicrosoftAccount;
}
else if (authorityRef.get() instanceof VsoAadAuthentication)
{
operationArguments.Authority = AuthorityType.AzureDirectory;
}
/* TODO: 449515: add GitHub support
else if (authorityRef instanceof GithubAuthentication)
{
operationArguments.Authority = AuthorityType.GitHub;
}
*/
}
else
{
operationArguments.Authority = AuthorityType.Basic;
}
}
switch (operationArguments.Authority)
{
case AzureDirectory:
Trace.writeLine(" authority is Azure Directory");
// return the allocated authority or a generic AAD backed VSO authentication object
return authorityRef.get() != null ? authorityRef.get() : new VsoAadAuthentication(Guid.Empty, VsoCredentialScope, secrets, adaRefreshTokenStore);
case Basic:
default:
Trace.writeLine(" authority is basic");
// return a generic username + password authentication object
return authorityRef.get() != null ? authorityRef.get() : new BasicAuthentication(secrets);
/* TODO: 449515: add GitHub support
case GitHub:
Trace.writeLine(" authority it GitHub");
// return a GitHub authentication object
return new GithubAuthentication(GithubCredentialScope, secrets);
*/
case MicrosoftAccount:
Trace.writeLine(" authority is Microsoft Live");
// return the allocated authority or a generic MSA backed VSO authentication object
return authorityRef.get() != null ? authorityRef.get() : new VsoMsaAuthentication(VsoCredentialScope, secrets, adaRefreshTokenStore);
}
}
private static void loadOperationArguments(final OperationArguments operationArguments, final Configuration config) throws IOException
{
Debug.Assert(operationArguments != null, "The operationsArguments parameter is null.");
Trace.writeLine("Program::loadOperationArguments");
final AtomicReference<Configuration.Entry> entryRef = new AtomicReference<Configuration.Entry>();
if (config.tryGetEntry(ConfigPrefix, operationArguments.TargetUri, "authority", entryRef))
{
Trace.writeLine(" authority = " + entryRef.get().Value);
if ("MSA".equalsIgnoreCase(entryRef.get().Value)
|| "Microsoft".equalsIgnoreCase(entryRef.get().Value)
|| "MicrosoftAccount".equalsIgnoreCase(entryRef.get().Value)
|| "Live".equalsIgnoreCase(entryRef.get().Value)
|| "LiveConnect".equalsIgnoreCase(entryRef.get().Value)
|| "LiveID".equalsIgnoreCase(entryRef.get().Value))
{
operationArguments.Authority = AuthorityType.MicrosoftAccount;
}
else if ("AAD".equalsIgnoreCase(entryRef.get().Value)
|| "Azure".equalsIgnoreCase(entryRef.get().Value)
|| "AzureDirectory".equalsIgnoreCase(entryRef.get().Value))
{
operationArguments.Authority = AuthorityType.AzureDirectory;
}
else if ("Integrated".equalsIgnoreCase(entryRef.get().Value)
|| "NTLM".equalsIgnoreCase(entryRef.get().Value)
|| "Kerberos".equalsIgnoreCase(entryRef.get().Value)
|| "SSO".equalsIgnoreCase(entryRef.get().Value))
{
operationArguments.Authority = AuthorityType.Integrated;
}
else
{
operationArguments.Authority = AuthorityType.Basic;
}
}
if (config.tryGetEntry(ConfigPrefix, operationArguments.TargetUri, "interactive", entryRef))
{
Trace.writeLine(" interactive = " + entryRef.get().Value);
if ("always".equalsIgnoreCase(entryRef.get().Value)
|| "true".equalsIgnoreCase(entryRef.get().Value)
|| "force".equalsIgnoreCase(entryRef.get().Value))
{
operationArguments.Interactivity = Interactivity.Always;
}
else if ("never".equalsIgnoreCase(entryRef.get().Value)
|| "false".equalsIgnoreCase(entryRef.get().Value))
{
operationArguments.Interactivity = Interactivity.Never;
}
}
if (config.tryGetEntry(ConfigPrefix, operationArguments.TargetUri, "validate", entryRef))
{
Trace.writeLine(" validate = " + entryRef.get().Value);
if ("true".equalsIgnoreCase(entryRef.get().Value))
{
operationArguments.ValidateCredentials = true;
}
else if ("false".equalsIgnoreCase(entryRef.get().Value))
{
operationArguments.ValidateCredentials = false;
}
}
if (config.tryGetEntry(ConfigPrefix, operationArguments.TargetUri, "writelog", entryRef))
{
Trace.writeLine(" writelog = " + entryRef.get().Value);
if ("true".equalsIgnoreCase(entryRef.get().Value))
{
operationArguments.WriteLog = true;
}
else if ("false".equalsIgnoreCase(entryRef.get().Value))
{
operationArguments.WriteLog = false;
}
}
if (config.tryGetEntry(ConfigPrefix, operationArguments.TargetUri, "eraseosxkeychain", entryRef))
{
Trace.writeLine(" eraseosxkeychain = " + entryRef.get().Value);
if ("true".equalsIgnoreCase(entryRef.get().Value))
{
operationArguments.EraseOsxKeyChain = true;
}
else if ("false".equalsIgnoreCase(entryRef.get().Value))
{
operationArguments.EraseOsxKeyChain = false;
}
}
}
private static void logEvent(final String message, final Object eventType)
{
final String eventSource = "Git Credential Manager";
/*** commented out due to UAC issues which require a proper installer to work around ***/
//Trace.WriteLine("Program::LogEvent");
//if (!EventLog.SourceExists(EventSource))
//{
// EventLog.CreateEventSource(EventSource, "Application");
// Trace.WriteLine(" event source created");
//}
//EventLog.WriteEntry(EventSource, message, eventType);
//Trace.WriteLine(" " + eventType + "event written");
}
private static void enableTraceLogging(final OperationArguments operationArguments) throws IOException
{
final int LogFileMaxLength = 8 * 1024 * 1024; // 8 MB
Trace.writeLine("Program::EnableTraceLogging");
if (operationArguments.WriteLog)
{
Trace.writeLine(" trace logging enabled");
final AtomicReference<String> gitConfigPath = new AtomicReference<String>();
if (Where.gitLocalConfig(gitConfigPath))
{
Trace.writeLine(" git local config found at " + gitConfigPath.get());
final String dotGitPath = Path.getDirectoryName(gitConfigPath.get());
final String logFilePath = Path.combine(dotGitPath, Path.changeExtension(ConfigPrefix, ".log"));
final String logFileName = operationArguments.TargetUri.toString();
final File logFileInfo = new File(logFilePath);
if (logFileInfo.exists() && logFileInfo.length() > LogFileMaxLength)
{
for (int i = 1; i < Integer.MAX_VALUE; i++)
{
final String moveName = String.format("%1$s%2$03d.log", ConfigPrefix, i);
final String movePath = Path.combine(dotGitPath, moveName);
final File moveFile = new File(movePath);
if (!moveFile.isFile())
{
logFileInfo.renameTo(moveFile);
break;
}
}
}
Trace.writeLine(" trace log destination is " + logFilePath);
final PrintStream listener = new PrintStream(logFilePath);
Trace.getListeners().add(listener);
// write a small header to help with identifying new log entries
listener.println(Environment.NewLine);
listener.println(String.format("Log Start (%1$tFT%1$tT%1$tZ)", Calendar.getInstance()));
listener.println(String.format("%1$s version %2$s", getTitle(), getVersion()));
}
}
}
private static void enableDebugTrace()
{
if (Debug.IsDebug)
{
// use the stderr stream for the trace as stdout is used in the cross-process communications protocol
Trace.getListeners().add(System.err);
}
}
static class ComponentFactory implements IComponentFactory
{
@Override public IAuthentication createAuthentication(final OperationArguments operationArguments, final ISecureStore secureStore)
{
return Program.createAuthentication(operationArguments, secureStore);
}
@Override public Configuration createConfiguration() throws IOException
{
return new Configuration();
}
@Override public ISecureStore createSecureStore()
{
// TODO: 449516: detect the operating system/capabilities and create the appropriate instance
final File parentFolder = determineParentFolder();
final File programFolder = new File(parentFolder, ProgramFolderName);
//noinspection ResultOfMethodCallIgnored
programFolder.mkdirs();
final File insecureFile = new File(programFolder, "insecureStore.xml");
return new InsecureStore(insecureFile);
}
}
}
| |
/*******************************************************************************
* Copyright 2012-present Pixate, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.pixate.util;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.drawable.BitmapDrawable;
import android.os.Build;
import android.test.AndroidTestCase;
import android.util.Base64;
import android.util.Base64OutputStream;
import com.pixate.PixateFreestyle;
public class UrlStreamOpenerTests extends AndroidTestCase {
private static final String RAW_TEST_FILE = "url_test_file";
private static final String IMAGE_RES = "ic_launcher.png";
private static final String IMAGE_ASSET = "urlOpener/asset_test.png";
private static final String DOCUMENT_FILE = "doc_test";
private static final String TMP_FILE = "tmp_test";
private String testFileContents;
private String documentFileUri;
private File tempFile;
private Bitmap assetBitmap;
private String assetBitmapBase64;
public UrlStreamOpenerTests() {
}
@Override
protected void setUp() throws Exception {
super.setUp();
Context context = this.getContext();
PixateFreestyle.init(context.getApplicationContext());
// Grab the bitmap placed in the assets. We can use it to compare
// results later.
InputStream is = context.getAssets().open(IMAGE_ASSET);
assetBitmap = BitmapFactory.decodeStream(is);
is.close();
Resources resources = context.getResources();
int rawFileId = resources.getIdentifier(RAW_TEST_FILE, "raw", this.getContext().getPackageName());
testFileContents = readStream(resources.openRawResource(rawFileId));
// Create a document file.
OutputStreamWriter writer =
new OutputStreamWriter(getContext().openFileOutput(DOCUMENT_FILE, Context.MODE_PRIVATE));
try {
writer.write(testFileContents);
} finally {
writer.close();
}
// Learn the document file's file:// uri so we can test that scheme.
documentFileUri = new File(context.getFilesDir(), DOCUMENT_FILE).toURI().toString();
// Clean it up to make it look like someone would type it in css
// (file:// instead of just file:/)
if (documentFileUri.startsWith("file:/") && !documentFileUri.startsWith("file://")) {
documentFileUri = documentFileUri.replace("file:", "file://");
}
// Create a temp file.
tempFile = new File(context.getCacheDir(), TMP_FILE);
writer = new OutputStreamWriter(new FileOutputStream(tempFile));
try {
writer.write(testFileContents);
} finally {
writer.close();
}
// Get a base64 of the test asset image bytes so we can do a data: call
// and compare results.
is = context.getAssets().open(IMAGE_ASSET);
ByteArrayOutputStream output = new ByteArrayOutputStream();
Base64OutputStream bos = new Base64OutputStream(output, Base64.DEFAULT);
try {
byte[] buffer = new byte[2048];
int count = is.read(buffer);
while (count > 0) {
bos.write(buffer, 0, count);
count = is.read(buffer);
}
assetBitmapBase64 = output.toString();
} finally {
is.close();
bos.close();
}
}
@Override
protected void tearDown() throws Exception {
super.tearDown();
// Remove files created for the tests.
// The document file.
try {
getContext().deleteFile(DOCUMENT_FILE);
} catch (Exception e) {
// No-op
}
// The temp file.
try {
if (tempFile != null && tempFile.exists()) {
tempFile.delete();
}
} catch (Exception e) {
// no-op
}
}
public void testResourceBitmap() {
Bitmap bitmap =
new BitmapDrawable(getContext().getResources(), UrlStreamOpener.open("bundle://" + IMAGE_RES)).getBitmap();
assertNotNull("Bitmap not created", bitmap);
assertTrue("Returned bitmap too small to be valid", bitmap.getHeight() > 10 && bitmap.getWidth() > 10);
}
public void testResourceText() throws Exception {
assertEquals(testFileContents, readStream(UrlStreamOpener.open("bundle://" + RAW_TEST_FILE)));
}
public void testAssetBitmap() throws Exception {
InputStream is = UrlStreamOpener.open("bundle://" + IMAGE_ASSET);
try {
Bitmap bitmap = BitmapFactory.decodeStream(is);
boolean result;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB_MR1) {
result = checkSameBitmapHC(assetBitmap, bitmap);
} else {
result = checkSameBitmapGB(assetBitmap, bitmap);
}
assertTrue("Returned bitmap different than canonical bitmap", result);
} finally {
is.close();
}
}
public void testDocument() throws Exception {
assertEquals(testFileContents, readStream(UrlStreamOpener.open("documents://" + DOCUMENT_FILE)));
}
public void testFile() throws Exception {
assertEquals(testFileContents, readStream(UrlStreamOpener.open(documentFileUri)));
}
public void testTmp() throws Exception {
assertEquals(testFileContents, readStream(UrlStreamOpener.open("tmp://" + TMP_FILE)));
}
public void testData() throws Exception {
InputStream is = UrlStreamOpener.open("data:image/png;base64," + assetBitmapBase64);
try {
Bitmap bitmap = BitmapFactory.decodeStream(is);
boolean result;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB_MR1) {
result = checkSameBitmapHC(assetBitmap, bitmap);
} else {
result = checkSameBitmapGB(assetBitmap, bitmap);
}
assertTrue("Returned bitmap different than canonical bitmap", result);
} finally {
is.close();
}
}
public void testImplicitDocument() throws Exception {
// Document is checked first. This matches the test
// document file name, so it should work.
assertEquals(testFileContents, readStream(UrlStreamOpener.open(DOCUMENT_FILE)));
}
public void testImplicitBundleAsset() throws Exception {
// Bundle is checked after documents. This is a valid
// file name in assets (a bundle source), so it should be found.
InputStream is = UrlStreamOpener.open(IMAGE_ASSET);
assertNotNull(is);
try {
Bitmap bitmap = BitmapFactory.decodeStream(is);
boolean result;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB_MR1) {
result = checkSameBitmapHC(assetBitmap, bitmap);
} else {
result = checkSameBitmapGB(assetBitmap, bitmap);
}
assertTrue("Returned bitmap different than canonical bitmap", result);
} finally {
is.close();
}
}
// Private
// Also closes the stream.
private String readStream(InputStream is) throws Exception {
if (is == null) {
return null;
}
char[] buffer = new char[2048];
StringBuilder sb = new StringBuilder();
InputStreamReader reader = new InputStreamReader(is, "utf-8");
int count = reader.read(buffer);
while (count > 0) {
sb.append(buffer, 0, count);
count = reader.read(buffer);
}
is.close();
return sb.toString();
}
@TargetApi(Build.VERSION_CODES.HONEYCOMB_MR1)
private boolean checkSameBitmapHC(Bitmap b1, Bitmap b2) {
if (b1 == null || b2 == null) {
return false;
}
return b1.sameAs(b2);
}
private boolean checkSameBitmapGB(Bitmap b1, Bitmap b2) {
if (b1 != null && b1 == b2) {
return true;
}
if (b1 == null || b2 == null) {
return false;
}
// Poor man's comparison for Gingerbread.
return b1.getHeight() == b2.getHeight()
&& b1.getWidth() == b2.getWidth()
&& b1.getRowBytes() == b2.getRowBytes()
&& b1.getPixel(0, 0) == b2.getPixel(0, 0)
&& b1.getPixel(b1.getWidth() - 1, b1.getHeight() - 1) == b2.getPixel(b2.getWidth() - 1,
b2.getHeight() - 1);
}
}
| |
//GamePanel.java
//Sad Heart
//Sarah MacDonald + Joel Kuntz
/*
* This class contains the main game loop,
* the dimensions of the window, the graphics and the
* mouse position.
*/
package main;
import java.awt.Dimension;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.awt.event.MouseMotionListener;
import java.awt.image.BufferedImage;
import javax.swing.JPanel;
import states.GameStateManager;
public class GamePanel extends JPanel implements Runnable, KeyListener, MouseListener, MouseMotionListener
{
/**
*
*/
private static final long serialVersionUID = 1L;
// Game State Manager
private GameStateManager gsm;
// Panel dimensions
public static final int WIDTH = 400;
public static final int HEIGHT = 240;
public static final int SCALE = 2;
// Main Game Loop
private Thread thread;
private boolean running = false;
private int fps = 60;
private long targetTime = 1000/fps; // target time to take one game loop
// Graphics
private Graphics2D g;
private BufferedImage image;
// Mouse Positions
public static int mouseX, mouseY;
/*
* Constructor for the panel.
* Sets the dimensions of the window, and the focus
* of the window to be on this panel
*/
public GamePanel()
{
setPreferredSize(new Dimension(WIDTH*SCALE, HEIGHT*SCALE));
setFocusable(true);
requestFocus();
}
/*
* Initializes the GamePanel and GameStateManager.
* The graphics will be created for display on this panel
* and the GameStateManager will be instantiated.
*/
public void init()
{
image = new BufferedImage(WIDTH, HEIGHT, BufferedImage.TYPE_INT_RGB);
g = (Graphics2D)image.getGraphics();
gsm = new GameStateManager();
}
/*
* Adds the listeners for user input.
* Starts a new thread if none is made yet and
* initializes listeners for keyboard and mouse input.
*/
public void addNotify()
{
super.addNotify();
if (thread == null)
{
running = true;
addKeyListener(this);
addMouseListener(this);
addMouseMotionListener(this);
thread = new Thread(this);
thread.start();
}
}
/*
* This is the main game loop
* It will update the game variables,
* and draw the images to the screen.
*/
public void run()
{
init();
//For keeping track of nanoTime
long start;
long elapsed;
long wait;
while(running)
{
//The start time of the function call
start = System.nanoTime();
//update variables, create and draw images
update();
draw();
drawToScreen();
//set the amount of time the function took
elapsed = System.nanoTime()-start;
//pauses the thread to maintain 60 fps
wait = targetTime-elapsed/1000000;
if (wait < 0) wait = 5;
try
{
Thread.sleep(wait);
}
catch(Exception e)
{
System.out.println(e.getMessage());
e.printStackTrace();
}
}
}
/*
* Calls the update function from the gamestate manager
* which calls the update function of the current state
*/
public void update()
{
gsm.update();
}
/*
* Clears the panel and then calls the GameStateManager to
* create the graphics.
*/
public void draw()
{
g.clearRect(0, 0, WIDTH, HEIGHT);
gsm.draw(g);
}
/*
* Gets the graphics that were drawn, then displays them on the panel.
*/
public void drawToScreen()
{
Graphics g2 = getGraphics();
g2.drawImage(image, 0, 0, WIDTH*SCALE, HEIGHT*SCALE, null);
g2.dispose();
}
/*
* User Input passed to the GameStateManager
*/
public void keyPressed(KeyEvent k)
{
gsm.keyPressed(k.getKeyCode());
}
public void keyReleased(KeyEvent k)
{
gsm.keyReleased(k.getKeyCode());
}
public void mousePressed(MouseEvent e)
{
gsm.mousePressed(e);
}
public void mouseReleased(MouseEvent e)
{
gsm.mouseReleased(e);
}
public void mouseDragged(MouseEvent e)
{
mouseX = e.getX() / SCALE;
mouseY = e.getY() / SCALE;
}
public void mouseMoved(MouseEvent e)
{
mouseX = e.getX() / SCALE;
mouseY = e.getY() / SCALE;
}
public void mouseClicked(MouseEvent e){}
public void mouseEntered(MouseEvent e){}
public void mouseExited(MouseEvent e) {}
public void keyTyped(KeyEvent k){}
}
| |
/*
* Copyright 2010-2013 Ning, Inc.
* Copyright 2014-2015 Groupon, Inc
* Copyright 2014-2015 The Billing Project, LLC
*
* The Billing Project licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.killbill.billing.payment.provider;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicBoolean;
import org.killbill.billing.catalog.api.Currency;
import org.killbill.billing.payment.api.PaymentMethodPlugin;
import org.killbill.billing.payment.api.PluginProperty;
import org.killbill.billing.payment.api.TestPaymentMethodPlugin;
import org.killbill.billing.payment.api.TransactionType;
import org.killbill.billing.payment.plugin.api.GatewayNotification;
import org.killbill.billing.payment.plugin.api.HostedPaymentPageFormDescriptor;
import org.killbill.billing.payment.plugin.api.PaymentMethodInfoPlugin;
import org.killbill.billing.payment.plugin.api.PaymentPluginApi;
import org.killbill.billing.payment.plugin.api.PaymentPluginApiException;
import org.killbill.billing.payment.plugin.api.PaymentPluginStatus;
import org.killbill.billing.payment.plugin.api.PaymentTransactionInfoPlugin;
import org.killbill.billing.util.callcontext.CallContext;
import org.killbill.billing.util.callcontext.TenantContext;
import org.killbill.billing.util.entity.DefaultPagination;
import org.killbill.billing.util.entity.Pagination;
import org.killbill.clock.Clock;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.inject.Inject;
/**
* This MockPaymentProviderPlugin only works for a single accounts as we don't specify the accountId
* for operations such as addPaymentMethod.
*/
public class MockPaymentProviderPlugin implements PaymentPluginApi {
public static final String GATEWAY_ERROR_CODE = "gatewayErrorCode";
public static final String GATEWAY_ERROR = "gatewayError";
public static final String PLUGIN_PROPERTY_PAYMENT_PLUGIN_STATUS_OVERRIDE = "paymentPluginStatusOverride";
public static final String PLUGIN_NAME = "__NO_OP__";
private final AtomicBoolean makeNextInvoiceFailWithError = new AtomicBoolean(false);
private final AtomicBoolean makeNextInvoiceFailWithException = new AtomicBoolean(false);
private final AtomicBoolean makeAllInvoicesFailWithError = new AtomicBoolean(false);
private final Map<String, InternalPaymentInfo> payments = new ConcurrentHashMap<String, InternalPaymentInfo>();
private final Map<String, List<PaymentTransactionInfoPlugin>> paymentTransactions = new ConcurrentHashMap<String, List<PaymentTransactionInfoPlugin>>();
// Note: we can't use HashMultiMap as we care about storing duplicate key/value pairs
private final Map<String, PaymentMethodPlugin> paymentMethods = new ConcurrentHashMap<String, PaymentMethodPlugin>();
private final Map<String, PaymentMethodInfoPlugin> paymentMethodsInfo = new ConcurrentHashMap<String, PaymentMethodInfoPlugin>();
private final Clock clock;
private class InternalPaymentInfo {
private BigDecimal authAmount;
private BigDecimal captureAmount;
private BigDecimal purchasedAmount;
private BigDecimal refundAmount;
private BigDecimal creditAmount;
private InternalPaymentInfo() {
this.authAmount = BigDecimal.ZERO;
this.captureAmount = BigDecimal.ZERO;
this.purchasedAmount = BigDecimal.ZERO;
this.refundAmount = BigDecimal.ZERO;
this.creditAmount = BigDecimal.ZERO;
}
public BigDecimal getAuthAmount() {
return authAmount;
}
public BigDecimal getCaptureAmount() {
return captureAmount;
}
public BigDecimal getPurchasedAmount() {
return purchasedAmount;
}
public BigDecimal getRefundAmount() {
return refundAmount;
}
public BigDecimal getCreditAmount() {
return creditAmount;
}
public BigDecimal getAmount(TransactionType type) {
switch (type) {
case AUTHORIZE:
return getAuthAmount();
case CAPTURE:
return getCaptureAmount();
case PURCHASE:
return getPurchasedAmount();
case VOID:
return BigDecimal.ZERO;
case CREDIT:
return getCreditAmount();
case REFUND:
return getRefundAmount();
default:
throw new RuntimeException("Unsupported type " + type);
}
}
public void addAmount(TransactionType type, BigDecimal amount) {
switch (type) {
case AUTHORIZE:
addAuthAmount(amount);
break;
case CAPTURE:
addCaptureAmount(amount);
break;
case PURCHASE:
addPurchasedAmount(amount);
break;
case VOID:
voidAuthAmount();
break;
case CREDIT:
addCreditAmount(amount);
break;
case REFUND:
addRefundAmount(amount);
break;
}
}
public void addAuthAmount(final BigDecimal authAmount) {
this.authAmount = this.authAmount.add(authAmount);
}
public void addCaptureAmount(final BigDecimal captureAmount) {
this.captureAmount = this.captureAmount.add(captureAmount);
}
public void addPurchasedAmount(final BigDecimal purchasedAmount) {
this.purchasedAmount = this.purchasedAmount.add(purchasedAmount);
}
public void addRefundAmount(final BigDecimal refundAmount) {
this.refundAmount = this.refundAmount.add(refundAmount);
}
public void addCreditAmount(final BigDecimal creditAmount) {
this.creditAmount = this.creditAmount.add(creditAmount);
}
public void voidAuthAmount() {
this.authAmount = BigDecimal.ZERO;
}
}
@Inject
public MockPaymentProviderPlugin(final Clock clock) {
this.clock = clock;
clear();
}
public void clear() {
makeNextInvoiceFailWithException.set(false);
makeAllInvoicesFailWithError.set(false);
makeNextInvoiceFailWithError.set(false);
paymentMethods.clear();
payments.clear();
paymentTransactions.clear();
paymentMethodsInfo.clear();
}
public void makeNextPaymentFailWithError() {
makeNextInvoiceFailWithError.set(true);
}
public void makeNextPaymentFailWithException() {
makeNextInvoiceFailWithException.set(true);
}
public void makeAllInvoicesFailWithError(final boolean failure) {
makeAllInvoicesFailWithError.set(failure);
}
public void updatePaymentTransactions(final UUID paymentId, final List<PaymentTransactionInfoPlugin> newTransactions) {
if (paymentTransactions.containsKey(paymentId.toString())) {
paymentTransactions.put (paymentId.toString(), newTransactions);
}
}
@Override
public PaymentTransactionInfoPlugin authorizePayment(final UUID kbAccountId, final UUID kbPaymentId, final UUID kbTransactionId, final UUID kbPaymentMethodId, final BigDecimal amount, final Currency currency, final Iterable<PluginProperty> properties, final CallContext context)
throws PaymentPluginApiException {
return getPaymentTransactionInfoPluginResult(kbPaymentId, kbTransactionId, TransactionType.AUTHORIZE, amount, currency, properties);
}
@Override
public PaymentTransactionInfoPlugin capturePayment(final UUID kbAccountId, final UUID kbPaymentId, final UUID kbTransactionId, final UUID kbPaymentMethodId, final BigDecimal amount, final Currency currency, final Iterable<PluginProperty> properties, final CallContext context)
throws PaymentPluginApiException {
return getPaymentTransactionInfoPluginResult(kbPaymentId, kbTransactionId, TransactionType.CAPTURE, amount, currency, properties);
}
@Override
public PaymentTransactionInfoPlugin purchasePayment(final UUID kbAccountId, final UUID kbPaymentId, final UUID kbTransactionId, final UUID kbPaymentMethodId, final BigDecimal amount, final Currency currency, final Iterable<PluginProperty> properties, final CallContext context) throws PaymentPluginApiException {
return getPaymentTransactionInfoPluginResult(kbPaymentId, kbTransactionId, TransactionType.PURCHASE, amount, currency, properties);
}
@Override
public PaymentTransactionInfoPlugin voidPayment(final UUID kbAccountId, final UUID kbPaymentId, final UUID kbTransactionId, final UUID kbPaymentMethodId, final Iterable<PluginProperty> properties, final CallContext context)
throws PaymentPluginApiException {
return getPaymentTransactionInfoPluginResult(kbPaymentId, kbTransactionId, TransactionType.VOID, BigDecimal.ZERO, null, properties);
}
@Override
public PaymentTransactionInfoPlugin creditPayment(final UUID kbAccountId, final UUID kbPaymentId, final UUID kbTransactionId, final UUID kbPaymentMethodId, final BigDecimal amount, final Currency currency, final Iterable<PluginProperty> properties, final CallContext context)
throws PaymentPluginApiException {
return getPaymentTransactionInfoPluginResult(kbPaymentId, kbTransactionId, TransactionType.CREDIT, amount, currency, properties);
}
@Override
public List<PaymentTransactionInfoPlugin> getPaymentInfo(final UUID kbAccountId, final UUID kbPaymentId, final Iterable<PluginProperty> properties, final TenantContext context) throws PaymentPluginApiException {
final List<PaymentTransactionInfoPlugin> result = paymentTransactions.get(kbPaymentId.toString());
return result != null ? result : ImmutableList.<PaymentTransactionInfoPlugin>of();
}
@Override
public Pagination<PaymentTransactionInfoPlugin> searchPayments(final String searchKey, final Long offset, final Long limit, final Iterable<PluginProperty> properties, final TenantContext tenantContext) throws PaymentPluginApiException {
throw new IllegalStateException("Not implemented");
}
@Override
public void addPaymentMethod(final UUID kbAccountId, final UUID kbPaymentMethodId, final PaymentMethodPlugin paymentMethodProps, final boolean setDefault, final Iterable<PluginProperty> properties, final CallContext context) throws PaymentPluginApiException {
// externalPaymentMethodId is set to a random value
final PaymentMethodPlugin realWithID = new TestPaymentMethodPlugin(kbPaymentMethodId, paymentMethodProps, UUID.randomUUID().toString());
paymentMethods.put(kbPaymentMethodId.toString(), realWithID);
final PaymentMethodInfoPlugin realInfoWithID = new DefaultPaymentMethodInfoPlugin(kbAccountId, kbPaymentMethodId, setDefault, UUID.randomUUID().toString());
paymentMethodsInfo.put(kbPaymentMethodId.toString(), realInfoWithID);
}
@Override
public void deletePaymentMethod(final UUID kbAccountId, final UUID kbPaymentMethodId, final Iterable<PluginProperty> properties, final CallContext context) throws PaymentPluginApiException {
paymentMethods.remove(kbPaymentMethodId.toString());
paymentMethodsInfo.remove(kbPaymentMethodId.toString());
}
@Override
public PaymentMethodPlugin getPaymentMethodDetail(final UUID kbAccountId, final UUID kbPaymentMethodId, final Iterable<PluginProperty> properties, final TenantContext context) throws PaymentPluginApiException {
return paymentMethods.get(kbPaymentMethodId.toString());
}
@Override
public void setDefaultPaymentMethod(final UUID kbAccountId, final UUID kbPaymentMethodId, final Iterable<PluginProperty> properties, final CallContext context) throws PaymentPluginApiException {
}
@Override
public List<PaymentMethodInfoPlugin> getPaymentMethods(final UUID kbAccountId, final boolean refreshFromGateway, final Iterable<PluginProperty> properties, final CallContext context) {
return ImmutableList.<PaymentMethodInfoPlugin>copyOf(paymentMethodsInfo.values());
}
@Override
public Pagination<PaymentMethodPlugin> searchPaymentMethods(final String searchKey, final Long offset, final Long limit, final Iterable<PluginProperty> properties, final TenantContext tenantContext) throws PaymentPluginApiException {
final ImmutableList<PaymentMethodPlugin> results = ImmutableList.<PaymentMethodPlugin>copyOf(Iterables.<PaymentMethodPlugin>filter(paymentMethods.values(), new Predicate<PaymentMethodPlugin>() {
@Override
public boolean apply(final PaymentMethodPlugin input) {
if (input.getProperties() != null) {
for (PluginProperty cur : input.getProperties()) {
if (cur.getValue().equals(searchKey)) {
return true;
}
}
}
return (input.getKbPaymentMethodId().toString().equals(searchKey));
}
}));
return DefaultPagination.<PaymentMethodPlugin>build(offset, limit, results);
}
@Override
public void resetPaymentMethods(final UUID kbAccountId, final List<PaymentMethodInfoPlugin> input, final Iterable<PluginProperty> properties, final CallContext callContext) {
paymentMethodsInfo.clear();
if (input != null) {
for (final PaymentMethodInfoPlugin cur : input) {
paymentMethodsInfo.put(cur.getPaymentMethodId().toString(), cur);
}
}
}
@Override
public HostedPaymentPageFormDescriptor buildFormDescriptor(final UUID kbAccountId, final Iterable<PluginProperty> customFields, final Iterable<PluginProperty> properties, final CallContext callContext) {
return new DefaultNoOpHostedPaymentPageFormDescriptor(kbAccountId);
}
@Override
public GatewayNotification processNotification(final String notification, final Iterable<PluginProperty> properties, final CallContext callContext) throws PaymentPluginApiException {
return new DefaultNoOpGatewayNotification();
}
@Override
public PaymentTransactionInfoPlugin refundPayment(final UUID kbAccountId, final UUID kbPaymentId, final UUID kbTransactionId, final UUID kbPaymentMethodId, final BigDecimal refundAmount, final Currency currency, final Iterable<PluginProperty> properties, final CallContext context) throws PaymentPluginApiException {
final InternalPaymentInfo info = payments.get(kbPaymentId.toString());
if (info == null) {
throw new PaymentPluginApiException("", String.format("No payment found for payment id %s (plugin %s)", kbPaymentId.toString(), PLUGIN_NAME));
}
BigDecimal maxAmountRefundable = info.getCaptureAmount().add(info.getPurchasedAmount());
if (maxAmountRefundable.compareTo(info.getRefundAmount()) < 0) {
throw new PaymentPluginApiException("", String.format("Refund amount of %s for payment id %s is bigger than the payment amount %s (plugin %s)",
refundAmount, kbPaymentId.toString(), maxAmountRefundable, PLUGIN_NAME));
}
return getPaymentTransactionInfoPluginResult(kbPaymentId, kbTransactionId, TransactionType.REFUND, refundAmount, currency, properties);
}
private PaymentTransactionInfoPlugin getPaymentTransactionInfoPluginResult(final UUID kbPaymentId, final UUID kbTransactionId, final TransactionType type, final BigDecimal amount, final Currency currency, final Iterable<PluginProperty> pluginProperties) throws PaymentPluginApiException {
if (makeNextInvoiceFailWithException.getAndSet(false)) {
throw new PaymentPluginApiException("", "test error");
}
final PluginProperty paymentPluginStatusOverride = Iterables.tryFind(pluginProperties, new Predicate<PluginProperty>() {
@Override
public boolean apply(final PluginProperty input) {
return PLUGIN_PROPERTY_PAYMENT_PLUGIN_STATUS_OVERRIDE.equals(input.getKey());
}
}).orNull();
final PaymentPluginStatus status;
if (paymentPluginStatusOverride != null && paymentPluginStatusOverride.getValue() != null) {
status = PaymentPluginStatus.valueOf(paymentPluginStatusOverride.getValue().toString());
} else {
status = (makeAllInvoicesFailWithError.get() || makeNextInvoiceFailWithError.getAndSet(false)) ? PaymentPluginStatus.ERROR : PaymentPluginStatus.PROCESSED;
}
final String errorCode = status == PaymentPluginStatus.PROCESSED ? "" : GATEWAY_ERROR_CODE;
final String error = status == PaymentPluginStatus.PROCESSED ? "" : GATEWAY_ERROR;
InternalPaymentInfo info = payments.get(kbPaymentId.toString());
if (info == null) {
info = new InternalPaymentInfo();
payments.put(kbPaymentId.toString(), info);
}
final PaymentTransactionInfoPlugin result = new DefaultNoOpPaymentInfoPlugin(kbPaymentId, kbTransactionId, type, amount, currency, clock.getUTCNow(), clock.getUTCNow(), status, errorCode, error);
List<PaymentTransactionInfoPlugin> existingTransactions = paymentTransactions.get(kbPaymentId.toString());
if (existingTransactions == null) {
existingTransactions = new ArrayList<PaymentTransactionInfoPlugin>();
paymentTransactions.put(kbPaymentId.toString(), existingTransactions);
}
final Iterator<PaymentTransactionInfoPlugin> iterator = existingTransactions.iterator();
while (iterator.hasNext()) {
final PaymentTransactionInfoPlugin existingTransaction = iterator.next();
if (existingTransaction.getKbTransactionPaymentId().equals(kbTransactionId)) {
info.addAmount(type, existingTransaction.getAmount().negate());
iterator.remove();
}
}
existingTransactions.add(result);
info.addAmount(type, result.getAmount());
return result;
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package git4idea.update;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vcs.FilePath;
import com.intellij.openapi.vcs.VcsException;
import com.intellij.openapi.vcs.VcsNotifier;
import com.intellij.openapi.vcs.changes.Change;
import com.intellij.openapi.vcs.changes.ChangeListManager;
import com.intellij.openapi.vcs.changes.ContentRevision;
import com.intellij.openapi.vcs.changes.ui.ChangeListViewerDialog;
import com.intellij.openapi.vcs.update.UpdatedFiles;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.UIUtil;
import com.intellij.vcsUtil.VcsUtil;
import git4idea.GitUtil;
import git4idea.branch.GitBranchPair;
import git4idea.commands.*;
import git4idea.i18n.GitBundle;
import git4idea.merge.GitConflictResolver;
import git4idea.merge.GitMerger;
import git4idea.repo.GitRepository;
import git4idea.util.GitUntrackedFilesHelper;
import git4idea.util.LocalChangesWouldBeOverwrittenHelper;
import org.jetbrains.annotations.NotNull;
import java.io.File;
import java.util.*;
import static git4idea.GitNotificationIdsHolder.MERGE_ERROR;
import static git4idea.GitNotificationIdsHolder.MERGE_RESET_ERROR;
import static java.util.Arrays.asList;
/**
* Handles {@code git pull} via merge.
*/
public class GitMergeUpdater extends GitUpdater {
private static final Logger LOG = Logger.getInstance(GitMergeUpdater.class);
@NotNull private final ChangeListManager myChangeListManager;
@NotNull private final GitBranchPair myBranchPair;
public GitMergeUpdater(@NotNull Project project,
@NotNull Git git,
@NotNull GitRepository repository,
@NotNull GitBranchPair branchPair,
@NotNull ProgressIndicator progressIndicator,
@NotNull UpdatedFiles updatedFiles) {
super(project, git, repository, progressIndicator, updatedFiles);
myBranchPair = branchPair;
myChangeListManager = ChangeListManager.getInstance(myProject);
}
@Override
@NotNull
protected GitUpdateResult doUpdate() {
LOG.info("doUpdate ");
final GitMerger merger = new GitMerger(myProject);
MergeLineListener mergeLineListener = new MergeLineListener();
GitUntrackedFilesOverwrittenByOperationDetector untrackedFilesDetector = new GitUntrackedFilesOverwrittenByOperationDetector(myRoot);
String originalText = myProgressIndicator.getText();
myProgressIndicator.setText(GitBundle.message("progress.text.merging.repository", GitUtil.mention(myRepository)));
try {
GitCommandResult result = myGit.merge(myRepository, myBranchPair.getTarget().getName(),
asList("--no-stat", "-v"), mergeLineListener, untrackedFilesDetector,
GitStandardProgressAnalyzer.createListener(myProgressIndicator));
myProgressIndicator.setText(originalText);
return result.success()
? GitUpdateResult.SUCCESS
: handleMergeFailure(mergeLineListener, untrackedFilesDetector, merger, result);
}
catch (ProcessCanceledException pce) {
cancel();
return GitUpdateResult.CANCEL;
}
}
@NotNull
private GitUpdateResult handleMergeFailure(MergeLineListener mergeLineListener,
GitMessageWithFilesDetector untrackedFilesWouldBeOverwrittenByMergeDetector,
final GitMerger merger,
GitCommandResult commandResult) {
final MergeError error = mergeLineListener.getMergeError();
LOG.info("merge error: " + error);
if (error == MergeError.CONFLICT) {
LOG.info("Conflict detected");
final boolean allMerged =
new MyConflictResolver(myProject, myGit, merger, myRoot).merge();
return allMerged ? GitUpdateResult.SUCCESS_WITH_RESOLVED_CONFLICTS : GitUpdateResult.INCOMPLETE;
}
else if (error == MergeError.LOCAL_CHANGES) {
LOG.info("Local changes would be overwritten by merge");
final List<FilePath> paths = getFilesOverwrittenByMerge(mergeLineListener.getOutput());
final Collection<Change> changes = getLocalChangesFilteredByFiles(paths);
UIUtil.invokeAndWaitIfNeeded((Runnable)() -> {
ChangeListViewerDialog dialog = new ChangeListViewerDialog(myProject, changes);
dialog.setDescription(LocalChangesWouldBeOverwrittenHelper.getErrorNotificationDescription());
dialog.show();
});
return GitUpdateResult.ERROR;
}
else if (untrackedFilesWouldBeOverwrittenByMergeDetector.wasMessageDetected()) {
LOG.info("handleMergeFailure: untracked files would be overwritten by merge");
GitUntrackedFilesHelper.notifyUntrackedFilesOverwrittenBy(myProject, myRoot,
untrackedFilesWouldBeOverwrittenByMergeDetector.getRelativeFilePaths(),
GitBundle.message("merge.operation.name"), null);
return GitUpdateResult.ERROR;
}
else {
LOG.info("Unknown error: " + commandResult.getErrorOutputAsJoinedString());
VcsNotifier.getInstance(myProject)
.notifyError(MERGE_ERROR, GitBundle.message("notification.title.error.merging"), commandResult.getErrorOutputAsHtmlString());
return GitUpdateResult.ERROR;
}
}
@Override
public boolean isSaveNeeded() {
try {
if (GitUtil.hasLocalChanges(true, myProject, myRoot)) {
return true;
}
}
catch (VcsException e) {
LOG.info("isSaveNeeded failed to check staging area", e);
return true;
}
// git log --name-status master..origin/master
String currentBranch = myBranchPair.getSource().getName();
String remoteBranch = myBranchPair.getTarget().getName();
try {
GitRepository repository = GitUtil.getRepositoryManager(myProject).getRepositoryForRoot(myRoot);
if (repository == null) {
LOG.error("Repository is null for root " + myRoot);
return true; // fail safe
}
final Collection<String> remotelyChanged = GitUtil.getPathsDiffBetweenRefs(Git.getInstance(), repository,
currentBranch, remoteBranch);
final List<File> locallyChanged = myChangeListManager.getAffectedPaths();
for (final File localPath : locallyChanged) {
if (ContainerUtil.exists(remotelyChanged, remotelyChangedPath -> FileUtil.pathsEqual(localPath.getPath(), remotelyChangedPath))) {
// found a file which was changed locally and remotely => need to save
return true;
}
}
return false;
} catch (VcsException e) {
LOG.info("failed to get remotely changed files for " + currentBranch + ".." + remoteBranch, e);
return true; // fail safe
}
}
private void cancel() {
GitLineHandler h = new GitLineHandler(myProject, myRoot, GitCommand.RESET);
h.addParameters("--merge");
GitCommandResult result = Git.getInstance().runCommand(h);
if (!result.success()) {
LOG.info("cancel git reset --merge: " + result.getErrorOutputAsJoinedString());
VcsNotifier.getInstance(myProject)
.notifyError(MERGE_RESET_ERROR, GitBundle.message("notification.title.couldn.t.reset.merge"),
result.getErrorOutputAsHtmlString());
}
}
// parses the output of merge conflict returning files which would be overwritten by merge. These files will be stashed.
private List<FilePath> getFilesOverwrittenByMerge(@NotNull List<String> mergeOutput) {
final List<FilePath> paths = new ArrayList<>();
for (String line : mergeOutput) {
if (StringUtil.isEmptyOrSpaces(line)) {
continue;
}
if (line.contains("Please, commit your changes or stash them before you can merge")) {
break;
}
line = line.trim();
final String path;
try {
path = myRoot.getPath() + "/" + GitUtil.unescapePath(line);
final File file = new File(path);
if (file.exists()) {
paths.add(VcsUtil.getFilePath(file, false));
}
} catch (VcsException e) { // just continue
}
}
return paths;
}
private Collection<Change> getLocalChangesFilteredByFiles(List<FilePath> paths) {
final Collection<Change> changes = new HashSet<>();
for (Change change : myChangeListManager.getAllChanges()) {
final ContentRevision afterRevision = change.getAfterRevision();
final ContentRevision beforeRevision = change.getBeforeRevision();
if ((afterRevision != null && paths.contains(afterRevision.getFile())) ||
(beforeRevision != null && paths.contains(beforeRevision.getFile()))) {
changes.add(change);
}
}
return changes;
}
@Override
public String toString() {
return "Merge updater";
}
private enum MergeError {
CONFLICT,
LOCAL_CHANGES,
OTHER
}
private static class MergeLineListener implements GitLineHandlerListener {
private MergeError myMergeError;
private final List<String> myOutput = new ArrayList<>();
private boolean myLocalChangesError = false;
@Override
public void onLineAvailable(String line, Key outputType) {
if (myLocalChangesError) {
myOutput.add(line);
} else if (line.contains("Automatic merge failed; fix conflicts and then commit the result")) {
myMergeError = MergeError.CONFLICT;
} else if (line.contains("Your local changes to the following files would be overwritten by merge")) {
myMergeError = MergeError.LOCAL_CHANGES;
myLocalChangesError = true;
}
}
public MergeError getMergeError() {
return myMergeError;
}
public List<String> getOutput() {
return myOutput;
}
}
private static class MyConflictResolver extends GitConflictResolver {
private final GitMerger myMerger;
private final VirtualFile myRoot;
MyConflictResolver(Project project, @NotNull Git git, GitMerger merger, VirtualFile root) {
super(project, Collections.singleton(root), makeParams(project));
myMerger = merger;
myRoot = root;
}
private static Params makeParams(Project project) {
Params params = new Params(project);
params.setErrorNotificationTitle(GitBundle.message("merge.update.project.generic.error.title"));
params.setMergeDescription(GitBundle.message("merge.update.project.conflict.merge.description.label"));
return params;
}
@Override protected boolean proceedIfNothingToMerge() throws VcsException {
myMerger.mergeCommit(myRoot);
return true;
}
@Override protected boolean proceedAfterAllMerged() throws VcsException {
myMerger.mergeCommit(myRoot);
return true;
}
}
}
| |
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.client.file;
import alluxio.AlluxioURI;
import alluxio.Constants;
import alluxio.annotation.PublicApi;
import alluxio.client.file.options.CreateDirectoryOptions;
import alluxio.client.file.options.CreateFileOptions;
import alluxio.client.file.options.DeleteOptions;
import alluxio.client.file.options.ExistsOptions;
import alluxio.client.file.options.FreeOptions;
import alluxio.client.file.options.GetStatusOptions;
import alluxio.client.file.options.ListStatusOptions;
import alluxio.client.file.options.LoadMetadataOptions;
import alluxio.client.file.options.MountOptions;
import alluxio.client.file.options.OpenFileOptions;
import alluxio.client.file.options.RenameOptions;
import alluxio.client.file.options.SetAttributeOptions;
import alluxio.client.file.options.UnmountOptions;
import alluxio.exception.AlluxioException;
import alluxio.exception.DirectoryNotEmptyException;
import alluxio.exception.ExceptionMessage;
import alluxio.exception.FileAlreadyExistsException;
import alluxio.exception.FileDoesNotExistException;
import alluxio.exception.InvalidPathException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.List;
import javax.annotation.concurrent.ThreadSafe;
/**
* Default implementation of the {@link FileSystem} interface. Developers can extend this class
* instead of implementing the interface. This implementation reads and writes data through
* {@link FileInStream} and {@link FileOutStream}. This class is thread safe.
*/
@PublicApi
@ThreadSafe
public class BaseFileSystem implements FileSystem {
private static final Logger LOG = LoggerFactory.getLogger(Constants.LOGGER_TYPE);
protected final FileSystemContext mFileSystemContext;
/**
* @param context file system context
* @return a {@link BaseFileSystem}
*/
public static BaseFileSystem get(FileSystemContext context) {
return new BaseFileSystem(context);
}
protected BaseFileSystem(FileSystemContext context) {
mFileSystemContext = context;
}
@Override
public void createDirectory(AlluxioURI path)
throws FileAlreadyExistsException, InvalidPathException, IOException, AlluxioException {
createDirectory(path, CreateDirectoryOptions.defaults());
}
@Override
public void createDirectory(AlluxioURI path, CreateDirectoryOptions options)
throws FileAlreadyExistsException, InvalidPathException, IOException, AlluxioException {
FileSystemMasterClient masterClient = mFileSystemContext.acquireMasterClient();
try {
masterClient.createDirectory(path, options);
LOG.debug("Created directory " + path.getPath());
} finally {
mFileSystemContext.releaseMasterClient(masterClient);
}
}
@Override
public FileOutStream createFile(AlluxioURI path)
throws FileAlreadyExistsException, InvalidPathException, IOException, AlluxioException {
return createFile(path, CreateFileOptions.defaults());
}
@Override
public FileOutStream createFile(AlluxioURI path, CreateFileOptions options)
throws FileAlreadyExistsException, InvalidPathException, IOException, AlluxioException {
FileSystemMasterClient masterClient = mFileSystemContext.acquireMasterClient();
try {
masterClient.createFile(path, options);
LOG.debug("Created file " + path.getPath());
} finally {
mFileSystemContext.releaseMasterClient(masterClient);
}
return new FileOutStream(path, options.toOutStreamOptions());
}
@Override
public void delete(AlluxioURI path)
throws DirectoryNotEmptyException, FileDoesNotExistException, IOException, AlluxioException {
delete(path, DeleteOptions.defaults());
}
@Override
public void delete(AlluxioURI path, DeleteOptions options)
throws DirectoryNotEmptyException, FileDoesNotExistException, IOException, AlluxioException {
FileSystemMasterClient masterClient = mFileSystemContext.acquireMasterClient();
try {
masterClient.delete(path, options);
LOG.debug("Deleted file " + path.getName());
} finally {
mFileSystemContext.releaseMasterClient(masterClient);
}
}
@Override
public boolean exists(AlluxioURI path)
throws InvalidPathException, IOException, AlluxioException {
return exists(path, ExistsOptions.defaults());
}
@Override
public boolean exists(AlluxioURI path, ExistsOptions options)
throws InvalidPathException, IOException, AlluxioException {
FileSystemMasterClient masterClient = mFileSystemContext.acquireMasterClient();
try {
// TODO(calvin): Make this more efficient
masterClient.getStatus(path);
return true;
} catch (FileDoesNotExistException e) {
return false;
} catch (InvalidPathException e) {
return false;
} finally {
mFileSystemContext.releaseMasterClient(masterClient);
}
}
@Override
public void free(AlluxioURI path)
throws FileDoesNotExistException, IOException, AlluxioException {
free(path, FreeOptions.defaults());
}
@Override
public void free(AlluxioURI path, FreeOptions options)
throws FileDoesNotExistException, IOException, AlluxioException {
FileSystemMasterClient masterClient = mFileSystemContext.acquireMasterClient();
try {
masterClient.free(path, options);
LOG.debug("Freed file " + path.getPath());
} finally {
mFileSystemContext.releaseMasterClient(masterClient);
}
}
@Override
public URIStatus getStatus(AlluxioURI path)
throws FileDoesNotExistException, IOException, AlluxioException {
return getStatus(path, GetStatusOptions.defaults());
}
@Override
public URIStatus getStatus(AlluxioURI path, GetStatusOptions options)
throws FileDoesNotExistException, IOException, AlluxioException {
FileSystemMasterClient masterClient = mFileSystemContext.acquireMasterClient();
try {
return masterClient.getStatus(path);
} catch (FileDoesNotExistException | InvalidPathException e) {
throw new FileDoesNotExistException(ExceptionMessage.PATH_DOES_NOT_EXIST.getMessage(path));
} finally {
mFileSystemContext.releaseMasterClient(masterClient);
}
}
@Override
public List<URIStatus> listStatus(AlluxioURI path)
throws FileDoesNotExistException, IOException, AlluxioException {
return listStatus(path, ListStatusOptions.defaults());
}
@Override
public List<URIStatus> listStatus(AlluxioURI path, ListStatusOptions options)
throws FileDoesNotExistException, IOException, AlluxioException {
FileSystemMasterClient masterClient = mFileSystemContext.acquireMasterClient();
// TODO(calvin): Fix the exception handling in the master
try {
return masterClient.listStatus(path, options);
} catch (FileDoesNotExistException e) {
throw new FileDoesNotExistException(ExceptionMessage.PATH_DOES_NOT_EXIST.getMessage(path));
} finally {
mFileSystemContext.releaseMasterClient(masterClient);
}
}
/**
* {@inheritDoc}
*
* @deprecated since version 1.1 and will be removed in version 2.0
*/
@Deprecated
@Override
public void loadMetadata(AlluxioURI path)
throws FileDoesNotExistException, IOException, AlluxioException {
loadMetadata(path, LoadMetadataOptions.defaults());
}
/**
* {@inheritDoc}
*
* @deprecated since version 1.1 and will be removed in version 2.0
*/
@Deprecated
@Override
public void loadMetadata(AlluxioURI path, LoadMetadataOptions options)
throws FileDoesNotExistException, IOException, AlluxioException {
FileSystemMasterClient masterClient = mFileSystemContext.acquireMasterClient();
try {
masterClient.loadMetadata(path, options);
LOG.debug("loaded metadata {} with options {}", path.getParent(), options);
} finally {
mFileSystemContext.releaseMasterClient(masterClient);
}
}
@Override
public void mount(AlluxioURI alluxioPath, AlluxioURI ufsPath)
throws IOException, AlluxioException {
mount(alluxioPath, ufsPath, MountOptions.defaults());
}
@Override
public void mount(AlluxioURI alluxioPath, AlluxioURI ufsPath, MountOptions options)
throws IOException, AlluxioException {
FileSystemMasterClient masterClient = mFileSystemContext.acquireMasterClient();
try {
// TODO(calvin): Make this fail on the master side
masterClient.mount(alluxioPath, ufsPath, options);
LOG.info("Mount " + ufsPath.toString() + " to " + alluxioPath.getPath());
} finally {
mFileSystemContext.releaseMasterClient(masterClient);
}
}
@Override
public FileInStream openFile(AlluxioURI path)
throws FileDoesNotExistException, IOException, AlluxioException {
return openFile(path, OpenFileOptions.defaults());
}
@Override
public FileInStream openFile(AlluxioURI path, OpenFileOptions options)
throws FileDoesNotExistException, IOException, AlluxioException {
URIStatus status = getStatus(path);
if (status.isFolder()) {
throw new FileNotFoundException(
ExceptionMessage.CANNOT_READ_DIRECTORY.getMessage(status.getName()));
}
return FileInStream.create(status, options.toInStreamOptions(), mFileSystemContext);
}
@Override
public void rename(AlluxioURI src, AlluxioURI dst)
throws FileDoesNotExistException, IOException, AlluxioException {
rename(src, dst, RenameOptions.defaults());
}
@Override
public void rename(AlluxioURI src, AlluxioURI dst, RenameOptions options)
throws FileDoesNotExistException, IOException, AlluxioException {
FileSystemMasterClient masterClient = mFileSystemContext.acquireMasterClient();
try {
// TODO(calvin): Update this code on the master side.
masterClient.rename(src, dst);
LOG.debug("Renamed file " + src.getPath() + " to " + dst.getPath());
} finally {
mFileSystemContext.releaseMasterClient(masterClient);
}
}
@Override
public void setAttribute(AlluxioURI path)
throws FileDoesNotExistException, IOException, AlluxioException {
setAttribute(path, SetAttributeOptions.defaults());
}
@Override
public void setAttribute(AlluxioURI path, SetAttributeOptions options)
throws FileDoesNotExistException, IOException, AlluxioException {
FileSystemMasterClient masterClient = mFileSystemContext.acquireMasterClient();
try {
masterClient.setAttribute(path, options);
LOG.debug("Set attributes for path {} with options {}", path.getPath(), options);
} finally {
mFileSystemContext.releaseMasterClient(masterClient);
}
}
@Override
public void unmount(AlluxioURI path) throws IOException, AlluxioException {
unmount(path, UnmountOptions.defaults());
}
@Override
public void unmount(AlluxioURI path, UnmountOptions options)
throws IOException, AlluxioException {
FileSystemMasterClient masterClient = mFileSystemContext.acquireMasterClient();
try {
masterClient.unmount(path);
LOG.info("Unmount " + path);
} finally {
mFileSystemContext.releaseMasterClient(masterClient);
}
}
}
| |
/*
* ice4j, the OpenSource Java Solution for NAT and Firewall Traversal.
*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ice4j.socket.jdk8;
import java.io.*;
import java.net.*;
import java.nio.*;
import java.nio.channels.*;
import java.util.*;
import java.util.function.*;
import org.ice4j.ice.harvest.*;
import org.ice4j.socket.*;
/**
* Shares a listening endpoint (i.e. an open and bound
* {@link ServerSocketChannel}) among multiple {@code MuxServerSocketChannel}s.
* Accepted {@link SocketChannel}s are demultiplexed based on
* {@link DatagramPacketFilter}s and dispatched for acceptance through matching
* {@code MuxServerSocketChannel}s.
*
* @author Lyubomir Marinov
*/
class MuxingServerSocketChannel
extends DelegatingServerSocketChannel<ServerSocketChannel>
{
/**
* Reference to 0.0.0.0 IPv4 or 0::0 IPv6 address for "wildcard" matching
* purposes.
*/
private static final InetAddress ANY_LOCAL_ADDRESS;
/**
* The {@code Selector} which waits for incoming network connections on all
* {@link #muxingServerSocketChannels}.
*/
private static Selector acceptSelector;
/**
* The {@code Thread} which waits for and accepts incoming network
* connections on all {@link #muxingServerSocketChannels}.
*/
private static Thread acceptThread;
/**
* The (global) list of existing <tt>MixingServerSocketChannel</tt>s.
*/
private static final List<MuxingServerSocketChannel>
muxingServerSocketChannels
= new LinkedList<>();
/**
* The maximum number of milliseconds to wait in a
* {@link Selector#select(long)}. The timeout should be a precaution though
* i.e. (1) it should better not be necessary and (2) it should be long
* enough to not unnecessarily hurt the performance of the application.
*/
private static final int SELECTOR_SELECT_TIMEOUT
= MuxServerSocketChannelFactory.SOCKET_CHANNEL_READ_TIMEOUT;
/**
* The maximum number of milliseconds to wait for an accepted
* {@code SocketChannel} to provide incoming/readable data before it is
* considered abandoned by the client.
*/
static final int SOCKET_CHANNEL_READ_TIMEOUT
= MuxServerSocketChannelFactory.SOCKET_CHANNEL_READ_TIMEOUT;
/**
* The maximum number of {@code byte}s to be read from
* {@link SocketChannel}s accepted by {@link MuxingServerSocketChannel}s in
* order to demultiplex (i.e. filter) them into
* {@code MuxServerSocketChannel}s.
*/
private static final int SOCKET_CHANNEL_READ_CAPACITY
= Math.max(
GoogleTurnSSLCandidateHarvester.SSL_CLIENT_HANDSHAKE.length,
Math.max(
HttpDemuxFilter.REQUEST_METHOD_MAX_LENGTH + 1 /* SP */,
HttpDemuxFilter.TLS_MIN_LENGTH));
static
{
try
{
ANY_LOCAL_ADDRESS = InetAddress.getByName("::");
}
catch (UnknownHostException e)
{
throw new RuntimeException(e);
}
}
/**
* Adds a specific {@code MuxingServerSocketChannel} to the (global) list of
* existing {@code MuxingServerSocketChannel}s and schedules acceptance of
* incoming network connections on it.
*
* @param channel the {@code MuxingServerSocketChannel} to add to the
* (global) list of existing {@code MuxingServerSocketChannel}s and to
* schedule for acceptance of incoming network connections
* @throws IOException if an I/O error occurs
*/
private static void addMuxingServerSocketChannel(
MuxingServerSocketChannel channel)
throws IOException
{
synchronized (muxingServerSocketChannels)
{
muxingServerSocketChannels.add(channel);
muxingServerSocketChannels.notifyAll();
scheduleAccept(channel);
}
}
/**
* Closes a {@code Channel} and swallows any {@link IOException}.
*
* @param channel the {@code Channel} to close
*/
public static void closeNoExceptions(Channel channel)
{
MuxServerSocketChannelFactory.closeNoExceptions(channel);
}
/**
* Finds the first open {@code MuxingServerSocketChannel} in the (global)
* list of existing {@code MuxingServerSocketChannel}s which is bound to a
* specific local {@link SocketAddress}.
*
* @param localAddr the local {@code SocketAddress} on which the bound
* {@code MuxingServerSocketChannel} is to be found
* @return the first open {@code MuxingServerSocketChannel} in the (global)
* list of existing {@code MuxingServerSocketChannel}s which is bound to the
* specified {@code localAddr} or {@code null}
*/
private static MuxingServerSocketChannel findMuxingServerSocketChannel(
SocketAddress localAddr)
{
MuxingServerSocketChannel channel = null;
synchronized (muxingServerSocketChannels)
{
Iterator<MuxingServerSocketChannel> i
= muxingServerSocketChannels.iterator();
while (i.hasNext())
{
MuxingServerSocketChannel aChannel = i.next();
if (aChannel.isOpen())
{
SocketAddress aLocalAddr;
try
{
aLocalAddr = aChannel.getLocalAddress();
}
catch (ClosedChannelException cce)
{
i.remove();
aLocalAddr = null;
}
catch (IOException ioe)
{
aLocalAddr = null;
}
boolean matches
= aLocalAddr != null && aLocalAddr.equals(localAddr);
// If not the same address, let's see if the cached one is
// an "anyLocalAddress" and if so let's consider it a match.
if (!matches
&& aLocalAddr instanceof InetSocketAddress
&& localAddr instanceof InetSocketAddress)
{
InetSocketAddress aLocalInetAddr
= (InetSocketAddress) aLocalAddr;
InetSocketAddress localInetAddr
= (InetSocketAddress) localAddr;
matches
= aLocalInetAddr.getAddress()
.equals(ANY_LOCAL_ADDRESS)
&& aLocalInetAddr.getPort()
== localInetAddr.getPort();
}
if (matches)
{
channel = aChannel;
// The whole idea of using (1) a List for
// muxingServerSocketChannels instead of a Map and (2)
// an Iterator to loop over muxingServerSocketChannels
// is to aggressively clean up. Anyway, break as soon as
// possible in order to improve the execution speed and
// because there is an attempt to clean
// muxingServerSocketChannels up upon closing.
break;
}
}
else
{
i.remove();
}
}
}
return channel;
}
/**
* If {@link #acceptSelector} exists and is open, try to close it and do not
* throw an <tt>IOException</tt>.
*/
private static void maybeCloseAcceptSelector()
{
if (acceptSelector != null)
{
if (acceptSelector.isOpen())
{
try
{
acceptSelector.close();
}
catch (IOException ioe)
{
// I don't know what to do about any IOException during
// Selector#close() even if I log it.
}
}
acceptSelector = null;
}
}
/**
* Opens and binds a new {@code MuxServerSocketChannel} instance. If there
* are other (existing) {@code MuxServerSocketChannel} open and bound on the
* specified listening {@code endpoint}, the new instance will share it with
* them.
*
* @param properties a {@code Map} of the values to be assigned to
* properties of the underlying {@link ServerSocketChannel} which is to
* actually listen on the specified {@code endpoint}. If the new instance is
* not the first to open and bind the specified {@code endpoint}, the
* {@code properties} and their respective values may not be used.
* @param endpoint the IP and port the new instance is to bind to
* @param backlog the requested maximum number of pending incoming
* connections to be queued. If the new instance is not the first to open
* and bind the specified {@code endpoint}, the value may not be used.
* @param filter the {@code DatagramPacketFilter} to demultiplex (i.e.
* recognize) the content meant for the new instance
* @return a new {@code MuxServerSocketChannel} instance open and bound on
* the specified listening {@code endpoint}
* @throws IOException if an I/O error occurs
*/
public static MuxServerSocketChannel openAndBind(
Map<String,Object> properties,
SocketAddress endpoint,
int backlog,
DatagramPacketFilter filter)
throws IOException
{
// The restriction(s) on filter are imposed by MuxingServerSocketChannel
// and MuxServerSocketChannel. Assert that they are satisfied as early
// as possible though because it does not make sense to bind a
// ServerSocketChannel and initialize a new MuxingServerSocketChannel
// instance otherwise.
Objects.requireNonNull(filter, "filter");
MuxingServerSocketChannel muxingChannel;
synchronized (muxingServerSocketChannels)
{
muxingChannel = findMuxingServerSocketChannel(endpoint);
if (muxingChannel == null)
{
ServerSocketChannel channel
= MuxServerSocketChannelFactory
.openAndBindServerSocketChannel(
properties,
endpoint,
backlog);
muxingChannel = new MuxingServerSocketChannel(channel);
addMuxingServerSocketChannel(muxingChannel);
}
}
return muxingChannel.createMuxServerSocketChannel(filter);
}
/**
* Runs in {@link #acceptThread} and waits for and accepts incoming network
* connections on all {@link #muxingServerSocketChannels}.
*/
private static void runInAcceptThread()
{
runInSelectorThread(
/* syncRoot */ muxingServerSocketChannels,
/* threadSupplier */ new Supplier<Thread>()
{
@Override
public Thread get()
{
return MuxingServerSocketChannel.acceptThread;
}
},
/* selectorSupplier */ new Supplier<Selector>()
{
@Override
public Selector get()
{
return MuxingServerSocketChannel.acceptSelector;
}
},
/* selectionKeyOps */ SelectionKey.OP_ACCEPT,
/* channels */ muxingServerSocketChannels,
/* predicate */ new BiPredicate<MuxingServerSocketChannel, SelectionKey>()
{
@Override
public boolean test(
MuxingServerSocketChannel ch,
SelectionKey sk)
{
try
{
// The idea is that all muxingServerSocketChannels
// are non-blocking.
ch.accept();
}
catch (IOException ioe)
{
// If ioe is a ClosedChannelException signalling
// that ch is closed, it will be handled at the end
// of the loop by removing ch from
// muxingServerSocketChannels.
}
return false;
}
});
}
/**
* Continually tests a {@link Predicate} on a set of
* {@link SelectableChannel}s.
*
* @param syncRoot the {@code Object} to synchronize the access to
* {@code threadSupplier}, {@code selectorSupplier}, {@code channels}, and
* {@code predicate}. It should be notified whenever the values supplied by
* {@code threadSupplier}, {@code selectorSupplier}, and {@code channels}
* change.
* @param threadSupplier the {@link Supplier} which is to supply the
* {@code Thread} in which the method is supposed to be running. If the
* returned value differs from the {@code Thread} in which the method is
* actually running (i.e. {@link Thread#currentThread()}, the method
* returns. In other words, {@code threadSupplier} is one of the ways to
* break out of the loop implemented by the method. The
* {@code threadSupplier} is called on while {@code syncRoot} is acquired.
* @param selectorSupplier the {@code Supplier} which is to supply the
* {@code Selector} on which the method is to await changes in the states of
* {@code channels} in order to begin a subsequent loop iteration. If the
* returned {@code Selector} is not open, the method returns. In other
* words, {@code selectorSupplier} is another way to break out of the loop
* implemented by the method. The {@code selectorSupplier} is called on
* while {@code syncRoot} is acquired.
* @param selectionKeyOps the {@link SelectionKey} operation-set bits which
* identify the states of {@code channels} whose changes trigger new loop
* iterations
* @param channels the (set of) {@code SelectableChannel}s on each of which
* {@code predicate} is to be continually tested. A loop iteration is
* triggered when at least one of {@code channels} has a state identified by
* {@code selectionKeyOps} changes.
* @param predicate the {@code Predicate} which is to be continually tested
* on {@code channels}. A loop iteration is triggered when at least one of
* {@code channels} has a state identified by {@code selectionKeyOps}
* changes. {@link BiPredicate#test(Object, Object)} is supplied with an
* element of {@code channels} and its (automatically) associated
* {@code SelectionKey} in the {@code Selector} returned by
* {@code selectorSupplier}. The {@code SelectionKey} is provided in case,
* for example, the implementation of {@code predicate} chooses to associate
* additional state with the {@code SelectableChannel} (through
* {@link SelectionKey#attach(Object)}) available throughout the whole loop.
* @param <T> the element type of {@code channels}
*/
private static <T extends SelectableChannel> void runInSelectorThread(
Object syncRoot,
Supplier<Thread> threadSupplier,
Supplier<Selector> selectorSupplier,
int selectionKeyOps,
Iterable<T> channels,
BiPredicate<T, SelectionKey> predicate)
{
// The timeout to use when invoking Selector#select(long) on sel (i.e.
// the Selector supplied by selectorSupplier). It purpose is twofold:
// (1) to not wait too long in Selector#select() (hence
// SELECTOR_SELECT_TIMEOUT) and (2) to weed out abandoned channels
// (hence SOCKET_CHANNEL_READ_TIMEOUT).
final int selSelectTimeout
= Math.min(SELECTOR_SELECT_TIMEOUT, SOCKET_CHANNEL_READ_TIMEOUT);
do
{
Selector sel;
boolean select = false;
synchronized (syncRoot)
{
if (!Thread.currentThread().equals(threadSupplier.get()))
break;
sel = selectorSupplier.get();
if (sel == null || !sel.isOpen())
break;
for (Iterator<T> i = channels.iterator(); i.hasNext();)
{
T ch = i.next();
boolean remove = false;
if (ch.isOpen())
{
SelectionKey sk = ch.keyFor(sel);
if (sk == null)
{
// Make sure that all (SelectableChannels in)
// channels are registered with (the Selector) sel.
try
{
sk = ch.register(sel, selectionKeyOps);
}
catch (ClosedChannelException cce)
{
// The cce will be handled at the end of the
// loop by removing ch from channels.
}
}
if (sk != null && sk.isValid())
{
remove = predicate.test(ch, sk);
if (remove)
sk.cancel();
}
}
if (remove || !ch.isOpen())
i.remove();
else
select = true;
}
// We've invoked the predicate on all (SelectableChannels in)
// channels.
sel.selectedKeys().clear();
// If there are no SelectableChannels in channels, we will wait
// until there are.
if (!select)
{
// We're going to wait bellow and continue with the next
// iteration of the loop afterwards. Don't hold onto sel
// while waiting (because it's unnecessary).
sel = null;
try
{
syncRoot.wait();
}
catch (InterruptedException ie)
{
// I don't know that we care about the interrupted state
// of the current thread because the method
// runInSelectorThread() is (or at least should be)
// pretty much the whole execution of the current thread
// that could potentially care about the interrupted
// state and it doesn't (or at least shouldn't).
}
continue;
}
}
// Wait for a new change in the state(s) of at least one element of
// channels. (The value of the local variable select is guaranteed
// to be true here.)
try
{
// Even if no element of channels has its state(s) changed, do
// wake up after selSelectTimeout milliseconds in order to weed
// out abandoned channels.
sel.select(selSelectTimeout);
}
catch (ClosedSelectorException cse)
{
break;
}
catch (IOException ioe)
{
// Well, we're selecting from multiple SelectableChannels so
// we're not sure what the IOException signals here.
}
}
while (true);
}
/**
* Schedules a specific {@code MuxingServerSocketChannel} for acceptance of
* incoming network connections in {@link #acceptThread}.
*
* @param channel the {@code MuxingServerSocketChannel} to schedule for
* acceptance of incoming network connections in {@code acceptThread}
* @throws IOException if an I/O error occurs
*/
private static void scheduleAccept(MuxingServerSocketChannel channel)
throws IOException
{
synchronized (muxingServerSocketChannels)
{
if (acceptThread == null)
{
// acceptSelector
maybeCloseAcceptSelector();
try
{
acceptSelector = channel.provider().openSelector();
}
catch (IOException ioe)
{
acceptSelector = Selector.open();
}
// acceptThread
acceptThread
= new Thread()
{
@Override
public void run()
{
try
{
runInAcceptThread();
}
finally
{
synchronized (muxingServerSocketChannels)
{
if (Thread.currentThread().equals(
acceptThread))
{
// acceptThread
acceptThread = null;
// acceptSelector
maybeCloseAcceptSelector();
}
}
}
}
};
acceptThread.setDaemon(true);
acceptThread.setName(
MuxingServerSocketChannel.class.getName()
+ ".acceptThread");
acceptThread.start();
}
else
{
// Notify acceptThread that a new MuxingServerSocketChannel
// (e.g. channel) may have been added.
Selector sel = MuxingServerSocketChannel.acceptSelector;
if (sel != null)
sel.wakeup();
}
}
// We might as well expedite the acceptance from
// muxingServerSocketChannel i.e. not wait for acceptThread and
// explicitly cause an accept iteration.
channel.accept();
}
/**
* The list of <tt>MuxServerSocketChannel</tt>s created by and delegating to
* this instance.
*/
private final List<MuxServerSocketChannel> muxServerSocketChannels
= new ArrayList<>();
/**
* The list of {@code SocketChannel}s which have been accepted by this
* {@code MuxingServerSocketChannel}, are being read from, and have not been
* accepted by the {@link DatagramPacketFilter} of any
* {@link MuxServerSocketChannel} yet.
*/
private final Queue<SocketChannel> readQ = new LinkedList<>();
/**
* The {@code Selector} which waits for incoming packets on all
* {@code SocketChannel}s in {@link #readQ}.
*/
private final Selector readSelector;
/**
* The {@code Thread} which waits for incoming packets on and reads them
* from all {@code SocketChannel}s in {@link #readQ}.
*/
private Thread readThread;
/**
* The <tt>Object</tt> which synchronizes the access to the state of this
* <tt>MuxingServerSocketChannel</tt> such as
* {@link #muxServerSocketChannels} and {@link #readQ}.
*/
private final Object syncRoot = new Object();
/**
* Initializes a new {@code MuxingServerSocketChannel} instance which is to
* share the listening endpoint of a specific {@link ServerSocketChannel}
* among multiple {@code MuxServerSocketChannel}s.
*
* @param delegate the {@code ServerSocketChannel} for which the new
* instance is to provide listening endpoint sharing
* @throws IOException if an I/O error occurs
*/
public MuxingServerSocketChannel(ServerSocketChannel delegate)
throws IOException
{
super(Objects.requireNonNull(delegate, "delegate"));
// If at least one MuxServerSocketChannel is configured as non-blocking,
// then MuxingServerSocketChannel (i.e. delegate) has to be configured
// as non-blocking as well.
configureBlocking(false);
readSelector = provider().openSelector();
}
/**
* {@inheritDoc}
*/
@Override
public SocketChannel accept()
throws IOException
{
SocketChannel ch = super.accept();
// Weeds out abandoned SocketChannels which were classified/filtered
// into MuxServerSocketChannel but were not accepted (out of it) for a
// long time. The accept() method of MuxingServerSocketChannel is a
// suitable place to do that because it is periodically invoked (by the
// runInAcceptThread() method) on a clock (in addition to network
// activity, of course).
closeAbandonedSocketChannels();
return ch;
}
/**
* Adds a specific {@code MuxServerSocketChannel} to the list of
* {@code MuxServerSocketChannel}s created by and delegating to this
* instance.
*
* @param channel the {@code MuxServerSocketChannel} to add
*/
protected void addMuxServerSocketChannel(MuxServerSocketChannel channel)
{
synchronized (syncRoot)
{
muxServerSocketChannels.add(channel);
syncRoot.notifyAll();
// Wake readThread up in case a SocketChannel from readQ is accepted
// by the filter of the newly-added MuxServerSocketChannel.
scheduleRead(/* channel */ null);
}
}
/**
* Weed out {@code SocketChannel}s which were classified/filtered into
* {@code MuxServerSocketChannel} but were not accepted (out of it) for a
* long time.
*/
private void closeAbandonedSocketChannels()
{
synchronized (syncRoot)
{
Collection<MuxServerSocketChannel> chs = muxServerSocketChannels;
if (!chs.isEmpty())
{
long now = System.currentTimeMillis();
for (MuxServerSocketChannel ch : chs)
ch.closeAbandonedSocketChannels(now);
}
}
}
/**
* Initializes a new {@code MuxServerSocketChannel} instance which is to
* delegate to this instance and is to demultiplex incoming network
* connections and packets using a specific {@link DatagramPacketFilter}.
*
* @param filter the {@code DatagramPacketFilter} to be used by the new
* {@code MuxServerSocketChannel} instance to demultiplex incoming network
* connections and packets
* @return a new {@code MuxServerSocketChannel} instance which delegates to
* this instance and demultiplexes incoming network connections and packets
* using the specified {@code filter}
*/
protected MuxServerSocketChannel createMuxServerSocketChannel(
DatagramPacketFilter filter)
{
// A MuxServerSocketChannel with no filter does not make sense. It
// cannot be a fallback because DatagramPacketFilters (i.e.
// MuxServerSocketChannels) have no priorities. It cannot be a catch all
// because a SocketChannel (i.e. Socket) may be accepted by a single
// MuxServerSocketChannel only.
Objects.requireNonNull(filter, "filter");
MuxServerSocketChannel channel;
synchronized (syncRoot)
{
for (Iterator<MuxServerSocketChannel> i
= muxServerSocketChannels.iterator();
i.hasNext();)
{
MuxServerSocketChannel aChannel = i.next();
if (aChannel.isOpen())
{
DatagramPacketFilter aFilter = aChannel.filter;
// The implementations of Object#equals(Object) should be
// symmetric but they are written by humans so there is room
// for errors.
if (filter.equals(aFilter) || aFilter.equals(filter))
{
// A SocketChannel (i.e. Socket) may be accepted by a
// single MuxServerSocketChannel only.
throw new IllegalArgumentException("filter");
}
}
else
{
i.remove();
}
}
channel = new MuxServerSocketChannel(this, filter);
addMuxServerSocketChannel(channel);
}
muxServerSocketChannelAdded(channel);
return channel;
}
/**
* Determines whether any of the {@code MuxServerSocketChannel}s created by
* and delegating to this instance demultiplexes (i.e. recognizes) a
* specific {@link SocketChannel} based on a specific {@link DatagramPacket}
* read from it and will make it available for acceptance.
*
* @param p the {@code DatagramPacket} read from {@code channel} which is to
* be analyzed by the {@code MuxServerSocketChannel}s created by and
* delegating to this instance
* @param channel the {@code SocketChannel} from which {@code p} was read
* and which is to possibly be demultiplexed into a
* {@code MuxServerSocketChannel}
* @return {@code true} if one of the {@code MuxServerSocketChannel}s
* created by and delegating to this instance demultiplexed the specified
* {@code channel}; otherwise, {@code false}
*/
private boolean filterAccept(DatagramPacket p, SocketChannel channel)
{
boolean b = false;
for (Iterator<MuxServerSocketChannel> i
= muxServerSocketChannels.iterator();
i.hasNext();)
{
MuxServerSocketChannel muxChannel = i.next();
if (muxChannel.isOpen())
{
try
{
b = muxChannel.filterAccept(p, channel);
if (b)
break;
}
catch (Throwable t)
{
// The implementation of DatagramPacketFilter is external to
// MuxingServerSocketChannel and we do not want the failure
// of one DatagramPacketFilter to kill the whole
// MuxingServerSocketChannel.
if (t instanceof InterruptedException)
Thread.currentThread().interrupt();
else if (t instanceof ThreadDeath)
throw (ThreadDeath) t;
}
}
else
{
i.remove();
}
}
return b;
}
/**
* {@inheritDoc}
*
* Queues a {@link SocketChannel} accepted by this instance for reading so
* that it may later on be demultiplexed into a
* {@code MuxServerSocketChannel}.
*/
@Override
protected SocketChannel implAccept(SocketChannel accepted)
throws IOException
{
synchronized (syncRoot)
{
if (accepted != null && accepted.isOpen())
{
accepted.configureBlocking(false);
// If there is only one (open) MuxServerSocketChannel, it is
// inefficient to read from it in a separate thread and then
// either deliver the accepted to the MuxServerSocketChannel or
// close the accepted. The situation is pretty much like no
// functionality enabled by MuxServerSocketChannel is needed:
// whoever has invoked accept() on this ServerSocketChannel is
// going to read from the accepted and either figure out that it
// is in an expected format or close it.
MuxServerSocketChannel oneAndOnly = null;
for (MuxServerSocketChannel ch : muxServerSocketChannels)
{
if (ch.isOpen())
{
if (oneAndOnly == null)
{
oneAndOnly = ch;
}
else
{
oneAndOnly = null;
break;
}
}
}
if (oneAndOnly != null && oneAndOnly.qAccept(accepted))
{
// It shouldn't matter much whether null or accepted is
// returned. It sounds reasonable to return null from the
// standpoint that accepted was classified/filtered into a
// MuxServerSocketChannel and, consequently, this
// MuxingServerSocketChannel no longer possesses it.
return null;
}
// There are multiple (open) MuxServerSocketChannels (or none
// but then the situation is weird and it's more easily handled
// as the situation with multiple) and this instance is to read
// from accepted in orde to determine where it's to be
// classified/filtered.
readQ.add(accepted);
syncRoot.notifyAll();
scheduleRead(accepted);
}
}
return accepted;
}
/**
* {@inheritDoc}
*
* Associates a {@link MuxingServerSocket} with this
* {@code MuxingServerSocketChannel}.
*/
@Override
protected MuxingServerSocket implSocket(ServerSocket socket)
throws IOException
{
return new MuxingServerSocket(socket, this);
}
/**
* Attempts to read from a specific {@link SocketChannel} into a specific
* {@link ByteBuffer} without throwing an {@link IOException} if the reading
* from the {@code channel} fails or there is insufficient room in
* {@code buf} to write into.
*
* @param channel the {@code SocketChannel} to read from
* @param buf the {@code ByteBuffer} to write into
* @return the number of {@code byte}s read from {@code channel} and written
* into {@code buf} or {@code -1} if {@code channel} has reached the end of
* its stream
*/
protected int maybeRead(SocketChannel channel, ByteBuffer buf)
{
int read;
if (buf.remaining() > 0)
{
try
{
read = channel.read(buf);
}
catch (IOException ioe)
{
// If ioe is a ClosedChannelException signalling that the
// specified channel is closed, it will be handled by the method
// caller (by removing channel from readQ).
read = 0;
}
}
else
{
read = 0;
}
return read;
}
/**
* Notifies this <tt>MixingServerSocketChannel</tt> that a specific
* <tt>MuxServerSocketChannel</tt> was added to
* {@link #muxServerSocketChannels}.
*
* @param channel the added <tt>MuxServerSocketChannel</tt>
*/
protected void muxServerSocketChannelAdded(MuxServerSocketChannel channel)
{
}
/**
* Runs in {@link #readThread} and reads from all {@link SocketChannel}s in
* {@link #readQ} and serves them for demultiplexing to
* {@link #muxServerSocketChannels}.
*/
protected void runInReadThread()
{
runInSelectorThread(
/* syncRoot */ syncRoot,
/* threadSupplier */ new Supplier<Thread>()
{
@Override
public Thread get()
{
return readThread;
}
},
/* selectorSupplier */ new Supplier<Selector>()
{
@Override
public Selector get()
{
return readSelector;
}
},
/* selectionKeyOps */ SelectionKey.OP_READ,
/* channels */ readQ,
/* predicate */ new BiPredicate<SocketChannel, SelectionKey>()
{
@Override
public boolean test(SocketChannel ch, SelectionKey sk)
{
return testRunInReadThreadPredicate(ch, sk);
}
});
}
/**
* Queues a specific {@link SocketChannel} to be read and demultiplexed into
* a {@code MuxServerSocketChannel}.
*
* @param channel the {@code SocketChannel} to queue for reading and
* demultiplexing
*/
protected void scheduleRead(SocketChannel channel)
{
synchronized (syncRoot)
{
if (readThread == null)
{
// readThread
readThread
= new Thread()
{
@Override
public void run()
{
try
{
runInReadThread();
}
finally
{
synchronized (syncRoot)
{
if (Thread.currentThread().equals(
readThread))
{
readThread = null;
}
}
}
}
};
readThread.setDaemon(true);
readThread.setName(
MuxingServerSocketChannel.class.getName()
+ ".readThread");
readThread.start();
}
else
{
// Notify readThread that a new SocketChannel (e.g. channel) may
// have been added.
Selector sel = this.readSelector;
if (sel != null)
sel.wakeup();
}
}
}
/**
* Implements {@link BiPredicate#test(Object, Object)} of the
* {@code BiPredicate} utilized by {@link #runInReadThread()}. The method is
* defined explicitly for the purposes of reducing excessive indentation and
* bettering readability. Reads from {@code ch} and attempts to
* classify/filter it into a {@link MuxServerSocketChannel} for acceptance.
* If {@code ch} has not provided readable data within
* {@link #SOCKET_CHANNEL_READ_TIMEOUT}, it is forcibly closed.
*
* @param ch the {@code SocketChannel} to read from and to classify/filter
* into a {@code MuxServerSocketChannel} for acceptance
* @param sk the {@code SelectionKey} associated with {@code ch} in the
* {@code Selector} which awaits changes in the state(s) of {@code ch}
* @return {@code true} if {@code ch} is to no longer be tested; otherwise,
* {@code false}
*/
private boolean testRunInReadThreadPredicate(
SocketChannel ch,
SelectionKey sk)
{
// Try to read from ch.
DatagramBuffer db = (DatagramBuffer) sk.attachment();
if (db == null)
{
db = new DatagramBuffer(SOCKET_CHANNEL_READ_CAPACITY);
sk.attach(db);
}
int read = maybeRead(ch, db.getByteBuffer());
// Try to filter ch (into a MuxServerSocketChannel).
if (ch.isOpen())
{
// Maintain a record of when the SocketChannel last provided
// readable data in order to weed out abandoned ones.
long now = System.currentTimeMillis();
if (read > 0 || db.timestamp == -1)
db.timestamp = now;
DatagramPacket p = db.getDatagramPacket();
int len = p.getLength();
if (len > 0)
{
if (filterAccept(p, ch))
{
// A MuxServerSocketChannel has accepted ch so this
// MuxingServerSocketChannel is no longer responsible for
// ch.
return true;
}
else if (len >= SOCKET_CHANNEL_READ_CAPACITY)
{
// This MuxingServerSocketChannel has read from ch as much
// as it will ever read and no existing
// MuxServerSocketChannel has accepted ch. There is no point
// in waiting anymore.
closeNoExceptions(ch);
// Allow this MuxingServerSocketChannel to clean ch up.
return false;
}
}
if (read <= 0 && now - db.timestamp >= SOCKET_CHANNEL_READ_TIMEOUT)
{
// It appears ch has been abandoned by the client.
closeNoExceptions(ch);
return false;
}
}
return false;
}
}
| |
/*
* Copyright 2012 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.apps.iosched.ui;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.database.ContentObserver;
import android.database.Cursor;
import android.database.DataSetObserver;
import android.graphics.Color;
import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
import android.support.v4.app.ListFragment;
import android.support.v4.app.LoaderManager;
import android.support.v4.content.CursorLoader;
import android.support.v4.content.Loader;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ListView;
import com.google.analytics.tracking.android.EasyTracker;
import com.google.android.apps.iosched.R;
import com.google.android.apps.iosched.provider.ScheduleContract;
import com.google.android.apps.iosched.ui.TracksAdapter.TracksQuery;
import com.google.android.apps.iosched.ui.tablet.SessionsSandboxMultiPaneActivity;
import com.google.android.apps.iosched.ui.tablet.TracksDropdownFragment;
import com.google.android.apps.iosched.util.UIUtils;
/**
* A simple {@link ListFragment} that renders a list of tracks with available
* sessions or sandbox companies (depending on {@link ExploreFragment#VIEW_TYPE}) using a
* {@link TracksAdapter}.
*/
public class ExploreFragment extends ListFragment implements
LoaderManager.LoaderCallbacks<Cursor> {
private TracksAdapter mAdapter;
private View mEmptyView;
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
ViewGroup rootView = (ViewGroup) inflater.inflate(
R.layout.fragment_list_with_empty_container_inset, container, false);
mEmptyView = rootView.findViewById(android.R.id.empty);
inflater.inflate(R.layout.empty_waiting_for_sync, (ViewGroup) mEmptyView, true);
return rootView;
}
@Override
public void onViewCreated(final View view, Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
view.setBackgroundColor(Color.WHITE);
final ListView listView = getListView();
listView.setSelector(android.R.color.transparent);
listView.setCacheColorHint(Color.WHITE);
addMapHeaderView();
mAdapter = new TracksAdapter(getActivity(), false);
setListAdapter(mAdapter);
// Override default ListView empty-view handling
listView.setEmptyView(null);
mEmptyView.setVisibility(View.VISIBLE);
mAdapter.registerDataSetObserver(new DataSetObserver() {
@Override
public void onChanged() {
if (mAdapter.getCount() > 0) {
mEmptyView.setVisibility(View.GONE);
mAdapter.unregisterDataSetObserver(this);
}
}
});
}
private void addMapHeaderView() {
ListView listView = getListView();
final Context context = listView.getContext();
View mapHeaderContainerView = LayoutInflater.from(context).inflate(
R.layout.list_item_track_map, listView, false);
View mapButton = mapHeaderContainerView.findViewById(R.id.map_button);
mapButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
// Launch map of conference venue
EasyTracker.getTracker().sendEvent(
"Explore Tab", "Click", "Map", 0L);
startActivity(new Intent(context,
UIUtils.getMapActivityClass(getActivity())));
}
});
listView.addHeaderView(mapHeaderContainerView);
listView.setHeaderDividersEnabled(false);
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
// As of support library r12, calling initLoader for a fragment in a FragmentPagerAdapter
// in the fragment's onCreate may cause the same LoaderManager to be dealt to multiple
// fragments because their mIndex is -1 (haven't been added to the activity yet). Thus,
// we do this in onActivityCreated.
getLoaderManager().initLoader(TracksQuery._TOKEN, null, this);
}
private final ContentObserver mObserver = new ContentObserver(new Handler()) {
@Override
public void onChange(boolean selfChange) {
if (getActivity() == null) {
return;
}
getLoaderManager().restartLoader(TracksQuery._TOKEN, null, ExploreFragment.this);
}
};
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
activity.getContentResolver().registerContentObserver(
ScheduleContract.Sessions.CONTENT_URI, true, mObserver);
}
@Override
public void onDetach() {
super.onDetach();
getActivity().getContentResolver().unregisterContentObserver(mObserver);
}
/** {@inheritDoc} */
@Override
public void onListItemClick(ListView l, View v, int position, long id) {
final Cursor cursor = (Cursor) mAdapter.getItem(position - 1); // - 1 to account for header
String trackId = ScheduleContract.Tracks.ALL_TRACK_ID;
int trackMeta = ScheduleContract.Tracks.TRACK_META_NONE;
if (cursor != null) {
trackId = cursor.getString(TracksAdapter.TracksQuery.TRACK_ID);
trackMeta = cursor.getInt(TracksAdapter.TracksQuery.TRACK_META);
}
final Intent intent = new Intent(Intent.ACTION_VIEW);
final Uri trackUri = ScheduleContract.Tracks.buildTrackUri(trackId);
intent.setData(trackUri);
if (trackMeta == ScheduleContract.Tracks.TRACK_META_SANDBOX_OFFICE_HOURS_ONLY) {
intent.putExtra(SessionsSandboxMultiPaneActivity.EXTRA_DEFAULT_VIEW_TYPE,
TracksDropdownFragment.VIEW_TYPE_SANDBOX);
} else if (trackMeta == ScheduleContract.Tracks.TRACK_META_OFFICE_HOURS_ONLY) {
intent.putExtra(SessionsSandboxMultiPaneActivity.EXTRA_DEFAULT_VIEW_TYPE,
TracksDropdownFragment.VIEW_TYPE_OFFICE_HOURS);
}
startActivity(intent);
}
@Override
public Loader<Cursor> onCreateLoader(int id, Bundle data) {
Intent intent = BaseActivity.fragmentArgumentsToIntent(getArguments());
Uri tracksUri = intent.getData();
if (tracksUri == null) {
tracksUri = ScheduleContract.Tracks.CONTENT_URI;
}
// Filter our tracks query to only include those with valid results
String[] projection = TracksAdapter.TracksQuery.PROJECTION;
String selection = null;
return new CursorLoader(getActivity(), tracksUri, projection, selection, null,
ScheduleContract.Tracks.DEFAULT_SORT);
}
@Override
public void onLoadFinished(Loader<Cursor> loader, Cursor cursor) {
if (getActivity() == null) {
return;
}
mAdapter.setHasAllItem(true);
mAdapter.swapCursor(cursor);
if (cursor.getCount() > 0) {
mEmptyView.setVisibility(View.GONE);
}
}
@Override
public void onLoaderReset(Loader<Cursor> loader) {
}
}
| |
/*
* GRAL: GRAphing Library for Java(R)
*
* (C) Copyright 2009-2012 Erich Seifert <dev[at]erichseifert.de>,
* Michael Seifert <michael[at]erichseifert.de>
*
* This file is part of GRAL.
*
* GRAL is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* GRAL is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with GRAL. If not, see <http://www.gnu.org/licenses/>.
*/
package de.erichseifert.gral.io.data;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.text.MessageFormat;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Scanner;
import java.util.regex.Pattern;
import de.erichseifert.gral.data.DataSource;
import de.erichseifert.gral.data.DataTable;
import de.erichseifert.gral.io.IOCapabilities;
import de.erichseifert.gral.util.Messages;
import de.erichseifert.gral.util.StatefulTokenizer;
import de.erichseifert.gral.util.StatefulTokenizer.Token;
/**
* <p>Class that creates a {@code DataSource} from file contents which are
* separated by a certain delimiter character. The delimiter is chosen based on
* the file type but can also be set manually. By default the comma character
* will be used as a delimiter for separating columns.</p>
* <p>{@code CSVReader} instances should be obtained by the
* {@link DataReaderFactory} rather than being created manually:</p>
* <pre>
* DataReaderFactory factory = DataReaderFactory.getInstance();
* DataReader reader = factory.get("text/csv");
* reader.read(new FileInputStream(filename), Integer.class, Double.class);
* </pre>
* @see <a href="http://tools.ietf.org/html/rfc4180">RFC 4180</a>
*/
public class CSVReader extends AbstractDataReader {
/** Key for specifying a {@link Character} value that defines the
delimiting character used to separate columns. */
public static final String SEPARATOR_CHAR = "separator"; //$NON-NLS-1$
static {
addCapabilities(new IOCapabilities(
"CSV", //$NON-NLS-1$
Messages.getString("DataIO.csvDescription"), //$NON-NLS-1$
"text/csv", //$NON-NLS-1$
new String[] {"csv", "txt"} //$NON-NLS-1$ //$NON-NLS-2$
));
addCapabilities(new IOCapabilities(
"TSV", //$NON-NLS-1$
Messages.getString("DataIO.tsvDescription"), //$NON-NLS-1$
"text/tab-separated-values", //$NON-NLS-1$
new String[] {
"tsv", "tab", "txt"} //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
));
}
/**
* Token types for analyzing CSV or TSV input.
*/
private static enum CSVTokenType {
/** Type for text tokens containing value content. */
TEXT,
/** Type for quotes that may wrap value content. */
QUOTE,
/** Type for row separators. */
ROW,
/** Type for column separators. */
COLUMN,
}
/**
*
*/
private static final class CSVTokenizer extends StatefulTokenizer {
/**
* Initializes a new tokenizer instance with a grammar to analyze CSV
* or TSV content. The character that separates columns must be
* provided.
* @param separator Column separator character.
*/
public CSVTokenizer(char separator) {
addJoinedType(CSVTokenType.TEXT);
addIgnoredType(CSVTokenType.QUOTE);
// Basic Set of rules for analyzing CSV content
putRules(
new Rule("\n|\r\n|\r", CSVTokenType.ROW),
new Rule("\\s*("+Pattern.quote(String.valueOf(separator))+")\\s*",
CSVTokenType.COLUMN),
new Rule("\"", CSVTokenType.QUOTE, "quoted"),
new Rule(".", CSVTokenType.TEXT)
);
// Set of rules that is valid inside quoted content
putRules("quoted",
new Rule("(\")\"", CSVTokenType.TEXT),
new Rule("\"", CSVTokenType.QUOTE, "#pop"),
new Rule(".", CSVTokenType.TEXT)
);
}
}
/**
* Creates a new instance with the specified MIME type. The delimiter is
* set depending on the MIME type parameter. By default a comma is used as
* a delimiter.
* @param mimeType MIME type of the file format to be read.
*/
public CSVReader(String mimeType) {
super(mimeType);
if ("text/tab-separated-values".equals(mimeType)) { //$NON-NLS-1$
setDefault(SEPARATOR_CHAR, '\t'); //$NON-NLS-1$ //$NON-NLS-2$
} else {
setDefault(SEPARATOR_CHAR, ','); //$NON-NLS-1$ //$NON-NLS-2$
}
}
/**
* Returns a DataSource that was imported.
* @param input Input to be read.
* @param types Number types for the columns of the DataSource.
* @return DataSource Imported data.
* @throws IOException when the file format is not valid or when
* experiencing an error during file operations.
*/
public DataSource read(InputStream input, Class<? extends Comparable<?>>... types)
throws IOException {
// Read all contents from the input stream
Scanner scanner = new Scanner(input).useDelimiter("\\Z");
String content = scanner.next();
// Tokenize the string
Character separator = getSetting(SEPARATOR_CHAR);
CSVTokenizer tokenizer = new CSVTokenizer(separator);
List<Token> tokens = tokenizer.tokenize(content);
// Add row token if there was no trailing line break
Token lastToken = tokens.get(tokens.size() - 1);
if (lastToken.getType() != CSVTokenType.ROW) {
Token eof = new Token(lastToken.getEnd(), lastToken.getEnd(), CSVTokenType.ROW, "");
tokens.add(eof);
}
// Find methods for all column data types that can be used to convert
// the text to the column data type
Map<Class<? extends Comparable<?>>, Method> parseMethods =
new HashMap<Class<? extends Comparable<?>>, Method>();
for (Class<? extends Comparable<?>> type : types) {
if (parseMethods.containsKey(type)) {
continue;
}
Method parseMethod = getParseMethod(type);
if (parseMethod != null) {
parseMethods.put(type, parseMethod);
}
}
// Process the data and store the data.
DataTable data = new DataTable(types);
List<Comparable<?>> row = new LinkedList<Comparable<?>>();
int rowIndex = 0;
int colIndex = 0;
String cellContent = "";
for (Token token : tokens) {
if (token.getType() == CSVTokenType.TEXT) {
// Store the token text
cellContent = token.getContent();
} else if (token.getType() == CSVTokenType.COLUMN ||
token.getType() == CSVTokenType.ROW) {
// Check for a valid number of columns
if (colIndex >= types.length) {
throw new IllegalArgumentException(MessageFormat.format(
"Too many columns in line {0,number,integer}: got {1,number,integer}, but expected {2,number,integer}.", //$NON-NLS-1$
rowIndex + 1, colIndex + 1, types.length));
}
// We need to add the cell to the row in both cases because
// rows don't have a trailing column token
Class<? extends Comparable<?>> colType = types[colIndex];
Method parseMethod = parseMethods.get(colType);
try {
Comparable<?> cell = (Comparable<?>) parseMethod.invoke(
null, cellContent);
row.add(cell);
} catch (IllegalArgumentException e) {
throw new RuntimeException(MessageFormat.format(
"Could not invoke method for parsing data type {0} in column {1,number,integer}.", //$NON-NLS-1$
types[colIndex].getSimpleName(), colIndex));
} catch (IllegalAccessException e) {
throw new RuntimeException(MessageFormat.format(
"Could not access method for parsing data type {0} in column {1,number,integer}.", //$NON-NLS-1$
types[colIndex].getSimpleName(), colIndex));
} catch (InvocationTargetException e) {
throw new IOException(MessageFormat.format(
"Type mismatch in line {0,number,integer}, column {1,number,integer}: got \"{2}\", but expected {3} value.", //$NON-NLS-1$
rowIndex + 1, colIndex + 1, cellContent, colType.getSimpleName()));
}
colIndex++;
if (token.getType() == CSVTokenType.ROW) {
// Check for a valid number of columns
if (row.size() < types.length) {
throw new IllegalArgumentException(MessageFormat.format(
"Not enough columns in line {0,number,integer}: got {1,number,integer}, but expected {2,number,integer}.", //$NON-NLS-1$
rowIndex + 1, row.size(), types.length));
}
// Add the row to the table
data.add(row);
rowIndex++;
// Start a new row
row.clear();
colIndex = 0;
cellContent = "";
}
}
}
return data;
}
/**
* Returns a method that can return a parsed value of the specified type.
* @param c Desired type.
* @return Method that parses a data type.
*/
private static Method getParseMethod(Class<?> c) {
Method parse = null;
if (String.class.isAssignableFrom(c)) {
try {
parse = String.class.getMethod("valueOf", Object.class);
} catch (NoSuchMethodException e) {
parse = null;
}
} else {
for (Method m : c.getMethods()) {
boolean isStatic = m.toString().indexOf("static") >= 0; //$NON-NLS-1$
if (!isStatic) {
continue;
}
Class<?>[] types = m.getParameterTypes();
boolean hasStringParameter =
(types.length == 1) && String.class.equals(types[0]);
if (!hasStringParameter) {
continue;
}
boolean parseName = m.getName().startsWith("parse"); //$NON-NLS-1$
if (!parseName) {
continue;
}
parse = m;
}
}
return parse;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.execution;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableMultimap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Multimap;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.SettableFuture;
import io.airlift.stats.TestingGcMonitor;
import io.airlift.units.DataSize;
import io.airlift.units.Duration;
import io.prestosql.Session;
import io.prestosql.connector.ConnectorId;
import io.prestosql.cost.StatsAndCosts;
import io.prestosql.execution.NodeTaskMap.PartitionedSplitCountTracker;
import io.prestosql.execution.buffer.LazyOutputBuffer;
import io.prestosql.execution.buffer.OutputBuffer;
import io.prestosql.execution.buffer.OutputBuffers;
import io.prestosql.memory.MemoryPool;
import io.prestosql.memory.QueryContext;
import io.prestosql.memory.context.SimpleLocalMemoryContext;
import io.prestosql.metadata.Split;
import io.prestosql.metadata.TableHandle;
import io.prestosql.operator.TaskContext;
import io.prestosql.operator.TaskStats;
import io.prestosql.spi.Node;
import io.prestosql.spi.memory.MemoryPoolId;
import io.prestosql.spiller.SpillSpaceTracker;
import io.prestosql.sql.planner.Partitioning;
import io.prestosql.sql.planner.PartitioningScheme;
import io.prestosql.sql.planner.PlanFragment;
import io.prestosql.sql.planner.Symbol;
import io.prestosql.sql.planner.plan.PlanFragmentId;
import io.prestosql.sql.planner.plan.PlanNode;
import io.prestosql.sql.planner.plan.PlanNodeId;
import io.prestosql.sql.planner.plan.TableScanNode;
import io.prestosql.testing.TestingMetadata.TestingColumnHandle;
import io.prestosql.testing.TestingMetadata.TestingTableHandle;
import org.joda.time.DateTime;
import javax.annotation.concurrent.GuardedBy;
import java.net.URI;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.OptionalInt;
import java.util.Set;
import java.util.concurrent.Executor;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Stream;
import static com.google.common.util.concurrent.Futures.nonCancellationPropagating;
import static io.airlift.units.DataSize.Unit.BYTE;
import static io.airlift.units.DataSize.Unit.GIGABYTE;
import static io.airlift.units.DataSize.Unit.MEGABYTE;
import static io.prestosql.SessionTestUtils.TEST_SESSION;
import static io.prestosql.execution.StateMachine.StateChangeListener;
import static io.prestosql.execution.buffer.OutputBuffers.BufferType.BROADCAST;
import static io.prestosql.execution.buffer.OutputBuffers.createInitialEmptyOutputBuffers;
import static io.prestosql.memory.context.AggregatedMemoryContext.newSimpleAggregatedMemoryContext;
import static io.prestosql.operator.StageExecutionDescriptor.ungroupedExecution;
import static io.prestosql.spi.type.VarcharType.VARCHAR;
import static io.prestosql.sql.planner.SystemPartitioningHandle.SINGLE_DISTRIBUTION;
import static io.prestosql.sql.planner.SystemPartitioningHandle.SOURCE_DISTRIBUTION;
import static io.prestosql.util.Failures.toFailures;
import static java.util.Objects.requireNonNull;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
public class MockRemoteTaskFactory
implements RemoteTaskFactory
{
private static final String TASK_INSTANCE_ID = "task-instance-id";
private final Executor executor;
private final ScheduledExecutorService scheduledExecutor;
public MockRemoteTaskFactory(Executor executor, ScheduledExecutorService scheduledExecutor)
{
this.executor = executor;
this.scheduledExecutor = scheduledExecutor;
}
public MockRemoteTask createTableScanTask(TaskId taskId, Node newNode, List<Split> splits, PartitionedSplitCountTracker partitionedSplitCountTracker)
{
Symbol symbol = new Symbol("column");
PlanNodeId sourceId = new PlanNodeId("sourceId");
PlanFragment testFragment = new PlanFragment(
new PlanFragmentId("test"),
new TableScanNode(
sourceId,
new TableHandle(new ConnectorId("test"), new TestingTableHandle()),
ImmutableList.of(symbol),
ImmutableMap.of(symbol, new TestingColumnHandle("column"))),
ImmutableMap.of(symbol, VARCHAR),
SOURCE_DISTRIBUTION,
ImmutableList.of(sourceId),
new PartitioningScheme(Partitioning.create(SINGLE_DISTRIBUTION, ImmutableList.of()), ImmutableList.of(symbol)),
ungroupedExecution(),
StatsAndCosts.empty(),
Optional.empty());
ImmutableMultimap.Builder<PlanNodeId, Split> initialSplits = ImmutableMultimap.builder();
for (Split sourceSplit : splits) {
initialSplits.put(sourceId, sourceSplit);
}
return createRemoteTask(TEST_SESSION, taskId, newNode, testFragment, initialSplits.build(), OptionalInt.empty(), createInitialEmptyOutputBuffers(BROADCAST), partitionedSplitCountTracker, true);
}
@Override
public MockRemoteTask createRemoteTask(
Session session,
TaskId taskId,
Node node,
PlanFragment fragment,
Multimap<PlanNodeId, Split> initialSplits,
OptionalInt totalPartitions,
OutputBuffers outputBuffers,
PartitionedSplitCountTracker partitionedSplitCountTracker,
boolean summarizeTaskInfo)
{
return new MockRemoteTask(taskId, fragment, node.getNodeIdentifier(), executor, scheduledExecutor, initialSplits, totalPartitions, partitionedSplitCountTracker);
}
public static final class MockRemoteTask
implements RemoteTask
{
private final AtomicLong nextTaskInfoVersion = new AtomicLong(TaskStatus.STARTING_VERSION);
private final URI location;
private final TaskStateMachine taskStateMachine;
private final TaskContext taskContext;
private final OutputBuffer outputBuffer;
private final String nodeId;
private final PlanFragment fragment;
@GuardedBy("this")
private final Set<PlanNodeId> noMoreSplits = new HashSet<>();
@GuardedBy("this")
private final Multimap<PlanNodeId, Split> splits = HashMultimap.create();
@GuardedBy("this")
private int runningDrivers;
@GuardedBy("this")
private SettableFuture<?> whenSplitQueueHasSpace = SettableFuture.create();
private final PartitionedSplitCountTracker partitionedSplitCountTracker;
public MockRemoteTask(TaskId taskId,
PlanFragment fragment,
String nodeId,
Executor executor,
ScheduledExecutorService scheduledExecutor,
Multimap<PlanNodeId, Split> initialSplits,
OptionalInt totalPartitions,
PartitionedSplitCountTracker partitionedSplitCountTracker)
{
this.taskStateMachine = new TaskStateMachine(requireNonNull(taskId, "taskId is null"), requireNonNull(executor, "executor is null"));
MemoryPool memoryPool = new MemoryPool(new MemoryPoolId("test"), new DataSize(1, GIGABYTE));
SpillSpaceTracker spillSpaceTracker = new SpillSpaceTracker(new DataSize(1, GIGABYTE));
QueryContext queryContext = new QueryContext(taskId.getQueryId(),
new DataSize(1, MEGABYTE),
new DataSize(2, MEGABYTE),
memoryPool,
new TestingGcMonitor(),
executor,
scheduledExecutor,
new DataSize(1, MEGABYTE),
spillSpaceTracker);
this.taskContext = queryContext.addTaskContext(taskStateMachine, TEST_SESSION, true, true, totalPartitions);
this.location = URI.create("fake://task/" + taskId);
this.outputBuffer = new LazyOutputBuffer(
taskId,
TASK_INSTANCE_ID,
executor,
new DataSize(1, BYTE),
() -> new SimpleLocalMemoryContext(newSimpleAggregatedMemoryContext(), "test"));
this.fragment = requireNonNull(fragment, "fragment is null");
this.nodeId = requireNonNull(nodeId, "nodeId is null");
splits.putAll(initialSplits);
this.partitionedSplitCountTracker = requireNonNull(partitionedSplitCountTracker, "partitionedSplitCountTracker is null");
partitionedSplitCountTracker.setPartitionedSplitCount(getPartitionedSplitCount());
updateSplitQueueSpace();
}
@Override
public TaskId getTaskId()
{
return taskStateMachine.getTaskId();
}
@Override
public String getNodeId()
{
return nodeId;
}
@Override
public TaskInfo getTaskInfo()
{
TaskState state = taskStateMachine.getState();
List<ExecutionFailureInfo> failures = ImmutableList.of();
if (state == TaskState.FAILED) {
failures = toFailures(taskStateMachine.getFailureCauses());
}
return new TaskInfo(
new TaskStatus(
taskStateMachine.getTaskId(),
TASK_INSTANCE_ID,
nextTaskInfoVersion.getAndIncrement(),
state,
location,
nodeId,
ImmutableSet.of(),
failures,
0,
0,
false,
new DataSize(0, BYTE),
new DataSize(0, BYTE),
new DataSize(0, BYTE),
0,
new Duration(0, MILLISECONDS)),
DateTime.now(),
outputBuffer.getInfo(),
ImmutableSet.of(),
taskContext.getTaskStats(),
true);
}
@Override
public TaskStatus getTaskStatus()
{
TaskStats stats = taskContext.getTaskStats();
return new TaskStatus(taskStateMachine.getTaskId(),
TASK_INSTANCE_ID,
nextTaskInfoVersion.get(),
taskStateMachine.getState(),
location,
nodeId,
ImmutableSet.of(),
ImmutableList.of(),
stats.getQueuedPartitionedDrivers(),
stats.getRunningPartitionedDrivers(),
false,
stats.getPhysicalWrittenDataSize(),
stats.getUserMemoryReservation(),
stats.getSystemMemoryReservation(),
0,
new Duration(0, MILLISECONDS));
}
private synchronized void updateSplitQueueSpace()
{
if (getQueuedPartitionedSplitCount() < 9) {
if (!whenSplitQueueHasSpace.isDone()) {
whenSplitQueueHasSpace.set(null);
}
}
else {
if (whenSplitQueueHasSpace.isDone()) {
whenSplitQueueHasSpace = SettableFuture.create();
}
}
}
public synchronized void finishSplits(int splits)
{
List<Map.Entry<PlanNodeId, Split>> toRemove = new ArrayList<>();
Iterator<Map.Entry<PlanNodeId, Split>> iterator = this.splits.entries().iterator();
while (toRemove.size() < splits && iterator.hasNext()) {
toRemove.add(iterator.next());
}
for (Map.Entry<PlanNodeId, Split> entry : toRemove) {
this.splits.remove(entry.getKey(), entry.getValue());
}
updateSplitQueueSpace();
}
public synchronized void clearSplits()
{
splits.clear();
partitionedSplitCountTracker.setPartitionedSplitCount(getPartitionedSplitCount());
runningDrivers = 0;
updateSplitQueueSpace();
}
public synchronized void startSplits(int maxRunning)
{
runningDrivers = splits.size();
runningDrivers = Math.min(runningDrivers, maxRunning);
updateSplitQueueSpace();
}
@Override
public void start()
{
taskStateMachine.addStateChangeListener(newValue -> {
if (newValue.isDone()) {
clearSplits();
}
});
}
@Override
public void addSplits(Multimap<PlanNodeId, Split> splits)
{
synchronized (this) {
this.splits.putAll(splits);
}
partitionedSplitCountTracker.setPartitionedSplitCount(getPartitionedSplitCount());
updateSplitQueueSpace();
}
@Override
public synchronized void noMoreSplits(PlanNodeId sourceId)
{
noMoreSplits.add(sourceId);
boolean allSourcesComplete = Stream.concat(fragment.getPartitionedSourceNodes().stream(), fragment.getRemoteSourceNodes().stream())
.filter(Objects::nonNull)
.map(PlanNode::getId)
.allMatch(noMoreSplits::contains);
if (allSourcesComplete) {
taskStateMachine.finished();
}
}
@Override
public void noMoreSplits(PlanNodeId sourceId, Lifespan lifespan)
{
throw new UnsupportedOperationException();
}
@Override
public void setOutputBuffers(OutputBuffers outputBuffers)
{
outputBuffer.setOutputBuffers(outputBuffers);
}
@Override
public void addStateChangeListener(StateChangeListener<TaskStatus> stateChangeListener)
{
taskStateMachine.addStateChangeListener(newValue -> stateChangeListener.stateChanged(getTaskStatus()));
}
@Override
public void addFinalTaskInfoListener(StateChangeListener<TaskInfo> stateChangeListener)
{
AtomicBoolean done = new AtomicBoolean();
StateChangeListener<TaskState> fireOnceStateChangeListener = state -> {
if (state.isDone() && done.compareAndSet(false, true)) {
stateChangeListener.stateChanged(getTaskInfo());
}
};
taskStateMachine.addStateChangeListener(fireOnceStateChangeListener);
fireOnceStateChangeListener.stateChanged(taskStateMachine.getState());
}
@Override
public synchronized ListenableFuture<?> whenSplitQueueHasSpace(int threshold)
{
return nonCancellationPropagating(whenSplitQueueHasSpace);
}
@Override
public void cancel()
{
taskStateMachine.cancel();
}
@Override
public void abort()
{
taskStateMachine.abort();
clearSplits();
}
@Override
public int getPartitionedSplitCount()
{
if (taskStateMachine.getState().isDone()) {
return 0;
}
synchronized (this) {
int count = 0;
for (PlanNodeId partitionedSource : fragment.getPartitionedSources()) {
Collection<Split> partitionedSplits = splits.get(partitionedSource);
count += partitionedSplits.size();
}
return count;
}
}
@Override
public synchronized int getQueuedPartitionedSplitCount()
{
if (taskStateMachine.getState().isDone()) {
return 0;
}
return getPartitionedSplitCount() - runningDrivers;
}
}
}
| |
/*
* Copyright (c) 2011, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* @test
* @bug 6173675
* @summary Basic test of ThreadMXBean.getThreadAllocatedBytes
* @author Paul Hohensee
*/
import java.lang.management.*;
public class ThreadAllocatedMemory {
private static com.sun.management.ThreadMXBean mbean =
(com.sun.management.ThreadMXBean)ManagementFactory.getThreadMXBean();
private static boolean testFailed = false;
private static boolean done = false;
private static boolean done1 = false;
private static Object obj = new Object();
private static final int NUM_THREADS = 10;
private static Thread[] threads = new Thread[NUM_THREADS];
private static long[] sizes = new long[NUM_THREADS];
public static void main(String[] argv)
throws Exception {
if (!mbean.isThreadAllocatedMemorySupported()) {
return;
}
// disable allocated memory measurement
if (mbean.isThreadAllocatedMemoryEnabled()) {
mbean.setThreadAllocatedMemoryEnabled(false);
}
if (mbean.isThreadAllocatedMemoryEnabled()) {
throw new RuntimeException(
"ThreadAllocatedMemory is expected to be disabled");
}
Thread curThread = Thread.currentThread();
long id = curThread.getId();
long s = mbean.getThreadAllocatedBytes(id);
if (s != -1) {
throw new RuntimeException(
"Invalid ThreadAllocatedBytes returned = " +
s + " expected = -1");
}
// enable allocated memory measurement
if (!mbean.isThreadAllocatedMemoryEnabled()) {
mbean.setThreadAllocatedMemoryEnabled(true);
}
if (!mbean.isThreadAllocatedMemoryEnabled()) {
throw new RuntimeException(
"ThreadAllocatedMemory is expected to be enabled");
}
long size = mbean.getThreadAllocatedBytes(id);
// implementation could have started measurement when
// measurement was enabled, in which case size can be 0
if (size < 0) {
throw new RuntimeException(
"Invalid allocated bytes returned = " + size);
}
doit();
// Expected to be size1 >= size
long size1 = mbean.getThreadAllocatedBytes(id);
if (size1 < size) {
throw new RuntimeException("Allocated bytes " + size1 +
" expected >= " + size);
}
System.out.println(curThread.getName() +
" Current thread allocated bytes = " + size +
" allocated bytes = " + size1);
// start threads, wait for them to block
for (int i = 0; i < NUM_THREADS; i++) {
threads[i] = new MyThread("MyThread-" + i);
threads[i].start();
}
// threads block after doing some allocation
waitUntilThreadBlocked();
for (int i = 0; i < NUM_THREADS; i++) {
sizes[i] = mbean.getThreadAllocatedBytes(threads[i].getId());
}
// let threads go and do some more allocation
synchronized (obj) {
done = true;
obj.notifyAll();
}
// wait for threads to get going again. we don't care if we
// catch them in mid-execution or if some of them haven't
// restarted after we're done sleeping.
goSleep(400);
for (int i = 0; i < NUM_THREADS; i++) {
long newSize = mbean.getThreadAllocatedBytes(threads[i].getId());
if (sizes[i] > newSize) {
throw new RuntimeException("TEST FAILED: " +
threads[i].getName() +
" previous allocated bytes = " + sizes[i] +
" > current allocated bytes = " + newSize);
}
System.out.println(threads[i].getName() +
" Previous allocated bytes = " + sizes[i] +
" Current allocated bytes = " + newSize);
}
// let threads exit
synchronized (obj) {
done1 = true;
obj.notifyAll();
}
for (int i = 0; i < NUM_THREADS; i++) {
try {
threads[i].join();
} catch (InterruptedException e) {
System.out.println("Unexpected exception is thrown.");
e.printStackTrace(System.out);
testFailed = true;
break;
}
}
if (testFailed) {
throw new RuntimeException("TEST FAILED");
}
System.out.println("Test passed");
}
private static void goSleep(long ms) throws Exception {
try {
Thread.sleep(ms);
} catch (InterruptedException e) {
System.out.println("Unexpected exception is thrown.");
throw e;
}
}
private static void waitUntilThreadBlocked()
throws Exception {
int count = 0;
while (count != NUM_THREADS) {
goSleep(100);
count = 0;
for (int i = 0; i < NUM_THREADS; i++) {
ThreadInfo info = mbean.getThreadInfo(threads[i].getId());
if (info.getThreadState() == Thread.State.WAITING) {
count++;
}
}
}
}
public static void doit() {
String tmp = "";
long hashCode = 0;
for (int counter = 0; counter < 1000; counter++) {
tmp += counter;
hashCode = tmp.hashCode();
}
System.out.println(Thread.currentThread().getName() +
" hashcode: " + hashCode);
}
static class MyThread extends Thread {
public MyThread(String name) {
super(name);
}
public void run() {
ThreadAllocatedMemory.doit();
synchronized (obj) {
while (!done) {
try {
obj.wait();
} catch (InterruptedException e) {
System.out.println("Unexpected exception is thrown.");
e.printStackTrace(System.out);
testFailed = true;
break;
}
}
}
long size1 = mbean.getThreadAllocatedBytes(getId());
ThreadAllocatedMemory.doit();
long size2 = mbean.getThreadAllocatedBytes(getId());
System.out.println(getName() + ": " +
"ThreadAllocatedBytes = " + size1 +
" ThreadAllocatedBytes = " + size2);
if (size1 > size2) {
throw new RuntimeException("TEST FAILED: " + getName() +
" ThreadAllocatedBytes = " + size1 +
" > ThreadAllocatedBytes = " + size2);
}
synchronized (obj) {
while (!done1) {
try {
obj.wait();
} catch (InterruptedException e) {
System.out.println("Unexpected exception is thrown.");
e.printStackTrace(System.out);
testFailed = true;
break;
}
}
}
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.search;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.OriginalIndices;
import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsGroup;
import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsResponse;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.GroupShardsIterator;
import org.elasticsearch.cluster.routing.ShardIterator;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.query.Rewriteable;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.search.SearchService;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.internal.AliasFilter;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.RemoteClusterAware;
import org.elasticsearch.transport.RemoteClusterService;
import org.elasticsearch.transport.Transport;
import org.elasticsearch.transport.TransportService;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Executor;
import java.util.function.BiFunction;
import java.util.function.LongSupplier;
import static org.elasticsearch.action.search.SearchType.QUERY_THEN_FETCH;
public class TransportSearchAction extends HandledTransportAction<SearchRequest, SearchResponse> {
/** The maximum number of shards for a single search request. */
public static final Setting<Long> SHARD_COUNT_LIMIT_SETTING = Setting.longSetting(
"action.search.shard_count.limit", Long.MAX_VALUE, 1L, Property.Dynamic, Property.NodeScope);
private final ClusterService clusterService;
private final SearchTransportService searchTransportService;
private final RemoteClusterService remoteClusterService;
private final SearchPhaseController searchPhaseController;
private final SearchService searchService;
@Inject
public TransportSearchAction(Settings settings, ThreadPool threadPool, TransportService transportService, SearchService searchService,
SearchTransportService searchTransportService, SearchPhaseController searchPhaseController,
ClusterService clusterService, ActionFilters actionFilters,
IndexNameExpressionResolver indexNameExpressionResolver) {
super(settings, SearchAction.NAME, threadPool, transportService, actionFilters, SearchRequest::new, indexNameExpressionResolver);
this.searchPhaseController = searchPhaseController;
this.searchTransportService = searchTransportService;
this.remoteClusterService = searchTransportService.getRemoteClusterService();
SearchTransportService.registerRequestHandler(transportService, searchService);
this.clusterService = clusterService;
this.searchService = searchService;
}
private Map<String, AliasFilter> buildPerIndexAliasFilter(SearchRequest request, ClusterState clusterState,
Index[] concreteIndices, Map<String, AliasFilter> remoteAliasMap) {
final Map<String, AliasFilter> aliasFilterMap = new HashMap<>();
for (Index index : concreteIndices) {
clusterState.blocks().indexBlockedRaiseException(ClusterBlockLevel.READ, index.getName());
AliasFilter aliasFilter = searchService.buildAliasFilter(clusterState, index.getName(), request.indices());
assert aliasFilter != null;
aliasFilterMap.put(index.getUUID(), aliasFilter);
}
aliasFilterMap.putAll(remoteAliasMap);
return aliasFilterMap;
}
private Map<String, Float> resolveIndexBoosts(SearchRequest searchRequest, ClusterState clusterState) {
if (searchRequest.source() == null) {
return Collections.emptyMap();
}
SearchSourceBuilder source = searchRequest.source();
if (source.indexBoosts() == null) {
return Collections.emptyMap();
}
Map<String, Float> concreteIndexBoosts = new HashMap<>();
for (SearchSourceBuilder.IndexBoost ib : source.indexBoosts()) {
Index[] concreteIndices =
indexNameExpressionResolver.concreteIndices(clusterState, searchRequest.indicesOptions(), ib.getIndex());
for (Index concreteIndex : concreteIndices) {
concreteIndexBoosts.putIfAbsent(concreteIndex.getUUID(), ib.getBoost());
}
}
return Collections.unmodifiableMap(concreteIndexBoosts);
}
/**
* Search operations need two clocks. One clock is to fulfill real clock needs (e.g., resolving
* "now" to an index name). Another clock is needed for measuring how long a search operation
* took. These two uses are at odds with each other. There are many issues with using a real
* clock for measuring how long an operation took (they often lack precision, they are subject
* to moving backwards due to NTP and other such complexities, etc.). There are also issues with
* using a relative clock for reporting real time. Thus, we simply separate these two uses.
*/
static class SearchTimeProvider {
private final long absoluteStartMillis;
private final long relativeStartNanos;
private final LongSupplier relativeCurrentNanosProvider;
/**
* Instantiates a new search time provider. The absolute start time is the real clock time
* used for resolving index expressions that include dates. The relative start time is the
* start of the search operation according to a relative clock. The total time the search
* operation took can be measured against the provided relative clock and the relative start
* time.
*
* @param absoluteStartMillis the absolute start time in milliseconds since the epoch
* @param relativeStartNanos the relative start time in nanoseconds
* @param relativeCurrentNanosProvider provides the current relative time
*/
SearchTimeProvider(
final long absoluteStartMillis,
final long relativeStartNanos,
final LongSupplier relativeCurrentNanosProvider) {
this.absoluteStartMillis = absoluteStartMillis;
this.relativeStartNanos = relativeStartNanos;
this.relativeCurrentNanosProvider = relativeCurrentNanosProvider;
}
long getAbsoluteStartMillis() {
return absoluteStartMillis;
}
long getRelativeStartNanos() {
return relativeStartNanos;
}
long getRelativeCurrentNanos() {
return relativeCurrentNanosProvider.getAsLong();
}
}
@Override
protected void doExecute(Task task, SearchRequest searchRequest, ActionListener<SearchResponse> listener) {
final long absoluteStartMillis = System.currentTimeMillis();
final long relativeStartNanos = System.nanoTime();
final SearchTimeProvider timeProvider =
new SearchTimeProvider(absoluteStartMillis, relativeStartNanos, System::nanoTime);
ActionListener<SearchSourceBuilder> rewriteListener = ActionListener.wrap(source -> {
if (source != searchRequest.source()) {
// only set it if it changed - we don't allow null values to be set but it might be already null be we want to catch
// situations when it possible due to a bug changes to null
searchRequest.source(source);
}
final ClusterState clusterState = clusterService.state();
final Map<String, OriginalIndices> remoteClusterIndices = remoteClusterService.groupIndices(searchRequest.indicesOptions(),
searchRequest.indices(), idx -> indexNameExpressionResolver.hasIndexOrAlias(idx, clusterState));
OriginalIndices localIndices = remoteClusterIndices.remove(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY);
if (remoteClusterIndices.isEmpty()) {
executeSearch((SearchTask)task, timeProvider, searchRequest, localIndices, remoteClusterIndices, Collections.emptyList(),
(clusterName, nodeId) -> null, clusterState, Collections.emptyMap(), listener, clusterState.getNodes()
.getDataNodes().size(), SearchResponse.Clusters.EMPTY);
} else {
remoteClusterService.collectSearchShards(searchRequest.indicesOptions(), searchRequest.preference(),
searchRequest.routing(), remoteClusterIndices, ActionListener.wrap((searchShardsResponses) -> {
List<SearchShardIterator> remoteShardIterators = new ArrayList<>();
Map<String, AliasFilter> remoteAliasFilters = new HashMap<>();
BiFunction<String, String, DiscoveryNode> clusterNodeLookup = processRemoteShards(searchShardsResponses,
remoteClusterIndices, remoteShardIterators, remoteAliasFilters);
int numNodesInvolved = searchShardsResponses.values().stream().mapToInt(r -> r.getNodes().length).sum()
+ clusterState.getNodes().getDataNodes().size();
SearchResponse.Clusters clusters = buildClusters(localIndices, remoteClusterIndices, searchShardsResponses);
executeSearch((SearchTask) task, timeProvider, searchRequest, localIndices, remoteClusterIndices,
remoteShardIterators, clusterNodeLookup, clusterState, remoteAliasFilters, listener, numNodesInvolved,
clusters);
}, listener::onFailure));
}
}, listener::onFailure);
if (searchRequest.source() == null) {
rewriteListener.onResponse(searchRequest.source());
} else {
Rewriteable.rewriteAndFetch(searchRequest.source(), searchService.getRewriteContext(timeProvider::getAbsoluteStartMillis),
rewriteListener);
}
}
static SearchResponse.Clusters buildClusters(OriginalIndices localIndices, Map<String, OriginalIndices> remoteIndices,
Map<String, ClusterSearchShardsResponse> searchShardsResponses) {
int localClusters = Math.min(localIndices.indices().length, 1);
int totalClusters = remoteIndices.size() + localClusters;
int successfulClusters = localClusters;
for (ClusterSearchShardsResponse searchShardsResponse : searchShardsResponses.values()) {
if (searchShardsResponse != ClusterSearchShardsResponse.EMPTY) {
successfulClusters++;
}
}
int skippedClusters = totalClusters - successfulClusters;
return new SearchResponse.Clusters(totalClusters, successfulClusters, skippedClusters);
}
static BiFunction<String, String, DiscoveryNode> processRemoteShards(Map<String, ClusterSearchShardsResponse> searchShardsResponses,
Map<String, OriginalIndices> remoteIndicesByCluster,
List<SearchShardIterator> remoteShardIterators,
Map<String, AliasFilter> aliasFilterMap) {
Map<String, Map<String, DiscoveryNode>> clusterToNode = new HashMap<>();
for (Map.Entry<String, ClusterSearchShardsResponse> entry : searchShardsResponses.entrySet()) {
String clusterAlias = entry.getKey();
ClusterSearchShardsResponse searchShardsResponse = entry.getValue();
HashMap<String, DiscoveryNode> idToDiscoveryNode = new HashMap<>();
clusterToNode.put(clusterAlias, idToDiscoveryNode);
for (DiscoveryNode remoteNode : searchShardsResponse.getNodes()) {
idToDiscoveryNode.put(remoteNode.getId(), remoteNode);
}
final Map<String, AliasFilter> indicesAndFilters = searchShardsResponse.getIndicesAndFilters();
for (ClusterSearchShardsGroup clusterSearchShardsGroup : searchShardsResponse.getGroups()) {
//add the cluster name to the remote index names for indices disambiguation
//this ends up in the hits returned with the search response
ShardId shardId = clusterSearchShardsGroup.getShardId();
final AliasFilter aliasFilter;
if (indicesAndFilters == null) {
aliasFilter = AliasFilter.EMPTY;
} else {
aliasFilter = indicesAndFilters.get(shardId.getIndexName());
assert aliasFilter != null : "alias filter must not be null for index: " + shardId.getIndex();
}
String[] aliases = aliasFilter.getAliases();
String[] finalIndices = aliases.length == 0 ? new String[] {shardId.getIndexName()} : aliases;
// here we have to map the filters to the UUID since from now on we use the uuid for the lookup
aliasFilterMap.put(shardId.getIndex().getUUID(), aliasFilter);
final OriginalIndices originalIndices = remoteIndicesByCluster.get(clusterAlias);
assert originalIndices != null : "original indices are null for clusterAlias: " + clusterAlias;
SearchShardIterator shardIterator = new SearchShardIterator(clusterAlias, shardId,
Arrays.asList(clusterSearchShardsGroup.getShards()), new OriginalIndices(finalIndices,
originalIndices.indicesOptions()));
remoteShardIterators.add(shardIterator);
}
}
return (clusterAlias, nodeId) -> {
Map<String, DiscoveryNode> clusterNodes = clusterToNode.get(clusterAlias);
if (clusterNodes == null) {
throw new IllegalArgumentException("unknown remote cluster: " + clusterAlias);
}
return clusterNodes.get(nodeId);
};
}
private void executeSearch(SearchTask task, SearchTimeProvider timeProvider, SearchRequest searchRequest, OriginalIndices localIndices,
Map<String, OriginalIndices> remoteClusterIndices, List<SearchShardIterator> remoteShardIterators,
BiFunction<String, String, DiscoveryNode> remoteConnections, ClusterState clusterState,
Map<String, AliasFilter> remoteAliasMap, ActionListener<SearchResponse> listener, int nodeCount,
SearchResponse.Clusters clusters) {
clusterState.blocks().globalBlockedRaiseException(ClusterBlockLevel.READ);
// TODO: I think startTime() should become part of ActionRequest and that should be used both for index name
// date math expressions and $now in scripts. This way all apis will deal with now in the same way instead
// of just for the _search api
final Index[] indices;
if (localIndices.indices().length == 0 && remoteClusterIndices.isEmpty() == false) {
indices = Index.EMPTY_ARRAY; // don't search on _all if only remote indices were specified
} else {
indices = indexNameExpressionResolver.concreteIndices(clusterState, searchRequest.indicesOptions(),
timeProvider.getAbsoluteStartMillis(), localIndices.indices());
}
Map<String, AliasFilter> aliasFilter = buildPerIndexAliasFilter(searchRequest, clusterState, indices, remoteAliasMap);
Map<String, Set<String>> routingMap = indexNameExpressionResolver.resolveSearchRouting(clusterState, searchRequest.routing(),
searchRequest.indices());
String[] concreteIndices = new String[indices.length];
for (int i = 0; i < indices.length; i++) {
concreteIndices[i] = indices[i].getName();
}
Map<String, Long> nodeSearchCounts = searchTransportService.getPendingSearchRequests();
GroupShardsIterator<ShardIterator> localShardsIterator = clusterService.operationRouting().searchShards(clusterState,
concreteIndices, routingMap, searchRequest.preference(), searchService.getResponseCollectorService(), nodeSearchCounts);
GroupShardsIterator<SearchShardIterator> shardIterators = mergeShardsIterators(localShardsIterator, localIndices,
remoteShardIterators);
failIfOverShardCountLimit(clusterService, shardIterators.size());
Map<String, Float> concreteIndexBoosts = resolveIndexBoosts(searchRequest, clusterState);
// optimize search type for cases where there is only one shard group to search on
if (shardIterators.size() == 1) {
// if we only have one group, then we always want Q_T_F, no need for DFS, and no need to do THEN since we hit one shard
searchRequest.searchType(QUERY_THEN_FETCH);
}
if (searchRequest.allowPartialSearchResults() == null) {
// No user preference defined in search request - apply cluster service default
searchRequest.allowPartialSearchResults(searchService.defaultAllowPartialSearchResults());
}
if (searchRequest.isSuggestOnly()) {
// disable request cache if we have only suggest
searchRequest.requestCache(false);
switch (searchRequest.searchType()) {
case DFS_QUERY_THEN_FETCH:
// convert to Q_T_F if we have only suggest
searchRequest.searchType(QUERY_THEN_FETCH);
break;
}
}
final DiscoveryNodes nodes = clusterState.nodes();
BiFunction<String, String, Transport.Connection> connectionLookup = (clusterName, nodeId) -> {
final DiscoveryNode discoveryNode = clusterName == null ? nodes.get(nodeId) : remoteConnections.apply(clusterName, nodeId);
if (discoveryNode == null) {
throw new IllegalStateException("no node found for id: " + nodeId);
}
return searchTransportService.getConnection(clusterName, discoveryNode);
};
if (searchRequest.isMaxConcurrentShardRequestsSet() == false) {
// we try to set a default of max concurrent shard requests based on
// the node count but upper-bound it by 256 by default to keep it sane. A single
// search request that fans out lots of shards should hit a cluster too hard while 256 is already a lot.
// we multiply it by the default number of shards such that a single request in a cluster of 1 would hit all shards of a
// default index.
searchRequest.setMaxConcurrentShardRequests(Math.min(256, nodeCount
* IndexMetaData.INDEX_NUMBER_OF_SHARDS_SETTING.getDefault(Settings.EMPTY)));
}
boolean preFilterSearchShards = shouldPreFilterSearchShards(searchRequest, shardIterators);
searchAsyncAction(task, searchRequest, shardIterators, timeProvider, connectionLookup, clusterState.version(),
Collections.unmodifiableMap(aliasFilter), concreteIndexBoosts, listener, preFilterSearchShards, clusters).start();
}
private boolean shouldPreFilterSearchShards(SearchRequest searchRequest, GroupShardsIterator<SearchShardIterator> shardIterators) {
SearchSourceBuilder source = searchRequest.source();
return searchRequest.searchType() == QUERY_THEN_FETCH && // we can't do this for DFS it needs to fan out to all shards all the time
SearchService.canRewriteToMatchNone(source) &&
searchRequest.getPreFilterShardSize() < shardIterators.size();
}
static GroupShardsIterator<SearchShardIterator> mergeShardsIterators(GroupShardsIterator<ShardIterator> localShardsIterator,
OriginalIndices localIndices,
List<SearchShardIterator> remoteShardIterators) {
List<SearchShardIterator> shards = new ArrayList<>();
shards.addAll(remoteShardIterators);
for (ShardIterator shardIterator : localShardsIterator) {
shards.add(new SearchShardIterator(null, shardIterator.shardId(), shardIterator.getShardRoutings(), localIndices));
}
return new GroupShardsIterator<>(shards);
}
@Override
protected final void doExecute(SearchRequest searchRequest, ActionListener<SearchResponse> listener) {
throw new UnsupportedOperationException("the task parameter is required");
}
private AbstractSearchAsyncAction searchAsyncAction(SearchTask task, SearchRequest searchRequest,
GroupShardsIterator<SearchShardIterator> shardIterators,
SearchTimeProvider timeProvider,
BiFunction<String, String, Transport.Connection> connectionLookup,
long clusterStateVersion, Map<String, AliasFilter> aliasFilter,
Map<String, Float> concreteIndexBoosts,
ActionListener<SearchResponse> listener, boolean preFilter,
SearchResponse.Clusters clusters) {
Executor executor = threadPool.executor(ThreadPool.Names.SEARCH);
if (preFilter) {
return new CanMatchPreFilterSearchPhase(logger, searchTransportService, connectionLookup,
aliasFilter, concreteIndexBoosts, executor, searchRequest, listener, shardIterators,
timeProvider, clusterStateVersion, task, (iter) -> {
AbstractSearchAsyncAction action = searchAsyncAction(task, searchRequest, iter, timeProvider, connectionLookup,
clusterStateVersion, aliasFilter, concreteIndexBoosts, listener, false, clusters);
return new SearchPhase(action.getName()) {
@Override
public void run() throws IOException {
action.start();
}
};
}, clusters);
} else {
AbstractSearchAsyncAction searchAsyncAction;
switch (searchRequest.searchType()) {
case DFS_QUERY_THEN_FETCH:
searchAsyncAction = new SearchDfsQueryThenFetchAsyncAction(logger, searchTransportService, connectionLookup,
aliasFilter, concreteIndexBoosts, searchPhaseController, executor, searchRequest, listener, shardIterators,
timeProvider, clusterStateVersion, task, clusters);
break;
case QUERY_AND_FETCH:
case QUERY_THEN_FETCH:
searchAsyncAction = new SearchQueryThenFetchAsyncAction(logger, searchTransportService, connectionLookup,
aliasFilter, concreteIndexBoosts, searchPhaseController, executor, searchRequest, listener, shardIterators,
timeProvider, clusterStateVersion, task, clusters);
break;
default:
throw new IllegalStateException("Unknown search type: [" + searchRequest.searchType() + "]");
}
return searchAsyncAction;
}
}
private static void failIfOverShardCountLimit(ClusterService clusterService, int shardCount) {
final long shardCountLimit = clusterService.getClusterSettings().get(SHARD_COUNT_LIMIT_SETTING);
if (shardCount > shardCountLimit) {
throw new IllegalArgumentException("Trying to query " + shardCount + " shards, which is over the limit of "
+ shardCountLimit + ". This limit exists because querying many shards at the same time can make the "
+ "job of the coordinating node very CPU and/or memory intensive. It is usually a better idea to "
+ "have a smaller number of larger shards. Update [" + SHARD_COUNT_LIMIT_SETTING.getKey()
+ "] to a greater value if you really want to query that many shards at the same time.");
}
}
}
| |
/*
* This file is part of "lunisolar-magma".
*
* (C) Copyright 2014-2022 Lunisolar (http://lunisolar.eu/).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.lunisolar.magma.func.supplier;
import eu.lunisolar.magma.func.*; // NOSONAR
import javax.annotation.Nonnull; // NOSONAR
import javax.annotation.Nullable; // NOSONAR
import java.util.Objects;// NOSONAR
import eu.lunisolar.magma.basics.meta.*; // NOSONAR
import eu.lunisolar.magma.basics.meta.functional.*; // NOSONAR
import eu.lunisolar.magma.basics.meta.functional.type.*; // NOSONAR
import eu.lunisolar.magma.basics.meta.functional.domain.*; // NOSONAR
import eu.lunisolar.magma.func.action.*; // NOSONAR
import eu.lunisolar.magma.func.consumer.*; // NOSONAR
import eu.lunisolar.magma.func.consumer.primitives.*; // NOSONAR
import eu.lunisolar.magma.func.consumer.primitives.bi.*; // NOSONAR
import eu.lunisolar.magma.func.consumer.primitives.obj.*; // NOSONAR
import eu.lunisolar.magma.func.consumer.primitives.tri.*; // NOSONAR
import eu.lunisolar.magma.func.function.*; // NOSONAR
import eu.lunisolar.magma.func.function.conversion.*; // NOSONAR
import eu.lunisolar.magma.func.function.from.*; // NOSONAR
import eu.lunisolar.magma.func.function.to.*; // NOSONAR
import eu.lunisolar.magma.func.operator.binary.*; // NOSONAR
import eu.lunisolar.magma.func.operator.ternary.*; // NOSONAR
import eu.lunisolar.magma.func.operator.unary.*; // NOSONAR
import eu.lunisolar.magma.func.predicate.*; // NOSONAR
import eu.lunisolar.magma.func.supplier.*; // NOSONAR
import org.testng.Assert;
import org.testng.annotations.*; //NOSONAR
import java.util.regex.Pattern; //NOSONAR
import java.text.ParseException; //NOSONAR
import eu.lunisolar.magma.basics.*; //NOSONAR
import eu.lunisolar.magma.basics.exceptions.*; //NOSONAR
import java.util.concurrent.atomic.AtomicInteger; //NOSONAR
import eu.lunisolar.magma.func.tuple.*; // NOSONAR
import java.util.function.*; // NOSONAR
/** The test obviously concentrate on the interface methods the function it self is very simple. */
public class LDblSupplierTest {
private static final String ORIGINAL_MESSAGE = "Original message";
private static final String EXCEPTION_WAS_WRAPPED = "Exception was wrapped.";
private static final String NO_EXCEPTION_WERE_THROWN = "No exception were thrown.";
private double testValue = 100d;
private LDblSupplier sut = new LDblSupplier(){
public double getAsDblX() {
return testValue;
}
};
private DoubleSupplier jre = () -> testValue;
private LDblSupplier sutAlwaysThrowing = LDblSupplier.dblSup(() -> {
throw new ParseException(ORIGINAL_MESSAGE, 0);
});
private LDblSupplier sutAlwaysThrowingUnchecked = LDblSupplier.dblSup(() -> {
throw new IndexOutOfBoundsException(ORIGINAL_MESSAGE);
});
@Test
public void testTheResult() throws Throwable {
Assert.assertEquals(sut.getAsDbl(), testValue);
}
@Test
public void testTupleCall() throws Throwable {
LTuple.Void domainObject = Tuple4U.tuple();
Object result = sut.tupleGetAsDbl(domainObject);
Assert.assertEquals(result, testValue);
}
@Test
public void testNonNullGetAsDbl() throws Throwable {
Assert.assertEquals(sut.nonNullGetAsDbl(), testValue);
}
@Test
public void testNestingGetAsDblUnchecked() throws Throwable {
// then
try {
sutAlwaysThrowingUnchecked.nestingGetAsDbl();
Assert.fail(NO_EXCEPTION_WERE_THROWN);
} catch (Exception e) {
Assert.assertEquals(e.getClass(), IndexOutOfBoundsException.class);
Assert.assertNull(e.getCause());
Assert.assertEquals(e.getMessage(), ORIGINAL_MESSAGE);
}
}
@Test
public void testShovingGetAsDblUnchecked() throws Throwable {
// then
try {
sutAlwaysThrowingUnchecked.shovingGetAsDbl();
Assert.fail(NO_EXCEPTION_WERE_THROWN);
} catch (Exception e) {
Assert.assertEquals(e.getClass(), IndexOutOfBoundsException.class);
Assert.assertNull(e.getCause());
Assert.assertEquals(e.getMessage(), ORIGINAL_MESSAGE);
}
}
@Test
public void testFunctionalInterfaceDescription() throws Throwable {
Assert.assertEquals(sut.functionalInterfaceDescription(), "LDblSupplier: double getAsDbl()");
}
@Test
public void testDblSupMethod() throws Throwable {
Assert.assertTrue(LDblSupplier.dblSup(() -> testValue ) instanceof LDblSupplier);
}
@Test
public void testWrapStdMethod() throws Throwable {
Assert.assertTrue(LDblSupplier.wrap(jre) instanceof LDblSupplier);
}
// <editor-fold desc="then (functional)">
@Test
public void testToSup0() throws Throwable {
final ThreadLocal<Boolean> mainFunctionCalled = ThreadLocal.withInitial(()-> false);
final ThreadLocal<Boolean> thenFunctionCalled = ThreadLocal.withInitial(()-> false);
//given (+ some assertions)
LDblSupplier sutO = () -> {
mainFunctionCalled.set(true);
return 90d;
};
LDblFunction<Integer> thenFunction = p -> {
thenFunctionCalled.set(true);
// double
Assert.assertEquals(p, (Object) 90d);
// Integer
return 100;
};
//when
LSupplier<Integer> function = sutO.toSup(thenFunction);
Integer finalValue = function.get();
//then - finals
Assert.assertEquals(finalValue, (Object) 100);
Assert.assertTrue(mainFunctionCalled.get());
Assert.assertTrue(thenFunctionCalled.get());
}
@Test
public void testToByteSup1() throws Throwable {
final ThreadLocal<Boolean> mainFunctionCalled = ThreadLocal.withInitial(()-> false);
final ThreadLocal<Boolean> thenFunctionCalled = ThreadLocal.withInitial(()-> false);
//given (+ some assertions)
LDblSupplier sutO = () -> {
mainFunctionCalled.set(true);
return 90d;
};
LDblToByteFunction thenFunction = p -> {
thenFunctionCalled.set(true);
// double
Assert.assertEquals(p, (Object) 90d);
// byte
return (byte)100;
};
//when
LByteSupplier function = sutO.toByteSup(thenFunction);
byte finalValue = function.getAsByte();
//then - finals
Assert.assertEquals(finalValue, (Object) (byte)100);
Assert.assertTrue(mainFunctionCalled.get());
Assert.assertTrue(thenFunctionCalled.get());
}
@Test
public void testToSrtSup2() throws Throwable {
final ThreadLocal<Boolean> mainFunctionCalled = ThreadLocal.withInitial(()-> false);
final ThreadLocal<Boolean> thenFunctionCalled = ThreadLocal.withInitial(()-> false);
//given (+ some assertions)
LDblSupplier sutO = () -> {
mainFunctionCalled.set(true);
return 90d;
};
LDblToSrtFunction thenFunction = p -> {
thenFunctionCalled.set(true);
// double
Assert.assertEquals(p, (Object) 90d);
// short
return (short)100;
};
//when
LSrtSupplier function = sutO.toSrtSup(thenFunction);
short finalValue = function.getAsSrt();
//then - finals
Assert.assertEquals(finalValue, (Object) (short)100);
Assert.assertTrue(mainFunctionCalled.get());
Assert.assertTrue(thenFunctionCalled.get());
}
@Test
public void testToIntSup3() throws Throwable {
final ThreadLocal<Boolean> mainFunctionCalled = ThreadLocal.withInitial(()-> false);
final ThreadLocal<Boolean> thenFunctionCalled = ThreadLocal.withInitial(()-> false);
//given (+ some assertions)
LDblSupplier sutO = () -> {
mainFunctionCalled.set(true);
return 90d;
};
LDblToIntFunction thenFunction = p -> {
thenFunctionCalled.set(true);
// double
Assert.assertEquals(p, (Object) 90d);
// int
return 100;
};
//when
LIntSupplier function = sutO.toIntSup(thenFunction);
int finalValue = function.getAsInt();
//then - finals
Assert.assertEquals(finalValue, (Object) 100);
Assert.assertTrue(mainFunctionCalled.get());
Assert.assertTrue(thenFunctionCalled.get());
}
@Test
public void testToLongSup4() throws Throwable {
final ThreadLocal<Boolean> mainFunctionCalled = ThreadLocal.withInitial(()-> false);
final ThreadLocal<Boolean> thenFunctionCalled = ThreadLocal.withInitial(()-> false);
//given (+ some assertions)
LDblSupplier sutO = () -> {
mainFunctionCalled.set(true);
return 90d;
};
LDblToLongFunction thenFunction = p -> {
thenFunctionCalled.set(true);
// double
Assert.assertEquals(p, (Object) 90d);
// long
return 100L;
};
//when
LLongSupplier function = sutO.toLongSup(thenFunction);
long finalValue = function.getAsLong();
//then - finals
Assert.assertEquals(finalValue, (Object) 100L);
Assert.assertTrue(mainFunctionCalled.get());
Assert.assertTrue(thenFunctionCalled.get());
}
@Test
public void testToFltSup5() throws Throwable {
final ThreadLocal<Boolean> mainFunctionCalled = ThreadLocal.withInitial(()-> false);
final ThreadLocal<Boolean> thenFunctionCalled = ThreadLocal.withInitial(()-> false);
//given (+ some assertions)
LDblSupplier sutO = () -> {
mainFunctionCalled.set(true);
return 90d;
};
LDblToFltFunction thenFunction = p -> {
thenFunctionCalled.set(true);
// double
Assert.assertEquals(p, (Object) 90d);
// float
return 100f;
};
//when
LFltSupplier function = sutO.toFltSup(thenFunction);
float finalValue = function.getAsFlt();
//then - finals
Assert.assertEquals(finalValue, (Object) 100f);
Assert.assertTrue(mainFunctionCalled.get());
Assert.assertTrue(thenFunctionCalled.get());
}
@Test
public void testToDblSup6() throws Throwable {
final ThreadLocal<Boolean> mainFunctionCalled = ThreadLocal.withInitial(()-> false);
final ThreadLocal<Boolean> thenFunctionCalled = ThreadLocal.withInitial(()-> false);
//given (+ some assertions)
LDblSupplier sutO = () -> {
mainFunctionCalled.set(true);
return 90d;
};
LDblUnaryOperator thenFunction = p -> {
thenFunctionCalled.set(true);
// double
Assert.assertEquals(p, (Object) 90d);
// double
return 100d;
};
//when
LDblSupplier function = sutO.toDblSup(thenFunction);
double finalValue = function.getAsDbl();
//then - finals
Assert.assertEquals(finalValue, (Object) 100d);
Assert.assertTrue(mainFunctionCalled.get());
Assert.assertTrue(thenFunctionCalled.get());
}
@Test
public void testToCharSup7() throws Throwable {
final ThreadLocal<Boolean> mainFunctionCalled = ThreadLocal.withInitial(()-> false);
final ThreadLocal<Boolean> thenFunctionCalled = ThreadLocal.withInitial(()-> false);
//given (+ some assertions)
LDblSupplier sutO = () -> {
mainFunctionCalled.set(true);
return 90d;
};
LDblToCharFunction thenFunction = p -> {
thenFunctionCalled.set(true);
// double
Assert.assertEquals(p, (Object) 90d);
// char
return '\u0100';
};
//when
LCharSupplier function = sutO.toCharSup(thenFunction);
char finalValue = function.getAsChar();
//then - finals
Assert.assertEquals(finalValue, (Object) '\u0100');
Assert.assertTrue(mainFunctionCalled.get());
Assert.assertTrue(thenFunctionCalled.get());
}
@Test
public void testToBoolSup8() throws Throwable {
final ThreadLocal<Boolean> mainFunctionCalled = ThreadLocal.withInitial(()-> false);
final ThreadLocal<Boolean> thenFunctionCalled = ThreadLocal.withInitial(()-> false);
//given (+ some assertions)
LDblSupplier sutO = () -> {
mainFunctionCalled.set(true);
return 90d;
};
LDblPredicate thenFunction = p -> {
thenFunctionCalled.set(true);
// double
Assert.assertEquals(p, (Object) 90d);
// boolean
return true;
};
//when
LBoolSupplier function = sutO.toBoolSup(thenFunction);
boolean finalValue = function.getAsBool();
//then - finals
Assert.assertEquals(finalValue, (Object) true);
Assert.assertTrue(mainFunctionCalled.get());
Assert.assertTrue(thenFunctionCalled.get());
}
// </editor-fold>
@Test(expectedExceptions = RuntimeException.class)
public void testShove() {
// given
LDblSupplier sutThrowing = LDblSupplier.dblSup(() -> {
throw new UnsupportedOperationException();
});
// when
sutThrowing.shovingGetAsDbl();
}
@Test
public void testToString() throws Throwable {
Assert.assertTrue(sut.toString().startsWith(this.getClass().getName()+"$"));
Assert.assertTrue(String.format("%s", sut).contains("LDblSupplier: double getAsDbl()"));
}
@Test
public void isThrowing() {
Assert.assertFalse(sut.isThrowing());
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.idea.svn.branchConfig;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.vfs.VirtualFile;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.idea.svn.SvnUtil;
import org.jetbrains.idea.svn.SvnVcs;
import org.jetbrains.idea.svn.commandLine.SvnBindException;
import org.jetbrains.idea.svn.info.Info;
import org.tmatesoft.svn.core.SVNException;
import org.tmatesoft.svn.core.SVNURL;
import org.tmatesoft.svn.core.internal.util.SVNPathUtil;
import org.tmatesoft.svn.core.internal.util.SVNURLUtil;
import java.io.File;
import java.util.*;
import static com.intellij.openapi.vfs.VfsUtilCore.virtualToIoFile;
import static com.intellij.util.containers.ContainerUtil.map;
public class SvnBranchConfigurationNew {
private static final Logger LOG = Logger.getInstance("#org.jetbrains.idea.svn.branchConfig.SvnBranchConfigurationNew");
private String myTrunkUrl;
// need public for serialization
public Map<String, InfoStorage<List<SvnBranchItem>>> myBranchMap;
private boolean myUserinfoInUrl;
public SvnBranchConfigurationNew() {
myTrunkUrl = "";
myBranchMap = new HashMap<>();
}
public boolean isUserinfoInUrl() {
return myUserinfoInUrl;
}
public void setUserinfoInUrl(final boolean userinfoInUrl) {
myUserinfoInUrl = userinfoInUrl;
}
public void setTrunkUrl(final String trunkUrl) {
myTrunkUrl = trunkUrl;
}
public String getTrunkUrl() {
return myTrunkUrl;
}
public List<String> getBranchUrls() {
final ArrayList<String> result = new ArrayList<>(myBranchMap.keySet());
List<String> cutList = map(result, SvnBranchConfigurationNew::cutEndSlash);
Collections.sort(cutList);
return cutList;
}
public void addBranches(String branchParentName, final InfoStorage<List<SvnBranchItem>> items) {
branchParentName = ensureEndSlash(branchParentName);
InfoStorage<List<SvnBranchItem>> current = myBranchMap.get(branchParentName);
if (current != null) {
LOG.info("Branches list not added for : '" + branchParentName + "; this branch parent URL is already present.");
return;
}
myBranchMap.put(branchParentName, items);
}
public static String ensureEndSlash(String name) {
return name.trim().endsWith("/") ? name : name + "/";
}
private static String cutEndSlash(String name) {
return name.endsWith("/") && name.length() > 0 ? name.substring(0, name.length() - 1) : name;
}
public void updateBranch(String branchParentName, final InfoStorage<List<SvnBranchItem>> items) {
branchParentName = ensureEndSlash(branchParentName);
final InfoStorage<List<SvnBranchItem>> current = myBranchMap.get(branchParentName);
if (current == null) {
LOG.info("Branches list not updated for : '" + branchParentName + "; since config has changed.");
return;
}
current.accept(items);
}
public Map<String, InfoStorage<List<SvnBranchItem>>> getBranchMap() {
return myBranchMap;
}
public List<SvnBranchItem> getBranches(String url) {
url = ensureEndSlash(url);
return myBranchMap.get(url).getValue();
}
public SvnBranchConfigurationNew copy() {
SvnBranchConfigurationNew result = new SvnBranchConfigurationNew();
result.myUserinfoInUrl = myUserinfoInUrl;
result.myTrunkUrl = myTrunkUrl;
result.myBranchMap = new HashMap<>();
for (Map.Entry<String, InfoStorage<List<SvnBranchItem>>> entry : myBranchMap.entrySet()) {
final InfoStorage<List<SvnBranchItem>> infoStorage = entry.getValue();
result.myBranchMap.put(entry.getKey(), new InfoStorage<>(
new ArrayList<>(infoStorage.getValue()), infoStorage.getInfoReliability()));
}
return result;
}
@Nullable
public String getBaseUrl(String url) {
if (myTrunkUrl != null) {
if (SVNPathUtil.isAncestor(myTrunkUrl, url)) {
return cutEndSlash(myTrunkUrl);
}
}
for(String branchUrl: myBranchMap.keySet()) {
if (SVNPathUtil.isAncestor(branchUrl, url)) {
String relativePath = SVNPathUtil.getRelativePath(branchUrl, url);
int secondSlash = relativePath.indexOf("/");
return cutEndSlash(branchUrl + (secondSlash == -1 ? relativePath : relativePath.substring(0, secondSlash)));
}
}
return null;
}
@Nullable
public String getBaseName(String url) {
String baseUrl = getBaseUrl(url);
if (baseUrl == null) {
return null;
}
int lastSlash = baseUrl.lastIndexOf("/");
return lastSlash == -1 ? baseUrl : baseUrl.substring(lastSlash + 1);
}
@Nullable
public String getRelativeUrl(String url) {
String baseUrl = getBaseUrl(url);
return baseUrl == null ? null : url.substring(baseUrl.length());
}
@Nullable
public SVNURL getWorkingBranch(@NotNull SVNURL someUrl) throws SvnBindException {
String baseUrl = getBaseUrl(someUrl.toString());
return baseUrl == null ? null : SvnUtil.createUrl(baseUrl);
}
private void iterateUrls(final UrlListener listener) throws SVNException {
if (listener.accept(myTrunkUrl)) {
return;
}
for (String branchUrl : myBranchMap.keySet()) {
// use more exact comparison first (paths longer)
final List<SvnBranchItem> children = myBranchMap.get(branchUrl).getValue();
for (SvnBranchItem child : children) {
if (listener.accept(child.getUrl())) {
return;
}
}
/*if (listener.accept(branchUrl)) {
return;
}*/
}
}
// to retrieve mappings between existing in the project working copies and their URLs
@Nullable
public Map<String,String> getUrl2FileMappings(final Project project, final VirtualFile root) {
try {
final BranchRootSearcher searcher = new BranchRootSearcher(SvnVcs.getInstance(project), root);
iterateUrls(searcher);
return searcher.getBranchesUnder();
} catch (SVNException e) {
return null;
}
}
public void removeBranch(String url) {
url = ensureEndSlash(url);
myBranchMap.remove(url);
}
private static class BranchRootSearcher implements UrlListener {
private final VirtualFile myRoot;
private final SVNURL myRootUrl;
// url path to file path
private final Map<String, String> myBranchesUnder;
private BranchRootSearcher(final SvnVcs vcs, final VirtualFile root) throws SVNException {
myRoot = root;
myBranchesUnder = new HashMap<>();
final Info info = vcs.getInfo(myRoot.getPath());
myRootUrl = info != null ? info.getURL() : null;
}
public boolean accept(final String url) throws SVNException {
if (myRootUrl != null) {
final File baseDir = virtualToIoFile(myRoot);
final String baseUrl = myRootUrl.getPath();
final SVNURL branchUrl = SVNURL.parseURIEncoded(url);
if (myRootUrl.equals(SVNURLUtil.getCommonURLAncestor(myRootUrl, branchUrl))) {
final File file = SvnUtil.fileFromUrl(baseDir, baseUrl, branchUrl.getPath());
myBranchesUnder.put(url, file.getAbsolutePath());
}
}
return false; // iterate everything
}
public Map<String, String> getBranchesUnder() {
return myBranchesUnder;
}
}
private interface UrlListener {
boolean accept(final String url) throws SVNException;
}
}
| |
// ----------------------------------------------------------------------------
// Copyright 2007-2013, GeoTelematic Solutions, Inc.
// All rights reserved
// ----------------------------------------------------------------------------
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// ----------------------------------------------------------------------------
// Change History:
// 2007/03/11 Martin D. Flynn
// -Initial release
// 2007/11/28 Martin D. Flynn
// -Integrated use of 'ReportColumn'
// 2009/09/23 Clifton Flynn, Martin D. Flynn
// -Added SOAP xml support
// ----------------------------------------------------------------------------
package org.opengts.war.report.presentation;
import java.io.*;
import org.opengts.util.*;
import org.opengts.war.tools.*;
import org.opengts.war.report.*;
public class BodyRowTemplate
{
// ------------------------------------------------------------------------
private ReportTable reportTable = null;
public BodyRowTemplate(ReportTable rptTable)
{
super();
this.reportTable = rptTable;
}
// ------------------------------------------------------------------------
public void writeHTML(PrintWriter out, int level, int rowIndex, boolean isTotal, DBDataRow dr)
throws ReportException
{
// rowIndex starts at '0'. < 0 indicates totals rowdata
/* CSS class reference */
String tdCssClass = null;
String trCssClass;
if (dr.hasCssClass()) {
// custom CSS for TR and TD
trCssClass = dr.getCssClass();
tdCssClass = dr.getCssClass();
} else
if (isTotal) {
trCssClass = (rowIndex <= 0)? ReportLayout.CSS_CLASS_TOTAL : ReportLayout.CSS_CLASS_TOTAL_2;
} else
if ((rowIndex & 1) == 0) {
trCssClass = ReportLayout.CSS_CLASS_ODD;
} else {
trCssClass = ReportLayout.CSS_CLASS_EVEN;
}
/* data row */
ReportColumn rptCols[] = dr.getReportColumns();
if (ListTools.isEmpty(rptCols)) {
throw new ReportException("No report columns defined");
}
/* table row start */
out.print("<tr class=\"" + trCssClass + "\">\n");
/* columns */
DataRowTemplate drt = dr.getDataRowTemplate();
for (int i = 0; i < rptCols.length; i++) {
/* extract column name/arg */
String colName = rptCols[i].getKey();
int colSpan = rptCols[i].getColSpan();
/* get field value */
DataColumnTemplate dct = drt.getColumnTemplate(colName);
if (dct != null) {
BodyColumnTemplate bct = this.reportTable.getBodyColumnTemplate(dct);
String fldName = bct.getFieldName(); // same as column name
Object fldVal = dr.getDBValue(fldName, rowIndex, rptCols[i]); // HTML
bct.writeHTML(out, level+1, rowIndex, isTotal, tdCssClass, colSpan, fldVal);
} else {
//Print.logError("BodyColumnTemplate not found: " + rptCols[i]);
}
}
/* table row end */
out.print("</tr>\n");
}
// ------------------------------------------------------------------------
public void writeXML(PrintWriter out, int level, int rowIndex, boolean isTotal, DBDataRow dr)
throws ReportException
{
ReportData rd = dr.getReportData();
boolean isSoapRequest = (rd != null)? rd.isSoapRequest() : false;
String PFX1 = XMLTools.PREFIX(isSoapRequest, level * ReportTable.INDENT);
// rowIndex starts at '0'
/* CSS class reference */
String tdCssClass = null;
String trCssClass;
if (dr.hasCssClass()) {
// custom CSS for TR and TD
trCssClass = dr.getCssClass();
tdCssClass = dr.getCssClass();
} else
if (isTotal) {
trCssClass = (rowIndex <= 0)? ReportLayout.CSS_CLASS_TOTAL : ReportLayout.CSS_CLASS_TOTAL_2;
} else
if ((rowIndex & 1) == 0) {
trCssClass = ReportLayout.CSS_CLASS_ODD;
} else {
trCssClass = ReportLayout.CSS_CLASS_EVEN;
}
/* data row */
ReportColumn rptCols[] = dr.getReportColumns();
if (ListTools.isEmpty(rptCols)) {
throw new ReportException("No report columns defined");
}
/* table row start */
out.print(PFX1);
out.print(XMLTools.startTAG(isSoapRequest,"BodyRow",
XMLTools.ATTR("class",trCssClass),
false,true));
/* columns */
DataRowTemplate drt = dr.getDataRowTemplate();
for (int i = 0; i < rptCols.length; i++) {
/* extract column name/arg */
String colName = rptCols[i].getKey();
int colSpan = rptCols[i].getColSpan();
/* get field value */
DataColumnTemplate dct = drt.getColumnTemplate(colName);
if (dct != null) {
BodyColumnTemplate bct = this.reportTable.getBodyColumnTemplate(dct);
String fldName = bct.getFieldName(); // same as column name
Object fldVal = dr.getDBValue(fldName, rowIndex, rptCols[i]); // XML
bct.writeXML(out, level+1, rowIndex, isTotal, tdCssClass, colSpan, fldVal, isSoapRequest);
} else {
//Print.logError("BodyColumnTemplate not found: " + rptCols[i]);
}
}
/* table row end */
out.print(PFX1);
out.print(XMLTools.endTAG(isSoapRequest,"BodyRow",true));
}
// ------------------------------------------------------------------------
public void writeCSV(PrintWriter out, int level, int rowIndex, boolean isTotal, DBDataRow dr)
throws ReportException
{
DataRowTemplate drt = dr.getDataRowTemplate();
ReportColumn rptCols[] = dr.getReportColumns();
for (int i = 0; i < rptCols.length; i++) {
/* extract column name/arg */
String colName = rptCols[i].getKey();
/* get field value */
DataColumnTemplate dct = drt.getColumnTemplate(colName);
if (dct != null) {
if (i > 0) {
out.print(","); // CSV_SEPARATOR
}
BodyColumnTemplate bct = this.reportTable.getBodyColumnTemplate(dct);
String fldName = bct.getFieldName();
Object fldVal = dr.getDBValue(fldName, rowIndex, rptCols[i]); // CSV
String valStr = (fldVal != null)? fldVal.toString() : "";
bct.writeCSV(out, level+1, valStr);
}
}
out.print("\n");
}
// ------------------------------------------------------------------------
public void writeXLS(ReportSpreadsheet rptRSS, int level, int rowIndex, DBDataRow dr)
throws ReportException
{
/* report columns */
ReportColumn rptCols[] = dr.getReportColumns();
if (ListTools.isEmpty(rptCols)) {
throw new ReportException("No report columns defined");
}
/* write columns */
DBDataRow.RowType rowType = dr.getRowType();
DataRowTemplate drt = dr.getDataRowTemplate(); // ie. "FieldLayout.FieldDataRow"
for (int i = 0; i < rptCols.length; i++) {
/* extract column name/arg */
String colName = rptCols[i].getKey();
int colSpan = rptCols[i].getColSpan();
/* get field value */
DataColumnTemplate dct = drt.getColumnTemplate(colName);
if (dct != null) {
BodyColumnTemplate bct = this.reportTable.getBodyColumnTemplate(dct);
String fldName = bct.getFieldName(); // same as column name
Object fldVal = dr.getDBValue(fldName, rowIndex, rptCols[i]); // XLS
bct.writeXLS(rptRSS, level+1, rowType, fldVal);
} else {
//Print.logError("BodyColumnTemplate not found: " + rptCols[i]);
}
}
/* done with this row */
rptRSS.incrementRowIndex();
}
// ------------------------------------------------------------------------
}
| |
package fotoshop;
import fotoshop.Filter.*;
import fotoshop.GUI.SequencePanel.SequenceConstructorPanel;
import fotoshop.GUI.SequencePanel.SequenceButton;
import fotoshop.resources.*;
import fotoshop.GUI.ImagePanel;
import fotoshop.GUI.GUI;
import fotoshop.*;
import fotoshop.Events.EventController;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.*;
import java.util.*;
import java.util.List;
import java.awt.*;
import static java.awt.Component.TOP_ALIGNMENT;
import java.awt.event.*;
import java.awt.image.BufferedImage;
import java.beans.PropertyChangeSupport;
import javax.swing.*;
import javax.swing.border.EmptyBorder;
/**
* Adapted from the fotoshop assignment example by Richard Jones
* @author Benjamin Nicholls, bn65@kent.ac.uk
*
* This class is the main processing class of the Fotoshop application.
* Fotoshop is a very simple image editing tool. Users can apply a number of
* filters to an image. That's all. It should really be extended to make it more
* useful!
*
* To edit an image, create an instance of this class and call the "edit"
* method.
*
* This main class creates and initialises all the others: it creates the parser
* and evaluates and executes the commands that the parser returns.
*
* @author Richard Jones
* @version 2013.09.10
*/
public class Editor {
private final String[] properties = {"LoadImage"};
private PropertyChangeSupport pChange = new PropertyChangeSupport(this);
/**
* EventController object that will pass any change in values to the GUI
* @see #addController(fotoshop.Events.EventController)
* @see fotoshop.Events.EventController
*/
private EventController controller;
/**
* The current stack of images instances being edited
*/
private ImageStack currentImg;
/**
* The instance of ImageCache storing images currently being worked on
*/
private final ImageCache imageCache = new ImageCache();
/** filterMap contains the possible filters that may be applied to the
* current image. Any attempt to apply a filter checks the name given
* against the filters named in this map.
* Must be initialised in the {@link #initFilters() initFilters} method.
**/
private List<Filter> filterList = initFilters();
/**
* Initialise the Map of the possible filters that may be applied to the
* image. Any new filters must also be added to this map through initFilters.
* Any attempt to apply a filter checks the name give against the filters
* named in this map.
* @return A map of the available filters
*/
private List<Filter> initFilters(){
List<Filter> fList = new ArrayList<Filter>();
/**A internationalised version of the filter name can be obtained from
* the ResourceManager and locale specific properties**/
fList.add(new MonoFilter(ResourceManager.getFilter("MONO")));
fList.add(new RotateFilter(ResourceManager.getFilter("ROT90")));
fList.add(new BrightnessFilter("BrightnessFilter"));
return fList;
}
/**
* Retrieve the names of the Filter objects within the {@link #filterList filterList}
* @return List<String> of filter names
*/
public List<String> getFiltersNames(){
List<String> filters = new ArrayList<String>();
for(Filter filter : filterList){
filters.add(filter.getName());
}
return filters;
}
/**
* Assign the editors EventController for this editor. Add a propertyChangeListener
* to the {@link #pChange pChange} PropertyChangeSupport for each property that may change in this Editor
* @param c the EventController to add to the PropertyChangeSupport object
* @see fotoshop.Events.EventController
*/
public void addController(EventController c){
pChange.addPropertyChangeListener("LoadImage", c);
pChange.addPropertyChangeListener("LoadName", c);
pChange.addPropertyChangeListener("LoadStack", c);
pChange.addPropertyChangeListener("LoadStackFocus", c);
pChange.addPropertyChangeListener("LoadCache", c);
pChange.addPropertyChangeListener("LoadCacheFocus", c);
pChange.addPropertyChangeListener("activate", c);
pChange.addPropertyChangeListener("refresh", c);
}
/**
* Given a command, edit (that is: execute) the command.
* Altered to make use of enum objects as suggested in "Objects First with
* Java" by Barnes and Kolling.
* @param command The command to be processed.
* @return true If the command ends the editing session, false otherwise.
*/
public boolean processCommand(Command command) {
System.out.println("Command = " + command.getCommandWord().toString());
boolean wantToQuit = false;
CommandWord commandWord = command.getCommandWord();
switch(commandWord){
case UNKNOWN:
System.out.println(ResourceManager.getEditorText("unknownCommand"));
break;
case OPEN:
open(command);
break;
case SAVE:
save(command);
break;
case APPLY:
performFilter(command);
break;
case UNDO:
restoreImage(command);
break;
case PUT:
put(command);
break;
case GET:
get(command);
break;
}
return wantToQuit;
}
/**
* Main property firing method. Fires property changes for all the relevant
* properties that need to be updated in the GUI
*/
public void refreshValues(){
if(currentImg.get() != null)
pChange.firePropertyChange("LoadImage", null, (BufferedImage)currentImg.get());
if(currentImg.getInstanceList() != null){
pChange.firePropertyChange("LoadStack", null, currentImg.getInstanceList());
System.out.println("stackIndex " + currentImg.getIndex());
pChange.firePropertyChange("LoadStackFocus", -1, currentImg.getIndex());
}
pChange.firePropertyChange("LoadCache", null, imageCache.getCachedImages());
if(currentImg.getName() != null){
pChange.firePropertyChange("LoadName", null, currentImg.getName());
pChange.firePropertyChange("LoadCacheFocus", null, currentImg.getName());
}
pChange.firePropertyChange("activate", 0, true);
pChange.firePropertyChange("refresh", null, null);
}
//----------------------------------
// Implementations of user commands:
//----------------------------------
/**
* "open" was entered. Open the file given as the second word of the command
* and use as the current image.
* @param command the command given.
*/
private void open(Command command) {
if(command.getFile() == null){
System.out.println("No file error");
return;
}
File file = command.getFile();
ColorImage img = FileManager.loadImage(file);
if (img == null) {
return;
} else {
currentImg = new ImageStack(img.getName(), img);
refreshValues();
}
}
/**
* "save" was entered. Save the current image to the file given as the
* second word of the command.
* @param command the command given
*/
private void save(Command command) {
if (currentImg == null) {
return;
}
File file = command.getFile();
FileManager.SaveImage(currentImg.get(), file);
}
/**
* "look" was entered. Report the status of the work bench.
* Lists the name of the current image, and a list of filters applied to
* the image
*/
/* private void look() {
System.out.println(ResourceManager.getEditorText("currentImage") +
currentName);
if(currentStack != null){
System.out.print(ResourceManager.getEditorText("currentFilters"));
List<ColorImage> stack = new ArrayList<ColorImage>();
if(!cache.contains(currentName))
stack = currentStack.getStack(currentName);
else
stack = cache.getStack(currentName);
for(ColorImage filteredImg : stack){
if(filterList.contains(filteredImg.getName()))
System.out.print(filteredImg.getName() + " ");
}
}
}*/
//------------------------------
// Image Cache commands
//-------------------------------
/**
* the "put" command finds an appropriate name from the command, or from the
* image name and performs {@link #putToCache(java.lang.String) putToCache}
* method.
* @param command Command that was called by the user
*/
private void put(Command command){
String name = currentImg.getName();
if (command.getName() != null) {
name = command.getName();
}
putToCache(name);
currentImg.setName(name);
refreshValues();
}
/**
* The "get" command performs the {@link #getFromCache(java.lang.String) getFromCache}
* method adding the current image to the cache using the name supplied by the Command
* @param command Command that was called by the user
*/
private void get(Command command){
if (command.getName() == null) {
// if there is no second word, we don't what to add it to cache as...
System.out.println(ResourceManager.getEditorText("unknownSource"));
return ;
}
getFromCache(command.getName());
refreshValues();
}
/**
* putToCache passes the current image into the ImageCache object via the
* {@link ImageCache#put(fotoshop.ImageStack, java.lang.String) put} method.
* @param name String name to add the {@link #currentImg current image} to the cache as
*/
public void putToCache(String name){
if(currentImg == null){
System.out.print("No image to put to cache");
}
else {
ImageStack clone = currentImg.cloneStack();
imageCache.put(clone, name);
currentImg = clone;
refreshValues();
System.out.println(ResourceManager.getEditorText("cacheAdded") +
name);
}
}
/**
* getFromCache passes the current image into the ImageCache object via the
* {@link ImageCache#get(java.lang.String) get} method.
* @param name name of the stored image to retrieve from the cache
*/
public void getFromCache(String name){
ImageStack img = imageCache.get(name);
if(img != null){
currentImg = img;
System.out.println(name + ResourceManager.getEditorText("cacheRetrieved"));
// look();
}
else
System.out.print(ResourceManager.getEditorText("missingCache"));
}
/**
* Attempt to pass currentImage into a filter
* @param command The second word of the command entered is the filter name
* that should be searched for, in the filterMap, and performed.
*/
private void performFilter(Command command){
System.out.println("apply filter");
if(currentImg.get() != null && command.getName() != null){
ColorImage filteredImage;
for(Filter filter : filterList){
if(filter.getName() == command.getName()){
float f = command.getValue();
filteredImage = filter.applyFilter(new ColorImage(currentImg.get(), currentImg.getName()), f);
pChange.firePropertyChange("LoadImage", 0, (BufferedImage)filteredImage);
pChange.firePropertyChange("refresh", null, null);
if(command.isTrue()){
currentImg.add(filteredImage);
refreshValues();
}
}
}
}
else {
System.out.println(ResourceManager.getFilter("filterError"));
}
}
/**
* restoreImage calls {@link restoreImage restoreImage} for the previous
* instance of the currentImage contained within the currentStack.
* The previous version of a ColorImage is contained within the ColorImage
* itself.
*/
private void restoreImage(Command command){
currentImg.get(Math.round(command.getValue()));
refreshValues();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.tinkerpop.gremlin.process.computer;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__;
import org.apache.tinkerpop.gremlin.structure.Direction;
import org.apache.tinkerpop.gremlin.structure.Vertex;
import org.junit.Test;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/**
* @author Marko A. Rodriguez (http://markorodriguez.com)
*/
public class GraphFilterTest {
@Test
public void shouldHaveValidLegalEnumOrdering() {
assertTrue(GraphFilter.Legal.YES.compareTo(GraphFilter.Legal.YES) == 0);
assertTrue(GraphFilter.Legal.YES.compareTo(GraphFilter.Legal.NO) < 0);
assertTrue(GraphFilter.Legal.YES.compareTo(GraphFilter.Legal.MAYBE) < 0);
assertTrue(GraphFilter.Legal.MAYBE.compareTo(GraphFilter.Legal.NO) < 0);
}
@Test
public void shouldOnlyAllowEdgeFilterToTraverseLocalStarGraph() {
GraphFilter graphFilter = new GraphFilter();
graphFilter.setEdgeFilter(__.outE());
try {
graphFilter.setEdgeFilter(__.<Vertex>outE().inV().outE());
fail("Should not allow traversals past the star graph");
} catch (final IllegalArgumentException e) {
assertEquals(e.getMessage(), GraphComputer.Exceptions.edgeFilterAccessesAdjacentVertices(__.<Vertex>outE().inV().outE()).getMessage());
}
}
@Test
public void shouldOnlyAllowVertexFilterToTraverseVertex() {
GraphFilter graphFilter = new GraphFilter();
graphFilter.setVertexFilter(__.hasLabel("person"));
try {
graphFilter.setVertexFilter(__.<Vertex>as("a").outE().<Vertex>select("a"));
fail("Should not allow traversals to the incident edges");
} catch (final IllegalArgumentException e) {
assertEquals(e.getMessage(), GraphComputer.Exceptions.vertexFilterAccessesIncidentEdges(__.<Vertex>as("a").outE().<Vertex>select("a")).getMessage());
}
}
@Test
public void shouldGetLegallyPositiveEdgeLabels() {
GraphFilter graphFilter = new GraphFilter();
assertFalse(graphFilter.hasEdgeFilter());
assertEquals(Collections.singleton(null), graphFilter.getLegallyPositiveEdgeLabels(Direction.OUT));
assertEquals(Collections.singleton(null), graphFilter.getLegallyPositiveEdgeLabels(Direction.IN));
assertEquals(Collections.singleton(null), graphFilter.getLegallyPositiveEdgeLabels(Direction.BOTH));
//
graphFilter = new GraphFilter();
graphFilter.setEdgeFilter(__.outE("created"));
assertTrue(graphFilter.hasEdgeFilter());
assertEquals(Collections.singleton("created"), graphFilter.getLegallyPositiveEdgeLabels(Direction.OUT));
assertEquals(Collections.emptySet(), graphFilter.getLegallyPositiveEdgeLabels(Direction.IN));
assertEquals(Collections.emptySet(), graphFilter.getLegallyPositiveEdgeLabels(Direction.BOTH));
//
graphFilter = new GraphFilter();
graphFilter.setEdgeFilter(__.outE());
assertTrue(graphFilter.hasEdgeFilter());
assertEquals(Collections.singleton(null), graphFilter.getLegallyPositiveEdgeLabels(Direction.OUT));
assertEquals(Collections.emptySet(), graphFilter.getLegallyPositiveEdgeLabels(Direction.IN));
assertEquals(Collections.emptySet(), graphFilter.getLegallyPositiveEdgeLabels(Direction.BOTH));
//
graphFilter = new GraphFilter();
graphFilter.setEdgeFilter(__.<Vertex>outE("created").has("weight", 32));
assertTrue(graphFilter.hasEdgeFilter());
assertEquals(Collections.singleton("created"), graphFilter.getLegallyPositiveEdgeLabels(Direction.OUT));
assertEquals(Collections.emptySet(), graphFilter.getLegallyPositiveEdgeLabels(Direction.IN));
assertEquals(Collections.emptySet(), graphFilter.getLegallyPositiveEdgeLabels(Direction.BOTH));
//
graphFilter = new GraphFilter();
graphFilter.setEdgeFilter(__.<Vertex>identity().outE("created"));
assertTrue(graphFilter.hasEdgeFilter());
assertEquals(Collections.singleton(null), graphFilter.getLegallyPositiveEdgeLabels(Direction.OUT));
assertEquals(Collections.singleton(null), graphFilter.getLegallyPositiveEdgeLabels(Direction.IN));
assertEquals(Collections.singleton(null), graphFilter.getLegallyPositiveEdgeLabels(Direction.BOTH));
//
graphFilter = new GraphFilter();
graphFilter.setEdgeFilter(__.bothE());
assertTrue(graphFilter.hasEdgeFilter());
assertEquals(Collections.singleton(null), graphFilter.getLegallyPositiveEdgeLabels(Direction.OUT));
assertEquals(Collections.singleton(null), graphFilter.getLegallyPositiveEdgeLabels(Direction.IN));
assertEquals(Collections.singleton(null), graphFilter.getLegallyPositiveEdgeLabels(Direction.BOTH));
//
graphFilter = new GraphFilter();
graphFilter.setEdgeFilter(__.<Vertex>bothE().has("weight", 32));
assertTrue(graphFilter.hasEdgeFilter());
assertEquals(Collections.singleton(null), graphFilter.getLegallyPositiveEdgeLabels(Direction.OUT));
assertEquals(Collections.singleton(null), graphFilter.getLegallyPositiveEdgeLabels(Direction.IN));
assertEquals(Collections.singleton(null), graphFilter.getLegallyPositiveEdgeLabels(Direction.BOTH));
//
graphFilter = new GraphFilter();
graphFilter.setEdgeFilter(__.<Vertex>bothE().limit(0));
assertTrue(graphFilter.hasEdgeFilter());
assertEquals(Collections.emptySet(), graphFilter.getLegallyPositiveEdgeLabels(Direction.OUT));
assertEquals(Collections.emptySet(), graphFilter.getLegallyPositiveEdgeLabels(Direction.IN));
assertEquals(Collections.emptySet(), graphFilter.getLegallyPositiveEdgeLabels(Direction.BOTH));
//
graphFilter = new GraphFilter();
graphFilter.setEdgeFilter(__.<Vertex>bothE("created").has("weight", 32));
assertTrue(graphFilter.hasEdgeFilter());
assertEquals(Collections.singleton("created"), graphFilter.getLegallyPositiveEdgeLabels(Direction.OUT));
assertEquals(Collections.singleton("created"), graphFilter.getLegallyPositiveEdgeLabels(Direction.IN));
assertEquals(Collections.singleton("created"), graphFilter.getLegallyPositiveEdgeLabels(Direction.BOTH));
//
graphFilter = new GraphFilter();
graphFilter.setEdgeFilter(__.union(__.outE("created"), __.inE("likes")));
assertTrue(graphFilter.hasEdgeFilter());
assertEquals(Collections.singleton("created"), graphFilter.getLegallyPositiveEdgeLabels(Direction.OUT));
assertEquals(Collections.singleton("likes"), graphFilter.getLegallyPositiveEdgeLabels(Direction.IN));
assertEquals(Collections.emptySet(), graphFilter.getLegallyPositiveEdgeLabels(Direction.BOTH));
//
graphFilter = new GraphFilter();
graphFilter.setEdgeFilter(__.union(__.outE("created"), __.inE("likes", "created")));
assertTrue(graphFilter.hasEdgeFilter());
assertEquals(Collections.singleton("created"), graphFilter.getLegallyPositiveEdgeLabels(Direction.OUT));
assertEquals(new HashSet<>(Arrays.asList("likes", "created")), graphFilter.getLegallyPositiveEdgeLabels(Direction.IN));
assertEquals(Collections.singleton("created"), graphFilter.getLegallyPositiveEdgeLabels(Direction.BOTH));
}
@Test
public void shouldHaveProperEdgeLegality() {
GraphFilter graphFilter = new GraphFilter();
assertFalse(graphFilter.hasEdgeFilter());
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.IN));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.BOTH));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.OUT));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.IN, "created"));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.BOTH, "likes"));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.OUT, "knows"));
//
graphFilter = new GraphFilter();
graphFilter.setEdgeFilter(__.flatMap(v -> v.get().edges(Direction.OUT, "created"))); // lambdas can not be introspected
assertTrue(graphFilter.hasEdgeFilter());
assertEquals(GraphFilter.Legal.MAYBE, graphFilter.checkEdgeLegality(Direction.IN));
assertEquals(GraphFilter.Legal.MAYBE, graphFilter.checkEdgeLegality(Direction.BOTH));
assertEquals(GraphFilter.Legal.MAYBE, graphFilter.checkEdgeLegality(Direction.OUT));
assertEquals(GraphFilter.Legal.MAYBE, graphFilter.checkEdgeLegality(Direction.IN, "created"));
assertEquals(GraphFilter.Legal.MAYBE, graphFilter.checkEdgeLegality(Direction.BOTH, "likes"));
assertEquals(GraphFilter.Legal.MAYBE, graphFilter.checkEdgeLegality(Direction.OUT, "knows"));
//
graphFilter = new GraphFilter();
graphFilter.setEdgeFilter(__.<Vertex>identity().bothE()); // there are no strategies for __.
assertTrue(graphFilter.hasEdgeFilter());
assertEquals(GraphFilter.Legal.MAYBE, graphFilter.checkEdgeLegality(Direction.IN));
assertEquals(GraphFilter.Legal.MAYBE, graphFilter.checkEdgeLegality(Direction.BOTH));
assertEquals(GraphFilter.Legal.MAYBE, graphFilter.checkEdgeLegality(Direction.OUT));
assertEquals(GraphFilter.Legal.MAYBE, graphFilter.checkEdgeLegality(Direction.IN, "created"));
assertEquals(GraphFilter.Legal.MAYBE, graphFilter.checkEdgeLegality(Direction.BOTH, "likes"));
assertEquals(GraphFilter.Legal.MAYBE, graphFilter.checkEdgeLegality(Direction.OUT, "knows"));
//
graphFilter = new GraphFilter();
graphFilter.setEdgeFilter(__.<Vertex>bothE().has("weight", 32));
assertTrue(graphFilter.hasEdgeFilter());
assertEquals(GraphFilter.Legal.MAYBE, graphFilter.checkEdgeLegality(Direction.IN));
assertEquals(GraphFilter.Legal.MAYBE, graphFilter.checkEdgeLegality(Direction.BOTH));
assertEquals(GraphFilter.Legal.MAYBE, graphFilter.checkEdgeLegality(Direction.OUT));
assertEquals(GraphFilter.Legal.MAYBE, graphFilter.checkEdgeLegality(Direction.IN, "created"));
assertEquals(GraphFilter.Legal.MAYBE, graphFilter.checkEdgeLegality(Direction.BOTH, "likes"));
assertEquals(GraphFilter.Legal.MAYBE, graphFilter.checkEdgeLegality(Direction.OUT, "knows"));
//
graphFilter = new GraphFilter();
graphFilter.setEdgeFilter(__.<Vertex>inE().has("weight", 32));
assertTrue(graphFilter.hasEdgeFilter());
assertEquals(GraphFilter.Legal.MAYBE, graphFilter.checkEdgeLegality(Direction.IN));
assertEquals(GraphFilter.Legal.NO, graphFilter.checkEdgeLegality(Direction.BOTH));
assertEquals(GraphFilter.Legal.NO, graphFilter.checkEdgeLegality(Direction.OUT));
assertEquals(GraphFilter.Legal.MAYBE, graphFilter.checkEdgeLegality(Direction.IN, "created"));
assertEquals(GraphFilter.Legal.NO, graphFilter.checkEdgeLegality(Direction.BOTH, "likes"));
assertEquals(GraphFilter.Legal.NO, graphFilter.checkEdgeLegality(Direction.OUT, "knows"));
//
graphFilter = new GraphFilter();
graphFilter.setEdgeFilter(__.<Vertex>bothE().limit(0));
assertTrue(graphFilter.hasEdgeFilter());
assertEquals(GraphFilter.Legal.NO, graphFilter.checkEdgeLegality(Direction.IN));
assertEquals(GraphFilter.Legal.NO, graphFilter.checkEdgeLegality(Direction.BOTH));
assertEquals(GraphFilter.Legal.NO, graphFilter.checkEdgeLegality(Direction.OUT));
//
graphFilter = new GraphFilter();
graphFilter.setEdgeFilter(__.outE());
assertTrue(graphFilter.hasEdgeFilter());
assertEquals(GraphFilter.Legal.NO, graphFilter.checkEdgeLegality(Direction.IN));
assertEquals(GraphFilter.Legal.NO, graphFilter.checkEdgeLegality(Direction.BOTH));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.OUT));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.OUT, "knows"));
//
graphFilter = new GraphFilter();
graphFilter.setEdgeFilter(__.<Vertex>outE().limit(10));
assertTrue(graphFilter.hasEdgeFilter());
assertEquals(GraphFilter.Legal.NO, graphFilter.checkEdgeLegality(Direction.IN));
assertEquals(GraphFilter.Legal.NO, graphFilter.checkEdgeLegality(Direction.BOTH));
assertEquals(GraphFilter.Legal.MAYBE, graphFilter.checkEdgeLegality(Direction.OUT));
assertEquals(GraphFilter.Legal.MAYBE, graphFilter.checkEdgeLegality(Direction.OUT, "knows"));
//
graphFilter = new GraphFilter();
graphFilter.setEdgeFilter(__.<Vertex>outE("knows").limit(10));
assertTrue(graphFilter.hasEdgeFilter());
assertEquals(GraphFilter.Legal.NO, graphFilter.checkEdgeLegality(Direction.IN));
assertEquals(GraphFilter.Legal.NO, graphFilter.checkEdgeLegality(Direction.BOTH));
assertEquals(GraphFilter.Legal.MAYBE, graphFilter.checkEdgeLegality(Direction.OUT));
assertEquals(GraphFilter.Legal.MAYBE, graphFilter.checkEdgeLegality(Direction.OUT, "knows"));
assertEquals(GraphFilter.Legal.NO, graphFilter.checkEdgeLegality(Direction.OUT, "created"));
//
graphFilter = new GraphFilter();
graphFilter.setEdgeFilter(__.bothE("created"));
assertTrue(graphFilter.hasEdgeFilter());
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.IN));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.BOTH));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.OUT));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.BOTH, "created"));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.IN, "created"));
assertEquals(GraphFilter.Legal.NO, graphFilter.checkEdgeLegality(Direction.BOTH, "knows"));
assertEquals(GraphFilter.Legal.NO, graphFilter.checkEdgeLegality(Direction.OUT, "knows"));
//
graphFilter = new GraphFilter();
graphFilter.setEdgeFilter(__.outE("knows", "likes"));
assertTrue(graphFilter.hasEdgeFilter());
assertEquals(GraphFilter.Legal.NO, graphFilter.checkEdgeLegality(Direction.IN));
assertEquals(GraphFilter.Legal.NO, graphFilter.checkEdgeLegality(Direction.BOTH));
assertEquals(GraphFilter.Legal.NO, graphFilter.checkEdgeLegality(Direction.BOTH, "likes"));
assertEquals(GraphFilter.Legal.NO, graphFilter.checkEdgeLegality(Direction.BOTH, "created"));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.OUT));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.OUT, "knows"));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.OUT, "likes"));
assertEquals(GraphFilter.Legal.NO, graphFilter.checkEdgeLegality(Direction.OUT, "created"));
//
graphFilter = new GraphFilter();
graphFilter.setEdgeFilter(__.union(__.inE("bought"), __.outE("created"), __.bothE("knows", "likes")));
assertTrue(graphFilter.hasEdgeFilter());
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.IN));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.BOTH));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.OUT));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.OUT, "created"));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.OUT, "likes"));
assertEquals(GraphFilter.Legal.NO, graphFilter.checkEdgeLegality(Direction.IN, "created"));
assertEquals(GraphFilter.Legal.NO, graphFilter.checkEdgeLegality(Direction.OUT, "worksFor"));
//
graphFilter = new GraphFilter();
graphFilter.setEdgeFilter(__.union(__.outE("created"), __.bothE("knows", "likes")));
assertTrue(graphFilter.hasEdgeFilter());
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.IN));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.BOTH));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.OUT));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.OUT, "created"));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.OUT, "likes"));
assertEquals(GraphFilter.Legal.NO, graphFilter.checkEdgeLegality(Direction.IN, "created"));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.IN, "likes"));
assertEquals(GraphFilter.Legal.NO, graphFilter.checkEdgeLegality(Direction.OUT, "worksFor"));
//
graphFilter = new GraphFilter();
graphFilter.setEdgeFilter(__.union(__.inE("bought").has("weight", 32), __.outE("created"), __.bothE("knows", "likes")));
assertTrue(graphFilter.hasEdgeFilter());
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.IN));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.BOTH));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.OUT));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.OUT, "created"));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.OUT, "likes"));
assertEquals(GraphFilter.Legal.NO, graphFilter.checkEdgeLegality(Direction.IN, "created"));
assertEquals(GraphFilter.Legal.MAYBE, graphFilter.checkEdgeLegality(Direction.IN, "bought"));
assertEquals(GraphFilter.Legal.NO, graphFilter.checkEdgeLegality(Direction.OUT, "worksFor"));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.IN, "likes"));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.IN, "knows"));
//
graphFilter = new GraphFilter();
graphFilter.setEdgeFilter(__.union(__.inE("bought").has("weight", 32), __.outE("created"), __.bothE("knows", "likes", "bought")));
assertTrue(graphFilter.hasEdgeFilter());
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.IN));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.BOTH));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.OUT));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.OUT, "created"));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.OUT, "likes"));
assertEquals(GraphFilter.Legal.NO, graphFilter.checkEdgeLegality(Direction.OUT, "blah"));
assertEquals(GraphFilter.Legal.NO, graphFilter.checkEdgeLegality(Direction.IN, "created"));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.IN, "bought"));
assertEquals(GraphFilter.Legal.NO, graphFilter.checkEdgeLegality(Direction.OUT, "worksFor"));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.IN, "likes"));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.IN, "knows"));
//
graphFilter = new GraphFilter();
graphFilter.setEdgeFilter(__.union(__.outE("created").limit(0), __.inE("created")));
assertTrue(graphFilter.hasEdgeFilter());
assertEquals(GraphFilter.Legal.MAYBE, graphFilter.checkEdgeLegality(Direction.OUT));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.IN));
assertEquals(GraphFilter.Legal.MAYBE, graphFilter.checkEdgeLegality(Direction.BOTH));
assertEquals(GraphFilter.Legal.MAYBE, graphFilter.checkEdgeLegality(Direction.OUT, "created"));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.IN, "created"));
assertEquals(GraphFilter.Legal.MAYBE, graphFilter.checkEdgeLegality(Direction.BOTH, "created"));
//
graphFilter = new GraphFilter();
graphFilter.setEdgeFilter(__.union(__.outE(), __.inE().limit(0)));
assertTrue(graphFilter.hasEdgeFilter());
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.OUT));
assertEquals(GraphFilter.Legal.MAYBE, graphFilter.checkEdgeLegality(Direction.IN));
assertEquals(GraphFilter.Legal.MAYBE, graphFilter.checkEdgeLegality(Direction.BOTH));
assertEquals(GraphFilter.Legal.YES, graphFilter.checkEdgeLegality(Direction.OUT, "created"));
assertEquals(GraphFilter.Legal.MAYBE, graphFilter.checkEdgeLegality(Direction.IN, "created"));
assertEquals(GraphFilter.Legal.MAYBE, graphFilter.checkEdgeLegality(Direction.BOTH, "created"));
}
}
| |
package org.libj.xquery.compiler;
import org.libj.xquery.lexer.TokenType;
import org.libj.xquery.lisp.Cons;
import org.libj.xquery.lisp.Fn;
import org.libj.xquery.namespace.*;
import org.libj.xquery.parser.*;
import org.libj.xquery.xml.XML;
import static org.libj.xquery.lexer.TokenType.*;
import static org.libj.xquery.compiler.Constants.*;
import static org.libj.xquery.lisp.Cons.*;
import java.util.ArrayList;
import java.util.Map;
public class Analysis {
private Cons ast;
private Map<String, Class> externals;
private Scope scope = new Scope();
private Scope freeScope = new Scope();
private Namespace namespace;
private int locals = _LOCAL_VAR_START;
public Analysis(Cons tree, String[] vars, Map<String, Class> externals, Namespace namespace, boolean hasCallback) {
ast = tree;
this.namespace = namespace;
if (!hasCallback) {
locals--;
}
for (String var: vars) {
Class<Object> t = externals.get(var);
if (t == null) {
t = Object.class;
}
Symbol sym = new Symbol("$"+var, locals++, t);
scope.define(sym);
}
this.externals = externals;
}
public int getLocals() {
return locals;
}
public Map<String, Symbol> getFreeVariables() {
return freeScope.getSymbols();
}
private Map<String, Class> collectExternalVarialbes() {
throw new RuntimeException("Not Implemented!");
}
public Cons walk() {
return Optimizer.cacheXPath(walkExpr(ast));
}
private Cons walkExpr(Cons expr) {
switch (AST.getNodeType(expr)) {
case FLOWER:
return walkFlower(expr);
case IF:
return walkIf(expr);
case ELEMENT:
return walkElement(expr);
case LIST:
return walkList(expr);
case VARIABLE:
return walkVariable(expr);
case NUMBER:
return walkNumber(expr);
case STRING:
return walkString(expr);
case PLUS: case MINUS: case MULTIPLY: case DIV: case NEGATIVE: case MOD:
case EQ: case NE:
case LT: case LE: case GT: case GE:
case AND: case OR:
case TO: case INDEX: case XPATH: case ATTR_AT:
return walkOp(expr);
case CALL:
return walkCall(expr);
default:
throw new RuntimeException("Not Implemented! "+expr);
}
}
private Cons walkNumber(Cons expr) {
TokenType t = (TokenType) expr.first();
String text = (String) expr.second();
if (text.indexOf('.') == -1) {
int n = Integer.parseInt(text);
return list(new ConstantElement(t, n, int.class), n);
}
else {
double d = Double.parseDouble(text);
return list(new ConstantElement(t, d, double.class), d);
}
}
private Cons walkString(Cons expr) {
TokenType t = (TokenType) expr.first();
String text = (String) expr.second();
return list(new ConstantElement(t, text, String.class), text);
}
private Cons walkVariable(Cons expr) {
String variable = (String) expr.second();
if (!isFree(variable)) {
return list(new VariableElement(resolveType(variable), resolve(variable)));
}
else {
Symbol symbol = resolveFree(variable);
return list(new VariableElement(symbol.getType(), symbol.getIndex()));
}
}
private Cons walkOp(Cons expr) {
TokenType type = AST.getNodeType(expr);
switch (type) {
case PLUS: case MINUS: case MULTIPLY: case DIV: case MOD:
return walkBinaryArithmetic(expr);
case NEGATIVE:
return walkNegative(expr);
case EQ: case NE:
case LT: case LE: case GT: case GE:
return walkComparison(expr);
case AND: case OR:
return walkLogic(expr);
case TO:
return walkTo(expr);
case INDEX:
return walkIndex(expr);
case XPATH:
return walkXPath(expr);
case ATTR_AT:
return walkAttrAt(expr);
default:
throw new RuntimeException("Not Implemented! "+toTypeName(type));
}
}
private Cons walkBinaryArithmetic(Cons expr) {
return unifyArithmetic(walkSub2(expr));
}
private Cons walkNegative(Cons expr) {
Cons v = walkExpr((Cons) expr.second());
expr = assoc1(expr, v);
expr = assocType(expr, AST.getEvalType(v));
return expr;
}
private Cons walkSub2(Cons expr) {
Cons left = walkExpr((Cons) expr.second());
Cons right = walkExpr((Cons) expr.third());
expr = assoc1(expr, left);
expr = assoc2(expr, right);
return expr;
}
private Cons walkLogic(Cons expr) {
Cons left = castTo(walkExpr((Cons) expr.second()), boolean.class);
Cons right = castTo(walkExpr((Cons) expr.third()), boolean.class);
expr = assoc1(expr, left);
expr = assoc2(expr, right);
expr = assocType(expr, boolean.class);
return expr;
}
private Cons walkComparison(Cons expr) {
return assocType(walkBinaryArithmetic(expr), boolean.class);
}
private Cons walkXPath(Cons expr) {
Cons object = (Cons) expr.second();
Cons xml = castTo(walkExpr(object), XML_INTERFACE_Class);
String path = (String) expr.third();
String ns = null;
int i = path.indexOf(':');
if (i != -1) {
String prefix = path.substring(0, i);
ns = ((URI)namespace.lookup(prefix)).getUri();
path = path.substring(i+1);
}
return list(new TypedElement(TokenType.XPATH, XML_INTERFACE_Class), xml, path, ns);
}
private Cons walkAttrAt(Cons expr) {
Cons object = (Cons) expr.second();
Cons xml = castTo(walkExpr(object), XML_INTERFACE_Class);
String attr = (String) expr.third();
return list(new TypedElement(TokenType.ATTR_AT, String.class), xml, attr);
}
private boolean isPureLets(Cons flower) {
for (Object x: ((Cons)flower.second()).rest()) {
Cons forlet = (Cons) x;
if (AST.getNodeType(forlet) != LET) {
return false;
}
}
return true;
}
private Cons walkIndex(Cons expr) {
Cons list = (Cons) expr.second();
Cons at = (Cons) expr.third();
if (AST.getNodeType(list) == FLOWER && !isPureLets(list)) {
Cons flower = walkFlower(list);
Cons flowerAt = castTo(walkExpr(at), int.class);
expr = list(FLOWERAT);
expr = Cons.append(expr, flower);
expr = Cons.append(expr, flowerAt);
return expr;
}
else {
Cons left = castTo(walkExpr(list), Object.class);
Cons right = castTo(walkExpr(at), int.class);
expr = assoc1(expr, left);
expr = assoc2(expr, right);
expr = assocType(expr, Object.class);
return expr;
}
}
private Cons walkTo(Cons expr) {
Cons left = castTo(walkExpr((Cons) expr.nth(1)), int.class);
Cons right = castTo(walkExpr((Cons) expr.nth(2)), int.class);
expr = assoc1(expr, left);
expr = assoc2(expr, right);
expr = assocType(expr, LIST_INTERFACE_CLASS);
return expr;
}
private Cons walkList(Cons expr) {
Cons ast = list(new TypedElement(AST.getTokenType(expr), LIST_INTERFACE_CLASS));
for (Object e: Cons.rest(expr)) {
ast = Cons.append(ast, castTo(walkExpr((Cons) e), Object.class));
}
return ast;
}
private Cons walkIf(Cons expr) {
Cons condition = walkExpr((Cons) expr.nth(1));
Cons thenValue = walkExpr((Cons) expr.nth(2));
Cons elseValue = walkExpr((Cons) expr.nth(3));
expr = assoc1(expr, castTo(condition, boolean.class));
if (AST.getEvalType(elseValue) == AST.getEvalType(thenValue)) {
expr = assoc2(expr, thenValue);
expr = assoc3(expr, elseValue);
expr = assocType(expr, AST.getEvalType(elseValue));
return expr;
}
else {
expr = assoc2(expr, castTo(thenValue, Object.class));
expr = assoc3(expr, castTo(elseValue, Object.class));
expr = assocType(expr, Object.class);
return expr;
}
}
private Cons walkFlower(Cons expr) {
Cons forlets = ((Cons) expr.nth(1)).rest();
Cons body = (Cons) expr.nth(2);
Cons where = (Cons) expr.nth(3);
expr = list(new TypedElement(AST.getTokenType(expr), LIST_INTERFACE_CLASS));
expr = Cons.cons(expr.first(), walkForlet(forlets, body, where));
expr = Optimizer.optimizeWhere(expr);
return expr;
}
private Cons walkForlet(Cons forlets, Cons body, Cons where) {
if (forlets == null || Cons.isNil(forlets)) {
return walkFlowerWhereBody(body, where);
}
else {
switch (AST.getNodeType(((Cons) forlets.first()))) {
case LET:
return walkLet(forlets, body, where);
case FOR:
return walkFor(forlets, body, where);
default:
throw new RuntimeException("Wrong code!");
}
}
}
private Cons walkFlowerWhereBody(Cons body, Cons where) {
if (where != null && !Cons.isNil(where)) {
where = walkExpr(where);
}
body = walkExpr(body);
return tuple(null, body, where);
}
private Cons walkLet(Cons forlets, Cons body, Cons where) {
pushScope();
Cons expr = (Cons) forlets.first();
Cons variableExpr = (Cons) expr.nth(1);
String variableName = (String) variableExpr.second();
Cons valueExpr = (Cons) expr.nth(2);
valueExpr = walkExpr(valueExpr);
Class valueType = AST.getEvalType(valueExpr);
int index = define(variableName, valueType);
VariableElement variable = new VariableElement(valueType, index);
expr = assocType(expr, valueType);
expr = assoc1(expr, variable);
expr = assoc2(expr, valueExpr);
Cons result = walkForlet(Cons.rest(forlets), body, where);
result = cons(cons(expr, (Cons) result.first()), result.next());
popScope();
return result;
}
private Cons walkFor(Cons forlets, Cons body, Cons where) {
if (AST.getNodeType((Cons) ((Cons) forlets.first()).nth(2)) == TO) {
return walkForRange(forlets, body, where);
}
else {
return walkForGeneral(forlets, body, where);
}
}
private Cons walkForRange(Cons forlets, Cons body, Cons where) {
pushScope();
Cons expr = (Cons) forlets.first();
Cons variableExpr = (Cons) expr.nth(1);
String variableName = (String) variableExpr.second();
Cons rangeExpr = (Cons) expr.nth(2);
Cons start = castTo(walkExpr(AST.nthAST(rangeExpr, 1)), int.class);
Cons end = castTo(walkExpr(AST.nthAST(rangeExpr, 2)), int.class);
int element = define(variableName, int.class);
VariableElement variable = new VariableElement(int.class, element);
expr = list(new TypedElement(FORRANGE, LIST_INTERFACE_CLASS), variable, list(start, end));
Cons result = walkForlet(Cons.rest(forlets), body, where);
result = cons(cons(expr, (Cons)result.first()), result.next());
popScope();
return result;
}
private Cons walkForGeneral(Cons forlets, Cons body, Cons where) {
pushScope();
Cons expr = (Cons) forlets.first();
Cons variableExpr = AST.nthAST(expr, 1);
String variableName = (String) variableExpr.second();
Cons collectionExpr = AST.nthAST(expr, 2);
collectionExpr = castTo(walkExpr(collectionExpr), Object.class);
int element = define(variableName, Object.class);
VariableElement variable = new VariableElement(Object.class, element);
expr = assocType(expr, LIST_INTERFACE_CLASS);
expr = assoc1(expr, variable);
expr = assoc2(expr, collectionExpr);
Cons result = walkForlet(Cons.rest(forlets), body, where);
result = cons(cons(expr, (Cons) result.first()), result.next());
popScope();
return result;
}
private Cons walkElement(Cons expr) {
TypedElement t = new TypedElement(AST.getTokenType(expr), XML.class);
Cons attrs = map(new Fn() {
public Object call(Object x) {
return walkAttr((Cons) x);
}
}, (Cons) expr.nth(2));
Cons content = walkTexts((Cons) expr.nth(3));
return list(t, expr.nth(1), attrs, content);
}
private Cons walkTexts(Cons value) {
value = map(new Fn() {
public Object call(Object x) {
if (x instanceof String) {
return x;
}
else {
return walkExpr((Cons) x);
}
}
}, value);
return value;
}
private Cons walkAttr(Cons attr) {
Cons value = (Cons) attr.second();
value = walkTexts(value);
return attr.assoc(1, value);
}
private Cons walkCall(Cons expr) {
String functionName = (String) expr.second();
Function fn = (Function) namespace.lookup(functionName);
// return invokeFunction(functionName, ((Cons) expr.next()).rest());
if (fn instanceof JavaFunction) {
return walkFunction((JavaFunction) fn, expr);
}
else if (fn instanceof OverloadedFunction) {
return walkFunction((OverloadedFunction) fn, expr);
}
else {
throw new RuntimeException("Not Implemented: " + fn);
}
}
private Cons walkFunctionArguments(JavaFunction fn, Cons expr, Cons arguments) {
expr = new Cons(new FunctionElement(fn.getReturnType(), fn));
if (fn.isMethod()) {
if (Cons.size(expr) < 1) {
throw new RuntimeException("Not Implemented!");
}
expr = Cons.append(expr, castTo((Cons) arguments.first(), fn.getClassType()));
arguments = Cons.rest(arguments);
}
Class<?>[] parameterTypes = fn.getParameterTypes();
int parameterSize = parameterTypes.length;
int argumentSize = arguments.size();
if (!fn.isVarArgs()) {
if (parameterSize != argumentSize) {
throw new RuntimeException(
String.format("Too %s arguments (%s:%s). Expected: %d, actual: %s",
argumentSize < parameterSize ? "few" : "many",
fn.getClassName().replace('/', '.'),
fn.getFunctionName(),
parameterSize,
argumentSize));
}
for (int i = 0; i < argumentSize; i++) {
expr = Cons.append(expr, castTo(AST.nthAST(arguments, i), parameterTypes[i]));
}
return expr;
}
else {
int normalParamameterNumber = parameterSize - 1;
int varParameterNumber = argumentSize - normalParamameterNumber;
if (varParameterNumber < 0) {
throw new RuntimeException("Not Implemented!");
}
for (int i = 0; i < normalParamameterNumber; i++) {
expr = Cons.append(expr, castTo(AST.nthAST(arguments, i), parameterTypes[i]));
}
Class elementType = parameterTypes[normalParamameterNumber];
if (!elementType.isArray()) {
throw new RuntimeException("Not Implemented!");
}
elementType = elementType.getComponentType();
for (int i = 0; i < varParameterNumber; i++) {
expr = Cons.append(expr, castTo(AST.nthAST(arguments, normalParamameterNumber + i), elementType));
}
return expr;
}
}
private Cons walkFunction(JavaFunction fn, Cons expr) {
Cons arguments = Cons.rest(Cons.rest(expr));
Cons newExpr = null;
for (Object arg: arguments) {
newExpr = Cons.append(newExpr, walkExpr((Cons) arg));
}
if (newExpr == null) {
newExpr = Cons.nilList();
}
return walkFunctionArguments(fn, expr, newExpr);
}
private Cons walkFunction(OverloadedFunction dispatcher, Cons expr) {
Cons arguments = Cons.rest(Cons.rest(expr));
int n = arguments.size();
Cons newExpr = null;
Class[] argumentTypes = new Class[n];
for (int i = 0; i < n; i++) {
Cons arg = walkExpr(AST.nthAST(arguments, i));
newExpr = Cons.append(newExpr, arg);
argumentTypes[i] = AST.getEvalType(arg);
}
if (newExpr == null) {
newExpr = Cons.nilList();
}
JavaFunction fn = dispatcher.resolveFunction(argumentTypes);
return walkFunctionArguments(fn, newExpr, newExpr);
}
//////////////////////////////////////////////////
/// caster
//////////////////////////////////////////////////
private Cons unifyArithmetic(Cons expr) {
Cons leftTree = AST.nthAST(expr, 1);
Cons rightTree = AST.nthAST(expr, 2);
Class leftType = AST.getEvalType(leftTree);
Class rightType = AST.getEvalType(rightTree);
if (leftType == rightType) {
if (leftType == Integer.class) {
expr = assocType(expr, int.class);
expr = assoc1(expr, castTo(leftTree, int.class));
expr = assoc2(expr, castTo(rightTree, int.class));
return expr;
}
// TODO: add more: double + double, long + long, etc.
else {
expr = assocType(expr, leftType);
return expr;
}
}
else if (leftType.isPrimitive() && rightType.isPrimitive()) {
// convert primitive to primitive
if (leftType == int.class && rightType == double.class) {
expr = assocType(expr, double.class);
expr = assoc1(expr, castTo(leftTree, double.class));
return expr;
}
else if (leftType == double.class && rightType == int.class) {
expr = assocType(expr, double.class);
expr = assoc2(expr, castTo(rightTree, double.class));
return expr;
}
else if (leftType == int.class && rightType == long.class) {
expr = assocType(expr, long.class);
expr = assoc1(expr, castTo(leftTree, long.class));
return expr;
}
else if (leftType == long.class && rightType == int.class) {
expr = assocType(expr, long.class);
expr = assoc2(expr, castTo(rightTree, long.class));
return expr;
}
else {
throw new RuntimeException("Not Implemented! "+leftType+" + "+rightType);
}
}
else if (leftType == Object.class && rightType.isPrimitive()) {
expr = assocType(expr, Object.class);
expr = assoc2(expr, castTo(rightTree, Object.class));
return expr;
}
else if (leftType.isPrimitive() && rightType == Object.class) {
expr = assocType(expr, Object.class);
expr = assoc1(expr, castTo(leftTree, Object.class));
return expr;
}
else if (leftType == XML_INTERFACE_Class || rightType == XML_INTERFACE_Class) {
expr = assocType(expr, String.class);
expr = assoc1(expr, castTo(leftTree, String.class));
expr = assoc2(expr, castTo(rightTree, String.class));
return expr;
}
else if (!leftType.isPrimitive() && !rightType.isPrimitive()) {
// object -> object
expr = assocType(expr, Object.class);
expr = assoc1(expr, castTo(leftTree, Object.class));
expr = assoc2(expr, castTo(rightTree, Object.class));
return expr;
}
else {
throw new RuntimeException("Not Implemented! "+leftType+" to "+rightType);
}
}
private Cons castTo(Cons expr, Class target) {
if (isConstant(expr)) {
return castConstant(expr, target);
}
else {
return castNonConstant(expr, target);
}
}
private boolean isConstant(Cons expr) {
return expr.first() instanceof ConstantElement;
}
private Cons castConstant(Cons expr, Class target) {
ConstantElement constant = (ConstantElement) expr.first();
Object value = constant.getValue();
if (value instanceof Number && target == String.class) {
value = value.toString();
return list(new ConstantElement(STRING, value, String.class), value);
}
else if ((value instanceof Integer) && target == double.class) {
value = ((Integer)value).doubleValue();
return list(new ConstantElement(NUMBER, value, double.class), value);
}
else if ((value instanceof Double) && target == int.class) {
value = ((Double)value).intValue();
return list(new ConstantElement(NUMBER, value, int.class), value);
}
return castNonConstant(expr, target);
}
private Cons castNonConstant(Cons expr, Class target) {
Class source = AST.getEvalType(expr);
if (source == target) {
return expr;
}
else if (target.isAssignableFrom(source)) {
return expr;
}
else if (!source.isPrimitive() && !target.isPrimitive()) {
// object to object
if (target == Object.class) {
return expr;
}
else {
return Cons.list(new CastElement(source, target), expr);
// throw new RuntimeException("Not Implemented! " + source + " to "+target);
}
}
else {
return Cons.list(new CastElement(source, target), expr);
}
}
//////////////////////////////////////////////////
/// scope
//////////////////////////////////////////////////
private void pushScope() {
scope = new Scope(scope);
}
private void popScope() {
if (scope == null) {
throw new CompilerException("Internal error: no more scope to pop");
}
scope = scope.getEnclosingScope();
}
private int define(String name, Class type) {
int index = locals++;
if (type == double.class || type == long.class) {
locals++;
}
Symbol sym = new Symbol(name, index, type);
scope.define(sym);
return index;
}
private Symbol defineFree(String name) {
int index = locals;
locals += 2;
Class t = externals.get(name.substring(1));
if (t == null) {
t = Object.class;
}
Symbol sym = new Symbol(name, index, t);
freeScope.define(sym);
return sym;
}
private boolean isFree(String name) {
return scope.resolve(name) == null;
}
private int resolve(String name) {
Symbol s = scope.resolve(name);
if (s == null) {
throw new CompilerException("Variable undefined: "+name);
}
return s.getIndex();
}
private Class resolveType(String name) {
Symbol s = scope.resolve(name);
if (s == null) {
throw new CompilerException("Variable undefined: "+name);
}
return s.getType();
}
private Symbol resolveFree(String name) {
Symbol s = freeScope.resolve(name);
if (s != null) {
return s;
}
else {
return defineFree(name);
}
}
//////////////////////////////////////////////////
/// utils
//////////////////////////////////////////////////
private Cons assoc0(Cons tree, Object x) {
return tree.assoc(0, x);
}
private Cons assoc1(Cons tree, Object x) {
return tree.assoc(1, x);
}
private Cons assoc2(Cons tree, Object x) {
return tree.assoc(2, x);
}
private Cons assoc3(Cons tree, Object x) {
return tree.assoc(3, x);
}
private Cons assocType(Cons tree, Class type) {
Object x = tree.first();
TokenType t;
if (x instanceof Element) {
t = ((Element) x).getTokenType();
}
else {
t = (TokenType) x;
}
return assoc0(tree, new TypedElement(t, type));
}
private Cons tuple(Cons x, Cons y, Cons z) {
return Cons.list(x, y, z);
}
}
| |
/**
* Copyright 2012, Board of Regents of the University of
* Wisconsin System. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Board of Regents of the University of Wisconsin
* System licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*******************************************************************************
* Copyright 2007-2010 The Board of Regents of the University of Wisconsin System.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package edu.wisc.wisccal.shareurl.domain;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang.builder.ToStringBuilder;
import org.apache.commons.lang.builder.ToStringStyle;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import edu.wisc.wisccal.shareurl.support.ProblematicRecurringEventSharePreference;
/**
* Bean to represent the set of preferences associated with a {@link Share}.
*
* @author Nicholas Blair
*/
public class SharePreferences implements Serializable {
public static final String FILTER_DISPLAY_SEPARATOR = ", ";
private static final Log LOG = LogFactory.getLog(SharePreferences.class);
/**
*
*/
private static final long serialVersionUID = 53706L;
private Set<ISharePreference> preferences = new HashSet<ISharePreference>();
/**
*
*/
public SharePreferences() {
}
/**
*
* @param preferences
*/
public SharePreferences(Set<ISharePreference> preferences) {
this.preferences = preferences;
}
/**
*
* @param key
* @param value
*/
public void addPreference(ISharePreference pref) {
if(null != pref) {
this.preferences.add(pref);
}
}
/**
*
* @param pref
* @return true if the preference was successfully removed.
*/
public boolean removePreference(ISharePreference pref) {
if(null != pref) {
return this.preferences.remove(pref);
}
return false;
}
/**
*
* @return the {@link Set} of {@link ISharePreference}s that return true for {@link ISharePreference#participatesInFiltering()}.
*/
public Set<ISharePreference> getFilterPreferences() {
Set<ISharePreference> results = new HashSet<ISharePreference>();
for(ISharePreference p:preferences) {
if(p.participatesInFiltering()) {
results.add(p);
}
}
return results;
}
/**
* Never null, but potentially empty.
*
* @return a copy of this instance's {@link Set} of {@link ISharePreference}s.
*/
public Set<ISharePreference> getPreferences() {
return new HashSet<ISharePreference>(preferences);
}
public Set<ISharePreference> getPropertyMatchPreferences() {
return getPreferencesByType(PropertyMatchPreference.PROPERTY_MATCH);
}
public List<CalendarMatchPreference> getCalendarMatchPreferences(){
Set<ISharePreference> sharePrefs = getPreferencesByType(CalendarMatchPreference.CALENDAR_MATCH);
List<CalendarMatchPreference> calendarMatchPreferences = new ArrayList<CalendarMatchPreference>();
for(ISharePreference p: sharePrefs) {
if(p instanceof CalendarMatchPreference) {
CalendarMatchPreference preference = (CalendarMatchPreference) p;
calendarMatchPreferences.add(preference);
}
}
return calendarMatchPreferences;
}
/**
* Never null, but potentially empty.
*
* @param type
* @return a set of {@link ISharePreference}s in this instance with the specified type
*/
public Set<ISharePreference> getPreferencesByType(String type) {
Set<ISharePreference> result = new HashSet<ISharePreference>();
for(ISharePreference p : this.preferences) {
if(p.getType().equals(type)) {
result.add(p);
}
}
return result;
}
/**
* Return the {@link ISharePreference} from this set of the specified
* type if and only if 1 instance of that type exists.
*
* Returns null if 0 or more than 1 preference of the specified type exists.
*
* @param type
* @return
*/
public ISharePreference getPreferenceByType(String type) {
Set<ISharePreference> typeSet = getPreferencesByType(type);
if(typeSet.size() == 1) {
return typeSet.iterator().next();
}
return null;
}
/**
* Short cut to determine if this object
* has the FreeBusy preference.
* @return
*/
public boolean isFreeBusyOnly() {
return containsAny(FreeBusyPreference.FREE_BUSY);
}
public boolean isRevocable() {
return !containsAny(NonRevocablePreference.NON_REVOCABLE);
}
public boolean isIncludeSourceCalendar() {
Set<ISharePreference> prefs = getPreferencesByType(IncludeSourceCalendarPreference.INCLUDE_SOURCE_CALENDAR);
for(ISharePreference pref: prefs) {
return Boolean.parseBoolean(pref.getValue());
}
//preference not present, default is false
return false;
}
/**
* Short cut to determine if this share has an
* IncludeParticipants preference set to true.
*
* @return the value of the IncludeParticipants preference, or false if not set
*/
public boolean isIncludeParticipants() {
Set<ISharePreference> prefs = getPreferencesByType(IncludeParticipantsPreference.INCLUDE_PARTICIPANTS);
for(ISharePreference pref: prefs) {
return Boolean.parseBoolean(pref.getValue());
}
// preference not present, default is false
return false;
}
/**
* Short cut to determine if this share has a CALENDAR_MATCH preference.
*
* @return true if a calendarMatch pref exists, false otherwise
*/
public boolean isCalendarSelect() {
Set<ISharePreference> prefs = getPreferencesByType(CalendarMatchPreference.CALENDAR_MATCH);
return (prefs.size() > 0);
}
/**
*
* @return true if contains {@link GuessableSharePreference}
*/
public boolean isGuessable() {
return containsAny(GuessableSharePreference.GUESSABLE);
}
/**
*
* @return
*/
public boolean containsProblemRecurringPreference() {
return containsAny(ProblematicRecurringEventSharePreference.PROBLEM_RECURRENCE_SUPPORT);
}
/**
*
* @param type
* @return true if this instance contains any preferences with the specified preference type
*/
protected boolean containsAny(String type) {
Set<ISharePreference> prefs = getPreferencesByType(type);
return prefs.size() > 0;
}
/**
*
* @return
*/
public int getEventFilterCount() {
int filteringPreferenceCount = 0;
for(ISharePreference pref : preferences) {
if(PropertyMatchPreference.PROPERTY_MATCH.equals(pref.getType()) || AccessClassificationMatchPreference.CLASS_ATTRIBUTE.equals(pref.getType())) {
filteringPreferenceCount++;
}
}
return filteringPreferenceCount;
}
/**
*
* @return a human readable display of the filter type properties associated with these preferences
*/
public String getFilterDisplay() {
StringBuilder display = new StringBuilder();
Set<ISharePreference> prefs = getPreferencesByType(PropertyMatchPreference.PROPERTY_MATCH);
for(Iterator<ISharePreference> i = prefs.iterator(); i.hasNext();) {
ISharePreference p = i.next();
display.append(p.getDisplayName());
if(i.hasNext()) {
display.append(FILTER_DISPLAY_SEPARATOR);
}
}
prefs = getPreferencesByType(AccessClassificationMatchPreference.CLASS_ATTRIBUTE);
if(prefs.size() != 0 && display.length() != 0) {
display.append(FILTER_DISPLAY_SEPARATOR);
}
for(Iterator<ISharePreference> i = prefs.iterator(); i.hasNext();) {
ISharePreference p = i.next();
display.append(p.getDisplayName());
if(i.hasNext()) {
display.append(FILTER_DISPLAY_SEPARATOR);
}
}
return display.toString();
}
/**
*
* @return
*/
public List<String> getClassificationFilters() {
List<String> results = new ArrayList<String>();
Set<ISharePreference> preferences = getPreferencesByType(AccessClassificationMatchPreference.CLASS_ATTRIBUTE);
for(ISharePreference pref: preferences) {
results.add(pref.getValue());
}
return results;
}
public List<ContentFilter> getCalendarFilters(){
Map<String, CalendarFilterImpl> map = new HashMap<String, CalendarFilterImpl>();
Set<ISharePreference> filterPreferences = getPreferencesByType(CalendarMatchPreference.CALENDAR_MATCH);
for(final ISharePreference pref: filterPreferences){
CalendarFilterImpl filter = map.get(pref.getKey());
if(filter == null){
map.put(pref.getKey(), new CalendarFilterImpl(pref));
}else{
filter.addMatchValue(pref.getValue());
}
}
return new ArrayList<ContentFilter>(map.values());
}
/**
*
* @return
*/
public List<ContentFilter> getContentFilters() {
Map<String, ContentFilterImpl> map = new HashMap<String, ContentFilterImpl>();
Set<ISharePreference> filterPreferences = getPreferencesByType(PropertyMatchPreference.PROPERTY_MATCH);
for(final ISharePreference pref: filterPreferences) {
ContentFilterImpl filter = map.get(pref.getKey());
if(filter == null) {
map.put(pref.getKey(), new ContentFilterImpl(pref));
} else {
filter.addMatchValue(pref.getValue());
}
}
return new ArrayList<ContentFilter>(map.values());
}
/* (non-Javadoc)
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
ToStringBuilder builder = new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE);
builder.append("preferences", preferences);
return builder.toString();
}
/**
* Invoke {@link ISharePreference#dispose()} on all preferences.
*/
public void disposeAll() {
for(ISharePreference p: preferences) {
p.dispose();
}
}
/**
* Construct a concrete {@link ISharePreference} from the 3 string arguments.
* Will return null if the preferenceType is unknown, or if the key is an unsupported value.
*
* @param preferenceType the value for {@link ISharePreference#getType()}
* @param preferenceKey the value for {@link ISharePreference#getKey()}
* @param preferenceValue the value for {@link ISharePreference#getValue()}
* @return one of the {@link ISharePreference} implementations
*/
public static ISharePreference construct(String preferenceType, String preferenceKey, String preferenceValue) {
if(FreeBusyPreference.FREE_BUSY.equals(preferenceType)) {
return new FreeBusyPreference();
} else if(GuessableSharePreference.GUESSABLE.equals(preferenceType)) {
return new GuessableSharePreference();
} else if(AccessClassificationMatchPreference.CLASS_ATTRIBUTE.equals(preferenceType)) {
AccessClassification access = AccessClassification.valueOf(preferenceValue);
if(access == null) {
return null;
}
return new AccessClassificationMatchPreference(access);
} else if(PropertyMatchPreference.PROPERTY_MATCH.equals(preferenceType)){
return new PropertyMatchPreference(preferenceKey, preferenceValue);
} else if (IncludeParticipantsPreference.INCLUDE_PARTICIPANTS.equals(preferenceType)) {
return new IncludeParticipantsPreference(Boolean.parseBoolean(preferenceValue));
}else if( CalendarMatchPreference.CALENDAR_MATCH.equals(preferenceType)){
LOG.trace("new CalendarMatchPreference(preferenceKey="+preferenceKey+", preferenceValue="+preferenceValue+")");
return new CalendarMatchPreference(preferenceKey, preferenceValue);
}else if(IncludeSourceCalendarPreference.INCLUDE_SOURCE_CALENDAR.equals(preferenceType)) {
return new IncludeSourceCalendarPreference(Boolean.parseBoolean(preferenceValue));
} else {
LOG.warn("could not match any preference types for type=" + preferenceType + ", key=" + preferenceKey + ", value=" + preferenceValue + ", returning null");
return null;
}
}
/**
*
* @author Nicholas Blair
*/
static class ContentFilterImpl implements ContentFilter {
private final ISharePreference preference;
private final List<String> matchValues = new ArrayList<String>();
/**
* @param preference
*/
ContentFilterImpl(ISharePreference preference) {
this.preference = preference;
addMatchValue(preference.getValue());
}
/*
* (non-Javadoc)
* @see edu.wisc.wisccal.shareurl.domain.ContentFilter#getPropertyName()
*/
@Override
public String getPropertyName() {
return preference.getKey();
}
/*
* (non-Javadoc)
* @see edu.wisc.wisccal.shareurl.domain.ContentFilter#getMatchValue()
*/
@Override
public List<String> getMatchValues() {
return matchValues;
}
/**
*
* @param value
* @return
*/
public void addMatchValue(String value) {
matchValues.add(value);
}
}
/**
*
* @author ctcudd
*
*/
static class CalendarFilterImpl implements ContentFilter {
private final ISharePreference preference;
private final List<String> matchValues = new ArrayList<String>();
/**
*
* @param preference
*/
CalendarFilterImpl(ISharePreference preference){
this.preference = preference;
addMatchValue(preference.getValue());
}
@Override
public String getPropertyName() {
return preference.getKey();
}
@Override
public List<String> getMatchValues() {
return matchValues;
}
/**
*
* @param value
* @return
*/
public void addMatchValue(String value) {
matchValues.add(value);
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.benchmark.stress;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.Requests;
import org.elasticsearch.common.StopWatch;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.node.Node;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.atomic.AtomicLong;
import static org.elasticsearch.client.Requests.searchRequest;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
import static org.elasticsearch.node.NodeBuilder.nodeBuilder;
import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource;
/**
*
*/
public class NodesStressTest {
private Node[] nodes;
private int numberOfNodes = 2;
private Client[] clients;
private AtomicLong idGenerator = new AtomicLong();
private int fieldNumLimit = 50;
private long searcherIterations = 10;
private Searcher[] searcherThreads = new Searcher[1];
private long indexIterations = 10;
private Indexer[] indexThreads = new Indexer[1];
private TimeValue sleepAfterDone = TimeValue.timeValueMillis(0);
private TimeValue sleepBeforeClose = TimeValue.timeValueMillis(0);
private CountDownLatch latch;
private CyclicBarrier barrier1;
private CyclicBarrier barrier2;
public NodesStressTest() {
}
public NodesStressTest numberOfNodes(int numberOfNodes) {
this.numberOfNodes = numberOfNodes;
return this;
}
public NodesStressTest fieldNumLimit(int fieldNumLimit) {
this.fieldNumLimit = fieldNumLimit;
return this;
}
public NodesStressTest searchIterations(int searchIterations) {
this.searcherIterations = searchIterations;
return this;
}
public NodesStressTest searcherThreads(int numberOfSearcherThreads) {
searcherThreads = new Searcher[numberOfSearcherThreads];
return this;
}
public NodesStressTest indexIterations(long indexIterations) {
this.indexIterations = indexIterations;
return this;
}
public NodesStressTest indexThreads(int numberOfWriterThreads) {
indexThreads = new Indexer[numberOfWriterThreads];
return this;
}
public NodesStressTest sleepAfterDone(TimeValue time) {
this.sleepAfterDone = time;
return this;
}
public NodesStressTest sleepBeforeClose(TimeValue time) {
this.sleepBeforeClose = time;
return this;
}
public NodesStressTest build(Settings settings) throws Exception {
settings = settingsBuilder()
// .put("index.refresh_interval", 1, TimeUnit.SECONDS)
.put(SETTING_NUMBER_OF_SHARDS, 5)
.put(SETTING_NUMBER_OF_REPLICAS, 1)
.put(settings)
.build();
nodes = new Node[numberOfNodes];
clients = new Client[numberOfNodes];
for (int i = 0; i < numberOfNodes; i++) {
nodes[i] = nodeBuilder().settings(settingsBuilder().put(settings).put("name", "node" + i)).node();
clients[i] = nodes[i].client();
}
for (int i = 0; i < searcherThreads.length; i++) {
searcherThreads[i] = new Searcher(i);
}
for (int i = 0; i < indexThreads.length; i++) {
indexThreads[i] = new Indexer(i);
}
latch = new CountDownLatch(1);
barrier1 = new CyclicBarrier(2);
barrier2 = new CyclicBarrier(2);
// warmup
StopWatch stopWatch = new StopWatch().start();
Indexer warmup = new Indexer(-1).max(10000);
warmup.start();
barrier1.await();
barrier2.await();
latch.await();
stopWatch.stop();
System.out.println("Done Warmup, took [" + stopWatch.totalTime() + "]");
latch = new CountDownLatch(searcherThreads.length + indexThreads.length);
barrier1 = new CyclicBarrier(searcherThreads.length + indexThreads.length + 1);
barrier2 = new CyclicBarrier(searcherThreads.length + indexThreads.length + 1);
return this;
}
public void start() throws Exception {
for (Thread t : searcherThreads) {
t.start();
}
for (Thread t : indexThreads) {
t.start();
}
barrier1.await();
StopWatch stopWatch = new StopWatch();
stopWatch.start();
barrier2.await();
latch.await();
stopWatch.stop();
System.out.println("Done, took [" + stopWatch.totalTime() + "]");
System.out.println("Sleeping before close: " + sleepBeforeClose);
Thread.sleep(sleepBeforeClose.millis());
for (Client client : clients) {
client.close();
}
for (Node node : nodes) {
node.close();
}
System.out.println("Sleeping before exit: " + sleepBeforeClose);
Thread.sleep(sleepAfterDone.millis());
}
class Searcher extends Thread {
final int id;
long counter = 0;
long max = searcherIterations;
Searcher(int id) {
super("Searcher" + id);
this.id = id;
}
@Override
public void run() {
try {
barrier1.await();
barrier2.await();
for (; counter < max; counter++) {
Client client = client(counter);
QueryBuilder query = termQuery("num", counter % fieldNumLimit);
query = constantScoreQuery(query);
SearchResponse search = client.search(searchRequest()
.source(searchSource().query(query)))
.actionGet();
// System.out.println("Got search response, hits [" + search.hits().totalHits() + "]");
}
} catch (Exception e) {
System.err.println("Failed to search:");
e.printStackTrace();
} finally {
latch.countDown();
}
}
}
class Indexer extends Thread {
final int id;
long counter = 0;
long max = indexIterations;
Indexer(int id) {
super("Indexer" + id);
this.id = id;
}
Indexer max(int max) {
this.max = max;
return this;
}
@Override
public void run() {
try {
barrier1.await();
barrier2.await();
for (; counter < max; counter++) {
Client client = client(counter);
long id = idGenerator.incrementAndGet();
client.index(Requests.indexRequest().index("test").type("type1").id(Long.toString(id))
.source(XContentFactory.jsonBuilder().startObject()
.field("num", id % fieldNumLimit)
.endObject()))
.actionGet();
}
System.out.println("Indexer [" + id + "]: Done");
} catch (Exception e) {
System.err.println("Failed to index:");
e.printStackTrace();
} finally {
latch.countDown();
}
}
}
private Client client(long i) {
return clients[((int) (i % clients.length))];
}
public static void main(String[] args) throws Exception {
NodesStressTest test = new NodesStressTest()
.numberOfNodes(2)
.indexThreads(5)
.indexIterations(10 * 1000)
.searcherThreads(5)
.searchIterations(10 * 1000)
.sleepBeforeClose(TimeValue.timeValueMinutes(10))
.sleepAfterDone(TimeValue.timeValueMinutes(10))
.build(EMPTY_SETTINGS);
test.start();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.testsuites;
import junit.framework.TestSuite;
import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunctionBackupFilterSelfTest;
import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunctionExcludeNeighborsSelfTest;
import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunctionFastPowerOfTwoHashSelfTest;
import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunctionStandardHashSelfTest;
import org.apache.ignite.internal.IgniteReflectionFactorySelfTest;
import org.apache.ignite.internal.processors.cache.CacheComparatorTest;
import org.apache.ignite.internal.processors.cache.CacheConcurrentReadThroughTest;
import org.apache.ignite.internal.processors.cache.CacheConfigurationLeakTest;
import org.apache.ignite.internal.processors.cache.CacheDhtLocalPartitionAfterRemoveSelfTest;
import org.apache.ignite.internal.processors.cache.CacheEnumOperationsSingleNodeTest;
import org.apache.ignite.internal.processors.cache.CacheEnumOperationsTest;
import org.apache.ignite.internal.processors.cache.CacheExchangeMessageDuplicatedStateTest;
import org.apache.ignite.internal.processors.cache.CacheGroupLocalConfigurationSelfTest;
import org.apache.ignite.internal.processors.cache.CacheDataRegionConfigurationTest;
import org.apache.ignite.internal.processors.cache.CacheOptimisticTransactionsWithFilterSingleServerTest;
import org.apache.ignite.internal.processors.cache.CacheOptimisticTransactionsWithFilterTest;
import org.apache.ignite.internal.processors.cache.CrossCacheTxNearEnabledRandomOperationsTest;
import org.apache.ignite.internal.processors.cache.CrossCacheTxRandomOperationsTest;
import org.apache.ignite.internal.processors.cache.GridCacheAtomicMessageCountSelfTest;
import org.apache.ignite.internal.processors.cache.GridCacheFinishPartitionsSelfTest;
import org.apache.ignite.internal.processors.cache.GridCacheOffheapUpdateSelfTest;
import org.apache.ignite.internal.processors.cache.GridCachePartitionedGetSelfTest;
import org.apache.ignite.internal.processors.cache.GridCachePartitionedProjectionAffinitySelfTest;
import org.apache.ignite.internal.processors.cache.GridCacheVariableTopologySelfTest;
import org.apache.ignite.internal.processors.cache.IgniteAtomicCacheEntryProcessorNodeJoinTest;
import org.apache.ignite.internal.processors.cache.IgniteCacheEntryProcessorNodeJoinTest;
import org.apache.ignite.internal.processors.cache.IgniteCacheIncrementTxTest;
import org.apache.ignite.internal.processors.cache.IgniteCacheNoSyncForGetTest;
import org.apache.ignite.internal.processors.cache.IgniteCachePartitionMapUpdateTest;
import org.apache.ignite.internal.processors.cache.IgniteClientCacheStartFailoverTest;
import org.apache.ignite.internal.processors.cache.IgniteDynamicCacheAndNodeStop;
import org.apache.ignite.internal.processors.cache.IgniteNearClientCacheCloseTest;
import org.apache.ignite.internal.processors.cache.IgniteOnePhaseCommitInvokeTest;
import org.apache.ignite.internal.processors.cache.IgniteOnePhaseCommitNearReadersTest;
import org.apache.ignite.internal.processors.cache.MemoryPolicyConfigValidationTest;
import org.apache.ignite.internal.processors.cache.NonAffinityCoordinatorDynamicStartStopTest;
import org.apache.ignite.internal.processors.cache.distributed.CacheLoadingConcurrentGridStartSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.CacheLoadingConcurrentGridStartSelfTestAllowOverwrite;
import org.apache.ignite.internal.processors.cache.distributed.CacheLockReleaseNodeLeaveTest;
import org.apache.ignite.internal.processors.cache.distributed.CachePartitionStateTest;
import org.apache.ignite.internal.processors.cache.distributed.CacheTxLoadingConcurrentGridStartSelfTestAllowOverwrite;
import org.apache.ignite.internal.processors.cache.distributed.GridCachePartitionNotLoadedEventSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.GridCachePartitionedNearDisabledTxMultiThreadedSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.GridCacheTransformEventSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.IgniteCacheClientNodeChangingTopologyTest;
import org.apache.ignite.internal.processors.cache.distributed.IgniteCacheClientNodePartitionsExchangeTest;
import org.apache.ignite.internal.processors.cache.distributed.IgniteCacheServerNodeConcurrentStart;
import org.apache.ignite.internal.processors.cache.distributed.dht.CachePartitionPartialCountersMapSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridCacheColocatedOptimisticTransactionSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridCacheColocatedPreloadRestartSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridCacheColocatedPrimarySyncSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridCacheColocatedTxSingleThreadedSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridCacheDhtEntrySelfTest;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridCacheDhtEvictionsDisabledSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridCacheDhtMappingSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridCacheDhtPreloadBigDataSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridCacheDhtPreloadDelayedSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridCacheDhtPreloadDisabledSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridCacheDhtPreloadMultiThreadedSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridCacheDhtPreloadOnheapSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridCacheDhtPreloadPutGetSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridCacheDhtPreloadSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridCacheDhtPreloadStartStopSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridCacheDhtPreloadUnloadSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridCachePartitionedNearDisabledLockSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridCachePartitionedTopologyChangeSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridCachePartitionedUnloadEventsSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.dht.IgniteCacheClearDuringRebalanceTest;
import org.apache.ignite.internal.processors.cache.distributed.dht.IgniteCachePartitionedBackupNodeFailureRecoveryTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCacheAtomicNearEvictionEventSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCacheAtomicNearMultiNodeSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCacheAtomicNearReadersSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCacheNearClientHitTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCacheNearEvictionEventSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCacheNearJobExecutionSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCacheNearMultiGetSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCacheNearMultiNodeSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCacheNearOneNodeSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCacheNearPartitionedClearSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCacheNearPreloadRestartSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCacheNearPrimarySyncSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCacheNearReaderPreloadSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCacheNearReadersSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCacheNearTxForceKeyTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCachePartitionedAffinitySelfTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCachePartitionedAtomicGetAndTransformStoreSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCachePartitionedBasicApiTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCachePartitionedBasicOpSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCachePartitionedBasicStoreMultiNodeSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCachePartitionedBasicStoreSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCachePartitionedEventSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCachePartitionedExplicitLockNodeFailureSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCachePartitionedGetAndTransformStoreSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCachePartitionedLoadCacheSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCachePartitionedLockSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCachePartitionedMultiNodeLockSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCachePartitionedMultiNodeSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCachePartitionedMultiThreadedPutGetSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCachePartitionedNearDisabledBasicStoreMultiNodeSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCachePartitionedNodeFailureSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCachePartitionedPreloadLifecycleSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCachePartitionedTxMultiThreadedSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCachePartitionedTxSingleThreadedSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCachePartitionedTxTimeoutSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridCacheRendezvousAffinityClientSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridNearCacheStoreUpdateTest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridPartitionedBackupLoadSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.near.NearCacheSyncUpdateTest;
import org.apache.ignite.internal.processors.cache.distributed.near.NoneRebalanceModeSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.replicated.GridCacheReplicatedJobExecutionTest;
import org.apache.ignite.internal.processors.cache.local.GridCacheLocalAtomicBasicStoreSelfTest;
import org.apache.ignite.internal.processors.cache.local.GridCacheLocalAtomicGetAndTransformStoreSelfTest;
import org.apache.ignite.internal.processors.cache.local.GridCacheLocalBasicApiSelfTest;
import org.apache.ignite.internal.processors.cache.local.GridCacheLocalBasicStoreSelfTest;
import org.apache.ignite.internal.processors.cache.local.GridCacheLocalEventSelfTest;
import org.apache.ignite.internal.processors.cache.local.GridCacheLocalEvictionEventSelfTest;
import org.apache.ignite.internal.processors.cache.local.GridCacheLocalGetAndTransformStoreSelfTest;
import org.apache.ignite.internal.processors.cache.local.GridCacheLocalIsolatedNodesSelfTest;
import org.apache.ignite.internal.processors.cache.local.GridCacheLocalLoadAllSelfTest;
import org.apache.ignite.internal.processors.cache.local.GridCacheLocalLockSelfTest;
import org.apache.ignite.internal.processors.cache.local.GridCacheLocalMultithreadedSelfTest;
import org.apache.ignite.internal.processors.cache.local.GridCacheLocalTxMultiThreadedSelfTest;
import org.apache.ignite.internal.processors.cache.local.GridCacheLocalTxSingleThreadedSelfTest;
import org.apache.ignite.internal.processors.cache.local.GridCacheLocalTxTimeoutSelfTest;
import org.apache.ignite.internal.processors.cache.persistence.MemoryPolicyInitializationTest;
import org.apache.ignite.internal.processors.continuous.IgniteNoCustomEventsOnNodeStart;
/**
* Test suite.
*/
public class IgniteCacheTestSuite2 extends TestSuite {
/**
* @return IgniteCache test suite.
* @throws Exception Thrown in case of the failure.
*/
public static TestSuite suite() throws Exception {
TestSuite suite = new TestSuite("IgniteCache Test Suite part 2");
// Local cache.
suite.addTestSuite(GridCacheLocalBasicApiSelfTest.class);
suite.addTestSuite(GridCacheLocalBasicStoreSelfTest.class);
suite.addTestSuite(GridCacheLocalAtomicBasicStoreSelfTest.class);
suite.addTestSuite(GridCacheLocalGetAndTransformStoreSelfTest.class);
suite.addTestSuite(GridCacheLocalAtomicGetAndTransformStoreSelfTest.class);
suite.addTestSuite(GridCacheLocalLoadAllSelfTest.class);
suite.addTestSuite(GridCacheLocalLockSelfTest.class);
suite.addTestSuite(GridCacheLocalMultithreadedSelfTest.class);
suite.addTestSuite(GridCacheLocalTxSingleThreadedSelfTest.class);
suite.addTestSuite(GridCacheLocalTxTimeoutSelfTest.class);
suite.addTestSuite(GridCacheLocalEventSelfTest.class);
suite.addTestSuite(GridCacheLocalEvictionEventSelfTest.class);
suite.addTestSuite(GridCacheVariableTopologySelfTest.class);
suite.addTestSuite(GridCacheLocalTxMultiThreadedSelfTest.class);
suite.addTestSuite(GridCacheTransformEventSelfTest.class);
suite.addTestSuite(GridCacheLocalIsolatedNodesSelfTest.class);
// Partitioned cache.
suite.addTestSuite(GridCachePartitionedGetSelfTest.class);
suite.addTest(new TestSuite(GridCachePartitionedBasicApiTest.class));
suite.addTest(new TestSuite(GridCacheNearMultiGetSelfTest.class));
suite.addTest(new TestSuite(NoneRebalanceModeSelfTest.class));
suite.addTest(new TestSuite(GridCacheNearJobExecutionSelfTest.class));
suite.addTest(new TestSuite(GridCacheReplicatedJobExecutionTest.class));
suite.addTest(new TestSuite(GridCacheNearOneNodeSelfTest.class));
suite.addTest(new TestSuite(GridCacheNearMultiNodeSelfTest.class));
suite.addTest(new TestSuite(GridCacheAtomicNearMultiNodeSelfTest.class));
suite.addTest(new TestSuite(GridCacheNearReadersSelfTest.class));
suite.addTest(new TestSuite(GridCacheNearReaderPreloadSelfTest.class));
suite.addTest(new TestSuite(GridCacheAtomicNearReadersSelfTest.class));
suite.addTest(new TestSuite(GridCachePartitionedAffinitySelfTest.class));
suite.addTest(new TestSuite(RendezvousAffinityFunctionExcludeNeighborsSelfTest.class));
suite.addTest(new TestSuite(RendezvousAffinityFunctionFastPowerOfTwoHashSelfTest.class));
suite.addTest(new TestSuite(RendezvousAffinityFunctionStandardHashSelfTest.class));
suite.addTest(new TestSuite(GridCacheRendezvousAffinityClientSelfTest.class));
suite.addTest(new TestSuite(GridCachePartitionedProjectionAffinitySelfTest.class));
suite.addTest(new TestSuite(GridCachePartitionedBasicOpSelfTest.class));
suite.addTest(new TestSuite(GridCachePartitionedBasicStoreSelfTest.class));
suite.addTest(new TestSuite(GridCachePartitionedGetAndTransformStoreSelfTest.class));
suite.addTest(new TestSuite(GridCachePartitionedAtomicGetAndTransformStoreSelfTest.class));
suite.addTest(new TestSuite(GridCachePartitionedBasicStoreMultiNodeSelfTest.class));
suite.addTest(new TestSuite(GridCachePartitionedNearDisabledBasicStoreMultiNodeSelfTest.class));
suite.addTest(new TestSuite(GridCachePartitionedEventSelfTest.class));
suite.addTest(new TestSuite(GridCachePartitionedLockSelfTest.class));
suite.addTest(new TestSuite(GridCachePartitionedNearDisabledLockSelfTest.class));
suite.addTest(new TestSuite(GridCachePartitionedMultiNodeLockSelfTest.class));
suite.addTest(new TestSuite(GridCachePartitionedMultiNodeSelfTest.class));
suite.addTest(new TestSuite(GridCachePartitionedMultiThreadedPutGetSelfTest.class));
suite.addTest(new TestSuite(GridCachePartitionedNodeFailureSelfTest.class));
suite.addTest(new TestSuite(GridCachePartitionedExplicitLockNodeFailureSelfTest.class));
suite.addTest(new TestSuite(GridCachePartitionedTxSingleThreadedSelfTest.class));
suite.addTest(new TestSuite(GridCacheColocatedTxSingleThreadedSelfTest.class));
suite.addTest(new TestSuite(GridCachePartitionedTxTimeoutSelfTest.class));
suite.addTest(new TestSuite(GridCacheFinishPartitionsSelfTest.class));
suite.addTest(new TestSuite(GridCacheDhtEntrySelfTest.class));
suite.addTest(new TestSuite(GridCacheDhtMappingSelfTest.class));
suite.addTest(new TestSuite(GridCachePartitionedTxMultiThreadedSelfTest.class));
suite.addTest(new TestSuite(GridCachePartitionedNearDisabledTxMultiThreadedSelfTest.class));
suite.addTest(new TestSuite(GridCacheDhtPreloadSelfTest.class));
suite.addTest(new TestSuite(GridCacheDhtPreloadOnheapSelfTest.class));
suite.addTest(new TestSuite(GridCacheDhtPreloadBigDataSelfTest.class));
suite.addTest(new TestSuite(GridCacheDhtPreloadPutGetSelfTest.class));
suite.addTest(new TestSuite(GridCacheDhtPreloadDisabledSelfTest.class));
suite.addTest(new TestSuite(GridCacheDhtPreloadMultiThreadedSelfTest.class));
suite.addTest(new TestSuite(CacheDhtLocalPartitionAfterRemoveSelfTest.class));
suite.addTest(new TestSuite(GridCacheColocatedPreloadRestartSelfTest.class));
suite.addTest(new TestSuite(GridCacheNearPreloadRestartSelfTest.class));
suite.addTest(new TestSuite(GridCacheDhtPreloadStartStopSelfTest.class));
suite.addTest(new TestSuite(GridCacheDhtPreloadUnloadSelfTest.class));
suite.addTest(new TestSuite(RendezvousAffinityFunctionBackupFilterSelfTest.class));
suite.addTest(new TestSuite(GridCachePartitionedPreloadLifecycleSelfTest.class));
suite.addTest(new TestSuite(CacheLoadingConcurrentGridStartSelfTest.class));
suite.addTest(new TestSuite(CacheLoadingConcurrentGridStartSelfTestAllowOverwrite.class));
suite.addTest(new TestSuite(CacheTxLoadingConcurrentGridStartSelfTestAllowOverwrite.class));
suite.addTest(new TestSuite(GridCacheDhtPreloadDelayedSelfTest.class));
suite.addTest(new TestSuite(GridPartitionedBackupLoadSelfTest.class));
suite.addTest(new TestSuite(GridCachePartitionedLoadCacheSelfTest.class));
suite.addTest(new TestSuite(GridCachePartitionNotLoadedEventSelfTest.class));
suite.addTest(new TestSuite(GridCacheDhtEvictionsDisabledSelfTest.class));
suite.addTest(new TestSuite(GridCacheNearEvictionEventSelfTest.class));
suite.addTest(new TestSuite(GridCacheAtomicNearEvictionEventSelfTest.class));
suite.addTest(new TestSuite(GridCachePartitionedTopologyChangeSelfTest.class));
suite.addTest(new TestSuite(GridCachePartitionedUnloadEventsSelfTest.class));
suite.addTest(new TestSuite(GridCacheColocatedOptimisticTransactionSelfTest.class));
suite.addTestSuite(GridCacheAtomicMessageCountSelfTest.class);
suite.addTest(new TestSuite(GridCacheNearPartitionedClearSelfTest.class));
suite.addTest(new TestSuite(GridCacheOffheapUpdateSelfTest.class));
suite.addTest(new TestSuite(GridCacheNearClientHitTest.class));
suite.addTest(new TestSuite(GridCacheNearPrimarySyncSelfTest.class));
suite.addTest(new TestSuite(GridCacheColocatedPrimarySyncSelfTest.class));
suite.addTest(new TestSuite(IgniteCachePartitionMapUpdateTest.class));
suite.addTest(new TestSuite(IgniteCacheClientNodePartitionsExchangeTest.class));
suite.addTest(new TestSuite(IgniteCacheClientNodeChangingTopologyTest.class));
suite.addTest(new TestSuite(IgniteCacheServerNodeConcurrentStart.class));
suite.addTest(new TestSuite(IgniteCacheEntryProcessorNodeJoinTest.class));
suite.addTest(new TestSuite(IgniteAtomicCacheEntryProcessorNodeJoinTest.class));
suite.addTest(new TestSuite(GridCacheNearTxForceKeyTest.class));
suite.addTest(new TestSuite(CrossCacheTxRandomOperationsTest.class));
suite.addTest(new TestSuite(CrossCacheTxNearEnabledRandomOperationsTest.class));
suite.addTest(new TestSuite(IgniteDynamicCacheAndNodeStop.class));
suite.addTest(new TestSuite(CacheLockReleaseNodeLeaveTest.class));
suite.addTest(new TestSuite(NearCacheSyncUpdateTest.class));
suite.addTest(new TestSuite(CacheConfigurationLeakTest.class));
suite.addTest(new TestSuite(MemoryPolicyConfigValidationTest.class));
suite.addTest(new TestSuite(MemoryPolicyInitializationTest.class));
suite.addTest(new TestSuite(CacheDataRegionConfigurationTest.class));
suite.addTest(new TestSuite(CacheGroupLocalConfigurationSelfTest.class));
suite.addTest(new TestSuite(CacheEnumOperationsSingleNodeTest.class));
suite.addTest(new TestSuite(CacheEnumOperationsTest.class));
suite.addTest(new TestSuite(IgniteCacheIncrementTxTest.class));
suite.addTest(new TestSuite(IgniteCachePartitionedBackupNodeFailureRecoveryTest.class));
suite.addTest(new TestSuite(IgniteNoCustomEventsOnNodeStart.class));
suite.addTest(new TestSuite(CacheExchangeMessageDuplicatedStateTest.class));
suite.addTest(new TestSuite(CacheConcurrentReadThroughTest.class));
suite.addTest(new TestSuite(GridNearCacheStoreUpdateTest.class));
suite.addTest(new TestSuite(IgniteOnePhaseCommitInvokeTest.class));
suite.addTest(new TestSuite(IgniteCacheNoSyncForGetTest.class));
suite.addTest(new TestSuite(IgniteOnePhaseCommitNearReadersTest.class));
suite.addTest(new TestSuite(IgniteNearClientCacheCloseTest.class));
suite.addTest(new TestSuite(IgniteClientCacheStartFailoverTest.class));
suite.addTest(new TestSuite(CacheOptimisticTransactionsWithFilterSingleServerTest.class));
suite.addTest(new TestSuite(CacheOptimisticTransactionsWithFilterTest.class));
suite.addTest(new TestSuite(NonAffinityCoordinatorDynamicStartStopTest.class));
suite.addTest(new TestSuite(IgniteCacheClearDuringRebalanceTest.class));
suite.addTest(new TestSuite(CachePartitionStateTest.class));
suite.addTest(new TestSuite(CacheComparatorTest.class));
suite.addTest(new TestSuite(CachePartitionPartialCountersMapSelfTest.class));
suite.addTest(new TestSuite(IgniteReflectionFactorySelfTest.class));
return suite;
}
}
| |
package com.cloud.vm;
import com.cloud.dao.EntityManager;
import com.cloud.legacymodel.storage.VirtualMachineTemplate;
import com.cloud.legacymodel.to.DiskTO;
import com.cloud.legacymodel.user.Account;
import com.cloud.legacymodel.vm.BootloaderType;
import com.cloud.legacymodel.vm.VirtualMachine;
import com.cloud.model.enumeration.HypervisorType;
import com.cloud.model.enumeration.VirtualMachineType;
import com.cloud.offering.ServiceOffering;
import com.cloud.service.ServiceOfferingVO;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Implementation of VirtualMachineProfile.
*/
public class VirtualMachineProfileImpl implements VirtualMachineProfile {
static EntityManager s_entityMgr;
VirtualMachine _vm;
ServiceOffering _offering;
VirtualMachineTemplate _template;
UserVmDetailVO _userVmDetails;
Map<Param, Object> _params;
List<NicProfile> _nics = new ArrayList<>();
List<DiskTO> _disks = new ArrayList<>();
StringBuilder _bootArgs = new StringBuilder();
Account _owner;
BootloaderType _bootloader;
Float cpuOvercommitRatio = 1.0f;
Float memoryOvercommitRatio = 1.0f;
VirtualMachineType _type;
List<String[]> vmData = null;
String configDriveLabel = null;
String configDriveIsoBaseLocation = "/tmp/";
String configDriveIsoRootFolder = null;
String configDriveIsoFile = null;
public VirtualMachineProfileImpl(final VirtualMachine vm) {
this(vm, null, null, null, null);
}
public VirtualMachineProfileImpl(final VirtualMachine vm, final VirtualMachineTemplate template, final ServiceOffering offering, final Account owner, final Map<Param,
Object> params) {
_vm = vm;
_template = template;
_offering = offering;
_params = params;
_owner = owner;
if (_params == null) {
_params = new HashMap<>();
}
if (vm != null) {
_type = vm.getType();
}
}
public VirtualMachineProfileImpl(final VirtualMachineType type) {
_type = type;
}
static void init(final EntityManager entityMgr) {
s_entityMgr = entityMgr;
}
@Override
public String toString() {
return _vm.toString();
}
@Override
public List<String[]> getVmData() {
return vmData;
}
@Override
public void setVmData(final List<String[]> vmData) {
this.vmData = vmData;
}
@Override
public String getConfigDriveLabel() {
return configDriveLabel;
}
@Override
public void setConfigDriveLabel(final String configDriveLabel) {
this.configDriveLabel = configDriveLabel;
}
@Override
public String getConfigDriveIsoRootFolder() {
return configDriveIsoRootFolder;
}
@Override
public void setConfigDriveIsoRootFolder(final String configDriveIsoRootFolder) {
this.configDriveIsoRootFolder = configDriveIsoRootFolder;
}
@Override
public String getConfigDriveIsoFile() {
return configDriveIsoFile;
}
@Override
public void setConfigDriveIsoFile(final String isoFile) {
this.configDriveIsoFile = isoFile;
}
@Override
public String getHostName() {
return _vm.getHostName();
}
@Override
public String getInstanceName() {
return _vm.getInstanceName();
}
@Override
public Account getOwner() {
if (_owner == null) {
_owner = s_entityMgr.findById(Account.class, _vm.getAccountId());
}
return _owner;
}
@Override
public VirtualMachine getVirtualMachine() {
return _vm;
}
@Override
public ServiceOffering getServiceOffering() {
if (_offering == null) {
_offering = s_entityMgr.findById(ServiceOffering.class, _vm.getServiceOfferingId());
}
return _offering;
}
@Override
public Object getParameter(final Param name) {
return _params.get(name);
}
@Override
public HypervisorType getHypervisorType() {
return _vm.getHypervisorType();
}
@Override
public VirtualMachineTemplate getTemplate() {
if (_template == null && _vm != null) {
_template = s_entityMgr.findByIdIncludingRemoved(VirtualMachineTemplate.class, _vm.getTemplateId());
}
return _template;
}
@Override
public long getTemplateId() {
return _vm.getTemplateId();
}
@Override
public long getServiceOfferingId() {
return _vm.getServiceOfferingId();
}
@Override
public long getId() {
return _vm.getId();
}
@Override
public String getUuid() {
return _vm.getUuid();
}
@Override
public List<NicProfile> getNics() {
return _nics;
}
public void setNics(final List<NicProfile> nics) {
_nics = nics;
}
@Override
public List<DiskTO> getDisks() {
return _disks;
}
public void setDisks(final List<DiskTO> disks) {
_disks = disks;
}
@Override
public void addNic(final int index, final NicProfile nic) {
_nics.add(index, nic);
}
@Override
public void addDisk(final int index, final DiskTO disk) {
_disks.add(index, disk);
}
@Override
public StringBuilder getBootArgsBuilder() {
return _bootArgs;
}
@Override
public void addBootArgs(final String... args) {
for (final String arg : args) {
_bootArgs.append(arg).append(" ");
}
}
@Override
public String getBootArgs() {
return _bootArgs.toString();
}
@Override
public void addNic(final NicProfile nic) {
_nics.add(nic);
}
@Override
public void addDisk(final DiskTO disk) {
_disks.add(disk);
}
@Override
public VirtualMachineType getType() {
return _type;
}
@Override
public void setParameter(final Param name, final Object value) {
_params.put(name, value);
}
@Override
public BootloaderType getBootLoaderType() {
return _bootloader;
}
@Override
public void setBootLoaderType(final BootloaderType bootLoader) {
_bootloader = bootLoader;
}
@Override
public Map<Param, Object> getParameters() {
return _params;
}
@Override
public Float getCpuOvercommitRatio() {
return cpuOvercommitRatio;
}
public void setCpuOvercommitRatio(final Float cpuOvercommitRatio) {
this.cpuOvercommitRatio = cpuOvercommitRatio;
}
@Override
public Float getMemoryOvercommitRatio() {
return memoryOvercommitRatio;
}
public void setMemoryOvercommitRatio(final Float memoryOvercommitRatio) {
this.memoryOvercommitRatio = memoryOvercommitRatio;
}
public void setServiceOffering(final ServiceOfferingVO offering) {
_offering = offering;
}
public String getConfigDriveIsoBaseLocation() {
return configDriveIsoBaseLocation;
}
}
| |
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Eclipse Public License, Version 1.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.eclipse.org/org/documents/epl-v10.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.dvlib;
import com.android.annotations.Nullable;
import com.android.io.NonClosingInputStream;
import com.android.io.NonClosingInputStream.CloseBehavior;
import org.w3c.dom.Document;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.xml.sax.Attributes;
import org.xml.sax.ErrorHandler;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
import org.xml.sax.helpers.DefaultHandler;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.xml.XMLConstants;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import javax.xml.transform.stream.StreamSource;
import javax.xml.validation.Schema;
import javax.xml.validation.SchemaFactory;
import javax.xml.validation.Validator;
public class DeviceSchema {
// ---- XSD ----
/**
* The latest version of the device XML Schema.
* Valid version numbers are between 1 and this number, included.
*/
public static final int NS_LATEST_VERSION = 2;
/** The XML namespace of the latest device XML. */
public static final String NS_DEVICES_URI = getSchemaUri(NS_LATEST_VERSION);
/** Base for the devices XSD URI, without the terminal version number. */
private static final String NS_DEVICES_URI_BASE = "http://schemas.android.com/sdk/devices/";
/** Regex pattern to find the terminal version number in an XSD URI. */
static final String NS_DEVICES_URI_PATTERN = NS_DEVICES_URI_BASE + "([0-9]+)"; //$NON-NLS-1$
// ----- XML ----
/**
* The "devices" element is the root element of this schema.
*
* It must contain one or more "device" elements that each define the
* hardware, software, and states for a given device.
*/
public static final String NODE_DEVICES = "devices";
/**
* A "device" element contains a "hardware" element, a "software" element
* for each API version it supports, and a "state" element for each possible
* state the device could be in.
*/
public static final String NODE_DEVICE = "device";
/**
* The "hardware" element contains all of the hardware information for a
* given device.
*/
public static final String NODE_HARDWARE = "hardware";
/**
* The "software" element contains all of the software information for an
* API version of the given device.
*/
public static final String NODE_SOFTWARE = "software";
/**
* The "state" element contains all of the parameters for a given state of
* the device. It's also capable of redefining hardware configurations if
* they change based on state.
*/
public static final String NODE_STATE = "state";
public static final String NODE_KEYBOARD = "keyboard";
public static final String NODE_TOUCH = "touch";
public static final String NODE_GL_EXTENSIONS = "gl-extensions";
public static final String NODE_GL_VERSION = "gl-version";
public static final String NODE_NETWORKING = "networking";
public static final String NODE_REMOVABLE_STORAGE = "removable-storage";
public static final String NODE_FLASH = "flash";
public static final String NODE_LIVE_WALLPAPER_SUPPORT = "live-wallpaper-support";
public static final String NODE_STATUS_BAR = "status-bar";
public static final String NODE_BUTTONS = "buttons";
public static final String NODE_CAMERA = "camera";
public static final String NODE_LOCATION = "location";
public static final String NODE_GPU = "gpu";
public static final String NODE_DOCK = "dock";
public static final String NODE_YDPI = "ydpi";
public static final String NODE_POWER_TYPE= "power-type";
public static final String NODE_Y_DIMENSION = "y-dimension";
public static final String NODE_SCREEN_RATIO = "screen-ratio";
public static final String NODE_NAV_STATE = "nav-state";
public static final String NODE_MIC = "mic";
public static final String NODE_RAM = "ram";
public static final String NODE_XDPI = "xdpi";
public static final String NODE_DIMENSIONS = "dimensions";
public static final String NODE_ABI = "abi";
public static final String NODE_MECHANISM = "mechanism";
public static final String NODE_MULTITOUCH = "multitouch";
public static final String NODE_NAV = "nav";
public static final String NODE_PIXEL_DENSITY = "pixel-density";
public static final String NODE_SCREEN_ORIENTATION = "screen-orientation";
public static final String NODE_AUTOFOCUS = "autofocus";
public static final String NODE_SCREEN_SIZE = "screen-size";
public static final String NODE_DESCRIPTION = "description";
public static final String NODE_BLUETOOTH_PROFILES = "bluetooth-profiles";
public static final String NODE_SCREEN = "screen";
public static final String NODE_SENSORS = "sensors";
public static final String NODE_DIAGONAL_LENGTH = "diagonal-length";
public static final String NODE_SCREEN_TYPE = "screen-type";
public static final String NODE_KEYBOARD_STATE = "keyboard-state";
public static final String NODE_X_DIMENSION = "x-dimension";
public static final String NODE_CPU = "cpu";
public static final String NODE_INTERNAL_STORAGE = "internal-storage";
public static final String NODE_META = "meta";
public static final String NODE_ICONS = "icons";
public static final String NODE_SIXTY_FOUR = "sixty-four";
public static final String NODE_SIXTEEN = "sixteen";
public static final String NODE_FRAME = "frame";
public static final String NODE_PATH = "path";
public static final String NODE_PORTRAIT_X_OFFSET = "portrait-x-offset";
public static final String NODE_PORTRAIT_Y_OFFSET = "portrait-y-offset";
public static final String NODE_LANDSCAPE_X_OFFSET = "landscape-x-offset";
public static final String NODE_LANDSCAPE_Y_OFFSET = "landscape-y-offset";
public static final String NODE_NAME = "name";
public static final String NODE_ID = "id";
public static final String NODE_API_LEVEL = "api-level";
public static final String NODE_MANUFACTURER = "manufacturer";
public static final String NODE_TAG_ID = "tag-id";
public static final String NODE_BOOT_PROPS = "boot-props";
public static final String NODE_BOOT_PROP = "boot-prop";
public static final String NODE_PROP_NAME = "prop-name";
public static final String NODE_PROP_VALUE = "prop-value";
public static final String NODE_SKIN = "skin";
public static final String ATTR_DEFAULT = "default";
public static final String ATTR_UNIT = "unit";
public static final String ATTR_NAME = "name";
/**
* Returns the URI of the SDK Repository schema for the given version number.
* @param version Between 1 and {@link #NS_LATEST_VERSION} included.
*/
public static String getSchemaUri(int version) {
return String.format(NS_DEVICES_URI_BASE + "%d", version); //$NON-NLS-1$
}
/**
* Returns a stream to the requested {@code device} XML Schema.
*
* @param version Between 1 and {@link #NS_LATEST_VERSION}, included.
* @return An {@link InputStream} object for the local XSD file or
* null if there is no schema for the requested version.
*/
public static InputStream getXsdStream(int version) {
assert version >= 1 && version <= NS_LATEST_VERSION
: "Unexpected schema version " + version;
String rootElement = NODE_DEVICES; //$NON-NLS-1$
String filename = String.format("%1$s-%2$d.xsd", rootElement, version); //$NON-NLS-1$
try {
return DeviceSchema.class.getResourceAsStream(filename);
} catch (Exception ignore) {
// Some implementations seem to return null on failure,
// others throw an exception. We want to return null.
}
return null;
}
/**
* Validates the input stream against the corresponding Devices XSD schema
* and then does a sanity check on the content.
*
* @param deviceXml The XML InputStream to validate.
* The XML input stream must supports the mark/reset() methods
* (that is its {@link InputStream#markSupported()} must return true)
* and which mark has already been set to the beginning of the stream.
* @param out The OutputStream for error messages.
* @param parent The parent directory of the input stream.
* @return Whether the given input constitutes a valid devices file.
*/
public static boolean validate(InputStream deviceXml, OutputStream out, File parent) {
PrintWriter writer = new PrintWriter(out);
try {
if (!(deviceXml instanceof NonClosingInputStream)) {
deviceXml = new NonClosingInputStream(deviceXml);
((NonClosingInputStream) deviceXml).setCloseBehavior(CloseBehavior.RESET);
}
int version = getXmlSchemaVersion(deviceXml);
if (version < 1 || version > NS_LATEST_VERSION) {
writer.println(String.format("Devices XSD version %1$d is out of valid range 1..%2$d",
version, NS_LATEST_VERSION));
return false;
}
assert deviceXml.markSupported();
// First check the input against the XSD schema
// Check the input, both against the XSD schema discovered above and also
// by using a custom validation which tests some properties not encoded in the XSD.
Schema schema = DeviceSchema.getSchema(version);
SAXParserFactory factory = SAXParserFactory.newInstance();
factory.setValidating(false);
factory.setNamespaceAware(true);
factory.setSchema(schema);
DevicesValidationHandler devicesValidator = new DevicesValidationHandler(parent, writer);
SAXParser parser = factory.newSAXParser();
deviceXml.reset();
parser.parse(deviceXml, devicesValidator);
return devicesValidator.isValidDevicesFile();
} catch (SAXException e) {
writer.println(e.getMessage());
return false;
} catch (ParserConfigurationException e) {
writer.println("Error creating SAX parser:");
writer.println(e.getMessage());
return false;
} catch (IOException e) {
writer.println("Error reading file stream:");
writer.println(e.getMessage());
return false;
} finally {
writer.flush();
}
}
/**
* Helper method that returns a validator for a specific version of the XSD.
*
* @param version Between 1 and {@link #NS_LATEST_VERSION}, included.
* @return A {@link Schema} validator or null.
*/
@Nullable
public static Schema getSchema(int version) throws SAXException {
InputStream xsdStream = getXsdStream(version);
if (xsdStream == null) {
return null;
}
SchemaFactory factory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI);
Schema schema = factory.newSchema(new StreamSource(xsdStream));
return schema;
}
/**
* Manually parses the root element of the XML to extract the schema version
* at the end of the xmlns:sdk="http://schemas.android.com/sdk/devices/$N"
* declaration.
*
* @param xml An XML input stream that supports the mark/reset() methods
* (that is its {@link InputStream#markSupported()} must return true)
* and which mark has already been set to the beginning of the stream.
* @return 1+ for a valid schema version
* or 0 if no schema could be found.
*/
public static int getXmlSchemaVersion(InputStream xml) {
if (xml == null) {
return 0;
}
// Get an XML document
Document doc = null;
try {
assert xml.markSupported();
xml.reset();
if (!(xml instanceof NonClosingInputStream)) {
xml = new NonClosingInputStream(xml);
((NonClosingInputStream) xml).setCloseBehavior(CloseBehavior.RESET);
}
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setIgnoringComments(false);
factory.setValidating(false);
// Parse the document using a non namespace aware builder
factory.setNamespaceAware(false);
DocumentBuilder builder = factory.newDocumentBuilder();
// We don't want the default handler which prints errors to stderr.
builder.setErrorHandler(new ErrorHandler() {
@Override
public void warning(SAXParseException e) throws SAXException {
// pass
}
@Override
public void fatalError(SAXParseException e) throws SAXException {
throw e;
}
@Override
public void error(SAXParseException e) throws SAXException {
throw e;
}
});
doc = builder.parse(xml);
// Prepare a new document using a namespace aware builder
factory.setNamespaceAware(true);
builder = factory.newDocumentBuilder();
} catch (Exception e) {
// Failed to reset XML stream
// Failed to get builder factor
// Failed to create XML document builder
// Failed to parse XML document
// Failed to read XML document
}
if (doc == null) {
return 0;
}
// Check the root element is an XML with at least the following properties:
// <sdk:sdk-repository
// xmlns:sdk="http://schemas.android.com/sdk/devices/$N">
//
// Note that we don't have namespace support enabled, we just do it manually.
Pattern nsPattern = Pattern.compile(NS_DEVICES_URI_PATTERN);
String prefix = null;
for (Node child = doc.getFirstChild(); child != null; child = child.getNextSibling()) {
if (child.getNodeType() == Node.ELEMENT_NODE) {
prefix = null;
String name = child.getNodeName();
int pos = name.indexOf(':');
if (pos > 0 && pos < name.length() - 1) {
prefix = name.substring(0, pos);
name = name.substring(pos + 1);
}
if (NODE_DEVICES.equals(name)) {
NamedNodeMap attrs = child.getAttributes();
String xmlns = "xmlns"; //$NON-NLS-1$
if (prefix != null) {
xmlns += ":" + prefix; //$NON-NLS-1$
}
Node attr = attrs.getNamedItem(xmlns);
if (attr != null) {
String uri = attr.getNodeValue();
if (uri != null) {
Matcher m = nsPattern.matcher(uri);
if (m.matches()) {
String version = m.group(1);
try {
return Integer.parseInt(version);
} catch (NumberFormatException e) {
return 0;
}
}
}
}
}
}
}
return 0;
}
/**
* A DefaultHandler that parses only to validate the XML is actually a valid
* devices config, since validation can't be entirely encoded in the devices
* schema.
*/
private static class DevicesValidationHandler extends DefaultHandler {
private boolean mValidDevicesFile = true;
private boolean mDefaultSeen = false;
private String mDeviceName;
private final File mDirectory;
private final PrintWriter mWriter;
private final StringBuilder mStringAccumulator = new StringBuilder();
public DevicesValidationHandler(File directory, PrintWriter writer) {
mDirectory = directory; // Possibly null
mWriter = writer;
}
@Override
public void startElement(String uri, String localName, String name, Attributes attributes)
throws SAXException {
if (NODE_DEVICE.equals(localName)) {
// Reset for a new device
mDefaultSeen = false;
} else if (NODE_STATE.equals(localName)) {
// Check if the state is set to be a default state
String val = attributes.getValue(ATTR_DEFAULT);
if (val != null && ("1".equals(val) || Boolean.parseBoolean(val))) {
/*
* If it is and we already have a default state for this
* device, then the device configuration is invalid.
* Otherwise, set that we've seen a default state for this
* device and continue
*/
if (mDefaultSeen) {
validationError("More than one default state for device " + mDeviceName);
} else {
mDefaultSeen = true;
}
}
}
mStringAccumulator.setLength(0);
}
@Override
public void characters(char[] ch, int start, int length) {
mStringAccumulator.append(ch, start, length);
}
@Override
public void endElement(String uri, String localName, String name) throws SAXException {
// If this is the end of a device node, make sure we have at least
// one default state
if (NODE_DEVICE.equals(localName) && !mDefaultSeen) {
validationError("No default state for device " + mDeviceName);
} else if (NODE_NAME.equals(localName)) {
mDeviceName = mStringAccumulator.toString().trim();
} else if (NODE_PATH.equals(localName) || NODE_SIXTY_FOUR.equals(localName)
|| NODE_SIXTEEN.equals(localName)) {
if (mDirectory == null) {
// There is no given parent directory, so this is not a
// valid devices file
validationError("No parent directory given, but relative paths exist.");
return;
}
// This is going to break on any files that end with a space,
// but that should be an incredibly rare corner case.
String relativePath = mStringAccumulator.toString().trim();
File f = new File(mDirectory, relativePath);
if (f == null || !f.isFile()) {
validationError(relativePath + " is not a valid path.");
return;
}
String fileName = f.getName();
int extensionStart = fileName.lastIndexOf('.');
if (extensionStart == -1 || !fileName.substring(extensionStart + 1).equals("png")) {
validationError(relativePath + " is not a valid file type.");
}
}
}
@Override
public void error(SAXParseException e) {
validationError(e.getMessage());
}
@Override
public void fatalError(SAXParseException e) {
validationError(e.getMessage());
}
public boolean isValidDevicesFile() {
return mValidDevicesFile;
}
private void validationError(String reason) {
mWriter.println("Error: " + reason);
mValidDevicesFile = false;
}
}
}
| |
/*
* Licensed to IAESTE A.s.b.l. (IAESTE) under one or more contributor
* license agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership. The Authors
* (See the AUTHORS file distributed with this work) licenses this file to
* You under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.iaeste.iws.core.services;
import static net.iaeste.iws.common.utils.LogUtil.formatLogMessage;
import net.iaeste.iws.api.constants.IWSErrors;
import net.iaeste.iws.api.dtos.Address;
import net.iaeste.iws.api.dtos.Country;
import net.iaeste.iws.api.dtos.exchange.CSVProcessingErrors;
import net.iaeste.iws.api.dtos.exchange.Employer;
import net.iaeste.iws.api.dtos.exchange.Offer;
import net.iaeste.iws.api.enums.exchange.OfferFields;
import net.iaeste.iws.api.enums.exchange.OfferState;
import net.iaeste.iws.api.exceptions.IWSException;
import net.iaeste.iws.api.requests.exchange.OfferCSVUploadRequest;
import net.iaeste.iws.api.responses.exchange.OfferCSVUploadResponse;
import net.iaeste.iws.api.util.Verifications;
import net.iaeste.iws.common.configuration.Settings;
import net.iaeste.iws.core.transformers.CommonTransformer;
import net.iaeste.iws.core.transformers.ExchangeTransformer;
import net.iaeste.iws.persistence.AccessDao;
import net.iaeste.iws.persistence.Authentication;
import net.iaeste.iws.persistence.ExchangeDao;
import net.iaeste.iws.persistence.entities.GroupEntity;
import net.iaeste.iws.persistence.entities.exchange.EmployerEntity;
import net.iaeste.iws.persistence.entities.exchange.OfferEntity;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVParser;
import org.apache.commons.csv.CSVRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/**
* @author Kim Jensen / last $Author:$
* @version $Revision:$ / $Date:$
* @since IWS 1.1
*/
public final class ExchangeCSVService extends CommonCSVService<ExchangeDao> {
private static final Logger LOG = LoggerFactory.getLogger(ExchangeCSVService.class);
private final AccessDao accessDao;
public ExchangeCSVService(final Settings settings, final ExchangeDao dao, final AccessDao accessDao) {
super(settings, dao);
this.accessDao = accessDao;
}
public OfferCSVUploadResponse uploadOffers(final Authentication authentication, final OfferCSVUploadRequest request) {
final Map<String, OfferCSVUploadResponse.ProcessingResult> processingResult = new HashMap<>();
final OfferCSVUploadResponse response = new OfferCSVUploadResponse();
final Map<String, CSVProcessingErrors> errors = new HashMap<>();
try (Reader reader = new StringReader(request.getCsv());
CSVParser parser = getDefaultCsvParser(reader, request.getDelimiter().getDescription())) {
final Set<String> headers = readHeader(parser);
final Set<String> expectedHeaders = new HashSet<>(createFirstRow(OfferFields.Type.UPLOAD));
if (headers.containsAll(expectedHeaders)) {
for (final CSVRecord record : parser.getRecords()) {
process(processingResult, errors, authentication, record);
}
} else {
throw new IWSException(IWSErrors.PROCESSING_FAILURE, "Invalid CSV header");
}
} catch (IllegalArgumentException e) {
throw new IWSException(IWSErrors.PROCESSING_FAILURE, "The header is invalid: " + e.getMessage() + '.', e);
} catch (IOException e) {
throw new IWSException(IWSErrors.PROCESSING_FAILURE, "CSV upload processing failed", e);
}
response.setProcessingResult(processingResult);
response.setErrors(errors);
return response;
}
private static Set<String> readHeader(final CSVParser parser) {
final Map<String, Integer> map = parser.getHeaderMap();
if (map == null) {
throw new IWSException(IWSErrors.CSV_HEADER_ERROR, "The CSV did not have a valid header.");
}
return map.keySet();
}
private static Offer extractOfferFromCSV(final Authentication authentication, final Map<String, String> errors, final CSVRecord record) {
// Extract the Country from the Authentication Information
final Country country = CommonTransformer.transform(authentication.getGroup().getCountry());
// Read the Address from the CSV and assign the found Country to it
final Address address = CommonTransformer.addressFromCsv(record, errors);
address.setCountry(country);
// Read the Employer from the CSV, and assign the transformed Address from it
final Employer employer = ExchangeTransformer.employerFromCsv(record, errors);
employer.setAddress(address);
// Read the Offer from the CSV, and assign the transformed Employer to it
final Offer offer = ExchangeTransformer.offerFromCsv(record, errors);
offer.setEmployer(employer);
// As all the Setters from the Offer has been invoked, all errors for
// this Offer has already been caught. Invoking the validator will only
// generate additional false error messages, since the validator will
// apply null checks to those values that failed the Setter checks and
// has not been set. Example, if the RefNo is wrong, then the Setter
// will reject it but not set it, the Validator will see it as null and
// set that as error as well, which is incorrect.
return offer;
}
private void process(final Map<String, OfferCSVUploadResponse.ProcessingResult> processingResult, final Map<String, CSVProcessingErrors> errors, final Authentication authentication, final CSVRecord record) {
final Map<String, String> conversionErrors = new HashMap<>(0);
String refNo = "";
try {
refNo = record.get(OfferFields.REF_NO.getField());
final Offer csvOffer = extractOfferFromCSV(authentication, conversionErrors, record);
final CSVProcessingErrors validationErrors = new CSVProcessingErrors(conversionErrors);
if (validationErrors.isEmpty()) {
processingResult.put(refNo, processOffer(authentication, refNo, csvOffer));
} else {
LOG.warn(formatLogMessage(authentication, "CSV Offer with RefNo " + refNo + " has some Problems: " + conversionErrors));
processingResult.put(refNo, OfferCSVUploadResponse.ProcessingResult.ERROR);
errors.put(refNo, validationErrors);
}
} catch (IllegalArgumentException | IWSException e) {
LOG.debug(e.getMessage(), e);
LOG.warn(formatLogMessage(authentication, "CSV Offer with RefNo " + refNo + " has a Problem: " + e.getMessage()));
processingResult.put(refNo, OfferCSVUploadResponse.ProcessingResult.ERROR);
if (errors.containsKey(refNo)) {
errors.get(refNo).put("general", e.getMessage());
} else {
final CSVProcessingErrors generalError = new CSVProcessingErrors();
generalError.put("general", e.getMessage());
if (!conversionErrors.isEmpty()) {
generalError.putAll(conversionErrors);
}
errors.put(refNo, generalError);
}
}
}
private OfferCSVUploadResponse.ProcessingResult processOffer (final Authentication authentication, final String refNo, final Offer csvOffer) {
final OfferEntity existingEntity = dao.findOfferByRefNo(authentication, refNo);
final OfferEntity newEntity = ExchangeTransformer.transform(csvOffer);
final OfferCSVUploadResponse.ProcessingResult result;
if (existingEntity != null) {
permissionCheck(authentication, authentication.getGroup());
//keep original offer state
newEntity.setStatus(existingEntity.getStatus());
csvOffer.getEmployer().setEmployerId(existingEntity.getEmployer().getExternalId());
final EmployerEntity employerEntity = process(authentication, csvOffer.getEmployer());
existingEntity.setEmployer(employerEntity);
newEntity.setExternalId(existingEntity.getExternalId());
dao.persist(authentication, existingEntity, newEntity);
LOG.info(formatLogMessage(authentication, "CSV Update of Offer with RefNo '%s' completed.", newEntity.getRefNo()));
result = OfferCSVUploadResponse.ProcessingResult.UPDATED;
} else {
// First, we need an Employer for our new Offer. The Process
// method will either find an existing Employer or create a
// new one.
final EmployerEntity employer = process(authentication, csvOffer.getEmployer());
// Add the Group to the Offer, otherwise our ref.no checks will fail
employer.setGroup(authentication.getGroup());
newEntity.setEmployer(employer);
ExchangeService.verifyRefnoValidity(newEntity);
newEntity.setExchangeYear(Verifications.calculateExchangeYear());
// Add the employer to the Offer
newEntity.setEmployer(employer);
// Set the Offer status to New
newEntity.setStatus(OfferState.NEW);
// Persist the Offer with history
dao.persist(authentication, newEntity);
LOG.info(formatLogMessage(authentication, "CSV Import of Offer with RefNo '%s' completed.", newEntity.getRefNo()));
result = OfferCSVUploadResponse.ProcessingResult.ADDED;
}
return result;
}
/**
* Processes an Employer from the CSV file. This is done by first trying to
* lookup the Employer via the unique characteristics for an Employer - and
* only of no existing records is found, will a new record be created. If
* a record is found, the changes will be merged and potentially also
* persisted.<br />
* If more than one Employer is found, then an Identification Exception is
* thrown.
*
* @param authentication The users Authentication information
* @param employer The Employer to find / create
* @return Employer Entity found or created
*/
private EmployerEntity process(final Authentication authentication, final Employer employer) {
// If the Employer provided is having an Id set - then we need to update
// the existing record, otherwise we will try to see if we can find a
// similar Employer and update it. If we can neither find an Employer by
// the Id, not the unique information - then we will create a new one.
EmployerEntity entity;
if (employer.getEmployerId() != null) {
// Id exists, so we simply find the Employer based on that
entity = dao.findEmployer(authentication, employer.getEmployerId());
LOG.debug(formatLogMessage(authentication, "Employer lookup for Id '%s' gave '%s'.", employer.getEmployerId(), entity.getName()));
} else {
// No Id was set, so we're trying to find the Employer based on the
// Unique information
entity = dao.findUniqueEmployer(authentication, employer);
LOG.debug(formatLogMessage(authentication, "Unique Employer for name '%s' gave '%s'.", employer.getName(), (entity != null) ? entity.getName() : "null"));
}
if (entity == null) {
entity = ExchangeTransformer.transform(employer);
final GroupEntity nationalGroup = accessDao.findNationalGroup(authentication.getUser());
entity.setGroup(nationalGroup);
processAddress(authentication, entity.getAddress());
dao.persist(authentication, entity);
LOG.info(formatLogMessage(authentication, "Have added the Employer '%s' for '%s'.", employer.getName(), authentication.getGroup().getGroupName()));
} else {
final EmployerEntity updated = ExchangeTransformer.transform(employer);
processAddress(authentication, entity.getAddress(), employer.getAddress());
dao.persist(authentication, entity, updated);
LOG.info(formatLogMessage(authentication, "Have updated the Employer '%s' for '%s'.", employer.getName(), authentication.getGroup().getGroupName()));
}
return entity;
}
private static CSVParser getDefaultCsvParser(final Reader input, final char delimiter) {
try {
return CSVFormat.RFC4180
.withDelimiter(delimiter)
.withHeader()
.parse(input);
} catch (IOException e) {
throw new IWSException(IWSErrors.PROCESSING_FAILURE, "Creating CSVParser failed", e);
}
}
}
| |
package apple.uikit;
import apple.NSObject;
import apple.coregraphics.struct.CGRect;
import apple.foundation.NSArray;
import apple.foundation.NSMethodSignature;
import apple.foundation.NSSet;
import org.moe.natj.c.ann.FunctionPtr;
import org.moe.natj.general.NatJ;
import org.moe.natj.general.Pointer;
import org.moe.natj.general.ann.ByValue;
import org.moe.natj.general.ann.Generated;
import org.moe.natj.general.ann.Library;
import org.moe.natj.general.ann.Mapped;
import org.moe.natj.general.ann.NFloat;
import org.moe.natj.general.ann.NInt;
import org.moe.natj.general.ann.NUInt;
import org.moe.natj.general.ann.Owned;
import org.moe.natj.general.ann.Runtime;
import org.moe.natj.general.ptr.VoidPtr;
import org.moe.natj.objc.Class;
import org.moe.natj.objc.ObjCRuntime;
import org.moe.natj.objc.SEL;
import org.moe.natj.objc.ann.ObjCClassBinding;
import org.moe.natj.objc.ann.Selector;
import org.moe.natj.objc.map.ObjCObjectMapper;
@Generated
@Library("UIKit")
@Runtime(ObjCRuntime.class)
@ObjCClassBinding
public class UIFocusHaloEffect extends UIFocusEffect {
static {
NatJ.register();
}
@Generated
protected UIFocusHaloEffect(Pointer peer) {
super(peer);
}
@Generated
@Selector("accessInstanceVariablesDirectly")
public static native boolean accessInstanceVariablesDirectly();
@Generated
@Owned
@Selector("alloc")
public static native UIFocusHaloEffect alloc();
@Owned
@Generated
@Selector("allocWithZone:")
public static native UIFocusHaloEffect allocWithZone(VoidPtr zone);
@Generated
@Selector("automaticallyNotifiesObserversForKey:")
public static native boolean automaticallyNotifiesObserversForKey(String key);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:")
public static native void cancelPreviousPerformRequestsWithTarget(@Mapped(ObjCObjectMapper.class) Object aTarget);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:selector:object:")
public static native void cancelPreviousPerformRequestsWithTargetSelectorObject(
@Mapped(ObjCObjectMapper.class) Object aTarget, SEL aSelector,
@Mapped(ObjCObjectMapper.class) Object anArgument);
@Generated
@Selector("classFallbacksForKeyedArchiver")
public static native NSArray<String> classFallbacksForKeyedArchiver();
@Generated
@Selector("classForKeyedUnarchiver")
public static native Class classForKeyedUnarchiver();
/**
* Container view in which to place the effect. When not set, the container is determined automatically
* from the focus item that provided this effect and the @c referenceView (if present).
*/
@Generated
@Selector("containerView")
public native UIView containerView();
@Generated
@Selector("debugDescription")
public static native String debugDescription_static();
@Generated
@Selector("description")
public static native String description_static();
@Generated
@Selector("effect")
public static native UIFocusHaloEffect effect();
/**
* Creates a halo with the given bezier path.
*/
@Generated
@Selector("effectWithPath:")
public static native UIFocusHaloEffect effectWithPath(UIBezierPath bezierPath);
/**
* Creates a rectangular halo.
*/
@Generated
@Selector("effectWithRect:")
public static native UIFocusHaloEffect effectWithRect(@ByValue CGRect rect);
/**
* Creates a rounded rect halo using the specified corner radius and corner curve.
*/
@Generated
@Selector("effectWithRoundedRect:cornerRadius:curve:")
public static native UIFocusHaloEffect effectWithRoundedRectCornerRadiusCurve(@ByValue CGRect rect,
@NFloat double cornerRadius, String curve);
@Generated
@Selector("hash")
@NUInt
public static native long hash_static();
@Generated
@Selector("init")
public native UIFocusHaloEffect init();
@Generated
@Selector("instanceMethodForSelector:")
@FunctionPtr(name = "call_instanceMethodForSelector_ret")
public static native NSObject.Function_instanceMethodForSelector_ret instanceMethodForSelector(SEL aSelector);
@Generated
@Selector("instanceMethodSignatureForSelector:")
public static native NSMethodSignature instanceMethodSignatureForSelector(SEL aSelector);
@Generated
@Selector("instancesRespondToSelector:")
public static native boolean instancesRespondToSelector(SEL aSelector);
@Generated
@Selector("isSubclassOfClass:")
public static native boolean isSubclassOfClass(Class aClass);
@Generated
@Selector("keyPathsForValuesAffectingValueForKey:")
public static native NSSet<String> keyPathsForValuesAffectingValueForKey(String key);
@Generated
@Owned
@Selector("new")
public static native UIFocusHaloEffect new_objc();
/**
* Position of the halo relative to the specified shape. Defaults to @c UIFocusHaloEffectPositionAutomatic.
*/
@Generated
@Selector("position")
@NInt
public native long position();
/**
* When set, the halo is placed above this view. If a @c containerView is also set, the @c referenceView must be a descendant
* of the @c containerView. The system will ensure that the halo is in the container but visually above the @c referenceView.
*/
@Generated
@Selector("referenceView")
public native UIView referenceView();
@Generated
@Selector("resolveClassMethod:")
public static native boolean resolveClassMethod(SEL sel);
@Generated
@Selector("resolveInstanceMethod:")
public static native boolean resolveInstanceMethod(SEL sel);
/**
* Container view in which to place the effect. When not set, the container is determined automatically
* from the focus item that provided this effect and the @c referenceView (if present).
*/
@Generated
@Selector("setContainerView:")
public native void setContainerView_unsafe(UIView value);
/**
* Container view in which to place the effect. When not set, the container is determined automatically
* from the focus item that provided this effect and the @c referenceView (if present).
*/
@Generated
public void setContainerView(UIView value) {
Object __old = containerView();
if (value != null) {
org.moe.natj.objc.ObjCRuntime.associateObjCObject(this, value);
}
setContainerView_unsafe(value);
if (__old != null) {
org.moe.natj.objc.ObjCRuntime.dissociateObjCObject(this, __old);
}
}
/**
* Position of the halo relative to the specified shape. Defaults to @c UIFocusHaloEffectPositionAutomatic.
*/
@Generated
@Selector("setPosition:")
public native void setPosition(@NInt long value);
/**
* When set, the halo is placed above this view. If a @c containerView is also set, the @c referenceView must be a descendant
* of the @c containerView. The system will ensure that the halo is in the container but visually above the @c referenceView.
*/
@Generated
@Selector("setReferenceView:")
public native void setReferenceView_unsafe(UIView value);
/**
* When set, the halo is placed above this view. If a @c containerView is also set, the @c referenceView must be a descendant
* of the @c containerView. The system will ensure that the halo is in the container but visually above the @c referenceView.
*/
@Generated
public void setReferenceView(UIView value) {
Object __old = referenceView();
if (value != null) {
org.moe.natj.objc.ObjCRuntime.associateObjCObject(this, value);
}
setReferenceView_unsafe(value);
if (__old != null) {
org.moe.natj.objc.ObjCRuntime.dissociateObjCObject(this, __old);
}
}
@Generated
@Selector("setVersion:")
public static native void setVersion_static(@NInt long aVersion);
@Generated
@Selector("superclass")
public static native Class superclass_static();
@Generated
@Selector("version")
@NInt
public static native long version_static();
}
| |
/*
* Copyright (c) 2006-2017 DMDirc Developers
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without limitation the
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
* Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
* WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
* OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
* OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package com.dmdirc.tls;
import com.dmdirc.config.provider.AggregateConfigProvider;
import com.dmdirc.config.provider.ConfigProvider;
import com.dmdirc.events.ServerCertificateProblemEncounteredEvent;
import com.dmdirc.events.ServerCertificateProblemResolvedEvent;
import com.dmdirc.events.eventbus.EventBus;
import com.dmdirc.interfaces.Connection;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.security.GeneralSecurityException;
import java.security.InvalidAlgorithmParameterException;
import java.security.KeyStore;
import java.security.KeyStoreException;
import java.security.cert.CertificateException;
import java.security.cert.PKIXParameters;
import java.security.cert.TrustAnchor;
import java.security.cert.X509Certificate;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Base64;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Semaphore;
import java.util.stream.Collectors;
import javax.naming.InvalidNameException;
import javax.naming.ldap.LdapName;
import javax.naming.ldap.Rdn;
import javax.net.ssl.KeyManager;
import javax.net.ssl.KeyManagerFactory;
import javax.net.ssl.X509TrustManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static com.dmdirc.util.LogUtils.USER_ERROR;
/**
* Manages storage and validation of certificates used when connecting to SSL servers.
*
* @since 0.6.3m1
*/
public class CertificateManager implements X509TrustManager {
private static final Logger LOG = LoggerFactory.getLogger(CertificateManager.class);
/** Connection that owns this manager. */
private final Connection connection;
/** The server name the user is trying to connect to. */
private final String serverName;
/** The configuration manager to use for settings. */
private final AggregateConfigProvider config;
/** The set of CAs from the global cacert file. */
private final Set<X509Certificate> globalTrustedCAs = new HashSet<>();
/** Used to synchronise the manager with the certificate dialog. */
private final Semaphore actionSem = new Semaphore(0);
/** The event bus to post errors to. */
private final EventBus eventBus;
/** The action to perform. */
private CertificateAction action;
/** A list of problems encountered most recently. */
private final List<CertificateException> problems = new ArrayList<>();
/** The chain of certificates currently being validated. */
private X509Certificate[] chain;
/** The user settings to write to. */
private final ConfigProvider userSettings;
/** Locator to use to find a system keystore. */
private final KeyStoreLocator keyStoreLocator;
/** Checker to use for hostnames. */
private final CertificateHostChecker hostChecker;
/**
* Creates a new certificate manager for a client connecting to the specified server.
*
* @param serverName The name the user used to connect to the server
* @param config The configuration manager to use
* @param userSettings The user settings to write to.
* @param eventBus The event bus to post errors to
*/
public CertificateManager(
final Connection connection,
final String serverName,
final AggregateConfigProvider config,
final ConfigProvider userSettings,
final EventBus eventBus) {
this.connection = connection;
this.serverName = serverName;
this.config = config;
this.userSettings = userSettings;
this.eventBus = eventBus;
this.keyStoreLocator = new KeyStoreLocator();
this.hostChecker = new CertificateHostChecker();
loadTrustedCAs();
}
/**
* Loads the trusted CA certificates from the Java cacerts store.
*/
private void loadTrustedCAs() {
try {
final KeyStore keyStore = keyStoreLocator.getKeyStore();
if (keyStore != null) {
final PKIXParameters params = new PKIXParameters(keyStore);
globalTrustedCAs.addAll(params.getTrustAnchors().stream()
.map(TrustAnchor::getTrustedCert)
.collect(Collectors.toList()));
}
} catch (InvalidAlgorithmParameterException | KeyStoreException ex) {
LOG.warn(USER_ERROR, "Unable to load trusted certificates", ex);
}
}
/**
* Retrieves a KeyManager[] for the client certificate specified in the configuration, if there
* is one.
*
* @return A KeyManager to use for the SSL connection
*/
public KeyManager[] getKeyManager() {
if (config.hasOptionString("ssl", "clientcert.file")) {
try (FileInputStream fis = new FileInputStream(config.getOption("ssl",
"clientcert.file"))) {
final char[] pass;
if (config.hasOptionString("ssl", "clientcert.pass")) {
pass = config.getOption("ssl", "clientcert.pass").toCharArray();
} else {
pass = null;
}
final KeyStore ks = KeyStore.getInstance("pkcs12");
ks.load(fis, pass);
final KeyManagerFactory kmf = KeyManagerFactory.getInstance(
KeyManagerFactory.getDefaultAlgorithm());
kmf.init(ks, pass);
return kmf.getKeyManagers();
} catch (FileNotFoundException ex) {
LOG.warn(USER_ERROR, "Certificate file not found", ex);
} catch (GeneralSecurityException | IOException ex) {
LOG.warn(USER_ERROR, "Unable to get key manager", ex);
}
}
return null;
}
@Override
public void checkClientTrusted(final X509Certificate[] chain, final String authType)
throws CertificateException {
throw new CertificateException("Not supported.");
}
/**
* Determines if the specified certificate is trusted by the user.
*
* @param certificate The certificate to be checked
*
* @return True if the certificate matches one in the trusted certificate store, or if the
* certificate's details are marked as trusted in the DMDirc configuration file.
*/
public TrustResult isTrusted(final X509Certificate certificate) {
try {
final String sig = Base64.getEncoder().encodeToString(certificate.getSignature());
if (config.hasOptionString("ssl", "trusted") && config.getOptionList("ssl",
"trusted").contains(sig)) {
return TrustResult.TRUSTED_MANUALLY;
} else {
for (X509Certificate trustedCert : globalTrustedCAs) {
if (Arrays.equals(certificate.getSignature(), trustedCert.getSignature())
&& certificate.getIssuerDN().getName()
.equals(trustedCert.getIssuerDN().getName())) {
certificate.verify(trustedCert.getPublicKey());
return TrustResult.TRUSTED_CA;
}
}
}
} catch (GeneralSecurityException ex) {
return TrustResult.UNTRUSTED_EXCEPTION;
}
return TrustResult.UNTRUSTED_GENERAL;
}
@Override
public void checkServerTrusted(final X509Certificate[] chain, final String authType)
throws CertificateException {
this.chain = Arrays.copyOf(chain, chain.length);
problems.clear();
if (!hostChecker.isValidFor(chain[0], serverName)) {
problems.add(new CertificateDoesntMatchHostException(
"Certificate was not issued to " + serverName));
}
if (checkIssuer(chain)) {
problems.clear();
}
if (!problems.isEmpty()) {
eventBus.publishAsync(new ServerCertificateProblemEncounteredEvent(connection, this,
Arrays.asList(chain), problems));
try {
actionSem.acquire();
} catch (InterruptedException ie) {
throw new CertificateException("Thread aborted", ie);
} finally {
problems.clear();
eventBus.publishAsync(new ServerCertificateProblemResolvedEvent(connection, this));
}
switch (action) {
case DISCONNECT:
throw new CertificateException("Not trusted");
case IGNORE_PERMANENTLY:
final List<String> list = new ArrayList<>(config
.getOptionList("ssl", "trusted"));
list.add(Base64.getEncoder().encodeToString(chain[0].getSignature()));
userSettings.setOption("ssl", "trusted", list);
break;
case IGNORE_TEMPORARILY:
// Do nothing, continue connecting
break;
}
}
}
/**
* Checks that some issuer in the certificate chain is trusted, either by the global CA list,
* or manually by the user.
*
* @param chain The chain of certificates to check.
* @return True if the certificate is trusted manually, false otherwise (i.e., trusted globally
* OR untrusted).
*/
private boolean checkIssuer(final X509Certificate... chain) {
boolean manual = false;
boolean verified = false;
for (X509Certificate cert : chain) {
final TrustResult trustResult = isTrusted(cert);
// Check that the certificate is in-date
try {
cert.checkValidity();
} catch (CertificateException ex) {
problems.add(ex);
}
// Check that we trust an issuer
verified |= trustResult.isTrusted();
if (trustResult == TrustResult.TRUSTED_MANUALLY) {
manual = true;
}
}
if (!verified) {
problems.add(new CertificateNotTrustedException("Issuer is not trusted"));
}
return manual;
}
/**
* Gets the chain of certificates currently being validated, if any.
*
* @return The chain of certificates being validated
*/
public X509Certificate[] getChain() {
return chain;
}
/**
* Gets the set of problems that were encountered with the last certificate.
*
* @return The set of problems encountered, or any empty collection if there is no current
* validation attempt ongoing.
*/
public Collection<CertificateException> getProblems() {
return problems;
}
/**
* Sets the action to perform for the request that's in progress.
*
* @param action The action that's been selected
*/
public void setAction(final CertificateAction action) {
this.action = action;
actionSem.release();
}
/**
* Retrieves the name of the server to which the user is trying to connect.
*
* @return The name of the server that the user is trying to connect to
*/
public String getServerName() {
return serverName;
}
/**
* Reads the fields from the subject's designated name in the specified certificate.
*
* @param cert The certificate to read
*
* @return A map of the fields in the certificate's subject's designated name
*/
public static Map<String, String> getDNFieldsFromCert(final X509Certificate cert) {
final Map<String, String> res = new HashMap<>();
try {
final LdapName name = new LdapName(cert.getSubjectX500Principal().getName());
for (Rdn rdn : name.getRdns()) {
res.put(rdn.getType(), rdn.getValue().toString());
}
} catch (InvalidNameException ex) {
// Don't care
}
return res;
}
@Override
public X509Certificate[] getAcceptedIssuers() {
return globalTrustedCAs.toArray(new X509Certificate[globalTrustedCAs.size()]);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.platform.cache;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.Lock;
import javax.cache.Cache;
import javax.cache.expiry.Duration;
import javax.cache.expiry.ExpiryPolicy;
import javax.cache.processor.EntryProcessorException;
import javax.cache.processor.EntryProcessorResult;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.cache.CacheEntryProcessor;
import org.apache.ignite.cache.CacheMetrics;
import org.apache.ignite.cache.CachePartialUpdateException;
import org.apache.ignite.cache.CachePeekMode;
import org.apache.ignite.cache.query.Query;
import org.apache.ignite.cache.query.ScanQuery;
import org.apache.ignite.cache.query.SqlFieldsQuery;
import org.apache.ignite.cache.query.SqlQuery;
import org.apache.ignite.cache.query.TextQuery;
import org.apache.ignite.configuration.*;
import org.apache.ignite.internal.IgniteInternalFuture;
import org.apache.ignite.internal.binary.BinaryRawReaderEx;
import org.apache.ignite.internal.binary.BinaryRawWriterEx;
import org.apache.ignite.internal.processors.cache.CacheOperationContext;
import org.apache.ignite.internal.processors.cache.CachePartialUpdateCheckedException;
import org.apache.ignite.internal.processors.cache.IgniteCacheProxy;
import org.apache.ignite.internal.processors.cache.query.QueryCursorEx;
import org.apache.ignite.internal.processors.platform.PlatformAbstractTarget;
import org.apache.ignite.internal.processors.platform.PlatformContext;
import org.apache.ignite.internal.processors.platform.PlatformNativeException;
import org.apache.ignite.internal.processors.platform.cache.query.PlatformContinuousQuery;
import org.apache.ignite.internal.processors.platform.cache.query.PlatformFieldsQueryCursor;
import org.apache.ignite.internal.processors.platform.cache.query.PlatformQueryCursor;
import org.apache.ignite.internal.processors.platform.utils.PlatformConfigurationUtils;
import org.apache.ignite.internal.processors.platform.utils.PlatformFutureUtils;
import org.apache.ignite.internal.processors.platform.utils.PlatformUtils;
import org.apache.ignite.internal.util.GridConcurrentFactory;
import org.apache.ignite.internal.util.future.IgniteFutureImpl;
import org.apache.ignite.internal.util.typedef.C1;
import org.apache.ignite.lang.IgniteFuture;
import org.jetbrains.annotations.Nullable;
/**
* Native cache wrapper implementation.
*/
@SuppressWarnings({"unchecked", "UnusedDeclaration", "TryFinallyCanBeTryWithResources"})
public class PlatformCache extends PlatformAbstractTarget {
/** */
public static final int OP_CLEAR = 1;
/** */
public static final int OP_CLEAR_ALL = 2;
/** */
public static final int OP_CONTAINS_KEY = 3;
/** */
public static final int OP_CONTAINS_KEYS = 4;
/** */
public static final int OP_GET = 5;
/** */
public static final int OP_GET_ALL = 6;
/** */
public static final int OP_GET_AND_PUT = 7;
/** */
public static final int OP_GET_AND_PUT_IF_ABSENT = 8;
/** */
public static final int OP_GET_AND_REMOVE = 9;
/** */
public static final int OP_GET_AND_REPLACE = 10;
/** */
public static final int OP_GET_NAME = 11;
/** */
public static final int OP_INVOKE = 12;
/** */
public static final int OP_INVOKE_ALL = 13;
/** */
public static final int OP_IS_LOCAL_LOCKED = 14;
/** */
public static final int OP_LOAD_CACHE = 15;
/** */
public static final int OP_LOC_EVICT = 16;
/** */
public static final int OP_LOC_LOAD_CACHE = 17;
/** */
public static final int OP_LOC_PROMOTE = 18;
/** */
public static final int OP_LOCAL_CLEAR = 20;
/** */
public static final int OP_LOCAL_CLEAR_ALL = 21;
/** */
public static final int OP_LOCK = 22;
/** */
public static final int OP_LOCK_ALL = 23;
/** */
public static final int OP_METRICS = 24;
/** */
private static final int OP_PEEK = 25;
/** */
private static final int OP_PUT = 26;
/** */
private static final int OP_PUT_ALL = 27;
/** */
public static final int OP_PUT_IF_ABSENT = 28;
/** */
public static final int OP_QRY_CONTINUOUS = 29;
/** */
public static final int OP_QRY_SCAN = 30;
/** */
public static final int OP_QRY_SQL = 31;
/** */
public static final int OP_QRY_SQL_FIELDS = 32;
/** */
public static final int OP_QRY_TXT = 33;
/** */
public static final int OP_REMOVE_ALL = 34;
/** */
public static final int OP_REMOVE_BOOL = 35;
/** */
public static final int OP_REMOVE_OBJ = 36;
/** */
public static final int OP_REPLACE_2 = 37;
/** */
public static final int OP_REPLACE_3 = 38;
/** */
public static final int OP_GET_CONFIG = 39;
/** Underlying JCache. */
private final IgniteCacheProxy cache;
/** Whether this cache is created with "keepBinary" flag on the other side. */
private final boolean keepBinary;
/** */
private static final GetAllWriter WRITER_GET_ALL = new GetAllWriter();
/** */
private static final EntryProcessorInvokeWriter WRITER_INVOKE = new EntryProcessorInvokeWriter();
/** */
private static final EntryProcessorInvokeAllWriter WRITER_INVOKE_ALL = new EntryProcessorInvokeAllWriter();
/** Map with currently active locks. */
private final ConcurrentMap<Long, Lock> lockMap = GridConcurrentFactory.newMap();
/** Lock ID sequence. */
private static final AtomicLong LOCK_ID_GEN = new AtomicLong();
/**
* Constructor.
*
* @param platformCtx Context.
* @param cache Underlying cache.
* @param keepBinary Keep binary flag.
*/
public PlatformCache(PlatformContext platformCtx, IgniteCache cache, boolean keepBinary) {
super(platformCtx);
this.cache = (IgniteCacheProxy)cache;
this.keepBinary = keepBinary;
}
/**
* Gets cache with "skip-store" flag set.
*
* @return Cache with "skip-store" flag set.
*/
public PlatformCache withSkipStore() {
if (cache.delegate().skipStore())
return this;
return new PlatformCache(platformCtx, cache.withSkipStore(), keepBinary);
}
/**
* Gets cache with "keep binary" flag.
*
* @return Cache with "keep binary" flag set.
*/
public PlatformCache withKeepBinary() {
if (keepBinary)
return this;
return new PlatformCache(platformCtx, cache.withKeepBinary(), true);
}
/**
* Gets cache with provided expiry policy.
*
* @param create Create.
* @param update Update.
* @param access Access.
* @return Cache.
*/
public PlatformCache withExpiryPolicy(final long create, final long update, final long access) {
IgniteCache cache0 = cache.withExpiryPolicy(new InteropExpiryPolicy(create, update, access));
return new PlatformCache(platformCtx, cache0, keepBinary);
}
/**
* Gets cache with asynchronous mode enabled.
*
* @return Cache with asynchronous mode enabled.
*/
public PlatformCache withAsync() {
if (cache.isAsync())
return this;
return new PlatformCache(platformCtx, (IgniteCache)cache.withAsync(), keepBinary);
}
/**
* Gets cache with no-retries mode enabled.
*
* @return Cache with no-retries mode enabled.
*/
public PlatformCache withNoRetries() {
CacheOperationContext opCtx = cache.operationContext();
if (opCtx != null && opCtx.noRetries())
return this;
return new PlatformCache(platformCtx, cache.withNoRetries(), keepBinary);
}
/** {@inheritDoc} */
@Override protected long processInStreamOutLong(int type, BinaryRawReaderEx reader) throws IgniteCheckedException {
switch (type) {
case OP_PUT:
cache.put(reader.readObjectDetached(), reader.readObjectDetached());
return TRUE;
case OP_REMOVE_BOOL:
return cache.remove(reader.readObjectDetached(), reader.readObjectDetached()) ? TRUE : FALSE;
case OP_REMOVE_ALL:
cache.removeAll(PlatformUtils.readSet(reader));
return TRUE;
case OP_PUT_ALL:
cache.putAll(PlatformUtils.readMap(reader));
return TRUE;
case OP_LOC_EVICT:
cache.localEvict(PlatformUtils.readCollection(reader));
return TRUE;
case OP_CONTAINS_KEY:
return cache.containsKey(reader.readObjectDetached()) ? TRUE : FALSE;
case OP_CONTAINS_KEYS:
return cache.containsKeys(PlatformUtils.readSet(reader)) ? TRUE : FALSE;
case OP_LOC_PROMOTE: {
cache.localPromote(PlatformUtils.readSet(reader));
break;
}
case OP_REPLACE_3:
return cache.replace(reader.readObjectDetached(), reader.readObjectDetached(),
reader.readObjectDetached()) ? TRUE : FALSE;
case OP_LOC_LOAD_CACHE:
loadCache0(reader, true);
break;
case OP_LOAD_CACHE:
loadCache0(reader, false);
break;
case OP_CLEAR:
cache.clear(reader.readObjectDetached());
break;
case OP_CLEAR_ALL:
cache.clearAll(PlatformUtils.readSet(reader));
break;
case OP_LOCAL_CLEAR:
cache.localClear(reader.readObjectDetached());
break;
case OP_LOCAL_CLEAR_ALL:
cache.localClearAll(PlatformUtils.readSet(reader));
break;
case OP_PUT_IF_ABSENT: {
return cache.putIfAbsent(reader.readObjectDetached(), reader.readObjectDetached()) ? TRUE : FALSE;
}
case OP_REPLACE_2: {
return cache.replace(reader.readObjectDetached(), reader.readObjectDetached()) ? TRUE : FALSE;
}
case OP_REMOVE_OBJ: {
return cache.remove(reader.readObjectDetached()) ? TRUE : FALSE;
}
case OP_IS_LOCAL_LOCKED:
return cache.isLocalLocked(reader.readObjectDetached(), reader.readBoolean()) ? TRUE : FALSE;
default:
return super.processInStreamOutLong(type, reader);
}
return TRUE;
}
/**
* Loads cache via localLoadCache or loadCache.
*/
private void loadCache0(BinaryRawReaderEx reader, boolean loc) {
PlatformCacheEntryFilter filter = null;
Object pred = reader.readObjectDetached();
if (pred != null)
filter = platformCtx.createCacheEntryFilter(pred, 0);
Object[] args = reader.readObjectArray();
if (loc)
cache.localLoadCache(filter, args);
else
cache.loadCache(filter, args);
}
/** {@inheritDoc} */
@Override protected Object processInStreamOutObject(int type, BinaryRawReaderEx reader)
throws IgniteCheckedException {
switch (type) {
case OP_QRY_SQL:
return runQuery(reader, readSqlQuery(reader));
case OP_QRY_SQL_FIELDS:
return runFieldsQuery(reader, readFieldsQuery(reader));
case OP_QRY_TXT:
return runQuery(reader, readTextQuery(reader));
case OP_QRY_SCAN:
return runQuery(reader, readScanQuery(reader));
case OP_QRY_CONTINUOUS: {
long ptr = reader.readLong();
boolean loc = reader.readBoolean();
boolean hasFilter = reader.readBoolean();
Object filter = reader.readObjectDetached();
int bufSize = reader.readInt();
long timeInterval = reader.readLong();
boolean autoUnsubscribe = reader.readBoolean();
Query initQry = readInitialQuery(reader);
PlatformContinuousQuery qry = platformCtx.createContinuousQuery(ptr, hasFilter, filter);
qry.start(cache, loc, bufSize, timeInterval, autoUnsubscribe, initQry);
return qry;
}
default:
return super.processInStreamOutObject(type, reader);
}
}
/**
* Read arguments for SQL query.
*
* @param reader Reader.
* @return Arguments.
*/
@Nullable private Object[] readQueryArgs(BinaryRawReaderEx reader) {
int cnt = reader.readInt();
if (cnt > 0) {
Object[] args = new Object[cnt];
for (int i = 0; i < cnt; i++)
args[i] = reader.readObjectDetached();
return args;
}
else
return null;
}
/** {@inheritDoc} */
@Override protected void processOutStream(int type, BinaryRawWriterEx writer) throws IgniteCheckedException {
switch (type) {
case OP_GET_NAME:
writer.writeObject(cache.getName());
break;
case OP_METRICS:
CacheMetrics metrics = cache.metrics();
writer.writeLong(metrics.getCacheGets());
writer.writeLong(metrics.getCachePuts());
writer.writeLong(metrics.getCacheHits());
writer.writeLong(metrics.getCacheMisses());
writer.writeLong(metrics.getCacheTxCommits());
writer.writeLong(metrics.getCacheTxRollbacks());
writer.writeLong(metrics.getCacheEvictions());
writer.writeLong(metrics.getCacheRemovals());
writer.writeFloat(metrics.getAveragePutTime());
writer.writeFloat(metrics.getAverageGetTime());
writer.writeFloat(metrics.getAverageRemoveTime());
writer.writeFloat(metrics.getAverageTxCommitTime());
writer.writeFloat(metrics.getAverageTxRollbackTime());
writer.writeString(metrics.name());
writer.writeLong(metrics.getOverflowSize());
writer.writeLong(metrics.getOffHeapEntriesCount());
writer.writeLong(metrics.getOffHeapAllocatedSize());
writer.writeInt(metrics.getSize());
writer.writeInt(metrics.getKeySize());
writer.writeBoolean(metrics.isEmpty());
writer.writeInt(metrics.getDhtEvictQueueCurrentSize());
writer.writeInt(metrics.getTxThreadMapSize());
writer.writeInt(metrics.getTxXidMapSize());
writer.writeInt(metrics.getTxCommitQueueSize());
writer.writeInt(metrics.getTxPrepareQueueSize());
writer.writeInt(metrics.getTxStartVersionCountsSize());
writer.writeInt(metrics.getTxCommittedVersionsSize());
writer.writeInt(metrics.getTxRolledbackVersionsSize());
writer.writeInt(metrics.getTxDhtThreadMapSize());
writer.writeInt(metrics.getTxDhtXidMapSize());
writer.writeInt(metrics.getTxDhtCommitQueueSize());
writer.writeInt(metrics.getTxDhtPrepareQueueSize());
writer.writeInt(metrics.getTxDhtStartVersionCountsSize());
writer.writeInt(metrics.getTxDhtCommittedVersionsSize());
writer.writeInt(metrics.getTxDhtRolledbackVersionsSize());
writer.writeBoolean(metrics.isWriteBehindEnabled());
writer.writeInt(metrics.getWriteBehindFlushSize());
writer.writeInt(metrics.getWriteBehindFlushThreadCount());
writer.writeLong(metrics.getWriteBehindFlushFrequency());
writer.writeInt(metrics.getWriteBehindStoreBatchSize());
writer.writeInt(metrics.getWriteBehindTotalCriticalOverflowCount());
writer.writeInt(metrics.getWriteBehindCriticalOverflowCount());
writer.writeInt(metrics.getWriteBehindErrorRetryCount());
writer.writeInt(metrics.getWriteBehindBufferSize());
writer.writeString(metrics.getKeyType());
writer.writeString(metrics.getValueType());
writer.writeBoolean(metrics.isStoreByValue());
writer.writeBoolean(metrics.isStatisticsEnabled());
writer.writeBoolean(metrics.isManagementEnabled());
writer.writeBoolean(metrics.isReadThrough());
writer.writeBoolean(metrics.isWriteThrough());
writer.writeFloat(metrics.getCacheHitPercentage());
writer.writeFloat(metrics.getCacheMissPercentage());
break;
case OP_GET_CONFIG:
CacheConfiguration ccfg = ((IgniteCache<Object, Object>)cache).
getConfiguration(CacheConfiguration.class);
PlatformConfigurationUtils.writeCacheConfiguration(writer, ccfg);
break;
default:
super.processOutStream(type, writer);
}
}
/** {@inheritDoc} */
@SuppressWarnings({"IfMayBeConditional", "ConstantConditions"})
@Override protected void processInStreamOutStream(int type, BinaryRawReaderEx reader, BinaryRawWriterEx writer)
throws IgniteCheckedException {
switch (type) {
case OP_GET: {
writer.writeObjectDetached(cache.get(reader.readObjectDetached()));
break;
}
case OP_GET_AND_PUT: {
writer.writeObjectDetached(cache.getAndPut(reader.readObjectDetached(), reader.readObjectDetached()));
break;
}
case OP_GET_AND_REPLACE: {
writer.writeObjectDetached(cache.getAndReplace(reader.readObjectDetached(),
reader.readObjectDetached()));
break;
}
case OP_GET_AND_REMOVE: {
writer.writeObjectDetached(cache.getAndRemove(reader.readObjectDetached()));
break;
}
case OP_GET_AND_PUT_IF_ABSENT: {
writer.writeObjectDetached(cache.getAndPutIfAbsent(reader.readObjectDetached(), reader.readObjectDetached()));
break;
}
case OP_PEEK: {
Object key = reader.readObjectDetached();
CachePeekMode[] modes = PlatformUtils.decodeCachePeekModes(reader.readInt());
writer.writeObjectDetached(cache.localPeek(key, modes));
break;
}
case OP_GET_ALL: {
Set keys = PlatformUtils.readSet(reader);
Map entries = cache.getAll(keys);
PlatformUtils.writeNullableMap(writer, entries);
break;
}
case OP_INVOKE: {
Object key = reader.readObjectDetached();
CacheEntryProcessor proc = platformCtx.createCacheEntryProcessor(reader.readObjectDetached(), 0);
try {
writer.writeObjectDetached(cache.invoke(key, proc));
}
catch (EntryProcessorException ex)
{
if (ex.getCause() instanceof PlatformNativeException)
writer.writeObjectDetached(((PlatformNativeException)ex.getCause()).cause());
else
throw ex;
}
break;
}
case OP_INVOKE_ALL: {
Set<Object> keys = PlatformUtils.readSet(reader);
CacheEntryProcessor proc = platformCtx.createCacheEntryProcessor(reader.readObjectDetached(), 0);
writeInvokeAllResult(writer, cache.invokeAll(keys, proc));
break;
}
case OP_LOCK:
writer.writeLong(registerLock(cache.lock(reader.readObjectDetached())));
break;
case OP_LOCK_ALL:
writer.writeLong(registerLock(cache.lockAll(PlatformUtils.readCollection(reader))));
break;
default:
super.processInStreamOutStream(type, reader, writer);
}
}
/** {@inheritDoc} */
@Override public Exception convertException(Exception e) {
if (e instanceof CachePartialUpdateException)
return new PlatformCachePartialUpdateException((CachePartialUpdateCheckedException)e.getCause(),
platformCtx, keepBinary);
if (e instanceof CachePartialUpdateCheckedException)
return new PlatformCachePartialUpdateException((CachePartialUpdateCheckedException)e, platformCtx, keepBinary);
if (e.getCause() instanceof EntryProcessorException)
return (EntryProcessorException) e.getCause();
return super.convertException(e);
}
/**
* Writes the result of InvokeAll cache method.
*
* @param writer Writer.
* @param results Results.
*/
private static void writeInvokeAllResult(BinaryRawWriterEx writer, Map<Object, EntryProcessorResult> results) {
if (results == null) {
writer.writeInt(-1);
return;
}
writer.writeInt(results.size());
for (Map.Entry<Object, EntryProcessorResult> entry : results.entrySet()) {
writer.writeObjectDetached(entry.getKey());
EntryProcessorResult procRes = entry.getValue();
try {
Object res = procRes.get();
writer.writeBoolean(false); // No exception
writer.writeObjectDetached(res);
}
catch (Exception ex) {
writer.writeBoolean(true); // Exception
writeError(writer, ex);
}
}
}
/**
* Writes an error to the writer either as a native exception, or as a couple of strings.
* @param writer Writer.
* @param ex Exception.
*/
private static void writeError(BinaryRawWriterEx writer, Exception ex) {
if (ex.getCause() instanceof PlatformNativeException)
writer.writeObjectDetached(((PlatformNativeException)ex.getCause()).cause());
else {
writer.writeObjectDetached(ex.getClass().getName());
writer.writeObjectDetached(ex.getMessage());
}
}
/** <inheritDoc /> */
@Override protected IgniteInternalFuture currentFuture() throws IgniteCheckedException {
return ((IgniteFutureImpl)cache.future()).internalFuture();
}
/** <inheritDoc /> */
@Nullable @Override protected PlatformFutureUtils.Writer futureWriter(int opId) {
if (opId == OP_GET_ALL)
return WRITER_GET_ALL;
if (opId == OP_INVOKE)
return WRITER_INVOKE;
if (opId == OP_INVOKE_ALL)
return WRITER_INVOKE_ALL;
return null;
}
/**
* Clears the contents of the cache, without notifying listeners or CacheWriters.
*
* @throws IllegalStateException if the cache is closed.
* @throws javax.cache.CacheException if there is a problem during the clear
*/
public void clear() throws IgniteCheckedException {
cache.clear();
}
/**
* Removes all entries.
*
* @throws org.apache.ignite.IgniteCheckedException In case of error.
*/
public void removeAll() throws IgniteCheckedException {
cache.removeAll();
}
/**
* Read cache size.
*
* @param peekModes Encoded peek modes.
* @param loc Local mode flag.
* @return Size.
*/
public int size(int peekModes, boolean loc) {
CachePeekMode[] modes = PlatformUtils.decodeCachePeekModes(peekModes);
return loc ? cache.localSize(modes) : cache.size(modes);
}
/**
* Create cache iterator.
*
* @return Cache iterator.
*/
public PlatformCacheIterator iterator() {
Iterator<Cache.Entry> iter = cache.iterator();
return new PlatformCacheIterator(platformCtx, iter);
}
/**
* Create cache iterator over local entries.
*
* @param peekModes Peke modes.
* @return Cache iterator.
*/
public PlatformCacheIterator localIterator(int peekModes) {
CachePeekMode[] peekModes0 = PlatformUtils.decodeCachePeekModes(peekModes);
Iterator<Cache.Entry> iter = cache.localEntries(peekModes0).iterator();
return new PlatformCacheIterator(platformCtx, iter);
}
/**
* Enters a lock.
*
* @param id Lock id.
*/
public void enterLock(long id) throws InterruptedException {
lock(id).lockInterruptibly();
}
/**
* Exits a lock.
*
* @param id Lock id.
*/
public void exitLock(long id) {
lock(id).unlock();
}
/**
* Attempts to enter a lock.
*
* @param id Lock id.
* @param timeout Timeout, in milliseconds. -1 for infinite timeout.
*/
public boolean tryEnterLock(long id, long timeout) throws InterruptedException {
return timeout == -1
? lock(id).tryLock()
: lock(id).tryLock(timeout, TimeUnit.MILLISECONDS);
}
/**
* Rebalances the cache.
*
* @param futId Future id.
*/
public void rebalance(long futId) {
PlatformFutureUtils.listen(platformCtx, cache.rebalance().chain(new C1<IgniteFuture, Object>() {
@Override public Object apply(IgniteFuture fut) {
return null;
}
}), futId, PlatformFutureUtils.TYP_OBJ, this);
}
/**
* Unregister lock.
*
* @param id Lock id.
*/
public void closeLock(long id){
Lock lock = lockMap.remove(id);
assert lock != null : "Failed to unregister lock: " + id;
}
/**
* Get lock by id.
*
* @param id Id.
* @return Lock.
*/
private Lock lock(long id) {
Lock lock = lockMap.get(id);
assert lock != null : "Lock not found for ID: " + id;
return lock;
}
/**
* Registers a lock in a map.
*
* @param lock Lock to register.
* @return Registered lock id.
*/
private long registerLock(Lock lock) {
long id = LOCK_ID_GEN.incrementAndGet();
lockMap.put(id, lock);
return id;
}
/**
* Runs specified query.
*/
private PlatformQueryCursor runQuery(BinaryRawReaderEx reader, Query qry) throws IgniteCheckedException {
try {
QueryCursorEx cursor = (QueryCursorEx) cache.query(qry);
return new PlatformQueryCursor(platformCtx, cursor,
qry.getPageSize() > 0 ? qry.getPageSize(): Query.DFLT_PAGE_SIZE);
}
catch (Exception err) {
throw PlatformUtils.unwrapQueryException(err);
}
}
/**
* Runs specified fields query.
*/
private PlatformFieldsQueryCursor runFieldsQuery(BinaryRawReaderEx reader, Query qry)
throws IgniteCheckedException {
try {
QueryCursorEx cursor = (QueryCursorEx) cache.query(qry);
return new PlatformFieldsQueryCursor(platformCtx, cursor,
qry.getPageSize() > 0 ? qry.getPageSize() : Query.DFLT_PAGE_SIZE);
}
catch (Exception err) {
throw PlatformUtils.unwrapQueryException(err);
}
}
/**
* Reads the query of specified type.
*/
private Query readInitialQuery(BinaryRawReaderEx reader) throws IgniteCheckedException {
int typ = reader.readInt();
switch (typ) {
case -1:
return null;
case OP_QRY_SCAN:
return readScanQuery(reader);
case OP_QRY_SQL:
return readSqlQuery(reader);
case OP_QRY_TXT:
return readTextQuery(reader);
}
throw new IgniteCheckedException("Unsupported query type: " + typ);
}
/**
* Reads sql query.
*/
private Query readSqlQuery(BinaryRawReaderEx reader) {
boolean loc = reader.readBoolean();
String sql = reader.readString();
String typ = reader.readString();
final int pageSize = reader.readInt();
Object[] args = readQueryArgs(reader);
return new SqlQuery(typ, sql).setPageSize(pageSize).setArgs(args).setLocal(loc);
}
/**
* Reads fields query.
*/
private Query readFieldsQuery(BinaryRawReaderEx reader) {
boolean loc = reader.readBoolean();
String sql = reader.readString();
final int pageSize = reader.readInt();
Object[] args = readQueryArgs(reader);
return new SqlFieldsQuery(sql).setPageSize(pageSize).setArgs(args).setLocal(loc);
}
/**
* Reads text query.
*/
private Query readTextQuery(BinaryRawReaderEx reader) {
boolean loc = reader.readBoolean();
String txt = reader.readString();
String typ = reader.readString();
final int pageSize = reader.readInt();
return new TextQuery(typ, txt).setPageSize(pageSize).setLocal(loc);
}
/**
* Reads scan query.
*/
private Query readScanQuery(BinaryRawReaderEx reader) {
boolean loc = reader.readBoolean();
final int pageSize = reader.readInt();
boolean hasPart = reader.readBoolean();
Integer part = hasPart ? reader.readInt() : null;
ScanQuery qry = new ScanQuery().setPageSize(pageSize);
qry.setPartition(part);
Object pred = reader.readObjectDetached();
if (pred != null)
qry.setFilter(platformCtx.createCacheEntryFilter(pred, 0));
qry.setLocal(loc);
return qry;
}
/**
* Writes error with EntryProcessorException cause.
*/
private static class GetAllWriter implements PlatformFutureUtils.Writer {
/** <inheritDoc /> */
@Override public void write(BinaryRawWriterEx writer, Object obj, Throwable err) {
assert obj instanceof Map;
PlatformUtils.writeNullableMap(writer, (Map) obj);
}
/** <inheritDoc /> */
@Override public boolean canWrite(Object obj, Throwable err) {
return err == null;
}
}
/**
* Writes error with EntryProcessorException cause.
*/
private static class EntryProcessorInvokeWriter implements PlatformFutureUtils.Writer {
/** <inheritDoc /> */
@Override public void write(BinaryRawWriterEx writer, Object obj, Throwable err) {
if (err == null) {
writer.writeBoolean(false); // No error.
writer.writeObjectDetached(obj);
}
else {
writer.writeBoolean(true); // Error.
writeError(writer, (Exception) err);
}
}
/** <inheritDoc /> */
@Override public boolean canWrite(Object obj, Throwable err) {
return true;
}
}
/**
* Writes results of InvokeAll method.
*/
private static class EntryProcessorInvokeAllWriter implements PlatformFutureUtils.Writer {
/** <inheritDoc /> */
@Override public void write(BinaryRawWriterEx writer, Object obj, Throwable err) {
writeInvokeAllResult(writer, (Map)obj);
}
/** <inheritDoc /> */
@Override public boolean canWrite(Object obj, Throwable err) {
return obj != null && err == null;
}
}
/**
* Interop expiry policy.
*/
private static class InteropExpiryPolicy implements ExpiryPolicy {
/** Duration: unchanged. */
private static final long DUR_UNCHANGED = -2;
/** Duration: eternal. */
private static final long DUR_ETERNAL = -1;
/** Duration: zero. */
private static final long DUR_ZERO = 0;
/** Expiry for create. */
private final Duration create;
/** Expiry for update. */
private final Duration update;
/** Expiry for access. */
private final Duration access;
/**
* Constructor.
*
* @param create Expiry for create.
* @param update Expiry for update.
* @param access Expiry for access.
*/
public InteropExpiryPolicy(long create, long update, long access) {
this.create = convert(create);
this.update = convert(update);
this.access = convert(access);
}
/** {@inheritDoc} */
@Override public Duration getExpiryForCreation() {
return create;
}
/** {@inheritDoc} */
@Override public Duration getExpiryForUpdate() {
return update;
}
/** {@inheritDoc} */
@Override public Duration getExpiryForAccess() {
return access;
}
/**
* Convert encoded duration to actual duration.
*
* @param dur Encoded duration.
* @return Actual duration.
*/
private static Duration convert(long dur) {
if (dur == DUR_UNCHANGED)
return null;
else if (dur == DUR_ETERNAL)
return Duration.ETERNAL;
else if (dur == DUR_ZERO)
return Duration.ZERO;
else {
assert dur > 0;
return new Duration(TimeUnit.MILLISECONDS, dur);
}
}
}
}
| |
package ca.uhn.fhir.rest.server;
import static org.junit.Assert.assertEquals;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.io.IOUtils;
import org.apache.http.NameValuePair;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.apache.http.message.BasicNameValuePair;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.servlet.ServletHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.hl7.fhir.dstu2016may.model.HumanName;
import org.hl7.fhir.dstu2016may.model.Patient;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import com.google.common.collect.Lists;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.rest.annotation.OptionalParam;
import ca.uhn.fhir.rest.annotation.Search;
import ca.uhn.fhir.rest.annotation.Sort;
import ca.uhn.fhir.rest.api.SortSpec;
import ca.uhn.fhir.rest.param.StringAndListParam;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor;
import ca.uhn.fhir.rest.server.interceptor.InterceptorAdapter;
import ca.uhn.fhir.util.PortUtil;
import ca.uhn.fhir.util.TestUtil;
public class SearchPostDstu2_1Test {
public class ParamLoggingInterceptor extends InterceptorAdapter {
@Override
public boolean incomingRequestPreProcessed(HttpServletRequest theRequest, HttpServletResponse theResponse) {
ourLog.info("Params: {}", theRequest.getParameterMap());
return true;
}
}
private static CloseableHttpClient ourClient;
private static FhirContext ourCtx = FhirContext.forDstu2_1();
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SearchPostDstu2_1Test.class);
private static int ourPort;
private static Server ourServer;
private static String ourLastMethod;
private static SortSpec ourLastSortSpec;
private static StringAndListParam ourLastName;
private static RestfulServer ourServlet;
@Before
public void before() {
ourLastMethod = null;
ourLastSortSpec = null;
ourLastName = null;
for (IServerInterceptor next : new ArrayList<IServerInterceptor>(ourServlet.getInterceptors())) {
ourServlet.unregisterInterceptor(next);
}
}
/**
* See #411
*/
@Test
public void testSearchWithMixedParamsNoInterceptorsYesParams() throws Exception {
HttpPost httpPost = new HttpPost("http://localhost:" + ourPort + "/Patient/_search?_format=application/fhir+json");
httpPost.addHeader("Cache-Control","no-cache");
List<NameValuePair> parameters = Lists.newArrayList();
parameters.add(new BasicNameValuePair("name", "Smith"));
httpPost.setEntity(new UrlEncodedFormEntity(parameters));
ourLog.info("Outgoing post: {}", httpPost);
CloseableHttpResponse status = ourClient.execute(httpPost);
try {
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
ourLog.info(responseContent);
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals("search", ourLastMethod);
assertEquals(null, ourLastSortSpec);
assertEquals(1, ourLastName.getValuesAsQueryTokens().size());
assertEquals(1, ourLastName.getValuesAsQueryTokens().get(0).getValuesAsQueryTokens().size());
assertEquals("Smith", ourLastName.getValuesAsQueryTokens().get(0).getValuesAsQueryTokens().get(0).getValue());
assertEquals(Constants.CT_FHIR_JSON_NEW, status.getEntity().getContentType().getValue().replaceAll(";.*", ""));
} finally {
IOUtils.closeQuietly(status.getEntity().getContent());
}
}
/**
* See #411
*/
@Test
public void testSearchWithMixedParamsNoInterceptorsNoParams() throws Exception {
HttpPost httpPost = new HttpPost("http://localhost:" + ourPort + "/Patient/_search");
httpPost.addHeader("Cache-Control","no-cache");
List<NameValuePair> parameters = Lists.newArrayList();
parameters.add(new BasicNameValuePair("name", "Smith"));
httpPost.setEntity(new UrlEncodedFormEntity(parameters));
ourLog.info("Outgoing post: {}", httpPost);
CloseableHttpResponse status = ourClient.execute(httpPost);
try {
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
ourLog.info(responseContent);
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals("search", ourLastMethod);
assertEquals(null, ourLastSortSpec);
assertEquals(1, ourLastName.getValuesAsQueryTokens().size());
assertEquals(1, ourLastName.getValuesAsQueryTokens().get(0).getValuesAsQueryTokens().size());
assertEquals("Smith", ourLastName.getValuesAsQueryTokens().get(0).getValuesAsQueryTokens().get(0).getValue());
assertEquals(Constants.CT_FHIR_XML, status.getEntity().getContentType().getValue().replaceAll(";.*", ""));
} finally {
IOUtils.closeQuietly(status.getEntity().getContent());
}
}
/**
* See #411
*/
@Test
public void testSearchWithMixedParamsYesInterceptorsYesParams() throws Exception {
ourServlet.registerInterceptor(new ParamLoggingInterceptor());
HttpPost httpPost = new HttpPost("http://localhost:" + ourPort + "/Patient/_search?_format=application/fhir+json");
httpPost.addHeader("Cache-Control","no-cache");
List<NameValuePair> parameters = Lists.newArrayList();
parameters.add(new BasicNameValuePair("name", "Smith"));
httpPost.setEntity(new UrlEncodedFormEntity(parameters));
ourLog.info("Outgoing post: {}", httpPost);
CloseableHttpResponse status = ourClient.execute(httpPost);
try {
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
ourLog.info(responseContent);
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals("search", ourLastMethod);
assertEquals(null, ourLastSortSpec);
assertEquals(1, ourLastName.getValuesAsQueryTokens().size());
assertEquals(1, ourLastName.getValuesAsQueryTokens().get(0).getValuesAsQueryTokens().size());
assertEquals("Smith", ourLastName.getValuesAsQueryTokens().get(0).getValuesAsQueryTokens().get(0).getValue());
assertEquals(Constants.CT_FHIR_JSON_NEW, status.getEntity().getContentType().getValue().replaceAll(";.*", ""));
} finally {
IOUtils.closeQuietly(status.getEntity().getContent());
}
}
/**
* See #411
*/
@Test
public void testSearchWithMixedParamsYesInterceptorsNoParams() throws Exception {
ourServlet.registerInterceptor(new ParamLoggingInterceptor());
HttpPost httpPost = new HttpPost("http://localhost:" + ourPort + "/Patient/_search");
httpPost.addHeader("Cache-Control","no-cache");
List<NameValuePair> parameters = Lists.newArrayList();
parameters.add(new BasicNameValuePair("name", "Smith"));
httpPost.setEntity(new UrlEncodedFormEntity(parameters));
ourLog.info("Outgoing post: {}", httpPost);
CloseableHttpResponse status = ourClient.execute(httpPost);
try {
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
ourLog.info(responseContent);
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals("search", ourLastMethod);
assertEquals(null, ourLastSortSpec);
assertEquals(1, ourLastName.getValuesAsQueryTokens().size());
assertEquals(1, ourLastName.getValuesAsQueryTokens().get(0).getValuesAsQueryTokens().size());
assertEquals("Smith", ourLastName.getValuesAsQueryTokens().get(0).getValuesAsQueryTokens().get(0).getValue());
assertEquals(Constants.CT_FHIR_XML, status.getEntity().getContentType().getValue().replaceAll(";.*", ""));
} finally {
IOUtils.closeQuietly(status.getEntity().getContent());
}
}
@AfterClass
public static void afterClassClearContext() throws Exception {
ourServer.stop();
TestUtil.clearAllStaticFieldsForUnitTest();
}
@BeforeClass
public static void beforeClass() throws Exception {
ourPort = PortUtil.findFreePort();
ourServer = new Server(ourPort);
DummyPatientResourceProvider patientProvider = new DummyPatientResourceProvider();
ServletHandler proxyHandler = new ServletHandler();
ourServlet = new RestfulServer(ourCtx);
ourServlet.setDefaultResponseEncoding(EncodingEnum.XML);
ourServlet.setPagingProvider(new FifoMemoryPagingProvider(10));
ourServlet.setResourceProviders(patientProvider);
ServletHolder servletHolder = new ServletHolder(ourServlet);
proxyHandler.addServletWithMapping(servletHolder, "/*");
ourServer.setHandler(proxyHandler);
ourServer.start();
PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager(5000, TimeUnit.MILLISECONDS);
HttpClientBuilder builder = HttpClientBuilder.create();
builder.setConnectionManager(connectionManager);
ourClient = builder.build();
}
public static class DummyPatientResourceProvider implements IResourceProvider {
@Override
public Class<? extends IBaseResource> getResourceType() {
return Patient.class;
}
//@formatter:off
@SuppressWarnings("rawtypes")
@Search()
public List search(
@Sort SortSpec theSortSpec,
@OptionalParam(name=Patient.SP_NAME) StringAndListParam theName
) {
ourLastMethod = "search";
ourLastSortSpec = theSortSpec;
ourLastName = theName;
ArrayList<Patient> retVal = new ArrayList<Patient>();
retVal.add((Patient) new Patient().addName(new HumanName().addFamily("FAMILY")).setId("foo"));
return retVal;
}
//@formatter:on
}
}
| |
/*
* Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership. Crate licenses
* this file to you under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* However, if you have executed another commercial license agreement
* with Crate these terms will supersede the license and you may use the
* software solely pursuant to the terms of the relevant commercial agreement.
*/
package io.crate.planner.consumer;
import com.google.common.collect.ImmutableList;
import io.crate.analyze.HavingClause;
import io.crate.analyze.OrderBy;
import io.crate.analyze.QueriedTable;
import io.crate.analyze.QueriedTableRelation;
import io.crate.analyze.relations.AnalyzedRelation;
import io.crate.analyze.relations.AnalyzedRelationVisitor;
import io.crate.analyze.relations.PlannedAnalyzedRelation;
import io.crate.analyze.relations.QueriedDocTable;
import io.crate.exceptions.VersionInvalidException;
import io.crate.metadata.Functions;
import io.crate.metadata.Routing;
import io.crate.metadata.doc.DocTableInfo;
import io.crate.planner.node.NoopPlannedAnalyzedRelation;
import io.crate.planner.node.dql.CollectPhase;
import io.crate.planner.node.dql.GroupByConsumer;
import io.crate.planner.node.dql.MergePhase;
import io.crate.planner.node.dql.NonDistributedGroupBy;
import io.crate.planner.projection.GroupProjection;
import io.crate.planner.projection.Projection;
import io.crate.planner.projection.builder.ProjectionBuilder;
import io.crate.planner.projection.builder.SplitPoints;
import io.crate.planner.symbol.Aggregation;
import io.crate.planner.symbol.Symbol;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.inject.Singleton;
import java.util.ArrayList;
import java.util.List;
@Singleton
public class NonDistributedGroupByConsumer implements Consumer {
private final Visitor visitor;
@Inject
public NonDistributedGroupByConsumer(Functions functions) {
this.visitor = new Visitor(functions);
}
@Override
public PlannedAnalyzedRelation consume(AnalyzedRelation relation, ConsumerContext context) {
return visitor.process(relation, context);
}
private static class Visitor extends AnalyzedRelationVisitor<ConsumerContext, PlannedAnalyzedRelation> {
private final Functions functions;
public Visitor(Functions functions) {
this.functions = functions;
}
@Override
public PlannedAnalyzedRelation visitQueriedDocTable(QueriedDocTable table, ConsumerContext context) {
if (table.querySpec().groupBy() == null) {
return null;
}
DocTableInfo tableInfo = table.tableRelation().tableInfo();
if (table.querySpec().where().hasVersions()) {
context.validationException(new VersionInvalidException());
return null;
}
Routing routing = context.plannerContext().allocateRouting(tableInfo, table.querySpec().where(), null);
if (routing.hasLocations() && routing.locations().size()>1) {
return null;
}
GroupByConsumer.validateGroupBySymbols(table.tableRelation(), table.querySpec().groupBy());
return nonDistributedGroupBy(table, routing, context);
}
@Override
public PlannedAnalyzedRelation visitQueriedTable(QueriedTable table, ConsumerContext context) {
if (table.querySpec().groupBy() == null) {
return null;
}
Routing routing = context.plannerContext().allocateRouting(table.tableRelation().tableInfo(), table.querySpec().where(), null);
return nonDistributedGroupBy(table, routing, context);
}
@Override
protected PlannedAnalyzedRelation visitAnalyzedRelation(AnalyzedRelation relation, ConsumerContext context) {
return null;
}
/**
* Group by on System Tables (never needs distribution)
* or Group by on user tables (RowGranulariy.DOC) with only one node.
*
* produces:
*
* SELECT:
* Collect ( GroupProjection ITER -> PARTIAL )
* LocalMerge ( GroupProjection PARTIAL -> FINAL, [FilterProjection], TopN )
*
*/
private PlannedAnalyzedRelation nonDistributedGroupBy(QueriedTableRelation table, Routing routing, ConsumerContext context) {
List<Symbol> groupBy = table.querySpec().groupBy();
ProjectionBuilder projectionBuilder = new ProjectionBuilder(functions, table.querySpec());
SplitPoints splitPoints = projectionBuilder.getSplitPoints();
// mapper / collect
GroupProjection groupProjection = projectionBuilder.groupProjection(
splitPoints.leaves(),
table.querySpec().groupBy(),
splitPoints.aggregates(),
Aggregation.Step.ITER,
Aggregation.Step.PARTIAL);
CollectPhase collectPhase = CollectPhase.forQueriedTable(
context.plannerContext(),
table,
splitPoints.leaves(),
ImmutableList.<Projection>of(groupProjection)
);
// handler
List<Symbol> collectOutputs = new ArrayList<>(
groupBy.size() +
splitPoints.aggregates().size());
collectOutputs.addAll(groupBy);
collectOutputs.addAll(splitPoints.aggregates());
OrderBy orderBy = table.querySpec().orderBy();
if (orderBy != null) {
table.tableRelation().validateOrderBy(orderBy);
}
List<Projection> projections = new ArrayList<>();
projections.add(projectionBuilder.groupProjection(
collectOutputs,
table.querySpec().groupBy(),
splitPoints.aggregates(),
Aggregation.Step.PARTIAL,
Aggregation.Step.FINAL
));
HavingClause havingClause = table.querySpec().having();
if (havingClause != null) {
if (havingClause.noMatch()) {
return new NoopPlannedAnalyzedRelation(table, context.plannerContext().jobId());
} else if (havingClause.hasQuery()){
projections.add(projectionBuilder.filterProjection(
collectOutputs,
havingClause.query()
));
}
}
/**
* If this is not the rootRelation this is a subquery (e.g. Insert by Query),
* so ordering and limiting is done by the rootRelation if required.
*
* If the querySpec outputs don't match the collectOutputs the query contains
* aggregations or scalar functions which can only be resolved by a TopNProjection,
* so a TopNProjection must be added.
*/
boolean outputsMatch = table.querySpec().outputs().size() == collectOutputs.size() &&
collectOutputs.containsAll(table.querySpec().outputs());
if (context.rootRelation() == table || !outputsMatch){
projections.add(projectionBuilder.topNProjection(
collectOutputs,
orderBy,
table.querySpec().offset(),
table.querySpec().limit(),
table.querySpec().outputs()
));
}
MergePhase localMergeNode = MergePhase.localMerge(
context.plannerContext().jobId(),
context.plannerContext().nextExecutionPhaseId(),
projections,
collectPhase);
return new NonDistributedGroupBy(collectPhase, localMergeNode, context.plannerContext().jobId());
}
}
}
| |
/*
* Copyright 2021 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp.serialization;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.javascript.jscomp.colors.Color;
import com.google.javascript.jscomp.colors.ColorId;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map.Entry;
import java.util.function.Function;
import java.util.function.Predicate;
import javax.annotation.Nullable;
/**
* Serializes `Color`s and information about them into a `TypePool`.
*
* <p>Client code should call `addColor()` for each of the colors it wants to have included in the
* `TypePool`, then call `generateTypePool()` to get the result.
*/
class ColorSerializer {
private final SerializationOptions serializationMode;
/**
* Used to find the index at which a `String` will appear in the `StringPoolProto` that will
* eventually be serialized with the `TypePool` generated by this `ColorSerializer`.
*/
private final Function<String, Integer> getStringPoolIndexFn;
/** Only property names that pass this predicate will be serialized. */
private final Predicate<String> propertyFilter;
/**
* Stores the `TypePointer` values assigned to `Color`s as they are added for serialization, so
* they can be looked up and used for references between the output `TypeProto`s.
*/
private final HashMap<ColorId, TypePointer> colorIdToTypePointer = new HashMap<>();
/**
* Stores the `Color`s to be serialized in the order they will be serialized.
*
* <p>This is used as a worklist when the `TypePool` is generated.
*/
private final ArrayList<Color> colorsInSerializedOrder = new ArrayList<>();
/**
* Create a ColorSerializer.
*
* @param serializationMode determines what parts of the `TypePool` proto to fill in.
* @param stringPoolIndexFn Used to request an integer index to encode in place of string value.
* The `TypePool` generated by this object will use these indices, so a `StringPoolProto` that
* contains the actual `String` values needs to be serialized along with it.
* @param propertyFilter Property names for which this returns `false` will not be included in the
* generated `TypeProto`s.
*/
ColorSerializer(
SerializationOptions serializationMode,
Function<String, Integer> stringPoolIndexFn,
Predicate<String> propertyFilter) {
this.serializationMode = serializationMode;
this.getStringPoolIndexFn = stringPoolIndexFn;
this.propertyFilter = propertyFilter;
// We must pre-populate the type pointers with the few axiomatic colors that won't actually
// be serialized. These types are required to get the first few offsets in the order they
// are specified by `TypePointers.OFFSET_TO_AXIOMATIC_COLOR`.
for (Color color : TypePointers.OFFSET_TO_AXIOMATIC_COLOR) {
addColor(color);
}
}
/**
* Add a collection of `Color`s to the list of those that must be serialized.
*
* @param colors to be serialized
* @return list of `TypePointer`s that will refer to the input `Color`s in the TypePool` that this
* object will create. The order will match the order of the input `Color`s.
*/
ImmutableList<TypePointer> addColors(Collection<Color> colors) {
final ImmutableList.Builder<TypePointer> builder = ImmutableList.builder();
for (Color color : colors) {
builder.add(addColor(color));
}
return builder.build();
}
/**
* Add `color` to the list of those that must be serialized (if it wasn't already there) and
* return the `TypePointer` value that will refer to it in the `TypePool` that this object will
* create.
*/
TypePointer addColor(Color color) {
return colorIdToTypePointer.computeIfAbsent(
color.getId(),
(unusedKey) -> {
final int index = colorsInSerializedOrder.size();
colorsInSerializedOrder.add(color);
return TypePointer.newBuilder().setPoolOffset(index).build();
});
}
/**
* Generate a `TypePool` proto built from the previously added `Color`s and the arguments supplied
* to this method.
*
* @param getDisambiguationSupertypesFn Given a `Color` return a set of `Color`s it inherits from.
* @param getMismatchSourceRefsFn May be `null` if this `serializationMode` is `SKIP_DEBUG_INFO`.
* Otherwise, this function must provide a set of all the source reference strings indicating
* code locations where the given `Color` encountered a type mismatch.
* @return a new `TypePool` proto
*/
TypePool generateTypePool(
Function<Color, ImmutableSet<Color>> getDisambiguationSupertypesFn,
@Nullable Function<Color, ImmutableSet<String>> getMismatchSourceRefsFn) {
final TypePool.Builder typePoolBuilder = TypePool.newBuilder();
// We use an indexed loop here for 2 reasons.
// 1. We must skip serialization of the axiomatic colors that start our list of serialized
// colors.
// 2. The logic in the loop may end up adding more colors to the list. It's often hard to tell
// what effect changing an iterable will have on an iteration that is in progress.
for (int i = TypePointers.untrimOffset(0); i < colorsInSerializedOrder.size(); i++) {
final Color color = colorsInSerializedOrder.get(i);
final TypePointer typePointer = colorIdToTypePointer.get(color.getId());
typePoolBuilder.addType(generateTypeProto(color));
for (Color supertype : getDisambiguationSupertypesFn.apply(color)) {
typePoolBuilder
.addDisambiguationEdgesBuilder()
.setSubtype(typePointer)
.setSupertype(addColor(supertype));
}
}
if (serializationMode != SerializationOptions.SKIP_DEBUG_INFO) {
checkNotNull(getMismatchSourceRefsFn);
final TypePool.DebugInfo.Builder debugInfoBuilder = typePoolBuilder.getDebugInfoBuilder();
// Construct a map from source reference string to type pointers,
// because that's the way the Mismatch protos work.
// Construct entries only for those colors that we have actually serialized in order to save
// space.
final LinkedHashMap<String, ArrayList<TypePointer>> srcRefToTypePointerList =
new LinkedHashMap<>();
for (Color color : colorsInSerializedOrder) {
final TypePointer typePointer = colorIdToTypePointer.get(color.getId());
for (String srcRef : getMismatchSourceRefsFn.apply(color)) {
final ArrayList<TypePointer> typePointerList =
srcRefToTypePointerList.computeIfAbsent(srcRef, (key) -> new ArrayList<>());
typePointerList.add(typePointer);
}
}
// Now use the map to build the Mismatch protos and put them into the debug info.
for (Entry<String, ArrayList<TypePointer>> entry : srcRefToTypePointerList.entrySet()) {
debugInfoBuilder
.addMismatchBuilder()
.setSourceRef(entry.getKey())
.addAllInvolvedColor(entry.getValue());
}
}
return typePoolBuilder.build();
}
private TypeProto generateTypeProto(Color color) {
final TypeProto.Builder typeProtoBuilder = TypeProto.newBuilder();
if (color.isUnion()) {
typeProtoBuilder.getUnionBuilder().addAllUnionMember(addColors(color.getUnionElements()));
} else {
final ObjectTypeProto.Builder objectTypeProtoBuilder = typeProtoBuilder.getObjectBuilder();
objectTypeProtoBuilder
.setIsInvalidating(color.isInvalidating())
.setUuid(color.getId().asByteString())
.setPropertiesKeepOriginalName(color.getPropertiesKeepOriginalName())
.addAllInstanceType(addColors(color.getInstanceColors()))
.addAllPrototype(addColors(color.getPrototypes()))
.setMarkedConstructor(color.isConstructor())
.addAllOwnProperty(getOwnPropertyStringPoolOffsets(color))
.setClosureAssert(color.isClosureAssert());
if (serializationMode != SerializationOptions.SKIP_DEBUG_INFO) {
final String compositeTypename = color.getDebugInfo().getCompositeTypename();
if (!compositeTypename.isEmpty()) {
// Color objects always have a DebugInfo field, but it will have an empty type
// name when we don't actually have a type name to store.
objectTypeProtoBuilder.getDebugInfoBuilder().addTypename(compositeTypename);
}
}
}
return typeProtoBuilder.build();
}
private ImmutableList<Integer> getOwnPropertyStringPoolOffsets(Color color) {
final ImmutableList.Builder<Integer> builder = ImmutableList.builder();
for (String ownProperty : color.getOwnProperties()) {
// The client code may know that some properties are unused in the AST, so there's no need
// to serialize them.
if (propertyFilter.test(ownProperty)) {
builder.add(getStringPoolIndexFn.apply(ownProperty));
}
}
return builder.build();
}
}
| |
/**********************************************************************************
* $URL$
* $Id$
***********************************************************************************
*
* Copyright (c) 2003, 2004, 2005, 2006, 2007, 2008 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.calendar.impl;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.sql.Connection;
import java.sql.SQLException;
import java.time.Instant;
import java.time.LocalDate;
import java.util.ArrayList;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.TimeZone;
import junit.framework.TestCase;
import lombok.extern.slf4j.Slf4j;
import org.sakaiproject.calendar.impl.DbCalendarService.DbStorage;
import org.sakaiproject.db.api.SqlReader;
import org.sakaiproject.db.api.SqlService;
import org.sakaiproject.entity.api.Entity;
import org.sakaiproject.entity.api.EntityManager;
import org.sakaiproject.entity.api.EntityProducer;
import org.sakaiproject.entity.api.Reference;
import org.sakaiproject.exception.ServerOverloadException;
import org.sakaiproject.time.api.Time;
import org.sakaiproject.time.api.TimeBreakdown;
import org.sakaiproject.time.api.TimeRange;
import org.sakaiproject.time.api.TimeService;
/**
* @author ieb
*
*/
@Slf4j
public class DbCalendarServiceSerializationTest extends TestCase
{
private SqlService sqlService;
private Entity container;
private EntityManager entityManager;
private Map<String, Object> services;
private TimeService timeService;
public DbCalendarServiceSerializationTest(String name)
{
super(name);
}
/**
* @throws java.lang.Exception
*/
protected void setUp() throws Exception
{
entityManager = new EntityManager() {
public boolean checkReference(String ref)
{
// TODO Auto-generated method stub
return false;
}
public List getEntityProducers()
{
// TODO Auto-generated method stub
return new ArrayList();
}
public Reference newReference(String refString)
{
// TODO Auto-generated method stub
return null;
}
public Reference newReference(Reference copyMe)
{
// TODO Auto-generated method stub
return null;
}
public List newReferenceList()
{
// TODO Auto-generated method stub
return new ArrayList();
}
public List newReferenceList(List copyMe)
{
// TODO Auto-generated method stub
return new ArrayList(copyMe);
}
public void registerEntityProducer(EntityProducer manager, String referenceRoot)
{
// TODO Auto-generated method stub
}
};
sqlService = new SqlService(){
public Connection borrowConnection() throws SQLException
{
// TODO Auto-generated method stub
return null;
}
public void dbCancel(Connection conn)
{
// TODO Auto-generated method stub
}
public Long dbInsert(Connection callerConnection, String sql, Object[] fields, String autoColumn)
{
// TODO Auto-generated method stub
return null;
}
public Long dbInsert(Connection callerConnection, String sql, Object[] fields, String autoColumn, InputStream last, int lastLength)
{
// TODO Auto-generated method stub
return null;
}
public List dbRead(String sql)
{
// TODO Auto-generated method stub
return null;
}
public List dbRead(String sql, Object[] fields, SqlReader reader)
{
// TODO Auto-generated method stub
return null;
}
public List dbRead(Connection conn, String sql, Object[] fields, SqlReader reader)
{
// TODO Auto-generated method stub
return null;
}
public void dbReadBinary(String sql, Object[] fields, byte[] value)
{
// TODO Auto-generated method stub
}
public void dbReadBinary(Connection conn, String sql, Object[] fields, byte[] value)
{
// TODO Auto-generated method stub
}
public InputStream dbReadBinary(String sql, Object[] fields, boolean big) throws ServerOverloadException
{
// TODO Auto-generated method stub
return null;
}
public void dbReadBlobAndUpdate(String sql, byte[] content)
{
// TODO Auto-generated method stub
}
public Connection dbReadLock(String sql, StringBuilder field)
{
// TODO Auto-generated method stub
return null;
}
public void dbUpdateCommit(String sql, Object[] fields, String var, Connection conn)
{
// TODO Auto-generated method stub
}
public boolean dbWrite(String sql)
{
// TODO Auto-generated method stub
return false;
}
public boolean dbWrite(String sql, String var)
{
// TODO Auto-generated method stub
return false;
}
public boolean dbWrite(String sql, Object[] fields)
{
// TODO Auto-generated method stub
return false;
}
public boolean dbWrite(Connection connection, String sql, Object[] fields)
{
// TODO Auto-generated method stub
return false;
}
public boolean dbWrite(String sql, Object[] fields, String lastField)
{
// TODO Auto-generated method stub
return false;
}
public boolean dbWriteBatch(Connection connection, String sql, List<Object[]> fieldsList)
{
// TODO Auto-generated method stub
return false;
}
public boolean dbWriteBinary(String sql, Object[] fields, byte[] var, int offset, int len)
{
// TODO Auto-generated method stub
return false;
}
public boolean dbWriteFailQuiet(Connection connection, String sql, Object[] fields)
{
// TODO Auto-generated method stub
return false;
}
public void ddl(ClassLoader loader, String resource)
{
// TODO Auto-generated method stub
}
public String getBooleanConstant(boolean value)
{
// TODO Auto-generated method stub
return null;
}
public GregorianCalendar getCal()
{
// TODO Auto-generated method stub
return null;
}
public Long getNextSequence(String tableName, Connection conn)
{
// TODO Auto-generated method stub
return null;
}
public String getVendor()
{
return "mysql";
}
public void returnConnection(Connection conn)
{
// TODO Auto-generated method stub
}
public boolean transact(Runnable callback, String tag)
{
// TODO Auto-generated method stub
return false;
}
public Connection dbReadLock(String sql, SqlReader reader)
{
// TODO Auto-generated method stub
return null;
}
public int dbWriteCount(String sql, Object[] fields, String lastField, Connection callerConnection, boolean failQuiet)
{
// TODO Auto-generated method stub
return -1;
}
public int dbWriteCount(String sql, Object[] fields, String lastField, Connection callerConnection, int failQuiet)
{
// TODO Auto-generated method stub
return -1;
}
};
timeService = new TimeService() {
public boolean clearLocalTimeZone(String userId)
{
// TODO Auto-generated method stub
return false;
}
public boolean different(Time a, Time b)
{
// TODO Auto-generated method stub
return false;
}
public GregorianCalendar getCalendar(TimeZone zone, int year, int month, int day, int hour, int min, int sec, int ms)
{
// TODO Auto-generated method stub
return null;
}
public TimeZone getLocalTimeZone()
{
// TODO Auto-generated method stub
return null;
}
@Override
public TimeZone getLocalTimeZone(String userId) {
return null;
}
public Time newTime()
{
// TODO Auto-generated method stub
return null;
}
public Time newTime(long value)
{
// TODO Auto-generated method stub
return null;
}
public Time newTime(GregorianCalendar cal)
{
// TODO Auto-generated method stub
return null;
}
public TimeBreakdown newTimeBreakdown(int year, int month, int day, int hour, int minute, int second, int millisecond)
{
// TODO Auto-generated method stub
return null;
}
public Time newTimeGmt(String value)
{
// TODO Auto-generated method stub
return null;
}
public Time newTimeGmt(int year, int month, int day, int hour, int minute, int second, int millisecond)
{
// TODO Auto-generated method stub
return null;
}
public Time newTimeGmt(TimeBreakdown breakdown)
{
// TODO Auto-generated method stub
return null;
}
public Time newTimeLocal(int year, int month, int day, int hour, int minute, int second, int millisecond)
{
// TODO Auto-generated method stub
return null;
}
public Time newTimeLocal(TimeBreakdown breakdown)
{
// TODO Auto-generated method stub
return null;
}
public TimeRange newTimeRange(Time start, Time end, boolean startIncluded, boolean endIncluded)
{
// TODO Auto-generated method stub
return null;
}
public TimeRange newTimeRange(String value)
{
// TODO Auto-generated method stub
return null;
}
public TimeRange newTimeRange(Time startAndEnd)
{
// TODO Auto-generated method stub
return null;
}
public TimeRange newTimeRange(long start, long duration)
{
// TODO Auto-generated method stub
return null;
}
public TimeRange newTimeRange(Time start, Time end)
{
// TODO Auto-generated method stub
return null;
}
public int dbWriteCount(String sql, Object[] fields, String lastField, Connection callerConnection, boolean failQuiet)
{
// TODO Auto-generated method stub
return -1;
}
public int dbWriteCount(String sql, Object[] fields, String lastField, Connection callerConnection, int failQuiet)
{
// TODO Auto-generated method stub
return -1;
}
@Override
public String dateFormat(Date date, Locale locale, int df) {
return null;
}
@Override
public String dateTimeFormat(Date date, Locale locale, int df) {
return null;
}
@Override
public String shortLocalizedTimestamp(Instant instant, TimeZone timezone, Locale locale) {
return null;
}
@Override
public String shortLocalizedTimestamp(Instant instant, Locale locale) {
return null;
}
@Override
public String shortLocalizedDate(LocalDate date, Locale locale) {
return null;
}
@Override
public String shortPreciseLocalizedTimestamp(Instant instant, TimeZone timezone, Locale locale) {
return null;
}
@Override
public String shortPreciseLocalizedTimestamp(Instant instant, Locale locale) {
return null;
}
@Override
public String timeFormat(Date date, Locale locale, int format) {
// TODO Auto-generated method stub
return null;
}
@Override
public String dayOfWeekFormat(Date date, Locale locale, int format) {
// TODO Auto-generated method stub
return null;
}
@Override
public Date parseISODateInUserTimezone(String dateString) {
// TODO Auto-generated method stub
return null;
}
};
services = new HashMap<String,Object>();
services.put("sqlservice", sqlService);
services.put("timeservice",timeService);
services.put("entitymanager",entityManager);
}
/**
* @throws java.lang.Exception
*/
protected void tearDown() throws Exception
{
}
public void testConstruct() {
DbCalendarService dbCal = new DbCalendarService();
}
public void testSerialize() throws IOException {
DbCalendarService dbCal = new DbCalendarService();
dbCal.setSqlService(sqlService);
dbCal.setEntityManager(entityManager);
dbCal.setServices(services);
DbStorage s = (DbStorage) dbCal.newStorage();
for(int i = 0; i < 100; i++ ) {
InputStream ins = this.getClass().getResourceAsStream("testSerialize_"+i);
if ( ins == null ) {
break;
}
BufferedReader instream = new BufferedReader(new InputStreamReader(ins));
StringBuilder sb = new StringBuilder();
String line = instream.readLine();
while ( line != null ) {
sb.append(line).append("\n");
line = instream.readLine();
}
instream.close();
log.info("Xml is ["+sb.toString()+"]");
Entity e = s.readContainerTest(sb.toString());
assertNotNull(e);
if ( container != null ) {
container = e;
}
}
for(int i = 0; i < 100; i++ ) {
InputStream ins = this.getClass().getResourceAsStream("testSerializeEvent_"+i);
if ( ins == null ) {
break;
}
BufferedReader instream = new BufferedReader(new InputStreamReader(ins));
StringBuilder sb = new StringBuilder();
String line = instream.readLine();
while ( line != null ) {
sb.append(line).append("\n");
line = instream.readLine();
}
instream.close();
log.info("Xml is ["+sb.toString()+"]");
Entity e = s.readResourceTest(container, sb.toString());
assertNotNull(e);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.api.java.typeutils.runtime;
import java.io.IOException;
import org.apache.avro.generic.GenericData;
import org.apache.avro.reflect.ReflectDatumReader;
import org.apache.avro.reflect.ReflectDatumWriter;
import org.apache.avro.util.Utf8;
import org.apache.flink.annotation.Internal;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.api.java.typeutils.runtime.kryo.Serializers;
import org.apache.flink.core.memory.DataInputView;
import org.apache.flink.core.memory.DataOutputView;
import org.apache.flink.util.InstantiationUtil;
import com.esotericsoftware.kryo.Kryo;
import org.objenesis.strategy.StdInstantiatorStrategy;
import static org.apache.flink.util.Preconditions.checkNotNull;
/**
* General purpose serialization. Currently using Apache Avro's Reflect-serializers for serialization and
* Kryo for deep object copies. We want to change this to Kryo-only.
*
* @param <T> The type serialized.
*/
@Internal
public final class AvroSerializer<T> extends TypeSerializer<T> {
private static final long serialVersionUID = 1L;
private final Class<T> type;
private final Class<? extends T> typeToInstantiate;
private transient ReflectDatumWriter<T> writer;
private transient ReflectDatumReader<T> reader;
private transient DataOutputEncoder encoder;
private transient DataInputDecoder decoder;
private transient Kryo kryo;
private transient T deepCopyInstance;
// --------------------------------------------------------------------------------------------
public AvroSerializer(Class<T> type) {
this(type, type);
}
public AvroSerializer(Class<T> type, Class<? extends T> typeToInstantiate) {
this.type = checkNotNull(type);
this.typeToInstantiate = checkNotNull(typeToInstantiate);
InstantiationUtil.checkForInstantiation(typeToInstantiate);
}
// --------------------------------------------------------------------------------------------
@Override
public boolean isImmutableType() {
return false;
}
@Override
public AvroSerializer<T> duplicate() {
return new AvroSerializer<T>(type, typeToInstantiate);
}
@Override
public T createInstance() {
return InstantiationUtil.instantiate(this.typeToInstantiate);
}
@Override
public T copy(T from) {
checkKryoInitialized();
return KryoUtils.copy(from, kryo, this);
}
@Override
public T copy(T from, T reuse) {
checkKryoInitialized();
return KryoUtils.copy(from, reuse, kryo, this);
}
@Override
public int getLength() {
return -1;
}
@Override
public void serialize(T value, DataOutputView target) throws IOException {
checkAvroInitialized();
this.encoder.setOut(target);
this.writer.write(value, this.encoder);
}
@Override
public T deserialize(DataInputView source) throws IOException {
checkAvroInitialized();
this.decoder.setIn(source);
return this.reader.read(null, this.decoder);
}
@Override
public T deserialize(T reuse, DataInputView source) throws IOException {
checkAvroInitialized();
this.decoder.setIn(source);
return this.reader.read(reuse, this.decoder);
}
@Override
public void copy(DataInputView source, DataOutputView target) throws IOException {
checkAvroInitialized();
if (this.deepCopyInstance == null) {
this.deepCopyInstance = InstantiationUtil.instantiate(type, Object.class);
}
this.decoder.setIn(source);
this.encoder.setOut(target);
T tmp = this.reader.read(this.deepCopyInstance, this.decoder);
this.writer.write(tmp, this.encoder);
}
private void checkAvroInitialized() {
if (this.reader == null) {
this.reader = new ReflectDatumReader<T>(type);
this.writer = new ReflectDatumWriter<T>(type);
this.encoder = new DataOutputEncoder();
this.decoder = new DataInputDecoder();
}
}
private void checkKryoInitialized() {
if (this.kryo == null) {
this.kryo = new Kryo();
Kryo.DefaultInstantiatorStrategy instantiatorStrategy = new Kryo.DefaultInstantiatorStrategy();
instantiatorStrategy.setFallbackInstantiatorStrategy(new StdInstantiatorStrategy());
kryo.setInstantiatorStrategy(instantiatorStrategy);
// register Avro types.
this.kryo.register(GenericData.Array.class, new Serializers.SpecificInstanceCollectionSerializerForArrayList());
this.kryo.register(Utf8.class);
this.kryo.register(GenericData.EnumSymbol.class);
this.kryo.register(GenericData.Fixed.class);
this.kryo.register(GenericData.StringType.class);
this.kryo.setAsmEnabled(true);
this.kryo.register(type);
}
}
// --------------------------------------------------------------------------------------------
@Override
public int hashCode() {
return 31 * this.type.hashCode() + this.typeToInstantiate.hashCode();
}
@Override
public boolean equals(Object obj) {
if (obj instanceof AvroSerializer) {
@SuppressWarnings("unchecked")
AvroSerializer<T> avroSerializer = (AvroSerializer<T>) obj;
return avroSerializer.canEqual(this) &&
type == avroSerializer.type &&
typeToInstantiate == avroSerializer.typeToInstantiate;
} else {
return false;
}
}
@Override
public boolean canEqual(Object obj) {
return obj instanceof AvroSerializer;
}
}
| |
/*
* Copyright 2016-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.rest.resources;
import com.eclipsesource.json.Json;
import com.eclipsesource.json.JsonArray;
import com.eclipsesource.json.JsonObject;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import org.apache.commons.lang3.StringUtils;
import org.hamcrest.Description;
import org.hamcrest.Matchers;
import org.hamcrest.TypeSafeMatcher;
import org.junit.Before;
import org.junit.Test;
import org.onlab.osgi.ServiceDirectory;
import org.onlab.osgi.TestServiceDirectory;
import org.onosproject.cluster.NodeId;
import org.onosproject.cluster.RoleInfo;
import org.onosproject.codec.CodecService;
import org.onosproject.codec.impl.CodecManager;
import org.onosproject.mastership.MastershipAdminService;
import org.onosproject.mastership.MastershipService;
import org.onosproject.net.DefaultDevice;
import org.onosproject.net.Device;
import org.onosproject.net.DeviceId;
import org.onosproject.net.MastershipRole;
import org.onosproject.net.device.DeviceService;
import javax.ws.rs.client.Entity;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.Response;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.util.List;
import java.util.Set;
import static org.easymock.EasyMock.anyObject;
import static org.easymock.EasyMock.createMock;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.expectLastCall;
import static org.easymock.EasyMock.replay;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.onosproject.net.MastershipRole.MASTER;
/**
* Unit tests for Mastership REST APIs.
*/
public final class MastershipResourceTest extends ResourceTest {
private final MastershipService mockService = createMock(MastershipService.class);
private final DeviceService mockDeviceService = createMock(DeviceService.class);
private final MastershipAdminService mockAdminService =
createMock(MastershipAdminService.class);
private final DeviceId deviceId1 = DeviceId.deviceId("dev:1");
private final DeviceId deviceId2 = DeviceId.deviceId("dev:2");
private final DeviceId deviceId3 = DeviceId.deviceId("dev:3");
final Device device1 = new DefaultDevice(null, deviceId1, Device.Type.OTHER,
"", "", "", "", null);
private final NodeId nodeId1 = NodeId.nodeId("node:1");
private final NodeId nodeId2 = NodeId.nodeId("node:2");
private final NodeId nodeId3 = NodeId.nodeId("node:3");
private final MastershipRole role1 = MASTER;
/**
* Creates a mock role info which is comprised of one master and three backups.
*
* @return a mock role info instance
*/
private RoleInfo createMockRoleInfo() {
NodeId master = NodeId.nodeId("master");
List<NodeId> backups = ImmutableList.of(nodeId1, nodeId2, nodeId3);
return new RoleInfo(master, backups);
}
private static final class RoleInfoJsonMatcher extends TypeSafeMatcher<JsonObject> {
private final RoleInfo roleInfo;
private String reason = "";
private RoleInfoJsonMatcher(RoleInfo roleInfo) {
this.roleInfo = roleInfo;
}
@Override
protected boolean matchesSafely(JsonObject jsonNode) {
// check master node identifier
String jsonNodeId = jsonNode.get("master") != null ?
jsonNode.get("master").asString() : null;
String nodeId = roleInfo.master().id();
if (!StringUtils.equals(jsonNodeId, nodeId)) {
reason = "master's node id was " + jsonNodeId;
return false;
}
// check backup nodes size
final JsonArray jsonBackupNodeIds = jsonNode.get("backups").asArray();
if (jsonBackupNodeIds.size() != roleInfo.backups().size()) {
reason = "backup nodes size was " + jsonBackupNodeIds.size();
return false;
}
// check backup nodes' identifier
for (NodeId backupNodeId : roleInfo.backups()) {
boolean backupFound = false;
for (int idx = 0; idx < jsonBackupNodeIds.size(); idx++) {
if (backupNodeId.id().equals(jsonBackupNodeIds.get(idx).asString())) {
backupFound = true;
break;
}
}
if (!backupFound) {
reason = "backup not found " + backupNodeId.id();
return false;
}
}
return true;
}
@Override
public void describeTo(Description description) {
description.appendText(reason);
}
}
/**
* Factory to allocate a role info json matcher.
*
* @param roleInfo role info object we are looking for
* @return matcher
*/
private static RoleInfoJsonMatcher matchesRoleInfo(RoleInfo roleInfo) {
return new RoleInfoJsonMatcher(roleInfo);
}
/**
* Sets up the global values for all the tests.
*/
@Before
public void setUpTest() {
final CodecManager codecService = new CodecManager();
codecService.activate();
ServiceDirectory testDirectory =
new TestServiceDirectory()
.add(MastershipService.class, mockService)
.add(MastershipAdminService.class, mockAdminService)
.add(DeviceService.class, mockDeviceService)
.add(CodecService.class, codecService);
setServiceDirectory(testDirectory);
}
/**
* Tests the result of the REST API GET when there are active master roles.
*/
@Test
public void testGetLocalRole() {
expect(mockService.getLocalRole(anyObject())).andReturn(role1).anyTimes();
replay(mockService);
final WebTarget wt = target();
final String response = wt.path("mastership/" + deviceId1.toString() +
"/local").request().get(String.class);
final JsonObject result = Json.parse(response).asObject();
assertThat(result, notNullValue());
assertThat(result.names(), hasSize(1));
assertThat(result.names().get(0), is("role"));
final String role = result.get("role").asString();
assertThat(role, notNullValue());
assertThat(role, is("MASTER"));
}
/**
* Tests the result of the REST API GET when there is no active master.
*/
@Test
public void testGetMasterForNull() {
expect(mockService.getMasterFor(anyObject())).andReturn(null).anyTimes();
replay(mockService);
final WebTarget wt = target();
final Response response = wt.path("mastership/" + deviceId1.toString() +
"/master").request().get();
assertEquals(404, response.getStatus());
}
/**
* Tests the result of the REST API GET when there is active master.
*/
@Test
public void testGetMasterFor() {
expect(mockService.getMasterFor(anyObject())).andReturn(nodeId1).anyTimes();
replay(mockService);
final WebTarget wt = target();
final String response = wt.path("mastership/" + deviceId1.toString() +
"/master").request().get(String.class);
final JsonObject result = Json.parse(response).asObject();
assertThat(result, notNullValue());
assertThat(result.names(), hasSize(1));
assertThat(result.names().get(0), is("nodeId"));
final String node = result.get("nodeId").asString();
assertThat(node, notNullValue());
assertThat(node, is("node:1"));
}
/**
* Tests the result of the REST API GET when there are no active nodes.
*/
@Test
public void testGetNodesForNull() {
expect(mockService.getNodesFor(anyObject())).andReturn(null).anyTimes();
replay(mockService);
final WebTarget wt = target();
final Response response = wt.path("mastership/" + deviceId1.toString() +
"/role").request().get();
assertEquals(404, response.getStatus());
}
/**
* Tests the result of the REST API GET when there are active nodes.
*/
@Test
public void testGetNodesFor() {
RoleInfo mockRoleInfo = createMockRoleInfo();
expect(mockService.getNodesFor(anyObject())).andReturn(mockRoleInfo).anyTimes();
replay(mockService);
final WebTarget wt = target();
final String response = wt.path("mastership/" + deviceId1.toString() +
"/role").request().get(String.class);
final JsonObject result = Json.parse(response).asObject();
assertThat(result, notNullValue());
assertThat(result, matchesRoleInfo(mockRoleInfo));
}
/**
* Tests the result of the REST API GET when there are active devices.
*/
@Test
public void testGetDevicesOf() {
Set<DeviceId> deviceIds = ImmutableSet.of(deviceId1, deviceId2, deviceId3);
expect(mockService.getDevicesOf(anyObject())).andReturn(deviceIds).anyTimes();
replay(mockService);
final WebTarget wt = target();
final String response = wt.path("mastership/" + deviceId1.toString() +
"/device").request().get(String.class);
final JsonObject result = Json.parse(response).asObject();
assertThat(result, notNullValue());
assertThat(result.names(), hasSize(1));
assertThat(result.names().get(0), is("deviceIds"));
final JsonArray jsonDevices = result.get("deviceIds").asArray();
assertThat(jsonDevices, notNullValue());
assertThat(jsonDevices.size(), is(3));
}
/**
* Tests the result of the REST API GET for requesting mastership role.
*/
@Test
public void testRequestRoleFor() {
expect(mockService.requestRoleForSync(anyObject())).andReturn(role1).anyTimes();
replay(mockService);
expect(mockDeviceService.getDevice(deviceId1)).andReturn(device1);
replay(mockDeviceService);
final WebTarget wt = target();
final String response = wt.path("mastership/" + deviceId1.toString() +
"/request").request().get(String.class);
final JsonObject result = Json.parse(response).asObject();
assertThat(result, notNullValue());
assertThat(result.names(), hasSize(1));
assertThat(result.names().get(0), is("role"));
final String role = result.get("role").asString();
assertThat(role, notNullValue());
assertThat(role, is("MASTER"));
}
/**
* Tests the result of the REST API GET for relinquishing mastership role.
*/
@Test
public void testRelinquishMastership() {
mockService.relinquishMastershipSync(anyObject());
expectLastCall();
replay(mockService);
final WebTarget wt = target();
final Response response = wt.path("mastership/" + deviceId1.toString() +
"/relinquish").request().get();
assertThat(response.getStatus(), is(HttpURLConnection.HTTP_CREATED));
String location = response.getLocation().toString();
assertThat(location, Matchers.startsWith(deviceId1.toString()));
}
/**
* Tests the result of the REST API PUT for setting role.
*/
@Test
public void testSetRole() {
mockAdminService.setRoleSync(anyObject(), anyObject(), anyObject());
expectLastCall();
replay(mockAdminService);
final WebTarget wt = target();
final InputStream jsonStream = MetersResourceTest.class
.getResourceAsStream("put-set-roles.json");
final Response response = wt.path("mastership")
.request().put(Entity.json(jsonStream));
assertThat(response.getStatus(), is(HttpURLConnection.HTTP_OK));
}
/**
* Tests the result of the REST API GET for balancing roles.
*/
@Test
public void testBalanceRoles() {
mockAdminService.balanceRoles();
expectLastCall();
replay(mockAdminService);
final WebTarget wt = target();
final Response response = wt.path("mastership").request().get();
assertThat(response.getStatus(), is(HttpURLConnection.HTTP_OK));
}
}
| |
package com.atulgpt.www.timetrix;
import android.content.DialogInterface;
import android.content.Intent;
import android.media.Ringtone;
import android.media.RingtoneManager;
import android.net.Uri;
import android.os.Bundle;
import android.preference.CheckBoxPreference;
import android.preference.EditTextPreference;
import android.preference.ListPreference;
import android.preference.Preference;
import android.preference.PreferenceActivity;
import android.preference.PreferenceFragment;
import android.preference.PreferenceManager;
import android.preference.RingtonePreference;
import android.preference.SwitchPreference;
import android.support.v7.app.AlertDialog;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.text.TextUtils;
import android.util.Log;
import android.view.MenuItem;
import android.view.View;
import android.widget.EditText;
import android.widget.Toast;
import com.atulgpt.www.timetrix.adapters.DatabaseAdapter;
import com.atulgpt.www.timetrix.utils.GlobalData;
import com.atulgpt.www.timetrix.utils.SharedPrefsUtil;
/**
* A {@link PreferenceActivity} that presents a set of application settings. On
* handset devices, settings are presented as a single list. On tablets,
* settings are split by category, with category headers shown to the left of
* the list of settings.
* <p>
* See <a href="http://developer.android.com/design/patterns/settings.html">
* Android Design: SettingsActivity</a> for design guidelines and the <a
* href="http://developer.android.com/guide/topics/ui/settings.html">SettingsActivity
* API Guide</a> for more information on developing a SettingsActivity UI.
*/
public class SettingsPreferenceActivity extends AppCompatActivity {
private static final boolean DEBUG = true;
private static final String TAG = SettingsPreferenceActivity.class.getSimpleName ();
/**
* A preference value change listener that updates the preference's summary
* to reflect its new value.
*/
private static Preference.OnPreferenceChangeListener sBindPreferenceSummaryToValueListener =
new Preference.OnPreferenceChangeListener () {
@Override
public boolean onPreferenceChange(Preference preference, Object value) {
String stringValue = value.toString ();
//Log.d (TAG, "onPreferenceChange: Preference name = "+PreferenceManager.getDefaultSharedPreferencesName (preference.getContext ()));
if (DEBUG)
Log.d (TAG, "onPreferenceChange: preference = " + preference + " stringValue = " + stringValue);
if (preference instanceof ListPreference) {
// For list preferences, look up the correct display value in
// the preference's 'entries' list.
ListPreference listPreference = (ListPreference) preference;
int index = listPreference.findIndexOfValue (stringValue);
// Set the summary to reflect the new value.
preference.setSummary (
index >= 0
? listPreference.getEntries ()[index]
: null);
} else if (preference instanceof RingtonePreference) {
// For ringtone preferences, look up the correct display value
// using RingtoneManager.
if (TextUtils.isEmpty (stringValue)) {
// Empty values correspond to 'silent' (no ringtone).
preference.setSummary (R.string.pref_ringtone_silent);
} else {
Ringtone ringtone = RingtoneManager.getRingtone (
preference.getContext (), Uri.parse (stringValue));
if (ringtone == null) {
// Clear the summary if there was a lookup error.
preference.setSummary (null);
} else {
// Set the summary to reflect the new ringtone display
// name.
String name = ringtone.getTitle (preference.getContext ());
preference.setSummary (name);
}
}
} else if (preference instanceof EditTextPreference) {
preference.setSummary (stringValue);
}
return true;
}
};
private int mSectionIndex = 0;
/**
* Binds a preference's summary to its value. More specifically, when the
* preference's value is changed, its summary (line of text below the
* preference title) is updated to reflect the value. The summary is also
* immediately updated upon calling this method. The exact display format is
* dependent on the type of preference.
*
* @see #sBindPreferenceSummaryToValueListener
*/
private static void bindPreferenceSummaryToValue(Preference preference) {
// Set the listener to watch for value changes.
preference.setOnPreferenceChangeListener (sBindPreferenceSummaryToValueListener);
// Trigger the listener immediately with the preference's
// current value.
if (preference instanceof CheckBoxPreference || preference instanceof SwitchPreference) {
sBindPreferenceSummaryToValueListener.onPreferenceChange (preference,
PreferenceManager
.getDefaultSharedPreferences (preference.getContext ())
.getBoolean (preference.getKey (), false));
} else if (preference instanceof EditTextPreference) {
sBindPreferenceSummaryToValueListener.onPreferenceChange (preference,
PreferenceManager
.getDefaultSharedPreferences (preference.getContext ())
.getString (preference.getKey (), ""));
}
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate (savedInstanceState);
setupActionBar ();
if (getIntent ().hasExtra (GlobalData.SECTION_INDEX)) {
mSectionIndex = getIntent ().getIntExtra (GlobalData.SECTION_INDEX, 0);
}
if (isValidFragment (GeneralPreferenceFragment.class.getName ()))
getFragmentManager ().beginTransaction ().replace (R.id.settings_fragment_layout,
new GeneralPreferenceFragment ()).commit ();
}
/**
* Set up the {@link android.app.ActionBar}, if the API is available.
*/
private void setupActionBar() {
//Setting custom action bar
setContentView (R.layout.activity_settings_prefrence);
Toolbar toolbar = (Toolbar) findViewById (R.id.toolbarSettingsPreference);
setSupportActionBar (toolbar);
android.support.v7.app.ActionBar actionBar = getSupportActionBar ();
if (actionBar != null) {
actionBar.setDisplayHomeAsUpEnabled (true);
} else Toast.makeText (this, "in2", Toast.LENGTH_SHORT).show ();
}
/**
* This method stops fragment injection in malicious applications.
* Make sure to deny any unknown fragments here.
*/
protected boolean isValidFragment(String fragmentName) {
return PreferenceFragment.class.getName ().equals (fragmentName)
|| GeneralPreferenceFragment.class.getName ().equals (fragmentName)
|| DataSyncPreferenceFragment.class.getName ().equals (fragmentName)
|| NotificationPreferenceFragment.class.getName ().equals (fragmentName);
}
/**
* This hook is called whenever an item in your options menu is selected.
* The default implementation simply returns false to have the normal
* processing happen (calling the item's Runnable or sending a message to
* its Handler as appropriate). You can use this method for any items
* for which you would like to do processing without those other
* facilities.
* <p>
* <p>Derived classes should call through to the base class for it to
* perform the default menu handling.</p>
*
* @param item The menu item that was selected.
* @return boolean Return false to allow normal menu processing to
* proceed, true to consume it here.
* @see #onCreateOptionsMenu
*/
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (item.getItemId () == android.R.id.home) {
startActivity (new Intent (SettingsPreferenceActivity.this, StartupPage.class)
.putExtra (GlobalData.SECTION_INDEX, mSectionIndex));
SettingsPreferenceActivity.this.finish ();
return true;
}
return super.onOptionsItemSelected (item);
}
/**
* Take care of popping the fragment back stack or finishing the activity
* as appropriate.
*/
@Override
public void onBackPressed() {
startActivity (new Intent (SettingsPreferenceActivity.this, StartupPage.class)
.putExtra (GlobalData.SECTION_INDEX, mSectionIndex));
SettingsPreferenceActivity.this.finish ();
}
/**
* This fragment shows general preferences only. It is used when the
* activity is showing a two-pane settings UI.
*/
public static class GeneralPreferenceFragment extends PreferenceFragment {
@Override
public void onCreate(final Bundle savedInstanceState) {
super.onCreate (savedInstanceState);
addPreferencesFromResource (R.xml.pref_settings_option);
setHasOptionsMenu (true);
// Bind the summaries of EditText/List/Dialog/Ringtone preferences
// to their values. When their values change, their summaries are
// updated to reflect the new value, per the Android Design
// guidelines.
bindPreferenceSummaryToValue (findPreference ("userNameEditText"));
bindPreferenceSummaryToValue (findPreference ("emailEditText"));
bindPreferenceSummaryToValue (findPreference ("notificationSwitch"));
bindPreferenceSummaryToValue (findPreference ("cloudSwitch"));
bindPreferenceSummaryToValue (findPreference ("passSwitch"));
bindPreferenceSummaryToValue (findPreference ("resetData"));
//bindPreferenceSummaryToValue (findPreference ("termsAndCondition"));
Preference preference = findPreference ("passSwitch");
preference.setOnPreferenceClickListener (new Preference.OnPreferenceClickListener () {
@Override
public boolean onPreferenceClick(Preference preference) {
final SwitchPreference switchPreference = (SwitchPreference) preference;
final SharedPrefsUtil sharedPrefsUtil = new SharedPrefsUtil (getActivity ());
if (!switchPreference.isChecked ()) {
sharedPrefsUtil.disablePassword ();
return false;
}
switchPreference.setChecked (false);
if (!switchPreference.isChecked ()) {
AlertDialog.Builder builder = new AlertDialog.Builder (getActivity ()).setTitle ("Set Password").setMessage ("Set Password to secure the app");
builder.setView (R.layout.dialog_password_entry);
builder.setPositiveButton (R.string.done_str, new DialogInterface.OnClickListener () {
@Override
public void onClick(DialogInterface dialog, int which) {
}
});
builder.setNegativeButton (R.string.cancel_str, new DialogInterface.OnClickListener () {
@Override
public void onClick(DialogInterface dialog, int which) {
switchPreference.setChecked (false);
dialog.dismiss ();
}
});
final AlertDialog dialog = builder.create ();
dialog.show ();
dialog.getButton (AlertDialog.BUTTON_POSITIVE).setOnClickListener (new View.OnClickListener () {
@Override
public void onClick(View v) {
EditText passwordEditText = (EditText) dialog.findViewById (R.id.editText_password);
EditText confirmPasswordEditText = (EditText) dialog.findViewById (R.id.editText_confirm_password);
if (passwordEditText != null && confirmPasswordEditText != null) {
String password = passwordEditText.getText ().toString ();
String confirmPassword = confirmPasswordEditText.getText ().toString ();
View view = null;
if (password.trim ().isEmpty ()) {
view = passwordEditText;
} else if (confirmPassword.trim ().isEmpty ()) {
view = confirmPasswordEditText;
}
if (view != null) {
((EditText) view).setError (getString (R.string.empty_field_not_allowed_str));
view.requestFocus ();
} else {
if (password.equals (confirmPassword)) {
sharedPrefsUtil.setUserPassAuth (confirmPassword);
sharedPrefsUtil.enablePassword ();
switchPreference.setChecked (true);
dialog.dismiss ();
} else {
confirmPasswordEditText.setError (getString (R.string.password_didnt_match_str));
confirmPasswordEditText.requestFocus ();
}
}
} else {
dialog.dismiss ();
}
}
});
}
return false;
}
});
Preference preference1 = findPreference ("resetData");
preference1.setOnPreferenceClickListener (new Preference.OnPreferenceClickListener () {
@Override
public boolean onPreferenceClick(Preference preference) {
AlertDialog.Builder builder = new AlertDialog.Builder (getActivity ()).setTitle (getString (R.string.delete_all_the_data_str)).setMessage (getString (R.string.it_ll_delete_all_the_data_str));
builder.setPositiveButton (getString (R.string.yes_str), new DialogInterface.OnClickListener () {
@Override
public void onClick(DialogInterface dialog, int which) {
DatabaseAdapter databaseAdapter = new DatabaseAdapter (getActivity (), null);
databaseAdapter.deleteDatabase ();
Toast.makeText (getActivity (), "Complete data Deleted!", Toast.LENGTH_SHORT).show ();
startActivity (new Intent (getActivity (), AddAnotherSection.class).putExtra (GlobalData.ADD_ANOTHER_SEC_HOME, false));
getActivity ().finish ();
dialog.dismiss ();
}
});
builder.setNegativeButton (getString (R.string.no_str), new DialogInterface.OnClickListener () {
@Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss ();
}
}).show ();
return false;
}
});
}
}
/**
* This fragment shows notification preferences only. It is used when the
* activity is showing a two-pane settings UI.
*/
public static class NotificationPreferenceFragment extends PreferenceFragment {
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate (savedInstanceState);
addPreferencesFromResource (R.xml.pref_notification);
setHasOptionsMenu (true);
// Bind the summaries of EditText/List/Dialog/Ringtone preferences
// to their values. When their values change, their summaries are
// updated to reflect the new value, per the Android Design
// guidelines.
bindPreferenceSummaryToValue (findPreference ("notifications_new_message_ringtone"));
}
}
/**
* This fragment shows data and sync preferences only. It is used when the
* activity is showing a two-pane settings UI.
*/
public static class DataSyncPreferenceFragment extends PreferenceFragment {
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate (savedInstanceState);
addPreferencesFromResource (R.xml.pref_data_sync);
setHasOptionsMenu (true);
// Bind the summaries of EditText/List/Dialog/Ringtone preferences
// to their values. When their values change, their summaries are
// updated to reflect the new value, per the Android Design
// guidelines.
bindPreferenceSummaryToValue (findPreference ("sync_frequency"));
}
}
}
| |
/*************************************************************************
* Compilation: javac BinarySearchST.java
* Execution: java BinarySearchST
* Dependencies: StdIn.java StdOut.java
* Data files: http://algs4.cs.princeton.edu/31elementary/tinyST.txt
*
* Symbol table implementation with binary search in an ordered array.
*
* % more tinyST.txt
* S E A R C H E X A M P L E
*
* % java BinarySearchST < tinyST.txt
* A 8
* C 4
* E 12
* H 5
* L 11
* M 9
* P 10
* R 3
* S 0
* X 7
*
*************************************************************************/
public class BinarySearchST<Key extends Comparable<Key>, Value> {
private static final int INIT_CAPACITY = 2;
private Key[] keys;
private Value[] vals;
private int N = 0;
// create an empty symbol table with default initial capacity
public BinarySearchST() { this(INIT_CAPACITY); }
// create an empty symbol table with given initial capacity
public BinarySearchST(int capacity) {
keys = (Key[]) new Comparable[capacity];
vals = (Value[]) new Object[capacity];
}
// resize the underlying arrays
private void resize(int capacity) {
assert capacity >= N;
Key[] tempk = (Key[]) new Comparable[capacity];
Value[] tempv = (Value[]) new Object[capacity];
for (int i = 0; i < N; i++) {
tempk[i] = keys[i];
tempv[i] = vals[i];
}
vals = tempv;
keys = tempk;
}
// is the key in the table?
public boolean contains(Key key) {
return get(key) != null;
}
// number of key-value pairs in the table
public int size() {
return N;
}
// is the symbol table empty?
public boolean isEmpty() {
return size() == 0;
}
// return the value associated with the given key, or null if no such key
public Value get(Key key) {
if (isEmpty()) return null;
int i = rank(key);
if (i < N && keys[i].compareTo(key) == 0) return vals[i];
return null;
}
// return the number of keys in the table that are smaller than given key
public int rank(Key key) {
int lo = 0, hi = N-1;
while (lo <= hi) {
int m = lo + (hi - lo) / 2;
int cmp = key.compareTo(keys[m]);
if (cmp < 0) hi = m - 1;
else if (cmp > 0) lo = m + 1;
else return m;
}
return lo;
}
// Search for key. Update value if found; grow table if new.
public void put(Key key, Value val) {
if (val == null) { delete(key); return; }
int i = rank(key);
// key is already in table
if (i < N && keys[i].compareTo(key) == 0) {
vals[i] = val;
return;
}
// insert new key-value pair
if (N == keys.length) resize(2*keys.length);
for (int j = N; j > i; j--) {
keys[j] = keys[j-1];
vals[j] = vals[j-1];
}
keys[i] = key;
vals[i] = val;
N++;
assert check();
}
// Remove the key-value pair if present
public void delete(Key key) {
if (isEmpty()) return;
// compute rank
int i = rank(key);
// key not in table
if (i == N || keys[i].compareTo(key) != 0) {
return;
}
for (int j = i; j < N-1; j++) {
keys[j] = keys[j+1];
vals[j] = vals[j+1];
}
N--;
keys[N] = null; // to avoid loitering
vals[N] = null;
// resize if 1/4 full
if (N > 0 && N == keys.length/4) resize(keys.length/2);
assert check();
}
// delete the minimum key and its associated value
public void deleteMin() {
if (isEmpty()) throw new RuntimeException("Symbol table underflow error");
delete(min());
}
// delete the maximum key and its associated value
public void deleteMax() {
if (isEmpty()) throw new RuntimeException("Symbol table underflow error");
delete(max());
}
/*****************************************************************************
* Ordered symbol table methods
*****************************************************************************/
public Key min() {
if (isEmpty()) return null;
return keys[0];
}
public Key max() {
if (isEmpty()) return null;
return keys[N-1];
}
public Key select(int k) {
if (k < 0 || k >= N) return null;
return keys[k];
}
public Key floor(Key key) {
int i = rank(key);
if (i < N && key.compareTo(keys[i]) == 0) return keys[i];
if (i == 0) return null;
else return keys[i-1];
}
public Key ceiling(Key key) {
int i = rank(key);
if (i == N) return null;
else return keys[i];
}
public int size(Key lo, Key hi) {
if (lo.compareTo(hi) > 0) return 0;
if (contains(hi)) return rank(hi) - rank(lo) + 1;
else return rank(hi) - rank(lo);
}
public Iterable<Key> keys() {
return keys(min(), max());
}
public Iterable<Key> keys(Key lo, Key hi) {
Queue<Key> queue = new Queue<Key>();
if (lo == null && hi == null) return queue;
if (lo == null) throw new RuntimeException("lo is null in keys()");
if (hi == null) throw new RuntimeException("hi is null in keys()");
if (lo.compareTo(hi) > 0) return queue;
for (int i = rank(lo); i < rank(hi); i++)
queue.enqueue(keys[i]);
if (contains(hi)) queue.enqueue(keys[rank(hi)]);
return queue;
}
/*****************************************************************************
* Check internal invariants
*****************************************************************************/
private boolean check() {
return isSorted() && rankCheck();
}
// are the items in the array in ascending order?
private boolean isSorted() {
for (int i = 1; i < size(); i++)
if (keys[i].compareTo(keys[i-1]) < 0) return false;
return true;
}
// check that rank(select(i)) = i
private boolean rankCheck() {
for (int i = 0; i < size(); i++)
if (i != rank(select(i))) return false;
for (int i = 0; i < size(); i++)
if (keys[i].compareTo(select(rank(keys[i]))) != 0) return false;
return true;
}
/*****************************************************************************
* Test client
*****************************************************************************/
public static void main(String[] args) {
BinarySearchST<String, Integer> st = new BinarySearchST<String, Integer>();
for (int i = 0; !StdIn.isEmpty(); i++) {
String key = StdIn.readString();
st.put(key, i);
}
for (String s : st.keys())
StdOut.println(s + " " + st.get(s));
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.snapshots;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.elasticsearch.Version;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.cluster.*;
import org.elasticsearch.cluster.block.ClusterBlocks;
import org.elasticsearch.cluster.metadata.*;
import org.elasticsearch.cluster.metadata.RestoreMetaData.ShardRestoreStatus;
import org.elasticsearch.cluster.routing.RestoreSource;
import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.routing.allocation.AllocationService;
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.repositories.RepositoriesService;
import org.elasticsearch.repositories.Repository;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.*;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
import static com.google.common.collect.Lists.newArrayList;
import static com.google.common.collect.Maps.newHashMap;
import static org.elasticsearch.cluster.metadata.MetaDataIndexStateService.INDEX_CLOSED_BLOCK;
/**
* Service responsible for restoring snapshots
* <p/>
* Restore operation is performed in several stages.
* <p/>
* First {@link #restoreSnapshot(RestoreRequest, RestoreSnapshotListener)}
* method reads information about snapshot and metadata from repository. In update cluster state task it checks restore
* preconditions, restores global state if needed, creates {@link RestoreMetaData} record with list of shards that needs
* to be restored and adds this shard to the routing table using {@link RoutingTable.Builder#addAsRestore(IndexMetaData, RestoreSource)}
* method.
* <p/>
* Individual shards are getting restored as part of normal recovery process in
* {@link org.elasticsearch.index.gateway.IndexShardGatewayService#recover(boolean, org.elasticsearch.index.gateway.IndexShardGatewayService.RecoveryListener)}
* method, which detects that shard should be restored from snapshot rather than recovered from gateway by looking
* at the {@link org.elasticsearch.cluster.routing.ShardRouting#restoreSource()} property. If this property is not null
* {@code recover} method uses {@link org.elasticsearch.index.snapshots.IndexShardSnapshotAndRestoreService#restore(org.elasticsearch.index.gateway.RecoveryState)}
* method to start shard restore process.
* <p/>
* At the end of the successful restore process {@code IndexShardSnapshotAndRestoreService} calls {@link #indexShardRestoreCompleted(SnapshotId, ShardId)},
* which updates {@link RestoreMetaData} in cluster state or removes it when all shards are completed. In case of
* restore failure a normal recovery fail-over process kicks in.
*/
public class RestoreService extends AbstractComponent implements ClusterStateListener {
private final ClusterService clusterService;
private final RepositoriesService repositoriesService;
private final TransportService transportService;
private final AllocationService allocationService;
private final MetaDataCreateIndexService createIndexService;
private final CopyOnWriteArrayList<RestoreCompletionListener> listeners = new CopyOnWriteArrayList<>();
@Inject
public RestoreService(Settings settings, ClusterService clusterService, RepositoriesService repositoriesService, TransportService transportService, AllocationService allocationService, MetaDataCreateIndexService createIndexService) {
super(settings);
this.clusterService = clusterService;
this.repositoriesService = repositoriesService;
this.transportService = transportService;
this.allocationService = allocationService;
this.createIndexService = createIndexService;
transportService.registerHandler(UpdateRestoreStateRequestHandler.ACTION, new UpdateRestoreStateRequestHandler());
clusterService.add(this);
}
/**
* Restores snapshot specified in the restore request.
*
* @param request restore request
* @param listener restore listener
*/
public void restoreSnapshot(final RestoreRequest request, final RestoreSnapshotListener listener) {
try {
// Read snapshot info and metadata from the repository
Repository repository = repositoriesService.repository(request.repository());
final SnapshotId snapshotId = new SnapshotId(request.repository(), request.name());
final Snapshot snapshot = repository.readSnapshot(snapshotId);
ImmutableList<String> filteredIndices = SnapshotUtils.filterIndices(snapshot.indices(), request.indices(), request.indicesOptions());
final MetaData metaData = repository.readSnapshotMetaData(snapshotId, filteredIndices);
// Make sure that we can restore from this snapshot
if (!snapshot.state().restorable()) {
throw new SnapshotRestoreException(snapshotId, "unsupported snapshot state [" + snapshot.state() + "]");
}
if (Version.CURRENT.before(snapshot.version())) {
throw new SnapshotRestoreException(snapshotId, "incompatible snapshot version [" + snapshot.version() + "]");
}
// Find list of indices that we need to restore
final Map<String, String> renamedIndices = newHashMap();
for (String index : filteredIndices) {
String renamedIndex = index;
if (request.renameReplacement() != null && request.renamePattern() != null) {
renamedIndex = index.replaceAll(request.renamePattern(), request.renameReplacement());
}
String previousIndex = renamedIndices.put(renamedIndex, index);
if (previousIndex != null) {
throw new SnapshotRestoreException(snapshotId, "indices [" + index + "] and [" + previousIndex + "] are renamed into the same index [" + renamedIndex + "]");
}
}
// Now we can start the actual restore process by adding shards to be recovered in the cluster state
// and updating cluster metadata (global and index) as needed
clusterService.submitStateUpdateTask(request.cause(), new TimeoutClusterStateUpdateTask() {
RestoreInfo restoreInfo = null;
@Override
public ClusterState execute(ClusterState currentState) {
// Check if another restore process is already running - cannot run two restore processes at the
// same time
RestoreMetaData restoreMetaData = currentState.metaData().custom(RestoreMetaData.TYPE);
if (restoreMetaData != null && !restoreMetaData.entries().isEmpty()) {
throw new ConcurrentSnapshotExecutionException(snapshotId, "Restore process is already running in this cluster");
}
// Updating cluster state
MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData());
ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks());
RoutingTable.Builder rtBuilder = RoutingTable.builder(currentState.routingTable());
if (!metaData.indices().isEmpty()) {
// We have some indices to restore
ImmutableMap.Builder<ShardId, RestoreMetaData.ShardRestoreStatus> shards = ImmutableMap.builder();
for (Map.Entry<String, String> indexEntry : renamedIndices.entrySet()) {
String index = indexEntry.getValue();
// Make sure that index was fully snapshotted - don't restore
if (failed(snapshot, index)) {
throw new SnapshotRestoreException(snapshotId, "index [" + index + "] wasn't fully snapshotted - cannot restore");
}
RestoreSource restoreSource = new RestoreSource(snapshotId, index);
String renamedIndex = indexEntry.getKey();
IndexMetaData snapshotIndexMetaData = metaData.index(index);
// Check that the index is closed or doesn't exist
IndexMetaData currentIndexMetaData = currentState.metaData().index(renamedIndex);
if (currentIndexMetaData == null) {
// Index doesn't exist - create it and start recovery
// Make sure that the index we are about to create has a validate name
createIndexService.validateIndexName(renamedIndex, currentState);
IndexMetaData.Builder indexMdBuilder = IndexMetaData.builder(snapshotIndexMetaData).state(IndexMetaData.State.OPEN).index(renamedIndex);
IndexMetaData updatedIndexMetaData = indexMdBuilder.build();
rtBuilder.addAsNewRestore(updatedIndexMetaData, restoreSource);
mdBuilder.put(updatedIndexMetaData, true);
} else {
// Index exist - checking that it's closed
if (currentIndexMetaData.state() != IndexMetaData.State.CLOSE) {
// TODO: Enable restore for open indices
throw new SnapshotRestoreException(snapshotId, "cannot restore index [" + renamedIndex + "] because it's open");
}
// Make sure that the number of shards is the same. That's the only thing that we cannot change
if (currentIndexMetaData.getNumberOfShards() != snapshotIndexMetaData.getNumberOfShards()) {
throw new SnapshotRestoreException(snapshotId, "cannot restore index [" + renamedIndex + "] with [" + currentIndexMetaData.getNumberOfShards() +
"] shard from snapshot with [" + snapshotIndexMetaData.getNumberOfShards() + "] shards");
}
// Index exists and it's closed - open it in metadata and start recovery
IndexMetaData.Builder indexMdBuilder = IndexMetaData.builder(snapshotIndexMetaData).state(IndexMetaData.State.OPEN);
indexMdBuilder.version(Math.max(snapshotIndexMetaData.version(), currentIndexMetaData.version() + 1));
IndexMetaData updatedIndexMetaData = indexMdBuilder.index(renamedIndex).build();
rtBuilder.addAsRestore(updatedIndexMetaData, restoreSource);
blocks.removeIndexBlock(renamedIndex, INDEX_CLOSED_BLOCK);
mdBuilder.put(updatedIndexMetaData, true);
}
for (int shard = 0; shard < snapshotIndexMetaData.getNumberOfShards(); shard++) {
shards.put(new ShardId(renamedIndex, shard), new RestoreMetaData.ShardRestoreStatus(clusterService.state().nodes().localNodeId()));
}
}
RestoreMetaData.Entry restoreEntry = new RestoreMetaData.Entry(snapshotId, RestoreMetaData.State.INIT, ImmutableList.copyOf(renamedIndices.keySet()), shards.build());
mdBuilder.putCustom(RestoreMetaData.TYPE, new RestoreMetaData(restoreEntry));
}
// Restore global state if needed
if (request.includeGlobalState()) {
if (metaData.persistentSettings() != null) {
mdBuilder.persistentSettings(metaData.persistentSettings());
}
if (metaData.templates() != null) {
// TODO: Should all existing templates be deleted first?
for (ObjectCursor<IndexTemplateMetaData> cursor : metaData.templates().values()) {
mdBuilder.put(cursor.value);
}
}
if (metaData.customs() != null) {
for (ObjectObjectCursor<String, MetaData.Custom> cursor : metaData.customs()) {
if (!RepositoriesMetaData.TYPE.equals(cursor.key)) {
// Don't restore repositories while we are working with them
// TODO: Should we restore them at the end?
mdBuilder.putCustom(cursor.key, cursor.value);
}
}
}
}
if (metaData.indices().isEmpty()) {
// We don't have any indices to restore - we are done
restoreInfo = new RestoreInfo(request.name(), ImmutableList.<String>of(), 0, 0);
}
ClusterState updatedState = ClusterState.builder(currentState).metaData(mdBuilder).blocks(blocks).routingTable(rtBuilder).build();
RoutingAllocation.Result routingResult = allocationService.reroute(ClusterState.builder(updatedState).routingTable(rtBuilder).build());
return ClusterState.builder(updatedState).routingResult(routingResult).build();
}
@Override
public void onFailure(String source, Throwable t) {
logger.warn("[{}] failed to restore snapshot", t, snapshotId);
listener.onFailure(t);
}
@Override
public TimeValue timeout() {
return request.masterNodeTimeout();
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
listener.onResponse(restoreInfo);
}
});
} catch (Throwable e) {
logger.warn("[{}][{}] failed to restore snapshot", e, request.repository(), request.name());
listener.onFailure(e);
}
}
/**
* This method is used by {@link org.elasticsearch.index.snapshots.IndexShardSnapshotAndRestoreService} to notify
* {@code RestoreService} about shard restore completion.
*
* @param snapshotId snapshot id
* @param shardId shard id
*/
public void indexShardRestoreCompleted(SnapshotId snapshotId, ShardId shardId) {
logger.trace("[{}] successfully restored shard [{}]", snapshotId, shardId);
UpdateIndexShardRestoreStatusRequest request = new UpdateIndexShardRestoreStatusRequest(snapshotId, shardId,
new ShardRestoreStatus(clusterService.state().nodes().localNodeId(), RestoreMetaData.State.SUCCESS));
if (clusterService.state().nodes().localNodeMaster()) {
innerUpdateRestoreState(request);
} else {
transportService.sendRequest(clusterService.state().nodes().masterNode(),
UpdateRestoreStateRequestHandler.ACTION, request, EmptyTransportResponseHandler.INSTANCE_SAME);
}
}
/**
* Updates shard restore record in the cluster state.
*
* @param request update shard status request
*/
private void innerUpdateRestoreState(final UpdateIndexShardRestoreStatusRequest request) {
clusterService.submitStateUpdateTask("update snapshot state", new ProcessedClusterStateUpdateTask() {
private boolean completed = true;
private RestoreInfo restoreInfo = null;
@Override
public ClusterState execute(ClusterState currentState) {
MetaData metaData = currentState.metaData();
MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData());
RestoreMetaData restore = metaData.custom(RestoreMetaData.TYPE);
if (restore != null) {
boolean changed = false;
ArrayList<RestoreMetaData.Entry> entries = newArrayList();
for (RestoreMetaData.Entry entry : restore.entries()) {
if (entry.snapshotId().equals(request.snapshotId())) {
HashMap<ShardId, ShardRestoreStatus> shards = newHashMap(entry.shards());
logger.trace("[{}] Updating shard [{}] with status [{}]", request.snapshotId(), request.shardId(), request.status().state());
shards.put(request.shardId(), request.status());
for (RestoreMetaData.ShardRestoreStatus status : shards.values()) {
if (!status.state().completed()) {
completed = false;
break;
}
}
if (!completed) {
entries.add(new RestoreMetaData.Entry(entry.snapshotId(), RestoreMetaData.State.STARTED, entry.indices(), ImmutableMap.copyOf(shards)));
} else {
logger.info("restore [{}] is done", request.snapshotId());
int failedShards = 0;
for (RestoreMetaData.ShardRestoreStatus status : shards.values()) {
if (status.state() == RestoreMetaData.State.FAILURE) {
failedShards++;
}
}
restoreInfo = new RestoreInfo(entry.snapshotId().getSnapshot(), entry.indices(), shards.size(), shards.size() - failedShards);
}
changed = true;
} else {
entries.add(entry);
}
}
if (changed) {
restore = new RestoreMetaData(entries.toArray(new RestoreMetaData.Entry[entries.size()]));
mdBuilder.putCustom(RestoreMetaData.TYPE, restore);
return ClusterState.builder(currentState).metaData(mdBuilder).build();
}
}
return currentState;
}
@Override
public void onFailure(String source, Throwable t) {
logger.warn("[{}][{}] failed to update snapshot status to [{}]", t, request.snapshotId(), request.shardId(), request.status());
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
if (restoreInfo != null) {
for (RestoreCompletionListener listener : listeners) {
try {
listener.onRestoreCompletion(request.snapshotId, restoreInfo);
} catch (Throwable e) {
logger.warn("failed to update snapshot status for [{}]", e, listener);
}
}
}
}
});
}
/**
* Checks if any of the deleted indices are still recovering and fails recovery on the shards of these indices
*
* @param event cluster changed event
*/
private void processDeletedIndices(ClusterChangedEvent event) {
MetaData metaData = event.state().metaData();
RestoreMetaData restore = metaData.custom(RestoreMetaData.TYPE);
if (restore == null) {
// Not restoring - nothing to do
return;
}
if (!event.indicesDeleted().isEmpty()) {
// Some indices were deleted, let's make sure all indices that we are restoring still exist
for (RestoreMetaData.Entry entry : restore.entries()) {
List<ShardId> shardsToFail = null;
for (ImmutableMap.Entry<ShardId, ShardRestoreStatus> shard : entry.shards().entrySet()) {
if (!shard.getValue().state().completed()) {
if (!event.state().metaData().hasIndex(shard.getKey().getIndex())) {
if (shardsToFail == null) {
shardsToFail = newArrayList();
}
shardsToFail.add(shard.getKey());
}
}
}
if (shardsToFail != null) {
for (ShardId shardId : shardsToFail) {
logger.trace("[{}] failing running shard restore [{}]", entry.snapshotId(), shardId);
innerUpdateRestoreState(new UpdateIndexShardRestoreStatusRequest(entry.snapshotId(), shardId, new ShardRestoreStatus(null, RestoreMetaData.State.FAILURE, "index was deleted")));
}
}
}
}
}
private boolean failed(Snapshot snapshot, String index) {
for (SnapshotShardFailure failure : snapshot.shardFailures()) {
if (index.equals(failure.index())) {
return true;
}
}
return false;
}
private boolean failed(Snapshot snapshot, String index, int shard) {
for (SnapshotShardFailure failure : snapshot.shardFailures()) {
if (index.equals(failure.index()) && shard == failure.shardId()) {
return true;
}
}
return false;
}
/**
* Adds restore completion listener
* <p/>
* This listener is called for each snapshot that finishes restore operation in the cluster. It's responsibility of
* the listener to decide if it's called for the appropriate snapshot or not.
*
* @param listener restore completion listener
*/
public void addListener(RestoreCompletionListener listener) {
this.listeners.add(listener);
}
/**
* Removes restore completion listener
* <p/>
* This listener is called for each snapshot that finishes restore operation in the cluster.
*
* @param listener restore completion listener
*/
public void removeListener(RestoreCompletionListener listener) {
this.listeners.remove(listener);
}
@Override
public void clusterChanged(ClusterChangedEvent event) {
try {
if (event.localNodeMaster()) {
processDeletedIndices(event);
}
} catch (Throwable t) {
logger.warn("Failed to update restore state ", t);
}
}
/**
* Checks if a repository is currently in use by one of the snapshots
*
* @param clusterState cluster state
* @param repository repository id
* @return true if repository is currently in use by one of the running snapshots
*/
public static boolean isRepositoryInUse(ClusterState clusterState, String repository) {
MetaData metaData = clusterState.metaData();
RestoreMetaData snapshots = metaData.custom(RestoreMetaData.TYPE);
if (snapshots != null) {
for (RestoreMetaData.Entry snapshot : snapshots.entries()) {
if (repository.equals(snapshot.snapshotId().getRepository())) {
return true;
}
}
}
return false;
}
/**
* Restore snapshot request
*/
public static class RestoreRequest {
private String cause;
private String name;
private String repository;
private String[] indices;
private String renamePattern;
private String renameReplacement;
private IndicesOptions indicesOptions = IndicesOptions.strictExpandOpen();
private Settings settings;
private TimeValue masterNodeTimeout;
private boolean includeGlobalState = false;
/**
* Constructs new restore request
*
* @param cause cause for restoring the snapshot
* @param repository repository name
* @param name snapshot name
*/
public RestoreRequest(String cause, String repository, String name) {
this.cause = cause;
this.name = name;
this.repository = repository;
}
/**
* Sets list of indices to restore
*
* @param indices list of indices
* @return this request
*/
public RestoreRequest indices(String[] indices) {
this.indices = indices;
return this;
}
/**
* Sets indices options flags
*
* @param indicesOptions indices options flags
* @return this request
*/
public RestoreRequest indicesOptions(IndicesOptions indicesOptions) {
this.indicesOptions = indicesOptions;
return this;
}
/**
* If true global cluster state will be restore as part of the restore operation
*
* @param includeGlobalState restore global state flag
* @return this request
*/
public RestoreRequest includeGlobalState(boolean includeGlobalState) {
this.includeGlobalState = includeGlobalState;
return this;
}
/**
* Sets repository-specific restore settings
*
* @param settings restore settings
* @return this request
*/
public RestoreRequest settings(Settings settings) {
this.settings = settings;
return this;
}
/**
* Sets master node timeout
* <p/>
* This timeout will affect only start of the restore process. Once restore process has started this timeout
* has no affect for the duration of restore.
*
* @param masterNodeTimeout master node timeout
* @return this request
*/
public RestoreRequest masterNodeTimeout(TimeValue masterNodeTimeout) {
this.masterNodeTimeout = masterNodeTimeout;
return this;
}
/**
* Sets index rename pattern
*
* @param renamePattern rename pattern
* @return this request
*/
public RestoreRequest renamePattern(String renamePattern) {
this.renamePattern = renamePattern;
return this;
}
/**
* Sets index rename replacement
*
* @param renameReplacement rename replacement
* @return this request
*/
public RestoreRequest renameReplacement(String renameReplacement) {
this.renameReplacement = renameReplacement;
return this;
}
/**
* Returns restore operation cause
*
* @return restore operation cause
*/
public String cause() {
return cause;
}
/**
* Returns snapshot name
*
* @return snapshot name
*/
public String name() {
return name;
}
/**
* Returns repository name
*
* @return repository name
*/
public String repository() {
return repository;
}
/**
* Return the list of indices to be restored
*
* @return the list of indices
*/
public String[] indices() {
return indices;
}
/**
* Returns indices option flags
*
* @return indices options flags
*/
public IndicesOptions indicesOptions() {
return indicesOptions;
}
/**
* Returns rename pattern
*
* @return rename pattern
*/
public String renamePattern() {
return renamePattern;
}
/**
* Returns replacement pattern
*
* @return replacement pattern
*/
public String renameReplacement() {
return renameReplacement;
}
/**
* Returns repository-specific restore settings
*
* @return restore settings
*/
public Settings settings() {
return settings;
}
/**
* Returns true if global state should be restore during this restore operation
*
* @return restore global state flag
*/
public boolean includeGlobalState() {
return includeGlobalState;
}
/**
* Return master node timeout
*
* @return master node timeout
*/
public TimeValue masterNodeTimeout() {
return masterNodeTimeout;
}
}
/**
* This listener is called as soon as restore operation starts in the cluster.
* <p/>
* To receive notifications about when operation ends in the cluster use {@link RestoreCompletionListener}
*/
public static interface RestoreSnapshotListener {
/**
* Called when restore operations successfully starts in the cluster. Not null value of {@code snapshot} parameter
* means that restore operation didn't involve any shards and therefore has already completed.
*
* @param restoreInfo if restore operation finished, contains information about restore operation, null otherwise
*/
void onResponse(RestoreInfo restoreInfo);
/**
* Called when restore operation failed to start
*
* @param t exception that prevented the restore operation to start
*/
void onFailure(Throwable t);
}
/**
* This listener is called every time a snapshot is restored in the cluster
*/
public static interface RestoreCompletionListener {
/**
* Called for every snapshot that is completed in the cluster
*
* @param snapshotId snapshot id
* @param restoreInfo restore completion information
*/
void onRestoreCompletion(SnapshotId snapshotId, RestoreInfo restoreInfo);
}
/**
* Internal class that is used to send notifications about finished shard restore operations to master node
*/
private static class UpdateIndexShardRestoreStatusRequest extends TransportRequest {
private SnapshotId snapshotId;
private ShardId shardId;
private ShardRestoreStatus status;
private UpdateIndexShardRestoreStatusRequest() {
}
private UpdateIndexShardRestoreStatusRequest(SnapshotId snapshotId, ShardId shardId, ShardRestoreStatus status) {
this.snapshotId = snapshotId;
this.shardId = shardId;
this.status = status;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
snapshotId = SnapshotId.readSnapshotId(in);
shardId = ShardId.readShardId(in);
status = ShardRestoreStatus.readShardRestoreStatus(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
snapshotId.writeTo(out);
shardId.writeTo(out);
status.writeTo(out);
}
public SnapshotId snapshotId() {
return snapshotId;
}
public ShardId shardId() {
return shardId;
}
public ShardRestoreStatus status() {
return status;
}
}
/**
* Internal class that is used to send notifications about finished shard restore operations to master node
*/
private class UpdateRestoreStateRequestHandler extends BaseTransportRequestHandler<UpdateIndexShardRestoreStatusRequest> {
static final String ACTION = "cluster/snapshot/update_restore";
@Override
public UpdateIndexShardRestoreStatusRequest newInstance() {
return new UpdateIndexShardRestoreStatusRequest();
}
@Override
public void messageReceived(UpdateIndexShardRestoreStatusRequest request, final TransportChannel channel) throws Exception {
innerUpdateRestoreState(request);
channel.sendResponse(TransportResponse.Empty.INSTANCE);
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide.util.gotoByName;
import com.intellij.Patches;
import com.intellij.codeInsight.daemon.DaemonCodeAnalyzer;
import com.intellij.find.findUsages.PsiElement2UsageTargetAdapter;
import com.intellij.icons.AllIcons;
import com.intellij.ide.DataManager;
import com.intellij.ide.IdeBundle;
import com.intellij.ide.IdeEventQueue;
import com.intellij.ide.actions.CopyReferenceAction;
import com.intellij.ide.actions.GotoFileAction;
import com.intellij.ide.actions.WindowAction;
import com.intellij.ide.ui.UISettings;
import com.intellij.ide.ui.laf.darcula.ui.DarculaTextBorder;
import com.intellij.ide.ui.laf.darcula.ui.DarculaTextFieldUI;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.MnemonicHelper;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.colors.EditorColorsManager;
import com.intellij.openapi.editor.colors.EditorColorsScheme;
import com.intellij.openapi.fileTypes.UnknownFileType;
import com.intellij.openapi.fileTypes.ex.FileTypeManagerEx;
import com.intellij.openapi.keymap.Keymap;
import com.intellij.openapi.keymap.KeymapManager;
import com.intellij.openapi.keymap.KeymapUtil;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.progress.Task;
import com.intellij.openapi.progress.util.ProgressIndicatorBase;
import com.intellij.openapi.progress.util.ProgressIndicatorUtils;
import com.intellij.openapi.progress.util.ReadTask;
import com.intellij.openapi.progress.util.TooManyUsagesStatus;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.popup.*;
import com.intellij.openapi.util.*;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.wm.IdeFocusManager;
import com.intellij.openapi.wm.ToolWindow;
import com.intellij.openapi.wm.ToolWindowManager;
import com.intellij.openapi.wm.WindowManager;
import com.intellij.openapi.wm.ex.WindowManagerEx;
import com.intellij.psi.PsiElement;
import com.intellij.psi.codeStyle.MinusculeMatcher;
import com.intellij.psi.codeStyle.NameUtil;
import com.intellij.psi.statistics.StatisticsInfo;
import com.intellij.psi.statistics.StatisticsManager;
import com.intellij.psi.util.PsiUtilCore;
import com.intellij.ui.*;
import com.intellij.ui.components.JBList;
import com.intellij.ui.popup.AbstractPopup;
import com.intellij.ui.popup.PopupOwner;
import com.intellij.ui.popup.PopupPositionManager;
import com.intellij.ui.popup.PopupUpdateProcessor;
import com.intellij.usageView.UsageInfo;
import com.intellij.usages.*;
import com.intellij.usages.impl.UsageViewManagerImpl;
import com.intellij.util.Alarm;
import com.intellij.util.Consumer;
import com.intellij.util.Processor;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.indexing.FileBasedIndex;
import com.intellij.util.text.Matcher;
import com.intellij.util.text.MatcherHolder;
import com.intellij.util.ui.AsyncProcessIcon;
import com.intellij.util.ui.EmptyIcon;
import com.intellij.util.ui.GraphicsUtil;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.border.CompoundBorder;
import javax.swing.border.EmptyBorder;
import javax.swing.event.DocumentEvent;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import javax.swing.text.AttributeSet;
import javax.swing.text.BadLocationException;
import javax.swing.text.DefaultEditorKit;
import javax.swing.text.PlainDocument;
import java.awt.*;
import java.awt.event.*;
import java.util.*;
import java.util.List;
public abstract class ChooseByNameBase {
private static final Logger LOG = Logger.getInstance("#com.intellij.ide.util.gotoByName.ChooseByNameBase");
protected final Project myProject;
protected final ChooseByNameModel myModel;
protected ChooseByNameItemProvider myProvider;
protected final String myInitialText;
private boolean mySearchInAnyPlace = false;
protected Component myPreviouslyFocusedComponent;
private boolean myInitialized;
protected final JPanelProvider myTextFieldPanel = new JPanelProvider();// Located in the layered pane
protected final MyTextField myTextField = new MyTextField();
private final CardLayout myCard = new CardLayout();
private final JPanel myCardContainer = new JPanel(myCard);
protected JCheckBox myCheckBox;
/**
* the tool area of the popup, it is just after card box
*/
private JComponent myToolArea;
protected JScrollPane myListScrollPane; // Located in the layered pane
private final MyListModel<Object> myListModel = new MyListModel<Object>();
protected final JList myList = new JBList(myListModel);
private final List<Pair<String, Integer>> myHistory = ContainerUtil.newArrayList();
private final List<Pair<String, Integer>> myFuture = ContainerUtil.newArrayList();
protected ChooseByNamePopupComponent.Callback myActionListener;
protected final Alarm myAlarm = new Alarm();
private final ListUpdater myListUpdater = new ListUpdater();
private boolean myDisposedFlag = false;
private ActionCallback myPostponedOkAction;
private final String[][] myNames = new String[2][];
private volatile CalcElementsThread myCalcElementsThread;
private static int VISIBLE_LIST_SIZE_LIMIT = 10;
private int myListSizeIncreasing = 30;
private int myMaximumListSizeLimit = 30;
@NonNls private static final String NOT_FOUND_IN_PROJECT_CARD = "syslib";
@NonNls private static final String NOT_FOUND_CARD = "nfound";
@NonNls private static final String CHECK_BOX_CARD = "chkbox";
@NonNls private static final String SEARCHING_CARD = "searching";
private final int myRebuildDelay;
private final Alarm myHideAlarm = new Alarm();
private boolean myShowListAfterCompletionKeyStroke = false;
protected JBPopup myTextPopup;
protected JBPopup myDropdownPopup;
private boolean myClosedByShiftEnter = false;
protected final int myInitialIndex;
private String myFindUsagesTitle;
private ShortcutSet myCheckBoxShortcut;
protected boolean myInitIsDone;
static final boolean ourLoadNamesEachTime = FileBasedIndex.ourEnableTracingOfKeyHashToVirtualFileMapping;
private boolean myFixLostTyping = true;
private boolean myAlwaysHasMore = false;
public boolean checkDisposed() {
if (myDisposedFlag && myPostponedOkAction != null && !myPostponedOkAction.isProcessed()) {
myPostponedOkAction.setRejected();
}
return myDisposedFlag;
}
public void setDisposed(boolean disposedFlag) {
myDisposedFlag = disposedFlag;
if (disposedFlag) {
setNamesSync(true, null);
setNamesSync(false, null);
}
}
private void setNamesSync(boolean checkboxState, @Nullable String[] value) {
synchronized (myNames) {
myNames[checkboxState ? 1 : 0] = value;
}
}
/**
* @param initialText initial text which will be in the lookup text field
*/
protected ChooseByNameBase(Project project, @NotNull ChooseByNameModel model, String initialText, PsiElement context) {
this(project, model, new DefaultChooseByNameItemProvider(context), initialText, 0);
}
@SuppressWarnings("UnusedDeclaration") // Used in MPS
protected ChooseByNameBase(Project project,
@NotNull ChooseByNameModel model,
@NotNull ChooseByNameItemProvider provider,
String initialText) {
this(project, model, provider, initialText, 0);
}
/**
* @param initialText initial text which will be in the lookup text field
*/
protected ChooseByNameBase(Project project,
@NotNull ChooseByNameModel model,
@NotNull ChooseByNameItemProvider provider,
String initialText,
final int initialIndex) {
myProject = project;
myModel = model;
myInitialText = initialText;
myProvider = provider;
myInitialIndex = initialIndex;
mySearchInAnyPlace = Registry.is("ide.goto.middle.matching") && model.useMiddleMatching();
myRebuildDelay = Registry.intValue("ide.goto.rebuild.delay");
myTextField.setText(myInitialText);
myInitIsDone = true;
}
public void setShowListAfterCompletionKeyStroke(boolean showListAfterCompletionKeyStroke) {
myShowListAfterCompletionKeyStroke = showListAfterCompletionKeyStroke;
}
public boolean isSearchInAnyPlace() {
return mySearchInAnyPlace;
}
public void setSearchInAnyPlace(boolean searchInAnyPlace) {
mySearchInAnyPlace = searchInAnyPlace;
}
public boolean isClosedByShiftEnter() {
return myClosedByShiftEnter;
}
public boolean isOpenInCurrentWindowRequested() {
return isClosedByShiftEnter();
}
/**
* Set tool area. The method may be called only before invoke.
*
* @param toolArea a tool area component
*/
public void setToolArea(JComponent toolArea) {
if (myToolArea != null) {
throw new IllegalStateException("Tool area is modifiable only before invoke()");
}
myToolArea = toolArea;
}
public void setFindUsagesTitle(@Nullable String findUsagesTitle) {
myFindUsagesTitle = findUsagesTitle;
}
public void invoke(final ChooseByNamePopupComponent.Callback callback,
final ModalityState modalityState,
boolean allowMultipleSelection) {
initUI(callback, modalityState, allowMultipleSelection);
}
@NotNull
public ChooseByNameModel getModel() {
return myModel;
}
public class JPanelProvider extends JPanel implements DataProvider {
private JBPopup myHint = null;
private boolean myFocusRequested = false;
JPanelProvider() {
}
@Override
public Object getData(String dataId) {
if (PlatformDataKeys.HELP_ID.is(dataId)) {
return myModel.getHelpId();
}
if (myCalcElementsThread != null) {
return null;
}
if (CommonDataKeys.PSI_ELEMENT.is(dataId)) {
Object element = getChosenElement();
if (element instanceof PsiElement) {
return element;
}
if (element instanceof DataProvider) {
return ((DataProvider)element).getData(dataId);
}
}
else if (LangDataKeys.PSI_ELEMENT_ARRAY.is(dataId)) {
final List<Object> chosenElements = getChosenElements();
if (chosenElements != null) {
List<PsiElement> result = new ArrayList<PsiElement>(chosenElements.size());
for (Object element : chosenElements) {
if (element instanceof PsiElement) {
result.add((PsiElement)element);
}
}
return PsiUtilCore.toPsiElementArray(result);
}
}
else if (PlatformDataKeys.DOMINANT_HINT_AREA_RECTANGLE.is(dataId)) {
return getBounds();
}
return null;
}
public void registerHint(JBPopup h) {
if (myHint != null && myHint.isVisible() && myHint != h) {
myHint.cancel();
}
myHint = h;
}
public boolean focusRequested() {
boolean focusRequested = myFocusRequested;
myFocusRequested = false;
return focusRequested;
}
@Override
public void requestFocus() {
myFocusRequested = true;
}
public void unregisterHint() {
myHint = null;
}
public void hideHint() {
if (myHint != null) {
myHint.cancel();
}
}
@Nullable
public JBPopup getHint() {
return myHint;
}
public void updateHint(PsiElement element) {
if (myHint == null || !myHint.isVisible()) return;
final PopupUpdateProcessor updateProcessor = myHint.getUserData(PopupUpdateProcessor.class);
if (updateProcessor != null) {
updateProcessor.updatePopup(element);
}
}
public void repositionHint() {
if (myHint == null || !myHint.isVisible()) return;
PopupPositionManager.positionPopupInBestPosition(myHint, null, null);
}
}
/**
* @param modalityState - if not null rebuilds list in given {@link ModalityState}
*/
protected void initUI(final ChooseByNamePopupComponent.Callback callback,
final ModalityState modalityState,
final boolean allowMultipleSelection) {
myPreviouslyFocusedComponent = WindowManagerEx.getInstanceEx().getFocusedComponent(myProject);
myActionListener = callback;
myTextFieldPanel.setLayout(new BoxLayout(myTextFieldPanel, BoxLayout.Y_AXIS));
final JPanel hBox = new JPanel();
hBox.setLayout(new BoxLayout(hBox, BoxLayout.X_AXIS));
JPanel caption2Tools = new JPanel(new BorderLayout());
if (myModel.getPromptText() != null) {
JLabel label = new JLabel(myModel.getPromptText());
if (UIUtil.isUnderAquaLookAndFeel()) {
label.setBorder(new CompoundBorder(new EmptyBorder(0, 9, 0, 0), label.getBorder()));
}
label.setFont(UIUtil.getLabelFont().deriveFont(Font.BOLD));
caption2Tools.add(label, BorderLayout.WEST);
}
caption2Tools.add(hBox, BorderLayout.EAST);
myCardContainer.setBorder(BorderFactory.createEmptyBorder(0, 0, 0, 4)); // space between checkbox and filter/show all in view buttons
final String checkBoxName = myModel.getCheckBoxName();
myCheckBox = new JCheckBox(checkBoxName != null ? checkBoxName +
(myCheckBoxShortcut != null ? " (" +
KeymapUtil
.getShortcutsText(myCheckBoxShortcut.getShortcuts()) +
")" : "") : "");
myCheckBox.setAlignmentX(SwingConstants.RIGHT);
if (!SystemInfo.isMac) {
myCheckBox.setBorder(null);
}
myCheckBox.setSelected(myModel.loadInitialCheckBoxState());
if (checkBoxName == null) {
myCheckBox.setVisible(false);
}
addCard(myCheckBox, CHECK_BOX_CARD);
addCard(new HintLabel(myModel.getNotInMessage()), NOT_FOUND_IN_PROJECT_CARD);
addCard(new HintLabel(IdeBundle.message("label.choosebyname.no.matches.found")), NOT_FOUND_CARD);
JPanel searching = new JPanel(new BorderLayout(5, 0));
searching.add(new AsyncProcessIcon("searching"), BorderLayout.WEST);
searching.add(new HintLabel(IdeBundle.message("label.choosebyname.searching")), BorderLayout.CENTER);
addCard(searching, SEARCHING_CARD);
myCard.show(myCardContainer, CHECK_BOX_CARD);
if (isCheckboxVisible()) {
hBox.add(myCardContainer);
}
final DefaultActionGroup group = new DefaultActionGroup();
group.add(new ShowFindUsagesAction() {
@Override
public PsiElement[][] getElements() {
final Object[] objects = myListModel.toArray();
final List<PsiElement> prefixMatchElements = new ArrayList<PsiElement>(objects.length);
final List<PsiElement> nonPrefixMatchElements = new ArrayList<PsiElement>(objects.length);
List<PsiElement> curElements = prefixMatchElements;
for (Object object : objects) {
if (object instanceof PsiElement) {
curElements.add((PsiElement)object);
}
else if (object instanceof DataProvider) {
final PsiElement psi = CommonDataKeys.PSI_ELEMENT.getData((DataProvider)object);
if (psi != null) {
curElements.add(psi);
}
}
else if (object == NON_PREFIX_SEPARATOR) {
curElements = nonPrefixMatchElements;
}
}
return new PsiElement[][]{PsiUtilCore.toPsiElementArray(prefixMatchElements),
PsiUtilCore.toPsiElementArray(nonPrefixMatchElements)};
}
});
final ActionToolbar actionToolbar = ActionManager.getInstance().createActionToolbar(ActionPlaces.UNKNOWN, group, true);
actionToolbar.setLayoutPolicy(ActionToolbar.NOWRAP_LAYOUT_POLICY);
final JComponent toolbarComponent = actionToolbar.getComponent();
toolbarComponent.setBorder(null);
if (myToolArea == null) {
myToolArea = new JLabel(EmptyIcon.create(1, 24));
}
hBox.add(myToolArea);
hBox.add(toolbarComponent);
myTextFieldPanel.add(caption2Tools);
final ActionMap actionMap = new ActionMap();
actionMap.setParent(myTextField.getActionMap());
actionMap.put(DefaultEditorKit.copyAction, new AbstractAction() {
@Override
public void actionPerformed(@NotNull ActionEvent e) {
if (myTextField.getSelectedText() != null) {
actionMap.getParent().get(DefaultEditorKit.copyAction).actionPerformed(e);
return;
}
final Object chosenElement = getChosenElement();
if (chosenElement instanceof PsiElement) {
CopyReferenceAction.doCopy((PsiElement)chosenElement, myProject);
}
}
});
myTextField.setActionMap(actionMap);
myTextFieldPanel.add(myTextField);
EditorColorsScheme scheme = EditorColorsManager.getInstance().getGlobalScheme();
boolean presentationMode = UISettings.getInstance().PRESENTATION_MODE;
int size = presentationMode ? UISettings.getInstance().PRESENTATION_MODE_FONT_SIZE - 4 : scheme.getEditorFontSize();
Font editorFont = new Font(scheme.getEditorFontName(), Font.PLAIN, size);
myTextField.setFont(editorFont);
if (checkBoxName != null) {
if (myCheckBox != null && myCheckBoxShortcut != null) {
new AnAction("change goto check box", null, null) {
@Override
public void actionPerformed(@NotNull AnActionEvent e) {
myCheckBox.setSelected(!myCheckBox.isSelected());
}
}.registerCustomShortcutSet(myCheckBoxShortcut, myTextField);
}
}
if (isCloseByFocusLost()) {
myTextField.addFocusListener(new FocusAdapter() {
@Override
public void focusLost(@NotNull final FocusEvent e) {
cancelListUpdater(); // cancel thread as early as possible
myHideAlarm.addRequest(new Runnable() {
@Override
public void run() {
JBPopup popup = JBPopupFactory.getInstance().getChildFocusedPopup(e.getComponent());
if (popup != null) {
popup.addListener(new JBPopupListener.Adapter() {
@Override
public void onClosed(@NotNull LightweightWindowEvent event) {
if (event.isOk()) {
hideHint();
}
}
});
}
else {
Component oppositeComponent = e.getOppositeComponent();
if (oppositeComponent == myCheckBox) {
IdeFocusManager.getInstance(myProject).requestFocus(myTextField, true);
return;
}
if (oppositeComponent != null && !(oppositeComponent instanceof JFrame) &&
myList.isShowing() &&
(oppositeComponent == myList || SwingUtilities.isDescendingFrom(myList, oppositeComponent))) {
IdeFocusManager.getInstance(myProject).requestFocus(myTextField, true);// Otherwise me may skip some KeyEvents
return;
}
if (oppositeComponent != null && myProject != null && !myProject.isDisposed()) {
ToolWindowManager toolWindowManager = ToolWindowManager.getInstance(myProject);
ToolWindow toolWindow = toolWindowManager.getToolWindow(toolWindowManager.getActiveToolWindowId());
if (toolWindow != null) {
JComponent toolWindowComponent = toolWindow.getComponent();
if (SwingUtilities.isDescendingFrom(oppositeComponent, toolWindowComponent)) {
return; // Allow toolwindows to gain focus (used by QuickDoc shown in a toolwindow)
}
}
}
EventQueue queue = Toolkit.getDefaultToolkit().getSystemEventQueue();
if (queue instanceof IdeEventQueue) {
if (!((IdeEventQueue)queue).wasRootRecentlyClicked(oppositeComponent)) {
Component root = SwingUtilities.getRoot(myTextField);
if (root != null && root.isShowing()) {
IdeFocusManager.getInstance(myProject).requestFocus(myTextField, true);
return;
}
}
}
hideHint();
}
}
}, 5);
}
});
}
if (myCheckBox != null) {
myCheckBox.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(@NotNull ItemEvent e) {
rebuildList(false);
}
});
myCheckBox.setFocusable(false);
}
myTextField.getDocument().addDocumentListener(new DocumentAdapter() {
@Override
protected void textChanged(DocumentEvent e) {
clearPostponedOkAction(false);
rebuildList(false);
}
});
final Set<KeyStroke> upShortcuts = getShortcuts(IdeActions.ACTION_EDITOR_MOVE_CARET_UP);
final Set<KeyStroke> downShortcuts = getShortcuts(IdeActions.ACTION_EDITOR_MOVE_CARET_DOWN);
myTextField.addKeyListener(new KeyAdapter() {
@Override
public void keyPressed(@NotNull KeyEvent e) {
if (e.getKeyCode() == KeyEvent.VK_ENTER && (e.getModifiers() & InputEvent.SHIFT_MASK) != 0) {
myClosedByShiftEnter = true;
close(true);
}
if (!myListScrollPane.isVisible()) {
return;
}
final int keyCode;
// Add support for user-defined 'caret up/down' shortcuts.
KeyStroke stroke = KeyStroke.getKeyStrokeForEvent(e);
if (upShortcuts.contains(stroke)) {
keyCode = KeyEvent.VK_UP;
}
else if (downShortcuts.contains(stroke)) {
keyCode = KeyEvent.VK_DOWN;
}
else {
keyCode = e.getKeyCode();
}
switch (keyCode) {
case KeyEvent.VK_DOWN:
ListScrollingUtil.moveDown(myList, e.getModifiersEx());
break;
case KeyEvent.VK_UP:
ListScrollingUtil.moveUp(myList, e.getModifiersEx());
break;
case KeyEvent.VK_PAGE_UP:
ListScrollingUtil.movePageUp(myList);
break;
case KeyEvent.VK_PAGE_DOWN:
ListScrollingUtil.movePageDown(myList);
break;
case KeyEvent.VK_TAB:
close(true);
break;
case KeyEvent.VK_ENTER:
if (myList.getSelectedValue() == EXTRA_ELEM) {
myMaximumListSizeLimit += myListSizeIncreasing;
rebuildList(myList.getSelectedIndex(), myRebuildDelay, ModalityState.current(), null);
e.consume();
}
break;
}
if (myList.getSelectedValue() == NON_PREFIX_SEPARATOR) {
if (keyCode == KeyEvent.VK_UP || keyCode == KeyEvent.VK_PAGE_UP) {
ListScrollingUtil.moveUp(myList, e.getModifiersEx());
}
else {
ListScrollingUtil.moveDown(myList, e.getModifiersEx());
}
}
}
});
myTextField.addActionListener(new ActionListener() {
@Override
public void actionPerformed(@NotNull ActionEvent actionEvent) {
doClose(true);
}
});
myList.setFocusable(false);
myList.setSelectionMode(allowMultipleSelection ? ListSelectionModel.MULTIPLE_INTERVAL_SELECTION :
ListSelectionModel.SINGLE_SELECTION);
new ClickListener() {
@Override
public boolean onClick(@NotNull MouseEvent e, int clickCount) {
if (!myTextField.hasFocus()) {
IdeFocusManager.getInstance(myProject).requestFocus(myTextField, true);
}
if (clickCount == 2) {
int selectedIndex = myList.getSelectedIndex();
Rectangle selectedCellBounds = myList.getCellBounds(selectedIndex, selectedIndex);
if (selectedCellBounds != null && selectedCellBounds.contains(e.getPoint())) { // Otherwise it was reselected in the selection listener
if (myList.getSelectedValue() == EXTRA_ELEM) {
myMaximumListSizeLimit += myListSizeIncreasing;
rebuildList(selectedIndex, myRebuildDelay, ModalityState.current(), null);
}
else {
doClose(true);
}
}
return true;
}
return false;
}
}.installOn(myList);
myList.setCellRenderer(myModel.getListCellRenderer());
myList.setFont(editorFont);
myList.addListSelectionListener(new ListSelectionListener() {
private int myPreviousSelectionIndex = 0;
@Override
public void valueChanged(@NotNull ListSelectionEvent e) {
if (myList.getSelectedValue() != NON_PREFIX_SEPARATOR) {
myPreviousSelectionIndex = myList.getSelectedIndex();
chosenElementMightChange();
updateDocumentation();
}
else if (allowMultipleSelection) {
myList.setSelectedIndex(myPreviousSelectionIndex);
}
}
});
myListScrollPane = ScrollPaneFactory.createScrollPane(myList);
myListScrollPane.setViewportBorder(new EmptyBorder(0, 0, 0, 0));
myTextFieldPanel.setBorder(new EmptyBorder(2, 2, 2, 2));
showTextFieldPanel();
myInitialized = true;
if (modalityState != null) {
rebuildList(myInitialIndex, 0, modalityState, null);
}
}
private void addCard(JComponent comp, String cardId) {
JPanel wrapper = new JPanel(new BorderLayout());
wrapper.add(comp, BorderLayout.EAST);
myCardContainer.add(wrapper, cardId);
}
public void setCheckBoxShortcut(ShortcutSet shortcutSet) {
myCheckBoxShortcut = shortcutSet;
}
@NotNull
private static Set<KeyStroke> getShortcuts(@NotNull String actionId) {
Set<KeyStroke> result = new HashSet<KeyStroke>();
Keymap keymap = KeymapManager.getInstance().getActiveKeymap();
Shortcut[] shortcuts = keymap.getShortcuts(actionId);
if (shortcuts == null) {
return result;
}
for (Shortcut shortcut : shortcuts) {
if (shortcut instanceof KeyboardShortcut) {
KeyboardShortcut keyboardShortcut = (KeyboardShortcut)shortcut;
result.add(keyboardShortcut.getFirstKeyStroke());
}
}
return result;
}
private void hideHint() {
if (!myTextFieldPanel.focusRequested()) {
doClose(false);
myTextFieldPanel.hideHint();
}
}
/**
* Default rebuild list. It uses {@link #myRebuildDelay} and current modality state.
*/
public void rebuildList(boolean initial) {
// TODO this method is public, because the chooser does not listed for the model.
rebuildList(initial ? myInitialIndex : 0, myRebuildDelay, ModalityState.current(), null);
}
private void updateDocumentation() {
final JBPopup hint = myTextFieldPanel.getHint();
final Object element = getChosenElement();
if (hint != null) {
if (element instanceof PsiElement) {
myTextFieldPanel.updateHint((PsiElement)element);
}
else if (element instanceof DataProvider) {
final Object o = ((DataProvider)element).getData(CommonDataKeys.PSI_ELEMENT.getName());
if (o instanceof PsiElement) {
myTextFieldPanel.updateHint((PsiElement)o);
}
}
}
}
public String transformPattern(String pattern) {
return pattern;
}
protected void doClose(final boolean ok) {
if (checkDisposed()) return;
if (closeForbidden(ok)) return;
if (postponeCloseWhenListReady(ok)) return;
cancelListUpdater();
close(ok);
clearPostponedOkAction(ok);
myListModel.clear();
}
protected boolean closeForbidden(boolean ok) {
return false;
}
protected void cancelListUpdater() {
ApplicationManager.getApplication().assertIsDispatchThread();
if (checkDisposed()) return;
final CalcElementsThread calcElementsThread = myCalcElementsThread;
if (calcElementsThread != null) {
calcElementsThread.cancel();
backgroundCalculationFinished(Collections.emptyList(), 0);
}
myListUpdater.cancelAll();
}
private boolean postponeCloseWhenListReady(boolean ok) {
if (!isToFixLostTyping()) return false;
final String text = getTrimmedText();
if (ok && myCalcElementsThread != null && !text.isEmpty()) {
myPostponedOkAction = new ActionCallback();
IdeFocusManager.getInstance(myProject).typeAheadUntil(myPostponedOkAction);
return true;
}
return false;
}
@NotNull public String getTrimmedText() {
return StringUtil.notNullize(myTextField.getText()).trim();
}
public void setFixLostTyping(boolean fixLostTyping) {
myFixLostTyping = fixLostTyping;
}
protected boolean isToFixLostTyping() {
return myFixLostTyping && Registry.is("actionSystem.fixLostTyping");
}
@NotNull
private synchronized String[] ensureNamesLoaded(boolean checkboxState) {
String[] cached = getNamesSync(checkboxState);
if (cached != null) return cached;
if (checkboxState &&
myModel instanceof ContributorsBasedGotoByModel &&
((ContributorsBasedGotoByModel)myModel).sameNamesForProjectAndLibraries() &&
getNamesSync(false) != null) {
// there is no way in indices to have different keys for project symbols vs libraries, we always have same ones
String[] allNames = getNamesSync(false);
setNamesSync(true, allNames);
return allNames;
}
String[] result = myModel.getNames(checkboxState);
//noinspection ConstantConditions
assert result != null : "Model "+myModel+ "("+myModel.getClass()+") returned null names";
setNamesSync(checkboxState, result);
return result;
}
@NotNull
public String[] getNames(boolean checkboxState) {
if (ourLoadNamesEachTime) {
setNamesSync(checkboxState, null);
return ensureNamesLoaded(checkboxState);
}
return getNamesSync(checkboxState);
}
private String[] getNamesSync(boolean checkboxState) {
synchronized (myNames) {
return myNames[checkboxState ? 1 : 0];
}
}
@NotNull
protected Set<Object> filter(@NotNull Set<Object> elements) {
return elements;
}
protected abstract boolean isCheckboxVisible();
protected abstract boolean isShowListForEmptyPattern();
protected abstract boolean isCloseByFocusLost();
protected void showTextFieldPanel() {
final JLayeredPane layeredPane = getLayeredPane();
final Dimension preferredTextFieldPanelSize = myTextFieldPanel.getPreferredSize();
final int x = (layeredPane.getWidth() - preferredTextFieldPanelSize.width) / 2;
final int paneHeight = layeredPane.getHeight();
final int y = paneHeight / 3 - preferredTextFieldPanelSize.height / 2;
VISIBLE_LIST_SIZE_LIMIT = Math.max
(10, (paneHeight - (y + preferredTextFieldPanelSize.height)) / (preferredTextFieldPanelSize.height / 2) - 1);
ComponentPopupBuilder builder = JBPopupFactory.getInstance().createComponentPopupBuilder(myTextFieldPanel, myTextField);
builder.setLocateWithinScreenBounds(false);
builder.setCancelCallback(new Computable<Boolean>() {
@Override
public Boolean compute() {
myTextPopup = null;
close(false);
return Boolean.TRUE;
}
}).setFocusable(true).setRequestFocus(true).setModalContext(false).setCancelOnClickOutside(false);
Point point = new Point(x, y);
SwingUtilities.convertPointToScreen(point, layeredPane);
Rectangle bounds = new Rectangle(point, new Dimension(preferredTextFieldPanelSize.width + 20, preferredTextFieldPanelSize.height));
myTextPopup = builder.createPopup();
myTextPopup.setSize(bounds.getSize());
myTextPopup.setLocation(bounds.getLocation());
MnemonicHelper.init(myTextFieldPanel);
if (myProject != null && !myProject.isDefault()) {
DaemonCodeAnalyzer.getInstance(myProject).disableUpdateByTimer(myTextPopup);
}
Disposer.register(myTextPopup, new Disposable() {
@Override
public void dispose() {
cancelListUpdater();
}
});
myTextPopup.show(layeredPane);
if (myTextPopup instanceof AbstractPopup) {
Window window = ((AbstractPopup)myTextPopup).getPopupWindow();
if (window instanceof JDialog) {
((JDialog)window).getRootPane().putClientProperty(WindowAction.NO_WINDOW_ACTIONS, Boolean.TRUE);
}
}
}
private JLayeredPane getLayeredPane() {
JLayeredPane layeredPane;
final Window window = WindowManager.getInstance().suggestParentWindow(myProject);
Component parent = UIUtil.findUltimateParent(window);
if (parent instanceof JFrame) {
layeredPane = ((JFrame)parent).getLayeredPane();
}
else if (parent instanceof JDialog) {
layeredPane = ((JDialog)parent).getLayeredPane();
}
else {
throw new IllegalStateException("cannot find parent window: project=" + myProject +
(myProject != null ? "; open=" + myProject.isOpen() : "") +
"; window=" + window);
}
return layeredPane;
}
protected void rebuildList(final int pos,
final int delay,
@NotNull final ModalityState modalityState,
@Nullable final Runnable postRunnable) {
ApplicationManager.getApplication().assertIsDispatchThread();
if (!myInitialized) {
return;
}
myAlarm.cancelAllRequests();
if (delay > 0) {
myAlarm.addRequest(new Runnable() {
@Override
public void run() {
rebuildList(pos, 0, modalityState, postRunnable);
}
}, delay, ModalityState.stateForComponent(myTextField));
return;
}
myListUpdater.cancelAll();
final CalcElementsThread calcElementsThread = myCalcElementsThread;
if (calcElementsThread != null) {
calcElementsThread.cancel();
}
final String text = getTrimmedText();
if (!canShowListForEmptyPattern() && text.isEmpty()) {
myListModel.clear();
hideList();
myTextFieldPanel.hideHint();
myCard.show(myCardContainer, CHECK_BOX_CARD);
return;
}
ListCellRenderer cellRenderer = myList.getCellRenderer();
if (cellRenderer instanceof ExpandedItemListCellRendererWrapper) {
cellRenderer = ((ExpandedItemListCellRendererWrapper)cellRenderer).getWrappee();
}
if (cellRenderer instanceof MatcherHolder) {
final String pattern = transformPattern(text);
final Matcher matcher = buildPatternMatcher(isSearchInAnyPlace() ? "*" + pattern : pattern);
((MatcherHolder)cellRenderer).setPatternMatcher(matcher);
}
scheduleCalcElements(text, myCheckBox.isSelected(), modalityState, new Consumer<Set<?>>() {
@Override
public void consume(Set<?> elements) {
ApplicationManager.getApplication().assertIsDispatchThread();
backgroundCalculationFinished(elements, pos);
if (postRunnable != null) {
postRunnable.run();
}
}
});
}
private void backgroundCalculationFinished(Collection<?> result, int toSelect) {
myCalcElementsThread = null;
setElementsToList(toSelect, result);
myList.repaint();
chosenElementMightChange();
if (result.isEmpty()) {
myTextFieldPanel.hideHint();
}
}
public void scheduleCalcElements(String text,
boolean checkboxState,
ModalityState modalityState,
Consumer<Set<?>> callback) {
new CalcElementsThread(text, checkboxState, callback, modalityState, false).scheduleThread();
}
private boolean isShowListAfterCompletionKeyStroke() {
return myShowListAfterCompletionKeyStroke;
}
private void setElementsToList(int pos, @NotNull Collection<?> elements) {
myListUpdater.cancelAll();
if (checkDisposed()) return;
if (elements.isEmpty()) {
myListModel.clear();
myTextField.setForeground(JBColor.red);
myListUpdater.cancelAll();
hideList();
clearPostponedOkAction(false);
return;
}
Object[] oldElements = myListModel.toArray();
Object[] newElements = elements.toArray();
List<ModelDiff.Cmd> commands = ModelDiff.createDiffCmds(myListModel, oldElements, newElements);
if (commands == null) {
myListUpdater.doPostponedOkIfNeeded();
return; // Nothing changed
}
myTextField.setForeground(UIUtil.getTextFieldForeground());
if (commands.isEmpty()) {
if (pos <= 0) {
pos = detectBestStatisticalPosition();
}
ListScrollingUtil.selectItem(myList, Math.min(pos, myListModel.size() - 1));
myList.setVisibleRowCount(Math.min(VISIBLE_LIST_SIZE_LIMIT, myList.getModel().getSize()));
showList();
myTextFieldPanel.repositionHint();
}
else {
showList();
myListUpdater.appendToModel(commands, pos);
}
}
private int detectBestStatisticalPosition() {
if (myModel instanceof Comparator) {
return 0;
}
int best = 0;
int bestPosition = 0;
int bestMatch = Integer.MIN_VALUE;
final int count = myListModel.getSize();
Matcher matcher = buildPatternMatcher(transformPattern(getTrimmedText()));
final String statContext = statisticsContext();
for (int i = 0; i < count; i++) {
final Object modelElement = myListModel.getElementAt(i);
String text = EXTRA_ELEM.equals(modelElement) || NON_PREFIX_SEPARATOR.equals(modelElement) ? null : myModel.getFullName(modelElement);
if (text != null) {
String shortName = myModel.getElementName(modelElement);
int match = shortName != null && matcher instanceof MinusculeMatcher
? ((MinusculeMatcher)matcher).matchingDegree(shortName) : Integer.MIN_VALUE;
int stats = StatisticsManager.getInstance().getUseCount(new StatisticsInfo(statContext, text));
if (match > bestMatch || match == bestMatch && stats > best) {
best = stats;
bestPosition = i;
bestMatch = match;
}
}
}
if (bestPosition < count - 1 && myListModel.getElementAt(bestPosition) == NON_PREFIX_SEPARATOR) {
bestPosition++;
}
return bestPosition;
}
@NotNull
@NonNls
protected String statisticsContext() {
return "choose_by_name#" + myModel.getPromptText() + "#" + myCheckBox.isSelected() + "#" + getTrimmedText();
}
private static class MyListModel<T> extends DefaultListModel implements ModelDiff.Model<T> {
@Override
public void addToModel(int idx, T element) {
if (idx < size()) {
add(idx, element);
}
else {
addElement(element);
}
}
@Override
public void removeRangeFromModel(int start, int end) {
if (start < size() && size() != 0) {
removeRange(start, Math.min(end, size()-1));
}
}
}
private class ListUpdater {
private final Alarm myAlarm = new Alarm(Alarm.ThreadToUse.SWING_THREAD);
private static final int DELAY = 10;
private static final int MAX_BLOCKING_TIME = 30;
private final List<ModelDiff.Cmd> myCommands = Collections.synchronizedList(new ArrayList<ModelDiff.Cmd>());
public void cancelAll() {
myCommands.clear();
myAlarm.cancelAllRequests();
}
public void appendToModel(@NotNull List<ModelDiff.Cmd> commands, final int selectionPos) {
myAlarm.cancelAllRequests();
myCommands.addAll(commands);
if (myCommands.isEmpty() || checkDisposed()) {
return;
}
myAlarm.addRequest(new Runnable() {
@Override
public void run() {
if (checkDisposed()) {
return;
}
final long startTime = System.currentTimeMillis();
while (!myCommands.isEmpty() && System.currentTimeMillis() - startTime < MAX_BLOCKING_TIME) {
final ModelDiff.Cmd cmd = myCommands.remove(0);
cmd.apply();
}
myList.setVisibleRowCount(Math.min(VISIBLE_LIST_SIZE_LIMIT, myList.getModel().getSize()));
if (!myCommands.isEmpty()) {
myAlarm.addRequest(this, DELAY);
}
else {
doPostponedOkIfNeeded();
}
if (!checkDisposed()) {
showList();
myTextFieldPanel.repositionHint();
if (!myListModel.isEmpty()) {
int pos = selectionPos <= 0 ? detectBestStatisticalPosition() : selectionPos;
ListScrollingUtil.selectItem(myList, Math.min(pos, myListModel.size() - 1));
}
}
}
}, DELAY);
}
private void doPostponedOkIfNeeded() {
if (myPostponedOkAction != null) {
if (getChosenElement() != null) {
doClose(true);
}
clearPostponedOkAction(checkDisposed());
}
}
}
private void clearPostponedOkAction(boolean success) {
if (myPostponedOkAction != null) {
if (success) {
myPostponedOkAction.setDone();
}
else {
myPostponedOkAction.setRejected();
}
}
myPostponedOkAction = null;
}
public boolean hasPostponedAction() {
return myPostponedOkAction != null;
}
protected abstract void showList();
protected abstract void hideList();
protected abstract void close(boolean isOk);
@Nullable
public Object getChosenElement() {
final List<Object> elements = getChosenElements();
return elements != null && elements.size() == 1 ? elements.get(0) : null;
}
protected List<Object> getChosenElements() {
return ContainerUtil.filter(myList.getSelectedValues(), new Condition<Object>() {
@Override
public boolean value(Object o) {
return o != EXTRA_ELEM && o != NON_PREFIX_SEPARATOR;
}
});
}
protected void chosenElementMightChange() {
}
protected final class MyTextField extends JTextField implements PopupOwner, TypeSafeDataProvider {
private final KeyStroke myCompletionKeyStroke;
private final KeyStroke forwardStroke;
private final KeyStroke backStroke;
private boolean completionKeyStrokeHappened = false;
private MyTextField() {
super(40);
if (!UIUtil.isUnderGTKLookAndFeel()) {
if (!(getUI() instanceof DarculaTextFieldUI)) {
setUI(DarculaTextFieldUI.createUI(this));
}
setBorder(new DarculaTextBorder());
}
enableEvents(AWTEvent.KEY_EVENT_MASK);
myCompletionKeyStroke = getShortcut(IdeActions.ACTION_CODE_COMPLETION);
forwardStroke = getShortcut(IdeActions.ACTION_GOTO_FORWARD);
backStroke = getShortcut(IdeActions.ACTION_GOTO_BACK);
setFocusTraversalKeysEnabled(false);
putClientProperty("JTextField.variant", "search");
setDocument(new PlainDocument() {
@Override
public void insertString(int offs, String str, AttributeSet a) throws BadLocationException {
super.insertString(offs, str, a);
if (str != null && str.length() > 1) {
handlePaste(str);
}
}
});
}
@Nullable
private KeyStroke getShortcut(String actionCodeCompletion) {
final Shortcut[] shortcuts = KeymapManager.getInstance().getActiveKeymap().getShortcuts(actionCodeCompletion);
for (final Shortcut shortcut : shortcuts) {
if (shortcut instanceof KeyboardShortcut) {
return ((KeyboardShortcut)shortcut).getFirstKeyStroke();
}
}
return null;
}
@Override
public void calcData(final DataKey key, @NotNull final DataSink sink) {
if (LangDataKeys.POSITION_ADJUSTER_POPUP.equals(key)) {
if (myDropdownPopup != null && myDropdownPopup.isVisible()) {
sink.put(key, myDropdownPopup);
}
}
else if (LangDataKeys.PARENT_POPUP.equals(key)) {
if (myTextPopup != null && myTextPopup.isVisible()) {
sink.put(key, myTextPopup);
}
}
}
@Override
protected void processKeyEvent(@NotNull KeyEvent e) {
final KeyStroke keyStroke = KeyStroke.getKeyStrokeForEvent(e);
if (myCompletionKeyStroke != null && keyStroke.equals(myCompletionKeyStroke)) {
completionKeyStrokeHappened = true;
e.consume();
final String pattern = getTrimmedText();
final int oldPos = myList.getSelectedIndex();
myHistory.add(Pair.create(pattern, oldPos));
final Runnable postRunnable = new Runnable() {
@Override
public void run() {
fillInCommonPrefix(pattern);
}
};
rebuildList(0, 0, ModalityState.current(), postRunnable);
return;
}
if (backStroke != null && keyStroke.equals(backStroke)) {
e.consume();
if (!myHistory.isEmpty()) {
final String oldText = getTrimmedText();
final int oldPos = myList.getSelectedIndex();
final Pair<String, Integer> last = myHistory.remove(myHistory.size() - 1);
myTextField.setText(last.first);
myFuture.add(Pair.create(oldText, oldPos));
rebuildList(0, 0, ModalityState.current(), null);
}
return;
}
if (forwardStroke != null && keyStroke.equals(forwardStroke)) {
e.consume();
if (!myFuture.isEmpty()) {
final String oldText = getTrimmedText();
final int oldPos = myList.getSelectedIndex();
final Pair<String, Integer> next = myFuture.remove(myFuture.size() - 1);
myTextField.setText(next.first);
myHistory.add(Pair.create(oldText, oldPos));
rebuildList(0, 0, ModalityState.current(), null);
}
return;
}
int position = myTextField.getCaretPosition();
int code = keyStroke.getKeyCode();
int modifiers = keyStroke.getModifiers();
try {
super.processKeyEvent(e);
}
catch (NullPointerException e1) {
if (!Patches.SUN_BUG_ID_6322854) {
throw e1;
}
}
finally {
if ((code == KeyEvent.VK_UP || code == KeyEvent.VK_DOWN) && modifiers == 0) {
myTextField.setCaretPosition(position);
}
}
}
private void fillInCommonPrefix(@NotNull final String pattern) {
if (StringUtil.isEmpty(pattern) && !canShowListForEmptyPattern()) {
return;
}
final List<String> list = myProvider.filterNames(ChooseByNameBase.this, getNames(myCheckBox.isSelected()), pattern);
if (isComplexPattern(pattern)) return; //TODO: support '*'
final String oldText = getTrimmedText();
final int oldPos = myList.getSelectedIndex();
String commonPrefix = null;
if (!list.isEmpty()) {
for (String name : list) {
final String string = name.toLowerCase();
if (commonPrefix == null) {
commonPrefix = string;
}
else {
while (!commonPrefix.isEmpty()) {
if (string.startsWith(commonPrefix)) {
break;
}
commonPrefix = commonPrefix.substring(0, commonPrefix.length() - 1);
}
if (commonPrefix.isEmpty()) break;
}
}
commonPrefix = list.get(0).substring(0, commonPrefix.length());
for (int i = 1; i < list.size(); i++) {
final String string = list.get(i).substring(0, commonPrefix.length());
if (!string.equals(commonPrefix)) {
commonPrefix = commonPrefix.toLowerCase();
break;
}
}
}
if (commonPrefix == null) commonPrefix = "";
if (!StringUtil.startsWithIgnoreCase(commonPrefix, pattern)) {
commonPrefix = pattern;
}
final String newPattern = commonPrefix;
myHistory.add(Pair.create(oldText, oldPos));
myTextField.setText(newPattern);
myTextField.setCaretPosition(newPattern.length());
rebuildList(false);
}
private boolean isComplexPattern(@NotNull final String pattern) {
if (pattern.indexOf('*') >= 0) return true;
for (String s : myModel.getSeparators()) {
if (pattern.contains(s)) return true;
}
return false;
}
@Override
@Nullable
public Point getBestPopupPosition() {
return new Point(myTextFieldPanel.getWidth(), getHeight());
}
@Override
protected void paintComponent(@NotNull final Graphics g) {
GraphicsUtil.setupAntialiasing(g);
super.paintComponent(g);
}
public boolean isCompletionKeyStroke() {
return completionKeyStrokeHappened;
}
}
public ChooseByNameItemProvider getProvider() {
return myProvider;
}
protected void handlePaste(String str) {
if (!myInitIsDone) return;
if (myModel instanceof GotoClassModel2 && isFileName(str)) {
//noinspection SSBasedInspection
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
final GotoFileAction gotoFile = new GotoFileAction();
AnActionEvent event = new AnActionEvent(null,
DataManager.getInstance().getDataContext(myTextField),
ActionPlaces.UNKNOWN,
gotoFile.getTemplatePresentation(),
ActionManager.getInstance(),
0);
event.setInjectedContext(gotoFile.isInInjectedContext());
gotoFile.actionPerformed(event);
}
});
}
}
private static boolean isFileName(String name) {
final int index = name.lastIndexOf('.');
if (index > 0) {
String ext = name.substring(index + 1);
if (ext.contains(":")) {
ext = ext.substring(0, ext.indexOf(':'));
}
if (FileTypeManagerEx.getInstanceEx().getFileTypeByExtension(ext) != UnknownFileType.INSTANCE) {
return true;
}
}
return false;
}
public static final String EXTRA_ELEM = "...";
public static final String NON_PREFIX_SEPARATOR = "non-prefix matches:";
public static Component renderNonPrefixSeparatorComponent(Color backgroundColor) {
final JPanel panel = new JPanel(new BorderLayout());
final JSeparator separator = new JSeparator(SwingConstants.HORIZONTAL);
panel.add(separator, BorderLayout.CENTER);
if (!UIUtil.isUnderAquaBasedLookAndFeel()) {
panel.setBorder(new EmptyBorder(3, 0, 2, 0));
}
panel.setBackground(backgroundColor);
return panel;
}
private class CalcElementsThread implements ReadTask {
private final String myPattern;
private volatile boolean myCheckboxState;
private volatile boolean myScopeExpanded;
private final Consumer<Set<?>> myCallback;
private final ModalityState myModalityState;
private final ProgressIndicator myProgress = new ProgressIndicatorBase();
CalcElementsThread(String pattern,
boolean checkboxState,
Consumer<Set<?>> callback,
@NotNull ModalityState modalityState,
boolean scopeExpanded) {
myPattern = pattern;
myCheckboxState = checkboxState;
myCallback = callback;
myModalityState = modalityState;
myScopeExpanded = scopeExpanded;
}
private final Alarm myShowCardAlarm = new Alarm();
void scheduleThread() {
ApplicationManager.getApplication().assertIsDispatchThread();
myCalcElementsThread = this;
showCard(SEARCHING_CARD, 200);
ProgressIndicatorUtils.scheduleWithWriteActionPriority(myProgress, this);
}
@Override
public void computeInReadAction(@NotNull ProgressIndicator indicator) {
if (myProject != null && myProject.isDisposed()) return;
final Set<Object> elements = new LinkedHashSet<Object>();
if (!ourLoadNamesEachTime) ensureNamesLoaded(myCheckboxState);
addElementsByPattern(myPattern, elements, myProgress, myCheckboxState);
if (myProgress.isCanceled()) {
myShowCardAlarm.cancelAllRequests();
return;
}
if (elements.isEmpty() && !myCheckboxState) {
myScopeExpanded = true;
myCheckboxState = true;
if (!ourLoadNamesEachTime) ensureNamesLoaded(true);
addElementsByPattern(myPattern, elements, myProgress, true);
}
final String cardToShow = elements.isEmpty() ? NOT_FOUND_CARD : myScopeExpanded ? NOT_FOUND_IN_PROJECT_CARD : CHECK_BOX_CARD;
showCard(cardToShow, 0);
final boolean edt = myModel instanceof EdtSortingModel;
final Set<Object> filtered = !edt ? filter(elements) : Collections.emptySet();
ApplicationManager.getApplication().invokeLater(new Runnable() {
@Override
public void run() {
if (!checkDisposed() && !myProgress.isCanceled()) {
CalcElementsThread currentBgProcess = myCalcElementsThread;
LOG.assertTrue(currentBgProcess == CalcElementsThread.this, currentBgProcess);
myCallback.consume(edt ? filter(elements) : filtered);
}
}
}, myModalityState);
}
@Override
public void onCanceled(@NotNull ProgressIndicator indicator) {
LOG.assertTrue(myCalcElementsThread == this, myCalcElementsThread);
new CalcElementsThread(myPattern, myCheckboxState, myCallback, myModalityState, myScopeExpanded).scheduleThread();
}
private void addElementsByPattern(@NotNull String pattern,
@NotNull final Set<Object> elements,
@NotNull final ProgressIndicator indicator,
boolean everywhere) {
long start = System.currentTimeMillis();
myProvider.filterElements(
ChooseByNameBase.this, pattern, everywhere,
indicator,
new Processor<Object>() {
@Override
public boolean process(Object o) {
if (indicator.isCanceled()) return false;
elements.add(o);
if (isOverflow(elements)) {
elements.add(EXTRA_ELEM);
return false;
}
return true;
}
}
);
if (myAlwaysHasMore) {
elements.add(EXTRA_ELEM);
}
if (ContributorsBasedGotoByModel.LOG.isDebugEnabled()) {
long end = System.currentTimeMillis();
ContributorsBasedGotoByModel.LOG.debug("addElementsByPattern("+pattern+"): "+(end-start)+"ms; "+elements.size()+" elements");
}
}
private void showCard(final String card, final int delay) {
if (ApplicationManager.getApplication().isUnitTestMode()) return;
myShowCardAlarm.cancelAllRequests();
myShowCardAlarm.addRequest(new Runnable() {
@Override
public void run() {
if (!myProgress.isCanceled()) {
myCard.show(myCardContainer, card);
}
}
}, delay, myModalityState);
}
protected boolean isOverflow(@NotNull Set<Object> elementsArray) {
return elementsArray.size() >= myMaximumListSizeLimit;
}
private void cancel() {
ApplicationManager.getApplication().assertIsDispatchThread();
myProgress.cancel();
}
}
public boolean canShowListForEmptyPattern() {
return isShowListForEmptyPattern() || isShowListAfterCompletionKeyStroke() && lastKeyStrokeIsCompletion();
}
protected boolean lastKeyStrokeIsCompletion() {
return myTextField.isCompletionKeyStroke();
}
private static Matcher buildPatternMatcher(@NotNull String pattern) {
return NameUtil.buildMatcher(pattern, 0, true, true, pattern.toLowerCase().equals(pattern));
}
private static class HintLabel extends JLabel {
private HintLabel(String text) {
super(text, RIGHT);
setForeground(Color.darkGray);
}
}
public int getMaximumListSizeLimit() {
return myMaximumListSizeLimit;
}
public void setMaximumListSizeLimit(final int maximumListSizeLimit) {
myMaximumListSizeLimit = maximumListSizeLimit;
}
public void setListSizeIncreasing(final int listSizeIncreasing) {
myListSizeIncreasing = listSizeIncreasing;
}
public boolean isAlwaysHasMore() {
return myAlwaysHasMore;
}
/**
* Display <tt>...</tt> item at the end of the list regardless of whether it was filled up or not.
* This option can be useful in cases, when it can't be said beforehand, that the next call to {@link ChooseByNameItemProvider}
* won't give new items.
*/
public void setAlwaysHasMore(boolean enabled) {
myAlwaysHasMore = enabled;
}
private static final String ACTION_NAME = "Show All in View";
private abstract class ShowFindUsagesAction extends AnAction {
public ShowFindUsagesAction() {
super(ACTION_NAME, ACTION_NAME, AllIcons.General.AutohideOff);
}
@Override
public void actionPerformed(@NotNull final AnActionEvent e) {
cancelListUpdater();
final UsageViewPresentation presentation = new UsageViewPresentation();
final String text = getTrimmedText();
final String prefixPattern = myFindUsagesTitle + " \'" + text + "\'";
final String nonPrefixPattern = myFindUsagesTitle + " \'*" + text + "*\'";
presentation.setCodeUsagesString(prefixPattern);
presentation.setUsagesInGeneratedCodeString(prefixPattern + " in generated code");
presentation.setDynamicUsagesString(nonPrefixPattern);
presentation.setTabName(prefixPattern);
presentation.setTabText(prefixPattern);
presentation.setTargetsNodeText("Unsorted " + StringUtil.toLowerCase(prefixPattern.toLowerCase()));
final Object[][] elements = getElements();
final List<PsiElement> targets = new ArrayList<PsiElement>();
final List<Usage> usages = new ArrayList<Usage>();
fillUsages(Arrays.asList(elements[0]), usages, targets, false);
fillUsages(Arrays.asList(elements[1]), usages, targets, true);
if (myListModel.contains(EXTRA_ELEM)) { //start searching for the rest
final boolean everywhere = myCheckBox.isSelected();
final Set<Object> prefixMatchElementsArray = new LinkedHashSet<Object>();
final Set<Object> nonPrefixMatchElementsArray = new LinkedHashSet<Object>();
hideHint();
ProgressManager.getInstance().run(new Task.Modal(myProject, prefixPattern, true) {
private ChooseByNameBase.CalcElementsThread myCalcUsagesThread;
@Override
public void run(@NotNull final ProgressIndicator indicator) {
ensureNamesLoaded(everywhere);
indicator.setIndeterminate(true);
final TooManyUsagesStatus tooManyUsagesStatus = TooManyUsagesStatus.createFor(indicator);
myCalcUsagesThread = new CalcElementsThread(text, everywhere, null, ModalityState.NON_MODAL, false) {
@Override
protected boolean isOverflow(@NotNull Set<Object> elementsArray) {
tooManyUsagesStatus.pauseProcessingIfTooManyUsages();
if (elementsArray.size() > UsageLimitUtil.USAGES_LIMIT - myMaximumListSizeLimit && tooManyUsagesStatus.switchTooManyUsagesStatus()) {
int usageCount = elementsArray.size() + myMaximumListSizeLimit;
UsageViewManagerImpl.showTooManyUsagesWarning(getProject(), tooManyUsagesStatus, indicator, presentation, usageCount, null);
}
return false;
}
};
ApplicationManager.getApplication().runReadAction(new Runnable() {
@Override
public void run() {
boolean anyPlace = isSearchInAnyPlace();
setSearchInAnyPlace(false);
myCalcUsagesThread.addElementsByPattern(text, prefixMatchElementsArray, indicator, everywhere);
setSearchInAnyPlace(anyPlace);
if (anyPlace && !indicator.isCanceled()) {
myCalcUsagesThread.addElementsByPattern(text, nonPrefixMatchElementsArray, indicator, everywhere);
nonPrefixMatchElementsArray.removeAll(prefixMatchElementsArray);
}
indicator.setText("Prepare...");
fillUsages(prefixMatchElementsArray, usages, targets, false);
fillUsages(nonPrefixMatchElementsArray, usages, targets, true);
}
});
}
@Override
public void onSuccess() {
showUsageView(targets, usages, presentation);
}
@Override
public void onCancel() {
myCalcUsagesThread.cancel();
}
});
}
else {
hideHint();
showUsageView(targets, usages, presentation);
}
}
private void fillUsages(Collection<Object> matchElementsArray,
List<Usage> usages,
List<PsiElement> targets,
final boolean separateGroup) {
for (Object o : matchElementsArray) {
if (o instanceof PsiElement) {
PsiElement element = (PsiElement)o;
if (element.getTextRange() != null) {
usages.add(new UsageInfo2UsageAdapter(new UsageInfo(element) {
@Override
public boolean isDynamicUsage() {
return separateGroup || super.isDynamicUsage();
}
}));
}
else {
targets.add(element);
}
}
}
}
private void showUsageView(@NotNull List<PsiElement> targets,
@NotNull List<Usage> usages,
@NotNull UsageViewPresentation presentation) {
UsageTarget[] usageTargets = targets.isEmpty() ? UsageTarget.EMPTY_ARRAY :
PsiElement2UsageTargetAdapter.convert(PsiUtilCore.toPsiElementArray(targets));
UsageViewManager.getInstance(myProject).showUsages(usageTargets, usages.toArray(new Usage[usages.size()]), presentation);
}
@Override
public void update(@NotNull AnActionEvent e) {
if (myFindUsagesTitle == null || myProject == null) {
e.getPresentation().setVisible(false);
return;
}
final Object[][] elements = getElements();
e.getPresentation().setEnabled(elements != null && elements[0].length + elements[1].length > 0);
}
public abstract Object[][] getElements();
}
public JTextField getTextField() {
return myTextField;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.gen;
import com.facebook.presto.bytecode.BytecodeBlock;
import com.facebook.presto.bytecode.BytecodeNode;
import com.facebook.presto.bytecode.ClassDefinition;
import com.facebook.presto.bytecode.FieldDefinition;
import com.facebook.presto.bytecode.MethodDefinition;
import com.facebook.presto.bytecode.Parameter;
import com.facebook.presto.bytecode.Scope;
import com.facebook.presto.bytecode.Variable;
import com.facebook.presto.bytecode.control.IfStatement;
import com.facebook.presto.metadata.Metadata;
import com.facebook.presto.operator.JoinFilterFunction;
import com.facebook.presto.spi.ConnectorSession;
import com.facebook.presto.spi.block.Block;
import com.facebook.presto.sql.relational.CallExpression;
import com.facebook.presto.sql.relational.RowExpression;
import com.facebook.presto.sql.relational.RowExpressionVisitor;
import com.google.common.base.Throwables;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.primitives.Primitives;
import com.google.inject.Inject;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import static com.facebook.presto.bytecode.Access.FINAL;
import static com.facebook.presto.bytecode.Access.PRIVATE;
import static com.facebook.presto.bytecode.Access.PUBLIC;
import static com.facebook.presto.bytecode.Access.a;
import static com.facebook.presto.bytecode.CompilerUtils.defineClass;
import static com.facebook.presto.bytecode.CompilerUtils.makeClassName;
import static com.facebook.presto.bytecode.Parameter.arg;
import static com.facebook.presto.bytecode.ParameterizedType.type;
import static com.facebook.presto.bytecode.expression.BytecodeExpressions.constantFalse;
import static com.facebook.presto.sql.gen.BytecodeUtils.invoke;
import static com.facebook.presto.sql.gen.TryCodeGenerator.defineTryMethod;
import static com.google.common.base.MoreObjects.toStringHelper;
import static java.util.Objects.requireNonNull;
public class JoinFilterFunctionCompiler
{
private final Metadata metadata;
@Inject
public JoinFilterFunctionCompiler(Metadata metadata)
{
this.metadata = metadata;
}
private final LoadingCache<JoinFilterCacheKey, Class<? extends JoinFilterFunction>> joinFilterFunctions = CacheBuilder.newBuilder().maximumSize(1000).build(
new CacheLoader<JoinFilterCacheKey, Class<? extends JoinFilterFunction>>()
{
@Override
public Class<? extends JoinFilterFunction> load(JoinFilterCacheKey key)
throws Exception
{
return compileFilterFunctionInternal(key.getFilter(), key.getLeftBlocksSize());
}
});
public JoinFilterFunctionFactory compileJoinFilterFunction(RowExpression filter, int leftBlocksSize)
{
Class<? extends JoinFilterFunction> joinFilterFunction = joinFilterFunctions.getUnchecked(new JoinFilterCacheKey(filter, leftBlocksSize));
return (session) -> {
try {
return joinFilterFunction.getConstructor(ConnectorSession.class).newInstance(session);
}
catch (ReflectiveOperationException e) {
throw Throwables.propagate(e);
}
};
}
private Class<? extends JoinFilterFunction> compileFilterFunctionInternal(RowExpression filterExpression, int leftBlocksSize)
{
ClassDefinition classDefinition = new ClassDefinition(
a(PUBLIC, FINAL),
makeClassName("JoinFilterFunction"),
type(Object.class),
type(JoinFilterFunction.class));
CallSiteBinder callSiteBinder = new CallSiteBinder();
new JoinFilterFunctionCompiler(metadata).generateMethods(classDefinition, callSiteBinder, filterExpression, leftBlocksSize);
//
// toString method
//
generateToString(
classDefinition,
callSiteBinder,
toStringHelper(classDefinition.getType().getJavaClassName())
.add("filter", filterExpression)
.add("leftBlocksSize", leftBlocksSize)
.toString());
return defineClass(classDefinition, JoinFilterFunction.class, callSiteBinder.getBindings(), getClass().getClassLoader());
}
private void generateMethods(ClassDefinition classDefinition, CallSiteBinder callSiteBinder, RowExpression filter, int leftBlocksSize)
{
CachedInstanceBinder cachedInstanceBinder = new CachedInstanceBinder(classDefinition, callSiteBinder);
FieldDefinition sessionField = classDefinition.declareField(a(PRIVATE, FINAL), "session", ConnectorSession.class);
generateConstructor(classDefinition, sessionField);
generateFilterMethod(classDefinition, callSiteBinder, cachedInstanceBinder, filter, leftBlocksSize, sessionField);
}
private void generateConstructor(ClassDefinition classDefinition, FieldDefinition sessionField)
{
Parameter sessionParameter = arg("session", ConnectorSession.class);
MethodDefinition constructorDefinition = classDefinition.declareConstructor(a(PUBLIC), sessionParameter);
BytecodeBlock body = constructorDefinition.getBody();
Variable thisVariable = constructorDefinition.getThis();
body.comment("super();")
.append(thisVariable)
.invokeConstructor(Object.class);
body.append(thisVariable.setField(sessionField, sessionParameter));
body.ret();
}
private void generateFilterMethod(ClassDefinition classDefinition, CallSiteBinder callSiteBinder, CachedInstanceBinder cachedInstanceBinder, RowExpression filter, int leftBlocksSize, FieldDefinition sessionField)
{
Map<CallExpression, MethodDefinition> tryMethodMap = generateTryMethods(classDefinition, callSiteBinder, cachedInstanceBinder, leftBlocksSize, filter);
// int leftPosition, Block[] leftBlocks, int rightPosition, Block[] rightBlocks
Parameter leftPosition = arg("leftPosition", int.class);
Parameter leftBlocks = arg("leftBlocks", Block[].class);
Parameter rightPosition = arg("rightPosition", int.class);
Parameter rightBlocks = arg("rightBlocks", Block[].class);
MethodDefinition method = classDefinition.declareMethod(
a(PUBLIC),
"filter",
type(boolean.class),
ImmutableList.<Parameter>builder()
.add(leftPosition)
.add(leftBlocks)
.add(rightPosition)
.add(rightBlocks)
.build());
method.comment("filter: %s", filter.toString());
BytecodeBlock body = method.getBody();
Scope scope = method.getScope();
Variable wasNullVariable = scope.declareVariable("wasNull", body, constantFalse());
scope.declareVariable("session", body, method.getThis().getField(sessionField));
BytecodeExpressionVisitor visitor = new BytecodeExpressionVisitor(
callSiteBinder,
cachedInstanceBinder,
fieldReferenceCompiler(callSiteBinder, leftPosition, leftBlocks, rightPosition, rightBlocks, leftBlocksSize, wasNullVariable),
metadata.getFunctionRegistry(),
tryMethodMap);
BytecodeNode visitorBody = filter.accept(visitor, scope);
Variable result = scope.declareVariable(boolean.class, "result");
body.append(visitorBody)
.putVariable(result)
.append(new IfStatement()
.condition(wasNullVariable)
.ifTrue(constantFalse().ret())
.ifFalse(result.ret()));
}
private Map<CallExpression, MethodDefinition> generateTryMethods(
ClassDefinition containerClassDefinition,
CallSiteBinder callSiteBinder,
CachedInstanceBinder cachedInstanceBinder,
int leftBlocksSize,
RowExpression filter)
{
TryExpressionExtractor tryExtractor = new TryExpressionExtractor();
filter.accept(tryExtractor, null);
List<CallExpression> tryExpressions = tryExtractor.getTryExpressionsPreOrder();
ImmutableMap.Builder<CallExpression, MethodDefinition> tryMethodMap = ImmutableMap.builder();
int methodId = 0;
for (CallExpression tryExpression : tryExpressions) {
Parameter session = arg("session", ConnectorSession.class);
Parameter leftPosition = arg("leftPosition", int.class);
Parameter leftBlocks = arg("leftBlocks", Block[].class);
Parameter rightPosition = arg("rightPosition", int.class);
Parameter rightBlocks = arg("rightBlocks", Block[].class);
Parameter wasNullVariable = arg("wasNull", boolean.class);
BytecodeExpressionVisitor innerExpressionVisitor = new BytecodeExpressionVisitor(
callSiteBinder,
cachedInstanceBinder,
fieldReferenceCompiler(callSiteBinder, leftPosition, leftBlocks, rightPosition, rightBlocks, leftBlocksSize, wasNullVariable),
metadata.getFunctionRegistry(),
tryMethodMap.build());
List<Parameter> inputParameters = ImmutableList.<Parameter>builder()
.add(session)
.add(leftPosition)
.add(leftBlocks)
.add(rightPosition)
.add(rightBlocks)
.add(wasNullVariable)
.build();
MethodDefinition tryMethod = defineTryMethod(
innerExpressionVisitor,
containerClassDefinition,
"try_" + methodId,
inputParameters,
Primitives.wrap(tryExpression.getType().getJavaType()),
tryExpression,
callSiteBinder);
tryMethodMap.put(tryExpression, tryMethod);
methodId++;
}
return tryMethodMap.build();
}
private static void generateToString(ClassDefinition classDefinition, CallSiteBinder callSiteBinder, String string)
{
// bind constant via invokedynamic to avoid constant pool issues due to large strings
classDefinition.declareMethod(a(PUBLIC), "toString", type(String.class))
.getBody()
.append(invoke(callSiteBinder.bind(string, String.class), "toString"))
.retObject();
}
@FunctionalInterface
public interface JoinFilterFunctionFactory
{
JoinFilterFunction create(ConnectorSession session);
}
private static RowExpressionVisitor<Scope, BytecodeNode> fieldReferenceCompiler(
final CallSiteBinder callSiteBinder,
final Variable leftPosition,
final Variable leftBlocks,
final Variable rightPosition,
final Variable rightBlocks,
final int leftBlocksSize,
final Variable wasNullVariable)
{
return new InputReferenceCompiler(
(scope, field) -> field < leftBlocksSize ? leftBlocks.getElement(field) : rightBlocks.getElement(field - leftBlocksSize),
(scope, field) -> field < leftBlocksSize ? leftPosition : rightPosition,
wasNullVariable,
callSiteBinder);
}
private static final class JoinFilterCacheKey
{
private final RowExpression filter;
private final int leftBlocksSize;
public JoinFilterCacheKey(RowExpression filter, int leftBlocksSize)
{
this.filter = requireNonNull(filter, "filter can not be null");
this.leftBlocksSize = leftBlocksSize;
}
public RowExpression getFilter()
{
return filter;
}
public int getLeftBlocksSize()
{
return leftBlocksSize;
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
JoinFilterCacheKey that = (JoinFilterCacheKey) o;
return leftBlocksSize == that.leftBlocksSize &&
Objects.equals(filter, that.filter);
}
@Override
public int hashCode()
{
return Objects.hash(filter, leftBlocksSize);
}
@Override
public String toString()
{
return toStringHelper(this)
.add("filter", filter)
.add("leftBlocksSize", leftBlocksSize)
.toString();
}
}
}
| |
package com.linkedin.thirdeye.query;
import com.google.common.base.Joiner;
import com.google.common.collect.LinkedListMultimap;
import com.google.common.collect.Multimap;
import com.linkedin.thirdeye.api.*;
import com.linkedin.thirdeye.impl.StarTreeQueryImpl;
import com.linkedin.thirdeye.impl.storage.IndexMetadata;
import org.joda.time.DateTime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
public class ThirdEyeQueryExecutor {
private static final Logger LOGGER = LoggerFactory.getLogger(ThirdEyeQueryExecutor.class);
private static final Joiner OR_JOINER = Joiner.on(" OR ");
private static final ThirdEyeFunction TO_MILLIS = new ThirdEyeUnitConversionFunction(1, TimeUnit.MILLISECONDS);
private final ExecutorService executorService;
private final StarTreeManager starTreeManager;
private static Map<String, Integer> timeGranularitySortOrder = new HashMap<String, Integer>();
static {
timeGranularitySortOrder.put("MONTHLY", 0);
timeGranularitySortOrder.put("WEEKLY", 1);
timeGranularitySortOrder.put("DAILY", 2);
timeGranularitySortOrder.put("HOURLY", 3);
}
public ThirdEyeQueryExecutor(ExecutorService executorService, StarTreeManager starTreeManager) {
this.executorService = executorService;
this.starTreeManager = starTreeManager;
}
public ThirdEyeQueryResult executeQuery(String sql) throws Exception {
return executeQuery(new ThirdEyeQueryParser(sql).getQuery());
}
public ThirdEyeQueryResult executeQuery(final ThirdEyeQuery query) throws Exception {
LOGGER.info("START Execution for query_id: {} query{}", query.hashCode(), query);
ThirdEyeQueryResult result = new ThirdEyeQueryResult();
final StarTreeConfig config = starTreeManager.getConfig(query.getCollection());
if (config == null) {
throw new IllegalArgumentException("No collection " + query.getCollection());
}
final List<String> dimensionNames = new ArrayList<>(config.getDimensions().size());
for (DimensionSpec dimensionSpec : config.getDimensions()) {
dimensionNames.add(dimensionSpec.getName());
}
result.setDimensions(dimensionNames);
// Offset for moving average
long startOffset = 0;
long collectionWindowMillis = 0;
for (ThirdEyeFunction function : query.getFunctions()) {
if (function instanceof ThirdEyeMovingAverageFunction) {
ThirdEyeMovingAverageFunction movingAverageFunction = (ThirdEyeMovingAverageFunction) function;
TimeGranularity window = movingAverageFunction.getWindow();
long windowMillis = TimeUnit.MILLISECONDS.convert(window.getSize(), window.getUnit());
if (windowMillis > startOffset) {
startOffset = windowMillis;
}
} else if (function instanceof ThirdEyeAggregateFunction) {
ThirdEyeAggregateFunction aggregateFunction = (ThirdEyeAggregateFunction) function;
TimeGranularity window = aggregateFunction.getWindow();
collectionWindowMillis = TimeUnit.MILLISECONDS.convert(window.getSize(), window.getUnit());
}
}
// Time
DateTime queryStartInMillis = new DateTime(query.getStart().getMillis() - startOffset);
long queryStartTime = dateTimeToCollectionTime(config, queryStartInMillis);
long queryEndTime = dateTimeToCollectionTime(config, query.getEnd());
final TimeRange inputQueryTimeRange = new TimeRange(queryStartInMillis.getMillis(), query.getEnd().getMillis());
// select the trees that need to be queried based on the
Map<UUID, IndexMetadata> immutableMetadataMap = new HashMap<>();
Map<UUID, IndexMetadata> realTimeMetadataMap = new HashMap<>();
for (StarTree starTree : starTreeManager.getStarTrees(config.getCollection()).values()) {
UUID treeId = starTree.getRoot().getId();
IndexMetadata indexMetadata = starTreeManager.getIndexMetadata(treeId);
if ("KAFKA".equals(indexMetadata.getTimeGranularity())) {
realTimeMetadataMap.put(treeId, indexMetadata);
} else {
immutableMetadataMap.put(treeId, indexMetadata);
}
}
LOGGER.info("Selecting trees to query for queryTimeRange:{}", inputQueryTimeRange);
List<UUID> treeIdsToQuery = selectTreesToQuery(immutableMetadataMap, inputQueryTimeRange);
// Align to aggregation boundary
if (collectionWindowMillis > 0) {
long collectionWindow = dateTimeToCollectionTime(config, new DateTime(collectionWindowMillis));
queryStartTime = (queryStartTime / collectionWindow) * collectionWindow;
queryEndTime = (queryEndTime / collectionWindow + 1) * collectionWindow; // include everything in that window
}
final TimeRange queryTimeRange = new TimeRange(queryStartTime, queryEndTime);
// Time ranges that should be queried for each tree
final Map<UUID, TimeRange> timeRangesToQuery = new HashMap<>();
for (UUID treeId : treeIdsToQuery) {
// Whole query time for each of the immutable trees
timeRangesToQuery.put(treeId, queryTimeRange);
}
// Determine max data time from the most recent tree that's being queried from immutable segments
Long maxImmutableTimeMillis = null;
for (UUID treeId : treeIdsToQuery) {
IndexMetadata indexMetadata = immutableMetadataMap.get(treeId);
if (maxImmutableTimeMillis == null || maxImmutableTimeMillis < indexMetadata.getMaxDataTimeMillis()) {
maxImmutableTimeMillis = indexMetadata.getMaxDataTimeMillis();
}
}
// Get the starting collection time we should use for real-time segments
Long realTimeStartTime = null;
Long realTimeStartTimeMillis = null;
if (maxImmutableTimeMillis == null) {
realTimeStartTime = queryTimeRange.getStart();
realTimeStartTimeMillis = queryStartInMillis.getMillis();
} else if (maxImmutableTimeMillis < query.getEnd().getMillis()) {
realTimeStartTime = dateTimeToCollectionTime(config, new DateTime(maxImmutableTimeMillis));
realTimeStartTimeMillis = maxImmutableTimeMillis;
long collectionWindow = dateTimeToCollectionTime(config, new DateTime(collectionWindowMillis));
if (collectionWindow > 0) {
realTimeStartTime = (realTimeStartTime / collectionWindow) * collectionWindow;
}
}
// Get the real time trees we need to query
List<UUID> realTimeTreeIdsToQuery = new ArrayList<>();
if (realTimeStartTime != null) {
TimeRange mutableTimeRange = new TimeRange(realTimeStartTime, queryTimeRange.getEnd());
TimeRange mutableTimeRangeMillis = new TimeRange(realTimeStartTimeMillis, query.getEnd().getMillis());
realTimeTreeIdsToQuery.addAll(selectTreesToQuery(realTimeMetadataMap, mutableTimeRangeMillis));
// Also add in-memory tree (though it may not match)
StarTree mutableTree = starTreeManager.getMutableStarTree(config.getCollection());
realTimeTreeIdsToQuery.add(mutableTree.getRoot().getId());
// Only query the back-end of the time range for these trees
for (UUID treeId : realTimeTreeIdsToQuery) {
timeRangesToQuery.put(treeId, mutableTimeRange);
}
}
// For all group by dimensions add those as fixed
if (!query.getGroupByColumns().isEmpty()) {
for (final String groupByColumn : query.getGroupByColumns()) {
if (query.getDimensionValues().containsKey(groupByColumn)) {
throw new IllegalArgumentException("Cannot fix dimension value in group by: " + groupByColumn);
}
final Set<Future<Set<String>>> dimensionSetFutures = new HashSet<>();
final List<StarTree> starTrees = new ArrayList<>(starTreeManager.getStarTrees(config.getCollection()).values());
StarTree mutableTree = starTreeManager.getMutableStarTree(config.getCollection());
if (mutableTree != null) {
starTrees.add(mutableTree);
}
for (final StarTree starTree : starTrees) {
UUID treeId = starTree.getRoot().getId();
if (!treeIdsToQuery.contains(treeId) && !realTimeTreeIdsToQuery.contains(treeId)) {
continue;
}
dimensionSetFutures.add(executorService.submit(new Callable<Set<String>>() {
@Override
public Set<String> call() throws Exception {
// TODO: Support multiple values per dimension
Multimap<String, String> values = query.getDimensionValues();
Map<String, String> singleValues = new HashMap<>(values.size());
for (Map.Entry<String, String> entry : query.getDimensionValues().entries()) {
if (singleValues.containsKey(entry.getKey())) {
throw new IllegalArgumentException("Multiple values currently not supported: " + values);
}
singleValues.put(entry.getKey(), entry.getValue());
}
return starTree.getDimensionValues(groupByColumn, singleValues);
}
}));
}
Set<String> dimensionSet = new HashSet<>();
for (Future<Set<String>> future : dimensionSetFutures) {
dimensionSet.addAll(future.get());
}
dimensionSet.remove(StarTreeConstants.STAR); // never represent this one
for (String dimensionValue : dimensionSet) {
query.addDimensionValue(groupByColumn, dimensionValue);
}
}
}
// Dimensions
List<DimensionKey> dimensionKeys = new ArrayList<>();
for (String[] combination : query.getDimensionCombinations(config.getDimensions())) {
dimensionKeys.add(new DimensionKey(combination));
}
// Metrics
Map<StarTree, Multimap<DimensionKey, Future<MetricTimeSeries>>> timeSeriesFutures = new HashMap<>();
final List<StarTree> starTrees = new ArrayList<>(starTreeManager.getStarTrees(config.getCollection()).values());
StarTree mutableTree = starTreeManager.getMutableStarTree(config.getCollection());
if (mutableTree != null) {
starTrees.add(mutableTree);
}
for (final StarTree starTree : starTrees) {
final UUID treeId = starTree.getRoot().getId();
if (!treeIdsToQuery.contains(treeId) && !realTimeTreeIdsToQuery.contains(treeId)) {
continue;
}
Multimap<DimensionKey, Future<MetricTimeSeries>> singleKeyResultMap = LinkedListMultimap.create();
timeSeriesFutures.put(starTree, singleKeyResultMap);
for (final DimensionKey dimensionKey : dimensionKeys) {
DimensionKey flattenedKey = flattenDisjunctions(config, query, dimensionKey);
timeSeriesFutures.get(starTree).put(flattenedKey, executorService.submit(new Callable<MetricTimeSeries>() {
@Override
public MetricTimeSeries call() throws Exception {
TimeRange timeRange = timeRangesToQuery.get(treeId);
return starTree.getTimeSeries(new StarTreeQueryImpl(config, dimensionKey, timeRange));
}
}));
}
}
// Merge results
Map<DimensionKey, MetricTimeSeries> mergedResults = new HashMap<>();
for (Multimap<DimensionKey, Future<MetricTimeSeries>> resultMap : timeSeriesFutures.values()) {
for (Map.Entry<DimensionKey, Collection<Future<MetricTimeSeries>>> entry : resultMap.asMap().entrySet()) {
for (Future<MetricTimeSeries> seriesFuture : entry.getValue()) {
MetricTimeSeries additionalSeries = seriesFuture.get();
MetricTimeSeries currentSeries = mergedResults.get(entry.getKey());
if (currentSeries == null) {
currentSeries = new MetricTimeSeries(additionalSeries.getSchema());
mergedResults.put(entry.getKey(), currentSeries);
}
currentSeries.aggregate(additionalSeries);
}
}
}
// Aggregate across all trees and apply functions
for (Map.Entry<DimensionKey, MetricTimeSeries> entry : mergedResults.entrySet()) {
MetricTimeSeries timeSeries = entry.getValue();
// Compute aggregate functions
for (ThirdEyeFunction function : query.getFunctions()) {
timeSeries = function.apply(config, query, timeSeries);
}
// Add derived metrics
for (ThirdEyeFunction function : query.getDerivedMetrics()) {
timeSeries = function.apply(config, query, timeSeries);
}
// Convert to milliseconds
timeSeries = TO_MILLIS.apply(config, query, timeSeries);
result.addData(entry.getKey(), timeSeries);
result.setMetrics(timeSeries.getSchema().getNames()); // multiple calls should be idempotent
}
LOGGER.info("END Execution for query_id: {} ", query.hashCode());
return result;
}
/**
* Selects the trees whose data times are non-disjoint with the query time range.
*
* <p>
* This uses a greedy selection algorithm, in which we prefer trees with a
* more coarse granularity (e.g. MONTHLY over HOURLY)
* </p>
*/
public List<UUID> selectTreesToQuery(final Map<UUID, IndexMetadata> treeMetadataMap,
final TimeRange queryTimeRange) {
List<UUID> treeIds = new ArrayList<>();
// Determine which trees we need to query
for (UUID treeId : treeMetadataMap.keySet()) {
IndexMetadata indexMetadata = treeMetadataMap.get(treeId);
TimeRange treeTimeRange =
new TimeRange(indexMetadata.getMinDataTimeMillis(), indexMetadata.getMaxDataTimeMillis());
if (!queryTimeRange.isDisjoint(treeTimeRange)) {
treeIds.add(treeId);
}
}
Comparator<? super UUID> comparator = new Comparator<UUID>() {
@Override
public int compare(UUID treeId1, UUID treeId2) {
IndexMetadata indexMetadata1 = treeMetadataMap.get(treeId1);
IndexMetadata indexMetadata2 = treeMetadataMap.get(treeId2);
Long startTime1 = indexMetadata1.getStartTimeMillis();
Long startTime2 = indexMetadata2.getStartTimeMillis();
int ret = startTime1.compareTo(startTime2);
if (ret == 0) {
Integer timeGranularity1 =
timeGranularitySortOrder.get(indexMetadata1.getTimeGranularity().toUpperCase());
Integer timeGranularity2 =
timeGranularitySortOrder.get(indexMetadata2.getTimeGranularity().toUpperCase());
ret = timeGranularity1.compareTo(timeGranularity2);
}
return ret;
}
};
// We will have segments at multiple granularities hourly, daily, weekly, monthly.
// Find the minimum number of trees to query
// We use a greedy algorithm that sorts the tree with startTime, Granularity (monthly appears
// first followed by weekly daily and hourly)
Collections.sort(treeIds, comparator);
List<UUID> treeIdsToQuery = new ArrayList<>();
// Select the disjointed trees after filtering
TimeRange lastSelectedRange = null;
for (UUID treeId : treeIds) {
IndexMetadata indexMetadata = treeMetadataMap.get(treeId);
TimeRange treeRange = new TimeRange(indexMetadata.getStartTimeMillis(), indexMetadata.getEndTimeMillis());
if (lastSelectedRange == null || lastSelectedRange.getEnd() <= treeRange.getStart()) { // range end is exclusive for tree
lastSelectedRange = treeRange;
treeIdsToQuery.add(treeId);
LOGGER.info("Selecting treeId:{} with TimeRange:{}", treeId, treeRange);
}
}
return treeIdsToQuery;
}
private static long dateTimeToCollectionTime(StarTreeConfig config, DateTime dateTime) {
TimeGranularity bucket = config.getTime().getBucket();
return bucket.getUnit().convert(dateTime.getMillis(), TimeUnit.MILLISECONDS) / bucket.getSize();
}
/** Replaces dimensions in all queries involving disjunctions such that they can be grouped */
private static DimensionKey flattenDisjunctions(StarTreeConfig config,
ThirdEyeQuery query,
DimensionKey dimensionKey) {
String[] flattenedValues = new String[config.getDimensions().size()];
for (int i = 0; i < config.getDimensions().size(); i++) {
String dimensionName = config.getDimensions().get(i).getName();
String dimensionValue = dimensionKey.getDimensionValues()[i];
Collection queryValues = query.getDimensionValues().get(dimensionName);
if (!query.getGroupByColumns().contains(dimensionName) && queryValues.size() > 1) {
dimensionValue = OR_JOINER.join(queryValues);
}
flattenedValues[i] = dimensionValue;
}
return new DimensionKey(flattenedValues);
}
}
| |
/*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.apple;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.matchesPattern;
import static org.hamcrest.Matchers.not;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeTrue;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.model.BuildTargets;
import com.facebook.buck.testutil.integration.ProjectWorkspace;
import com.facebook.buck.testutil.integration.TemporaryPaths;
import com.facebook.buck.testutil.integration.TestDataHelper;
import com.facebook.buck.util.ProcessExecutor;
import com.facebook.buck.util.environment.Platform;
import org.junit.Rule;
import org.junit.Test;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.regex.Pattern;
public class PrebuiltAppleFrameworkIntegrationTest {
@Rule
public TemporaryPaths tmp = new TemporaryPaths();
@Test
public void testPrebuiltAppleFrameworkBuildsSomething() throws IOException {
assumeTrue(Platform.detect() == Platform.MACOS);
ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(
this, "prebuilt_apple_framework_builds", tmp);
workspace.setUp();
ProjectFilesystem filesystem = new ProjectFilesystem(workspace.getDestPath());
BuildTarget target =
BuildTargetFactory.newInstance("//prebuilt:BuckTest");
ProjectWorkspace.ProcessResult result =
workspace.runBuckCommand("build", target.getFullyQualifiedName());
result.assertSuccess();
assertTrue(Files.exists(workspace.getPath(BuildTargets.getGenPath(filesystem, target, "%s"))));
}
@Test
public void testPrebuiltAppleFrameworkLinks() throws IOException, InterruptedException {
assumeTrue(Platform.detect() == Platform.MACOS);
ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(
this, "prebuilt_apple_framework_links", tmp);
workspace.setUp();
ProjectFilesystem filesystem = new ProjectFilesystem(workspace.getDestPath());
BuildTarget target =
BuildTargetFactory.newInstance("//app:TestApp");
ProjectWorkspace.ProcessResult result =
workspace.runBuckCommand("build", target.getFullyQualifiedName());
result.assertSuccess();
Path testBinaryPath = workspace.getPath(BuildTargets.getGenPath(filesystem, target, "%s"));
assertTrue(Files.exists(testBinaryPath));
ProcessExecutor.Result otoolResult = workspace.runCommand(
"otool", "-L", testBinaryPath.toString());
assertEquals(0, otoolResult.getExitCode());
assertThat(
otoolResult.getStdout().orElse(""),
containsString("@rpath/BuckTest.framework/BuckTest"));
assertThat(
otoolResult.getStdout().orElse(""),
not(containsString("BuckTest.dylib")));
}
@Test
public void testPrebuiltAppleFrameworkCopiedToBundle() throws IOException, InterruptedException {
assumeTrue(Platform.detect() == Platform.MACOS);
ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(
this, "prebuilt_apple_framework_links", tmp);
workspace.setUp();
ProjectFilesystem filesystem = new ProjectFilesystem(workspace.getDestPath());
BuildTarget target =
BuildTargetFactory.newInstance("//app:TestAppBundle#dwarf-and-dsym,include-frameworks");
ProjectWorkspace.ProcessResult result =
workspace.runBuckCommand("build", target.getFullyQualifiedName());
result.assertSuccess();
Path includedFramework = workspace.getPath(BuildTargets.getGenPath(filesystem, target, "%s"))
.resolve("TestAppBundle.app")
.resolve("Frameworks")
.resolve("BuckTest.framework");
assertTrue(Files.isDirectory(includedFramework));
}
@Test
public void testStaticWithDependencies() throws IOException, InterruptedException {
assumeTrue(Platform.detect() == Platform.MACOS);
ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(
this, "prebuilt_apple_framework_static", tmp);
workspace.setUp();
ProjectFilesystem filesystem = new ProjectFilesystem(workspace.getDestPath());
BuildTarget target =
BuildTargetFactory.newInstance("//app:TestApp#static,macosx-x86_64");
ProjectWorkspace.ProcessResult result =
workspace.runBuckCommand("build", target.getFullyQualifiedName());
result.assertSuccess();
Path testBinaryPath = workspace.getPath(BuildTargets.getGenPath(filesystem, target, "%s"));
ProcessExecutor.Result otoolResult = workspace.runCommand(
"otool", "-L", testBinaryPath.toString());
assertEquals(0, otoolResult.getExitCode());
assertThat(
otoolResult.getStdout().orElse(""),
containsString("Foundation.framework"));
assertThat(
otoolResult.getStdout().orElse(""),
not(containsString("@rpath/BuckTest.framework/BuckTest")));
ProcessExecutor.Result nmResult = workspace.runCommand(
"nm", testBinaryPath.toString());
assertEquals(0, nmResult.getExitCode());
assertThat(
nmResult.getStdout().orElse(""),
containsString("S _OBJC_CLASS_$_Hello"));
assertThat(
nmResult.getStdout().orElse(""),
not(containsString("U _OBJC_CLASS_$_Hello")));
assertThat(
nmResult.getStdout().orElse(""),
containsString("S _OBJC_CLASS_$_Strings"));
assertThat(
nmResult.getStdout().orElse(""),
not(containsString("U _OBJC_CLASS_$_Strings")));
}
@Test
public void headerUsesShouldMapBackToTestApp() throws Exception {
assumeTrue(Platform.detect() == Platform.MACOS);
ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(
this, "prebuilt_apple_framework_links", tmp);
workspace.setUp();
workspace.runBuckBuild(
"//app:TestApp#iphonesimulator-x86_64",
"--config", "cxx.untracked_headers=error")
.assertSuccess();
}
@Test
public void ruleKeyChangesWhenFrameworkIsModified() throws Exception {
assumeTrue(Platform.detect() == Platform.MACOS);
ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(
this, "prebuilt_apple_framework_links", tmp);
workspace.setUp();
String resultBefore;
{
ProjectWorkspace.ProcessResult result = workspace.runBuckCommand(
"targets", "--show-rulekey", "//app:TestApp#iphonesimulator-x86_64");
resultBefore = result.assertSuccess().getStdout();
}
workspace.writeContentsToPath("", "prebuilt/BuckTest.framework/Headers/Hello.h");
String resultAfter;
{
ProjectWorkspace.ProcessResult result = workspace.runBuckCommand(
"targets", "--show-rulekey", "//app:TestApp#iphonesimulator-x86_64");
resultAfter = result.assertSuccess().getStdout();
}
assertNotEquals(
"Rule Key before and after header change should be different",
resultBefore,
resultAfter);
}
@Test
public void testProjectGeneratorGeneratesWorkingProject() throws Exception {
assumeTrue(Platform.detect() == Platform.MACOS);
ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(
this, "prebuilt_apple_framework_links", tmp);
workspace.setUp();
workspace.runBuckCommand("project", "//app:workspace").assertSuccess();
{
ProcessExecutor.Result result = workspace.runCommand(
"xcodebuild",
// "json" output.
"-json",
// Make sure the output stays in the temp folder.
"-derivedDataPath", "xcode-out/",
// Build the project that we just generated
"-workspace", "app/TestAppBundle.xcworkspace",
"-scheme", "TestAppBundle",
// Build for iphonesimulator
"-arch", "x86_64",
"-sdk", "iphonesimulator");
result.getStderr().ifPresent(System.err::print);
assertEquals("xcodebuild should succeed", 0, result.getExitCode());
}
Path appBundlePath =
tmp.getRoot().resolve("xcode-out/Build/Products/Debug-iphonesimulator/TestAppBundle.app");
assertTrue(
"Framework is copied into bundle.",
Files.isRegularFile(appBundlePath.resolve("Frameworks/BuckTest.framework/BuckTest")));
{
ProcessExecutor.Result result =
workspace.runCommand("otool", "-l", appBundlePath.resolve("TestAppBundle").toString());
assertThat(
"App binary adds Framework dir to rpath.",
result.getStdout().get(),
matchesPattern(
Pattern.compile(
".*\\s+cmd LC_RPATH.*\\s+path @executable_path/Frameworks\\b.*",
Pattern.DOTALL)));
assertThat(
"App binary has load instruction for framework",
result.getStdout().get(),
matchesPattern(
Pattern.compile(
".*\\s+cmd LC_LOAD_DYLIB.*\\s+name @rpath/BuckTest.framework/BuckTest\\b.*",
Pattern.DOTALL)));
}
}
}
| |
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.wiki.management;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Iterator;
import javax.management.Attribute;
import javax.management.AttributeList;
import javax.management.AttributeNotFoundException;
import javax.management.DynamicMBean;
import javax.management.IntrospectionException;
import javax.management.InvalidAttributeValueException;
import javax.management.MBeanAttributeInfo;
import javax.management.MBeanConstructorInfo;
import javax.management.MBeanException;
import javax.management.MBeanInfo;
import javax.management.MBeanNotificationInfo;
import javax.management.MBeanOperationInfo;
import javax.management.NotCompliantMBeanException;
import javax.management.ReflectionException;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
/**
* A simple MBean which does not require an interface class unlike
* the StandardMBean class. The methods are exposed through a method
* call, which in turn then calls the methods using the Reflection API.
* <p>
* This class is similar to the javax.management.StandardMBean, but it does
* require the API interface to be declared, so it's simpler. It's not as
* powerful, but it does not require you to declare two classes (and keep
* them in sync).
*
* @since 2.6
*/
// FIXME: This class should really use Annotations instead of a method call.
// FIXME: Exception handling is not probably according to spec...
public abstract class SimpleMBean implements DynamicMBean {
private static final Logger LOG = Logger.getLogger( SimpleMBean.class );
protected MBeanInfo m_beanInfo;
private static Method findGetterSetter( Class<?> clazz, String name, Class<?> parm )
{
try
{
Class<?>[] params = { parm };
Class<?>[] emptyparms = {};
Method m = clazz.getDeclaredMethod( name, parm != null ? params : emptyparms );
return m;
}
catch( Exception e )
{
// There's nothing to do, really - we just return a null.
}
return null;
}
/**
* Create a new SimpleMBean
*
* @throws NotCompliantMBeanException {@inheritDoc}
*/
protected SimpleMBean() throws NotCompliantMBeanException
{
//
// Create attributes
//
String[] attlist = getAttributeNames();
MBeanAttributeInfo[] attributes = null;
if( attlist != null )
{
attributes = new MBeanAttributeInfo[attlist.length];
for( int i = 0; i < attlist.length; i++ )
{
String name = attlist[i];
name = StringUtils.capitalize( name );
Method getter = findGetterSetter( getClass(), "get"+name, null );
if( getter == null ) getter = findGetterSetter( getClass(), "is"+name, null );
Method setter = null;
if( getter != null )
{
setter = findGetterSetter( getClass(), "set"+name, getter.getReturnType() );
}
//
// Check, if there's a description available
//
Method descriptor = findGetterSetter( getClass(), "get"+name+"Description", null );
String description = "";
if( descriptor != null )
{
try
{
description = (String) descriptor.invoke( this, (Object[])null );
}
catch( Exception e )
{
description="Exception: "+e.getMessage();
}
}
MBeanAttributeInfo info;
try
{
info = new MBeanAttributeInfo( attlist[i], description, getter, setter );
}
catch (IntrospectionException e)
{
throw new NotCompliantMBeanException( e.getMessage() );
}
attributes[i] = info;
}
}
//
// Create operations.
//
String[] oplist = getMethodNames();
MBeanOperationInfo[] operations = new MBeanOperationInfo[oplist.length];
Method[] methods = getClass().getMethods();
for( int i = 0; i < oplist.length; i++ )
{
Method method = null;
for( int m = 0; m < methods.length; m++ )
{
if( methods[m].getName().equals( oplist[i] ) )
{
method = methods[m];
}
}
if( method == null )
{
throw new NotCompliantMBeanException("Class declares method "+oplist[i]+", yet does not implement it!");
}
MBeanOperationInfo info = new MBeanOperationInfo( method.getName(), method );
operations[i] = info;
}
//
// Create the actual BeanInfo instance.
//
MBeanConstructorInfo[] constructors = null;
MBeanNotificationInfo[] notifications = null;
m_beanInfo = new MBeanInfo( getClass().getName(),
getDescription(),
attributes,
constructors,
operations,
notifications );
}
/**
* Customization hook: Override this to get a description for your MBean. By default,
* this is an empty string.
*
* @return A description for the MBean.
*/
protected String getDescription()
{
return "";
}
/**
* Gets an attribute using reflection from the MBean.
*
* @param name Name of the attribute to find.
* @return The value returned by the corresponding getXXX() call
* @throws AttributeNotFoundException If there is not such attribute
* @throws MBeanException
* @throws ReflectionException
*/
public Object getAttribute(String name)
throws AttributeNotFoundException, MBeanException, ReflectionException
{
Method m;
Object res = null;
try
{
String mname = "get"+StringUtils.capitalize( name );
m = findGetterSetter( getClass(), mname, null );
if( m == null ) throw new AttributeNotFoundException( name );
res = m.invoke( this, (Object[])null );
}
catch (SecurityException e)
{
// TODO Auto-generated catch block
LOG.error( e.getMessage(), e );
}
catch (IllegalArgumentException e)
{
// TODO Auto-generated catch block
LOG.error( e.getMessage(), e );
}
catch (IllegalAccessException e)
{
// TODO Auto-generated catch block
LOG.error( e.getMessage(), e );
}
catch (InvocationTargetException e)
{
// TODO Auto-generated catch block
LOG.error( e.getMessage(), e );
}
return res;
}
/**
* Gets multiple attributes at the same time.
*
* @param arg0 The attribute names to get
* @return A list of attributes
*/
public AttributeList getAttributes(String[] arg0)
{
AttributeList list = new AttributeList();
for( int i = 0; i < arg0.length; i++ )
{
try
{
list.add( new Attribute(arg0[i], getAttribute(arg0[i])) );
}
catch (AttributeNotFoundException e)
{
// TODO Auto-generated catch block
LOG.error( e.getMessage(), e );
}
catch (MBeanException e)
{
// TODO Auto-generated catch block
LOG.error( e.getMessage(), e );
}
catch (ReflectionException e)
{
// TODO Auto-generated catch block
LOG.error( e.getMessage(), e );
}
}
return list;
}
/**
* Return the MBeanInfo structure.
*
* @return the MBeanInfo
*/
public MBeanInfo getMBeanInfo()
{
return m_beanInfo;
}
/**
* Invokes a particular method.
*
* @param arg0 Method name
* @param arg1 A list of arguments for the invocation
*/
public Object invoke(String arg0, Object[] arg1, String[] arg2)
throws MBeanException, ReflectionException
{
Method[] methods = getClass().getMethods();
for( int i = 0; i < methods.length; i++ )
{
if( methods[i].getName().equals(arg0) )
{
try
{
return methods[i].invoke( this, arg1 );
}
catch (IllegalArgumentException e)
{
throw new ReflectionException( e, "Wrong arguments" );
}
catch (IllegalAccessException e)
{
throw new ReflectionException( e, "No access" );
}
catch (InvocationTargetException e)
{
throw new ReflectionException( e, "Wrong target" );
}
}
}
throw new ReflectionException(null, "There is no such method "+arg0); // TODO: Can you put a null exception?
}
public void setAttribute(Attribute attr)
throws AttributeNotFoundException,
InvalidAttributeValueException,
MBeanException,
ReflectionException
{
Method m;
String mname = "set"+StringUtils.capitalize( attr.getName() );
m = findGetterSetter( getClass(), mname, attr.getValue().getClass() );
if( m == null ) throw new AttributeNotFoundException( attr.getName() );
Object[] args = { attr.getValue() };
try
{
m.invoke( this, args );
}
catch (IllegalArgumentException e)
{
throw new InvalidAttributeValueException( "Faulty argument: "+e.getMessage() );
}
catch (IllegalAccessException e)
{
throw new ReflectionException( e, "Cannot access attribute "+e.getMessage() );
}
catch (InvocationTargetException e)
{
throw new ReflectionException( e, "Cannot invoke attribute "+e.getMessage() );
}
}
public AttributeList setAttributes(AttributeList arg0)
{
AttributeList result = new AttributeList();
for( Iterator i = arg0.iterator(); i.hasNext(); )
{
Attribute attr = (Attribute)i.next();
//
// Attempt to set the attribute. If it succeeds (no exception),
// then we just add it to the list of successfull sets.
//
try
{
setAttribute( attr );
result.add( attr );
}
catch (AttributeNotFoundException e)
{
// TODO Auto-generated catch block
LOG.error( e.getMessage(), e );
}
catch (InvalidAttributeValueException e)
{
// TODO Auto-generated catch block
LOG.error( e.getMessage(), e );
}
catch (MBeanException e)
{
// TODO Auto-generated catch block
LOG.error( e.getMessage(), e );
}
catch (ReflectionException e)
{
// TODO Auto-generated catch block
LOG.error( e.getMessage(), e );
}
}
return result;
}
/**
* This method must return a list of attributes which are
* exposed by the SimpleMBean. If there's a getXXX() method
* available, it'll be exposed as a getter, and if there's a
* setXXX() method available, it'll be exposed as a setter.
* For example:
* <pre>
* public void setFoo( String foo ) ...
* public String getFoo() ...
*
* public String[] getAttributeNames()
* {
* String[] attrs = { "foo" };
*
* return attrs;
* }
* </pre>
* Also, methods starting with "is" are also recognized as getters
* (e.g. <code>public boolean isFoo()</code>.)
*
* @return An array of attribute names that can be get and optionally set.
*/
public abstract String[] getAttributeNames();
/**
* This method must return a list of operations which
* are to be exposed by the SimpleMBean. Note that using overloaded
* method names is not supported - only one will be exposed as a JMX method
* at random.
*
* @return An array of method names that should be exposed as
* JMX operations.
*/
public abstract String[] getMethodNames();
}
| |
/*
* Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws;
import com.amazonaws.annotation.SdkInternalApi;
import com.amazonaws.annotation.SdkProtectedApi;
import com.amazonaws.auth.RegionAwareSigner;
import com.amazonaws.auth.Signer;
import com.amazonaws.auth.SignerFactory;
import com.amazonaws.client.AwsSyncClientParams;
import com.amazonaws.client.builder.AwsClientBuilder;
import com.amazonaws.handlers.RequestHandler;
import com.amazonaws.handlers.RequestHandler2;
import com.amazonaws.http.AmazonHttpClient;
import com.amazonaws.http.ExecutionContext;
import com.amazonaws.internal.DefaultServiceEndpointBuilder;
import com.amazonaws.internal.auth.DefaultSignerProvider;
import com.amazonaws.internal.auth.SignerProvider;
import com.amazonaws.internal.auth.SignerProviderContext;
import com.amazonaws.log.CommonsLogFactory;
import com.amazonaws.metrics.AwsSdkMetrics;
import com.amazonaws.metrics.RequestMetricCollector;
import com.amazonaws.regions.Region;
import com.amazonaws.regions.Regions;
import com.amazonaws.util.AWSRequestMetrics;
import com.amazonaws.util.AWSRequestMetrics.Field;
import com.amazonaws.util.AwsHostNameUtils;
import com.amazonaws.util.Classes;
import com.amazonaws.util.RuntimeHttpUtils;
import com.amazonaws.util.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import java.net.URI;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import static com.amazonaws.SDKGlobalConfiguration.PROFILING_SYSTEM_PROPERTY;
/**
* Abstract base class for Amazon Web Service Java clients.
* <p>
* Responsible for basic client capabilities that are the same across all AWS
* SDK Java clients (ex: setting the client endpoint).
*/
public abstract class AmazonWebServiceClient {
/**
* @deprecated No longer used.
*/
@Deprecated
public static final boolean LOGGING_AWS_REQUEST_METRIC = true;
private static final String AMAZON = "Amazon";
private static final String AWS = "AWS";
private static final Log log =
LogFactory.getLog(AmazonWebServiceClient.class);
static {
// Configures the internal logging of the signers and core
// classes to use Jakarta Commons Logging to stay consistent with the
// rest of the library.
boolean success = com.amazonaws.log.InternalLogFactory.configureFactory(
new CommonsLogFactory());
if (log.isDebugEnabled())
log.debug("Internal logging successfully configured to commons logger: "
+ success);
}
/**
* Flag indicating whether a client is mutable or not. Legacy clients built via the constructors
* are mutable. Clients built with the fluent builders are immutable.
*/
private volatile boolean isImmutable = false;
/**
* The service endpoint to which this client will send requests.
* <p>
* Subclass should only read but not assign to this field, at least not
* without synchronization on the enclosing object for thread-safety
* reason.
*/
protected volatile URI endpoint;
/**
* Used to explicitly override the internal signer region computed by the
* default implementation. This field is typically null.
*/
private volatile String signerRegionOverride;
/** The client configuration */
protected ClientConfiguration clientConfiguration;
/** Low level client for sending requests to AWS services. */
protected AmazonHttpClient client;
/** Optional request handlers for additional request processing. */
protected final List<RequestHandler2> requestHandler2s;
/** Optional offset (in seconds) to use when signing requests */
protected int timeOffset;
private volatile SignerProvider signerProvider;
/**
* The cached service abbreviation for this service, used for identifying
* service endpoints by region, identifying the necessary signer, etc.
* Thread safe so it's backward compatible.
*/
private volatile String serviceName;
/**
* The service name in region metadata, i.e. the prefix of endpoint.
*/
private volatile String endpointPrefix;
/**
* Constructs a new AmazonWebServiceClient object using the specified
* configuration.
*
* @param clientConfiguration
* The client configuration for this client.
*/
public AmazonWebServiceClient(ClientConfiguration clientConfiguration) {
this(clientConfiguration, null);
}
/**
* Constructs a new AmazonWebServiceClient object using the specified
* configuration and request metric collector.
*
* @param clientConfiguration
* The client configuration for this client.
* @param requestMetricCollector
* optional request metric collector to be used at the http
* client level; can be null.
*/
public AmazonWebServiceClient(ClientConfiguration clientConfiguration,
RequestMetricCollector requestMetricCollector) {
this(clientConfiguration, requestMetricCollector, false);
}
@SdkProtectedApi
protected AmazonWebServiceClient(ClientConfiguration clientConfiguration,
RequestMetricCollector requestMetricCollector,
boolean disableStrictHostNameVerification) {
this.clientConfiguration = clientConfiguration;
requestHandler2s = new CopyOnWriteArrayList<RequestHandler2>();
client = new AmazonHttpClient(clientConfiguration,
requestMetricCollector, disableStrictHostNameVerification,
calculateCRC32FromCompressedData());
}
protected AmazonWebServiceClient(AwsSyncClientParams clientParams) {
this.clientConfiguration = clientParams.getClientConfiguration();
requestHandler2s = clientParams.getRequestHandlers();
client = new AmazonHttpClient(clientConfiguration, clientParams.getRequestMetricCollector(),
!useStrictHostNameVerification(),
calculateCRC32FromCompressedData());
}
/**
* Returns the signer.
* <p>
* Note, however, the signer configured for S3 is incomplete at this stage
* as the information on the S3 bucket and key is not yet known.
*/
@Deprecated
protected Signer getSigner() {
return signerProvider.getSigner(SignerProviderContext.builder().build());
}
/**
* @return Current SignerProvider instance.
*/
@SdkProtectedApi
protected SignerProvider getSignerProvider() {
return signerProvider;
}
/**
* Overrides the default endpoint for this client. Callers can use this
* method to control which AWS region they want to work with.
* <p>
* <b>This method is not threadsafe. Endpoints should be configured when the
* client is created and before any service requests are made. Changing it
* afterwards creates inevitable race conditions for any service requests in
* transit.</b>
* <p>
* Callers can pass in just the endpoint (ex: "ec2.amazonaws.com") or a full
* URL, including the protocol (ex: "https://ec2.amazonaws.com"). If the
* protocol is not specified here, the default protocol from this client's
* {@link ClientConfiguration} will be used, which by default is HTTPS.
* <p>
* For more information on using AWS regions with the AWS SDK for Java, and
* a complete list of all available endpoints for all AWS services, see:
* <a href="http://developer.amazonwebservices.com/connect/entry.jspa?externalID=3912">
* http://developer.amazonwebservices.com/connect/entry.jspa?externalID=3912</a>
*
* @param endpoint
* The endpoint (ex: "ec2.amazonaws.com") or a full URL,
* including the protocol (ex: "https://ec2.amazonaws.com") of
* the region specific AWS endpoint this client will communicate
* with.
* @throws IllegalArgumentException
* If any problems are detected with the specified endpoint.
*
* @deprecated use {@link AwsClientBuilder#setEndpointConfiguration(AwsClientBuilder.EndpointConfiguration)} for example:
* {@code builder.setEndpointConfiguration(new EndpointConfiguration(endpoint, signingRegion));}
*/
@Deprecated
public void setEndpoint(String endpoint) throws IllegalArgumentException {
checkMutability();
URI uri = toURI(endpoint);
Signer signer = computeSignerByURI(uri, signerRegionOverride, false);
synchronized(this) {
this.endpoint = uri;
this.signerProvider = createSignerProvider(signer);
}
}
/** Returns the endpoint as a URI. */
private URI toURI(String endpoint) throws IllegalArgumentException {
return RuntimeHttpUtils.toUri(endpoint, clientConfiguration);
}
/**
* Returns the signer based on the given URI and the current AWS client
* configuration. Currently only the SQS client can have different region on
* a per request basis. For other AWS clients, the region remains the same
* on a per AWS client level.
* <p>
* Note, however, the signer returned for S3 is incomplete at this stage as
* the information on the S3 bucket and key is not yet known.
*/
public Signer getSignerByURI(URI uri) {
return computeSignerByURI(uri, signerRegionOverride, true);
}
/**
* Returns the signer for the given uri and the current client
* configuration.
* <p>
* Note, however, the signer returned for S3 is incomplete at this stage as
* the information on the S3 bucket and key is not yet known.
*
* @param signerRegionOverride
* the overriding signer region; or null if there is none.
* @param isRegionIdAsSignerParam
* true if the "regionId" is used to configure the signer if
* applicable; false if this method is called for the purpose of
* purely setting the communication end point of this AWS client,
* and therefore the "regionId" parameter will not be used
* directly for configuring the signer.
*/
private Signer computeSignerByURI(URI uri, String signerRegionOverride,
boolean isRegionIdAsSignerParam) {
if (uri == null) {
throw new IllegalArgumentException(
"Endpoint is not set. Use setEndpoint to set an endpoint before performing any request.");
}
String service = getServiceNameIntern();
String region = AwsHostNameUtils.parseRegionName(uri.getHost(), service);
return computeSignerByServiceRegion(
service, region, signerRegionOverride, isRegionIdAsSignerParam);
}
/**
* Returns the signer for the given service name, region id, and the current
* client configuration.
* <p>
* Note, however, the signer returned for S3 is incomplete at this stage as
* the information on the S3 bucket and key is not yet known.
*
* @param regionId
* the region for sending AWS requests
* @param signerRegionOverride
* the overriding signer region; or null if there is none.
* @param isRegionIdAsSignerParam
* true if the "regionId" is used to configure the signer if
* applicable; false if this method is called for the purpose of
* purely setting the communication end point of this AWS client,
* and therefore the "regionId" parameter will not be used
* directly for configuring the signer.
*/
private Signer computeSignerByServiceRegion(
String serviceName, String regionId,
String signerRegionOverride,
boolean isRegionIdAsSignerParam) {
String signerType = clientConfiguration.getSignerOverride();
Signer signer = signerType == null
? SignerFactory.getSigner(serviceName, regionId)
: SignerFactory.getSignerByTypeAndService(signerType, serviceName)
;
if (signer instanceof RegionAwareSigner) {
// Overrides the default region computed
RegionAwareSigner regionAwareSigner = (RegionAwareSigner)signer;
// (signerRegionOverride != null) means that it is likely to be AWS
// internal dev work, as "signerRegionOverride" is typically null
// when used in the external release
if (signerRegionOverride != null)
regionAwareSigner.setRegionName(signerRegionOverride);
else if (regionId != null && isRegionIdAsSignerParam)
regionAwareSigner.setRegionName(regionId);
}
return signer;
}
/**
* An alternative to {@link AmazonWebServiceClient#setEndpoint(String)}, sets the regional
* endpoint for this client's service calls. Callers can use this method to control which AWS
* region they want to work with.
* <p>
* <b>This method is not threadsafe. A region should be configured when the client is created
* and before any service requests are made. Changing it afterwards creates inevitable race
* conditions for any service requests in transit or retrying.</b>
* <p>
* By default, all service endpoints in all regions use the https protocol. To use http instead,
* specify it in the {@link ClientConfiguration} supplied at construction.
*
* @param region
* The region this client will communicate with. See
* {@link Region#getRegion(com.amazonaws.regions.Regions)} for accessing a given
* region.
* @throws java.lang.IllegalArgumentException
* If the given region is null, or if this service isn't available in the given
* region. See {@link Region#isServiceSupported(String)}
* @see Region#getRegion(com.amazonaws.regions.Regions)
* @see Region#createClient(Class, com.amazonaws.auth.AWSCredentialsProvider,
* ClientConfiguration)
* @deprecated use {@link AwsClientBuilder#setRegion(String)}
*/
@Deprecated
public void setRegion(Region region) throws IllegalArgumentException {
checkMutability();
if (region == null) {
throw new IllegalArgumentException("No region provided");
}
final String serviceNameForEndpoint = getEndpointPrefix();
final String serviceNameForSigner = getServiceNameIntern();
URI uri = new DefaultServiceEndpointBuilder(serviceNameForEndpoint, clientConfiguration.getProtocol()
.toString()).withRegion(region).getServiceEndpoint();
Signer signer = computeSignerByServiceRegion(serviceNameForSigner, region.getName(), signerRegionOverride, false);
synchronized (this) {
this.endpoint = uri;
this.signerProvider = createSignerProvider(signer);
}
}
/**
* Convenient method for setting region.
*
* @param region region to set to; must not be null.
*
* @see #setRegion(Region)
* @deprecated use {@link AwsClientBuilder#setRegion(String)}
*/
@Deprecated
public final void configureRegion(Regions region) {
checkMutability();
if (region == null)
throw new IllegalArgumentException("No region provided");
this.setRegion(Region.getRegion(region));
}
/**
* Shuts down this client object, releasing any resources that might be held
* open. This is an optional method, and callers are not expected to call
* it, but can if they want to explicitly release any open resources. Once a
* client has been shutdown, it should not be used to make any more
* requests.
*/
public void shutdown() {
client.shutdown();
}
/**
* @deprecated by {@link #addRequestHandler(RequestHandler2)}.
*
* Appends a request handler to the list of registered handlers that are run
* as part of a request's lifecycle.
*
* @param requestHandler
* The new handler to add to the current list of request
* handlers.
*/
@Deprecated
public void addRequestHandler(RequestHandler requestHandler) {
checkMutability();
requestHandler2s.add(RequestHandler2.adapt(requestHandler));
}
/**
* Appends a request handler to the list of registered handlers that are run
* as part of a request's lifecycle.
*
* @param requestHandler2
* The new handler to add to the current list of request
* handlers.
* @deprecated use {@link AwsClientBuilder#withRequestHandlers(RequestHandler2...)}
*/
@Deprecated
public void addRequestHandler(RequestHandler2 requestHandler2) {
checkMutability();
requestHandler2s.add(requestHandler2);
}
/**
* Removes a request handler from the list of registered handlers that are run
* as part of a request's lifecycle.
*
* @param requestHandler
* The handler to remove from the current list of request
* handlers.
* @deprecated use {@link AwsClientBuilder#withRequestHandlers(RequestHandler2...)}
*/
@Deprecated
public void removeRequestHandler(RequestHandler requestHandler) {
checkMutability();
requestHandler2s.remove(RequestHandler2.adapt(requestHandler));
}
/**
* @deprecated use {@link AwsClientBuilder#withRequestHandlers(RequestHandler2...)}
*/
@Deprecated
public void removeRequestHandler(RequestHandler2 requestHandler2) {
checkMutability();
requestHandler2s.remove(requestHandler2);
}
/**
* Runs the {@code beforeMarshalling} method of any
* {@code RequestHandler2}s associated with this client.
*
* @param request the request passed in from the user
* @return the (possibly different) request to marshal
*/
@SuppressWarnings("unchecked")
protected final <T extends AmazonWebServiceRequest> T beforeMarshalling(
T request) {
T local = request;
for (RequestHandler2 handler : requestHandler2s) {
local = (T) handler.beforeMarshalling(local);
}
return local;
}
protected ExecutionContext createExecutionContext(AmazonWebServiceRequest req) {
return createExecutionContext(req, signerProvider);
}
protected ExecutionContext createExecutionContext(AmazonWebServiceRequest req,
SignerProvider signerProvider) {
boolean isMetricsEnabled = isRequestMetricsEnabled(req) || isProfilingEnabled();
return ExecutionContext.builder()
.withRequestHandler2s(requestHandler2s)
.withUseRequestMetrics(isMetricsEnabled)
.withAwsClient(this)
.withSignerProvider(signerProvider).build();
}
protected final ExecutionContext createExecutionContext(Request<?> req) {
return createExecutionContext(req.getOriginalRequest());
}
protected SignerProvider createSignerProvider(Signer signer) {
return new DefaultSignerProvider(this, signer);
}
/* Check the profiling system property and return true if set */
protected static boolean isProfilingEnabled() {
return System.getProperty(PROFILING_SYSTEM_PROPERTY) != null;
}
/**
* Returns true if request metric collection is applicable to the given
* request; false otherwise.
*/
protected final boolean isRequestMetricsEnabled(AmazonWebServiceRequest req) {
RequestMetricCollector c = req.getRequestMetricCollector(); // request level collector
if (c != null && c.isEnabled()) {
return true;
}
return isRMCEnabledAtClientOrSdkLevel();
}
/**
* Returns true if request metric collection is enabled at the service
* client or AWS SDK level request; false otherwise.
*/
private boolean isRMCEnabledAtClientOrSdkLevel() {
RequestMetricCollector c = requestMetricCollector();
return c != null && c.isEnabled();
}
/**
* Sets the optional value for time offset for this client. This
* value will be applied to all requests processed through this client.
* Value is in seconds, positive values imply the current clock is "fast",
* negative values imply clock is slow.
*
* @param timeOffset
* The optional value for time offset (in seconds) for this client.
*/
public void setTimeOffset(int timeOffset) {
checkMutability();
this.timeOffset = timeOffset;
}
/**
* Sets the optional value for time offset for this client. This
* value will be applied to all requests processed through this client.
* Value is in seconds, positive values imply the current clock is "fast",
* negative values imply clock is slow.
*
* @param timeOffset
* The optional value for time offset (in seconds) for this client.
*
* @return the updated web service client
*/
public AmazonWebServiceClient withTimeOffset(int timeOffset) {
checkMutability();
setTimeOffset(timeOffset);
return this;
}
/**
* Returns the optional value for time offset for this client. This
* value will be applied to all requests processed through this client.
* Value is in seconds, positive values imply the current clock is "fast",
* negative values imply clock is slow.
*
* @return The optional value for time offset (in seconds) for this client.
*/
public int getTimeOffset() {
return timeOffset;
}
/**
* Returns the client specific {@link RequestMetricCollector}; or null if
* there is none.
*/
public RequestMetricCollector getRequestMetricsCollector() {
return client.getRequestMetricCollector();
}
/**
* Returns the client specific request metric collector if there is one; or
* the one at the AWS SDK level otherwise.
*/
protected RequestMetricCollector requestMetricCollector() {
RequestMetricCollector mc = client.getRequestMetricCollector();
return mc == null ? AwsSdkMetrics.getRequestMetricCollector() : mc;
}
/**
* Returns the most specific request metric collector, starting from the request level, then
* client level, then finally the AWS SDK level.
*/
private final RequestMetricCollector findRequestMetricCollector(
RequestMetricCollector reqLevelMetricsCollector) {
if (reqLevelMetricsCollector != null) {
return reqLevelMetricsCollector;
} else if (getRequestMetricsCollector() != null) {
return getRequestMetricsCollector();
} else {
return AwsSdkMetrics.getRequestMetricCollector();
}
}
/**
* Convenient method to end the client execution without logging the
* awsRequestMetrics.
*/
protected final void endClientExecution(
AWSRequestMetrics awsRequestMetrics, Request<?> request,
Response<?> response) {
this.endClientExecution(awsRequestMetrics, request, response,
!LOGGING_AWS_REQUEST_METRIC);
}
/**
* Common routine to end a client AWS request/response execution and collect
* the request metrics. Caller of this routine is responsible for starting
* the event for {@link Field#ClientExecuteTime} and call this method
* in a try-finally block.
*
* @param loggingAwsRequestMetrics deprecated and ignored
*/
protected final void endClientExecution(
AWSRequestMetrics awsRequestMetrics, Request<?> request,
Response<?> response, @Deprecated boolean loggingAwsRequestMetrics) {
if (request != null) {
awsRequestMetrics.endEvent(Field.ClientExecuteTime);
awsRequestMetrics.getTimingInfo().endTiming();
RequestMetricCollector c = findRequestMetricCollector(
request.getOriginalRequest().getRequestMetricCollector());
c.collectMetrics(request, response);
awsRequestMetrics.log();
}
}
/**
* @deprecated by {@link #getServiceName()}.
*/
@Deprecated
protected String getServiceAbbreviation() {
return getServiceNameIntern();
}
/**
* Returns the service abbreviation for this service, used for identifying
* service endpoints by region, identifying the necessary signer, etc.
* Used to be call "getServiceAbbreviation".
*/
public String getServiceName() {
return getServiceNameIntern();
}
/**
* @return the service name that should be used when computing the region
* endpoints. This method returns the value of the
* regionMetadataServiceName configuration in the internal config
* file if such configuration is specified for the current client,
* otherwise it returns the same service name that is used for
* request signing.
*/
public String getEndpointPrefix() {
if (endpointPrefix != null) {
return endpointPrefix;
}
String httpClientName = getHttpClientName();
String serviceNameInRegionMetadata = ServiceNameFactory.
getServiceNameInRegionMetadata(httpClientName);
synchronized (this) {
if (endpointPrefix != null) {
return endpointPrefix;
}
if (serviceNameInRegionMetadata != null) {
return endpointPrefix = serviceNameInRegionMetadata;
} else {
return endpointPrefix = getServiceNameIntern();
}
}
}
/**
* An internal method used to explicitly override the service name for region metadata.
* This service name is used to compute the region endpoints.
*/
protected void setEndpointPrefix(String endpointPrefix) {
if (endpointPrefix == null) {
throw new IllegalArgumentException(
"The parameter endpointPrefix must be specified!");
}
this.endpointPrefix = endpointPrefix;
}
/**
* Internal method for implementing {@link #getServiceName()}. Method is
* protected by intent so peculiar subclass that don't follow the class
* naming convention can choose to return whatever service name as needed.
*/
protected String getServiceNameIntern() {
if (serviceName == null) {
synchronized (this) {
if (serviceName == null) {
return serviceName = computeServiceName();
}
}
}
return serviceName;
}
/**
* An internal method used to explicitly override the service name
* computed by the default implementation. This method is not expected to be
* normally called except for AWS internal development purposes.
*/
public final void setServiceNameIntern(String serviceName) {
if (serviceName == null)
throw new IllegalArgumentException(
"The parameter serviceName must be specified!");
this.serviceName = serviceName;
}
/**
* Returns the service name of this AWS http client by first looking it up from the SDK internal
* configuration, and if not found, derive it from the class name of the immediate subclass of
* {@link AmazonWebServiceClient}. No configuration is necessary if the simple class name of the
* http client follows the convention of <code>(Amazon|AWS).*(JavaClient|Client)</code>.
*/
private String computeServiceName() {
final String httpClientName = getHttpClientName();
String service = ServiceNameFactory.getServiceName(httpClientName);
if (service != null) {
return service; // only if it is so explicitly configured
}
// Otherwise, make use of convention over configuration
int j = httpClientName.indexOf("JavaClient");
if (j == -1) {
j = httpClientName.indexOf("Client");
if (j == -1) {
throw new IllegalStateException(
"Unrecognized suffix for the AWS http client class name " + httpClientName);
}
}
int i = httpClientName.indexOf(AMAZON);
int len;
if (i == -1) {
i = httpClientName.indexOf(AWS);
if (i == -1) {
throw new IllegalStateException(
"Unrecognized prefix for the AWS http client class name " + httpClientName);
}
len = AWS.length();
} else {
len = AMAZON.length();
}
if (i >= j) {
throw new IllegalStateException(
"Unrecognized AWS http client class name " + httpClientName);
}
String serviceName = httpClientName.substring(i + len, j);
return StringUtils.lowerCase(serviceName);
}
private String getHttpClientName() {
Class<?> httpClientClass = Classes.childClassOf(AmazonWebServiceClient.class, this);
return httpClientClass.getSimpleName();
}
/**
* Returns the signer region override.
*
* @see #setSignerRegionOverride(String).
*/
public final String getSignerRegionOverride() {
return signerRegionOverride;
}
/**
* An internal method used to explicitly override the internal signer region
* computed by the default implementation. This method is not expected to be
* normally called except for AWS internal development purposes.
*/
public final void setSignerRegionOverride(String signerRegionOverride) {
checkMutability();
Signer signer = computeSignerByURI(endpoint, signerRegionOverride, true);
synchronized(this) {
this.signerRegionOverride = signerRegionOverride;
this.signerProvider = createSignerProvider(signer);
}
}
/**
* Fluent method for {@link #setRegion(Region)}.
*<pre>
* Example:
*
* AmazonDynamoDBClient client = new AmazonDynamoDBClient(...).<AmazonDynamoDBClient>withRegion(...);
*</pre>
* @see #setRegion(Region)
* @deprecated use {@link AwsClientBuilder#withRegion(Region)} for example:
* {@code AmazonSNSClientBuilder.standard().withRegion(region).build();}
*/
@Deprecated
public <T extends AmazonWebServiceClient> T withRegion(Region region) {
setRegion(region);
@SuppressWarnings("unchecked") T t= (T)this;
return t;
}
/**
* Convenient fluent method for setting region.
*
* @param region region to set to; must not be null.
*
* @see #withRegion(Region)
* @deprecated use {@link AwsClientBuilder#withRegion(Regions)} for example:
* {@code AmazonSNSClientBuilder.standard().withRegion(region).build();}
*/
@Deprecated
public <T extends AmazonWebServiceClient> T withRegion(Regions region) {
configureRegion(region);
@SuppressWarnings("unchecked") T t= (T)this;
return t;
}
/**
* Fluent method for {@link #setEndpoint(String)}.
*<pre>
* Example:
*
* AmazonDynamoDBClient client = new AmazonDynamoDBClient(...).<AmazonDynamoDBClient>withEndPoint(...);
*</pre>
* @see #setEndpoint(String)
* @deprecated use {@link AwsClientBuilder#withEndpointConfiguration(AwsClientBuilder.EndpointConfiguration)} for example:
* {@code AmazonSNSClientBuilder.standard().withEndpointConfiguration(new EndpointConfiguration(endpoint, signingRegion)).build();}
*/
@Deprecated
public <T extends AmazonWebServiceClient> T withEndpoint(String endpoint) {
setEndpoint(endpoint);
@SuppressWarnings("unchecked") T t= (T)this;
return t;
}
/**
* Internal only API to lock a client's mutable methods. Only intended for use by the fluent
* builders.
*/
@Deprecated
@SdkInternalApi
public final void makeImmutable() {
this.isImmutable = true;
}
/**
* If the client has been marked as immutable then throw an {@link
* UnsupportedOperationException}, otherwise do nothing. Should be called by each mutating
* method.
*/
@SdkProtectedApi
protected final void checkMutability() {
if (isImmutable) {
throw new UnsupportedOperationException(
"Client is immutable when created with the builder.");
}
}
/**
* Hook to allow S3 client to disable strict hostname verification since it uses wildcard
* certificates.
*
* @return True if strict hostname verification should be used, false otherwise.
*/
protected boolean useStrictHostNameVerification() {
return true;
}
/**
* Hook to allow clients to override CRC32 calculation behavior. Currently, only exercised by DynamoDB.
*
* @return True if the service returns CRC32 checksum from the compressed data, false otherwise.
*/
protected boolean calculateCRC32FromCompressedData() {
return false;
}
}
| |
package org.aries.launcher;
import java.util.List;
import java.util.Map;
import javax.management.ObjectName;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.aries.util.io.StreamListener;
import org.aries.util.properties.PropertyManager;
import common.jmx.MBeanUtil;
public abstract class AbstractApplicationLauncher { //implements Launcher {
public abstract String getMBeanName();
protected Log log = LogFactory.getLog(getClass());
protected ObjectName objectName;
protected Program program;
protected ProcessManager processManager;
protected long startTime;
protected boolean isWin32;
protected boolean isWinXP;
public AbstractApplicationLauncher() {
//does nothing
}
public AbstractApplicationLauncher(Program program) {
setProgram(program);
}
public void setProgram(Program program) {
this.program = program;
}
public boolean isWin32() {
return isWin32;
}
public boolean isWinXP() {
return isWinXP;
}
public long getStartTime() {
return startTime;
}
protected StreamListener getStreamListener() {
return null;
}
/*
* Remote Interface Methods
* ------------------------
*/
public boolean isActive() {
return processManager.isActive();
}
public void start() throws Exception {
try {
log.info("Starting: "+program.getName());
processManager.start();
log.info("Started: "+program.getName());
} catch (Exception e) {
log.error("Problem starting: "+program.getName(), e);
throw e;
}
}
public void join() throws Exception {
try {
log.info("Joining: "+program.getName());
processManager.join();
log.info("Returned: "+program.getName());
} catch (Exception e) {
log.error("Problem joining: "+program.getName(), e);
throw e;
}
}
//@Override
public void stop() throws Exception {
try {
log.info("Stopping: "+program.getName());
if (processManager.isActive())
processManager.stop();
MBeanUtil.unregisterMBean(objectName);
log.info("Stopped: "+program.getName());
} catch (Throwable e) {
log.error("Problem closing: "+program.getName(), e);
throw new RuntimeException(e);
}
}
/*
* Internal Methods
* ----------------
*/
public ObjectName getObjectName() {
return objectName;
}
protected ObjectName makeObjectName() {
StringBuffer buf = new StringBuffer(getMBeanName());
buf.append(",application="+program.getName());
//buf.append(",isWin32="+_isWin32);
return MBeanUtil.makeObjectName(buf.toString());
}
public void initialize() throws Exception {
PropertyManager.getInstance().initialize();
isWin32 = System.getProperty("os.name").toLowerCase().indexOf("windows") != -1;
isWinXP = System.getProperty("os.name").toLowerCase().indexOf("windows xp") != -1;
objectName = makeObjectName();
MBeanUtil.registerMBean(this, objectName);
log.info("registered MBean: "+objectName);
}
public void initializeManager(List<String> command) throws Exception {
processManager = createProcessManager(command);
processManager.initialize();
initializeEnvironment();
}
protected void initializeEnvironment() {
//String ps = System.getProperty("path.separator");
Map<String, String> environmentVariables = System.getenv();
//System.out.println(environmentVariables.size());
if (isWin32()) {
//add SystemRoot
if (isWinXP() || true)
addEnvironmentVariable("SystemRoot", "C:\\WINDOWS");
//else environmentVariables.put("SystemRoot", "C:\\WINNT");
}
//TODO make isSolaris field
//if (!isWin32()) {
appendEnvironmentVariable("PATH", environmentVariables.get("PATH"));
appendEnvironmentVariable("PATH", System.getProperty("PATH"));
appendEnvironmentVariable("LD_LIBRARY_PATH", environmentVariables.get("LD_LIBRARY_PATH"));
appendEnvironmentVariable("LD_LIBRARY_PATH", System.getProperty("LD_LIBRARY_PATH"));
appendEnvironmentVariable("LD_LIBRARY_PATH", System.getProperty("java.library.path"));
//}
//log the current state
log.info("isWin32="+isWin32());
//log.info("isSolaris="+isSolaris());
//log.info("isCentos="+isCentos());
//System.getProperties().list(System.out);
log.info("PATH="+getEnvironmentVariable("PATH"));
log.info("LD_LIBRARY_PATH="+getEnvironmentVariable("LD_LIBRARY_PATH"));
}
protected String getEnvironmentVariable(String name) {
return processManager.getEnvironment().get(name);
}
protected void addEnvironmentVariable(String name, String value) {
processManager.getEnvironment().put(name, value);
}
protected void appendEnvironmentVariable(String name, String value) {
if (value != null) {
String currentValue = processManager.getEnvironment().get(name);
if (currentValue != null)
value = currentValue+value;
processManager.getEnvironment().put(name, value);
}
}
protected ProcessManager createProcessManager(List<String> command) {
ProcessManager processManager = new ProcessManager(program.getName(), command);
processManager.setOutputEnabled(program.isOutputEnabled());
processManager.setStreamListener(getStreamListener());
processManager.setMonitored(program.isMonitored());
return processManager;
}
protected String getArchivePath(String jarFile) {
String jarDirectory = jarFile.substring(0, jarFile.lastIndexOf(".jar"));
String archivePath = getApplicationHome()+"/"+jarDirectory;
return archivePath;
}
protected String getApplicationHome() {
String fs = System.getProperty("file.separator");
String userDir = System.getProperty("user.dir");
String appHome = userDir+fs+PropertyManager.getInstance().get("applicationHome");
return appHome;
}
protected String getApplicationDirectory(String archiveFile) {
int position = archiveFile.lastIndexOf(".jar");
String name = archiveFile.substring(0, position);
return name;
}
}
| |
/****************************************************************
* Licensed to the Apache Software Foundation (ASF) under one *
* or more contributor license agreements. See the NOTICE file *
* distributed with this work for additional information *
* regarding copyright ownership. The ASF licenses this file *
* to you under the Apache License, Version 2.0 (the *
* "License"); you may not use this file except in compliance *
* with the License. You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, *
* software distributed under the License is distributed on an *
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY *
* KIND, either express or implied. See the License for the *
* specific language governing permissions and limitations *
* under the License. *
****************************************************************/
package org.apache.james.smtpserver;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import javax.mail.BodyPart;
import javax.mail.MessagingException;
import javax.mail.Multipart;
import javax.mail.internet.MimeBodyPart;
import javax.mail.internet.MimeMessage;
import javax.mail.internet.MimeMultipart;
import junit.framework.TestCase;
import org.apache.james.dnsservice.api.DNSService;
import org.apache.james.dnsservice.api.mock.MockDNSService;
import org.apache.james.protocols.smtp.SMTPSession;
import org.apache.james.protocols.smtp.hook.HookResult;
import org.apache.james.protocols.smtp.hook.HookReturnCode;
import org.apache.james.protocols.smtp.utils.BaseFakeSMTPSession;
import org.apache.james.smtpserver.fastfail.URIRBLHandler;
import org.apache.james.smtpserver.mock.MockMimeMessage;
import org.apache.james.smtpserver.mock.mailet.MockMail;
import org.apache.mailet.Mail;
public class URIRBLHandlerTest extends TestCase {
private static final String BAD_DOMAIN1 = "bad.domain.de";
private static final String BAD_DOMAIN2 = "bad2.domain.de";
private static final String GOOD_DOMAIN = "good.apache.org";
private static final String URISERVER = "multi.surbl.org.";
private SMTPSession mockedSMTPSession;
private Mail mockedMail;
private SMTPSession setupMockedSMTPSession(final Mail mail) {
mockedMail = mail;
mockedSMTPSession = new BaseFakeSMTPSession() {
private String ipAddress = "192.168.0.1";
private String host = "localhost";
private boolean relayingAllowed;
public String getRemoteHost() {
return host;
}
public String getRemoteIPAddress() {
return ipAddress;
}
private HashMap<String, Object> sstate = new HashMap<String, Object>();
private HashMap<String, Object> connectionState = new HashMap<String, Object>();
@Override
public Object setAttachment(String key, Object value, State state) {
if (state == State.Connection) {
if (value == null) {
return connectionState.remove(key);
}
return connectionState.put(key, value);
} else {
if (value == null) {
return sstate.remove(key);
}
return sstate.put(key, value);
}
}
@Override
public Object getAttachment(String key, State state) {
sstate.put(SMTPSession.SENDER, "sender@james.apache.org");
if (state == State.Connection) {
return connectionState.get(key);
} else {
return sstate.get(key);
}
}
public boolean isRelayingAllowed() {
return relayingAllowed;
}
public void setRelayingAllowed(boolean relayingAllowed) {
this.relayingAllowed = relayingAllowed;
}
};
return mockedSMTPSession;
}
private Mail setupMockedMail(MimeMessage message) {
MockMail mail = new MockMail();
mail.setMessage(message);
return mail;
}
public MimeMessage setupMockedMimeMessage(String text) throws MessagingException {
MimeMessage message = new MimeMessage(new MockMimeMessage());
message.setText(text);
message.saveChanges();
return message;
}
public MimeMessage setupMockedMimeMessageMP(String text) throws MessagingException {
MimeMessage message = new MimeMessage(new MockMimeMessage());
// Create the message part
BodyPart messageBodyPart = new MimeBodyPart();
// Fill the message
messageBodyPart.setText(text);
Multipart multipart = new MimeMultipart();
multipart.addBodyPart(messageBodyPart);
message.setContent(multipart);
message.saveChanges();
return message;
}
/**
* Setup the mocked dnsserver
*
*/
private DNSService setupMockedDnsServer() {
DNSService mockedDnsServer = new MockDNSService() {
public Collection findTXTRecords(String hostname) {
List res = new ArrayList();
if (hostname == null) {
return res;
}
;
if ((BAD_DOMAIN1.substring(4)).equals(hostname)) {
res.add("Blocked - see http://www.surbl.org");
}
return res;
}
public InetAddress getByName(String host) throws UnknownHostException {
if ((BAD_DOMAIN1.substring(4) + "." + URISERVER).equals(host)) {
return InetAddress.getByName("127.0.0.1");
} else if ((BAD_DOMAIN2.substring(4) + "." + URISERVER).equals(host)) {
return InetAddress.getByName("127.0.0.1");
} else if ((GOOD_DOMAIN.substring(5) + "." + URISERVER).equals(host)) {
throw new UnknownHostException();
}
throw new UnsupportedOperationException("getByName(" + host + ") not implemented by this mock");
}
};
return mockedDnsServer;
}
public void testNotBlocked() throws IOException, MessagingException {
ArrayList servers = new ArrayList();
servers.add(URISERVER);
SMTPSession session = setupMockedSMTPSession(setupMockedMail(setupMockedMimeMessage("http://" + GOOD_DOMAIN + "/")));
URIRBLHandler handler = new URIRBLHandler();
handler.setDNSService(setupMockedDnsServer());
handler.setUriRblServer(servers);
HookResult response = handler.onMessage(session, mockedMail);
assertEquals("Email was not rejected", response.getResult(), HookReturnCode.DECLINED);
}
public void testBlocked() throws IOException, MessagingException {
ArrayList servers = new ArrayList();
servers.add(URISERVER);
SMTPSession session = setupMockedSMTPSession(setupMockedMail(setupMockedMimeMessage("http://" + BAD_DOMAIN1 + "/")));
URIRBLHandler handler = new URIRBLHandler();
handler.setDNSService(setupMockedDnsServer());
handler.setUriRblServer(servers);
HookResult response = handler.onMessage(session, mockedMail);
assertEquals("Email was rejected", response.getResult(), HookReturnCode.DENY);
}
public void testBlockedMultiPart() throws IOException, MessagingException {
ArrayList servers = new ArrayList();
servers.add(URISERVER);
SMTPSession session = setupMockedSMTPSession(setupMockedMail(setupMockedMimeMessageMP("http://" + BAD_DOMAIN1 + "/" + " " + "http://" + GOOD_DOMAIN + "/")));
URIRBLHandler handler = new URIRBLHandler();
handler.setDNSService(setupMockedDnsServer());
handler.setUriRblServer(servers);
HookResult response = handler.onMessage(session, mockedMail);
assertEquals("Email was rejected", response.getResult(), HookReturnCode.DENY);
}
/*
* public void testAddJunkScore() throws IOException, MessagingException {
*
* ArrayList servers = new ArrayList(); servers.add(URISERVER);
*
* SMTPSession session =
* setupMockedSMTPSession(setupMockedMail(setupMockedMimeMessage("http://" +
* BAD_DOMAIN1 + "/"))); session.getState().put(JunkScore.JUNK_SCORE, new
* JunkScoreImpl());
*
* URIRBLHandler handler = new URIRBLHandler();
*
* ContainerUtil.enableLogging(handler, new MockLogger());
* handler.setDnsServer(setupMockedDnsServer());
* handler.setUriRblServer(servers); handler.setAction("junkScore");
* handler.setScore(20); HookResult response = handler.onMessage(session,
* mockedMail);
*
* assertNull("Email was not rejected", response);
* assertEquals("JunkScore added", ((JunkScore)
* session.getState().get(JunkScore
* .JUNK_SCORE)).getStoredScore("UriRBLCheck"), 20.0, 0d); }
*/
}
| |
package org.apache.maven.project;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.maven.RepositoryUtils;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.InvalidRepositoryException;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.repository.LegacyLocalRepositoryManager;
import org.apache.maven.bridge.MavenRepositorySystem;
import org.apache.maven.model.Build;
import org.apache.maven.model.Dependency;
import org.apache.maven.model.DependencyManagement;
import org.apache.maven.model.DeploymentRepository;
import org.apache.maven.model.Extension;
import org.apache.maven.model.Model;
import org.apache.maven.model.Plugin;
import org.apache.maven.model.Profile;
import org.apache.maven.model.ReportPlugin;
import org.apache.maven.model.building.DefaultModelBuildingRequest;
import org.apache.maven.model.building.DefaultModelProblem;
import org.apache.maven.model.building.FileModelSource;
import org.apache.maven.model.building.ModelBuilder;
import org.apache.maven.model.building.ModelBuildingException;
import org.apache.maven.model.building.ModelBuildingRequest;
import org.apache.maven.model.building.ModelBuildingResult;
import org.apache.maven.model.building.ModelProblem;
import org.apache.maven.model.building.ModelProcessor;
import org.apache.maven.model.building.ModelSource;
import org.apache.maven.model.building.StringModelSource;
import org.apache.maven.model.resolution.ModelResolver;
import org.apache.maven.repository.internal.ArtifactDescriptorUtils;
import org.codehaus.plexus.component.annotations.Component;
import org.codehaus.plexus.component.annotations.Requirement;
import org.codehaus.plexus.logging.Logger;
import org.codehaus.plexus.util.Os;
import org.codehaus.plexus.util.StringUtils;
import org.eclipse.aether.RepositorySystemSession;
import org.eclipse.aether.RequestTrace;
import org.eclipse.aether.impl.RemoteRepositoryManager;
import org.eclipse.aether.repository.LocalRepositoryManager;
import org.eclipse.aether.repository.RemoteRepository;
import org.eclipse.aether.repository.WorkspaceRepository;
import org.eclipse.aether.resolution.ArtifactRequest;
import org.eclipse.aether.resolution.ArtifactResult;
/**
*/
@Component( role = ProjectBuilder.class )
public class DefaultProjectBuilder
implements ProjectBuilder
{
@Requirement
private Logger logger;
@Requirement
private ModelBuilder modelBuilder;
@Requirement
private ModelProcessor modelProcessor;
@Requirement
private ProjectBuildingHelper projectBuildingHelper;
@Requirement
private MavenRepositorySystem repositorySystem;
@Requirement
private org.eclipse.aether.RepositorySystem repoSystem;
@Requirement
private RemoteRepositoryManager repositoryManager;
@Requirement
private ProjectDependenciesResolver dependencyResolver;
// ----------------------------------------------------------------------
// MavenProjectBuilder Implementation
// ----------------------------------------------------------------------
@Override
public ProjectBuildingResult build( File pomFile, ProjectBuildingRequest request )
throws ProjectBuildingException
{
return build( pomFile, new FileModelSource( pomFile ), new InternalConfig( request, null, null ) );
}
@Override
public ProjectBuildingResult build( ModelSource modelSource, ProjectBuildingRequest request )
throws ProjectBuildingException
{
return build( null, modelSource, new InternalConfig( request, null, null ) );
}
private ProjectBuildingResult build( File pomFile, ModelSource modelSource, InternalConfig config )
throws ProjectBuildingException
{
ClassLoader oldContextClassLoader = Thread.currentThread().getContextClassLoader();
try
{
ProjectBuildingRequest projectBuildingRequest = config.request;
MavenProject project = projectBuildingRequest.getProject();
List<ModelProblem> modelProblems = null;
Throwable error = null;
if ( project == null )
{
ModelBuildingRequest request = getModelBuildingRequest( config );
project = new MavenProject();
project.setFile( pomFile );
DefaultModelBuildingListener listener =
new DefaultModelBuildingListener( project, projectBuildingHelper, projectBuildingRequest );
request.setModelBuildingListener( listener );
request.setPomFile( pomFile );
request.setModelSource( modelSource );
request.setLocationTracking( true );
ModelBuildingResult result;
try
{
result = modelBuilder.build( request );
}
catch ( ModelBuildingException e )
{
result = e.getResult();
if ( result == null || result.getEffectiveModel() == null )
{
throw new ProjectBuildingException( e.getModelId(), e.getMessage(), pomFile, e );
}
// validation error, continue project building and delay failing to help IDEs
error = e;
}
modelProblems = result.getProblems();
initProject( project, Collections.<String, MavenProject>emptyMap(), result,
new HashMap<File, Boolean>(), projectBuildingRequest );
}
else if ( projectBuildingRequest.isResolveDependencies() )
{
projectBuildingHelper.selectProjectRealm( project );
}
DependencyResolutionResult resolutionResult = null;
if ( projectBuildingRequest.isResolveDependencies() )
{
resolutionResult = resolveDependencies( project, config.session );
}
ProjectBuildingResult result = new DefaultProjectBuildingResult( project, modelProblems, resolutionResult );
if ( error != null )
{
ProjectBuildingException e = new ProjectBuildingException( Arrays.asList( result ) );
e.initCause( error );
throw e;
}
return result;
}
finally
{
Thread.currentThread().setContextClassLoader( oldContextClassLoader );
}
}
private DependencyResolutionResult resolveDependencies( MavenProject project, RepositorySystemSession session )
{
DependencyResolutionResult resolutionResult;
try
{
DefaultDependencyResolutionRequest resolution = new DefaultDependencyResolutionRequest( project, session );
resolutionResult = dependencyResolver.resolve( resolution );
}
catch ( DependencyResolutionException e )
{
resolutionResult = e.getResult();
}
Set<Artifact> artifacts = new LinkedHashSet<>();
if ( resolutionResult.getDependencyGraph() != null )
{
RepositoryUtils.toArtifacts( artifacts, resolutionResult.getDependencyGraph().getChildren(),
Collections.singletonList( project.getArtifact().getId() ), null );
// Maven 2.x quirk: an artifact always points at the local repo, regardless whether resolved or not
LocalRepositoryManager lrm = session.getLocalRepositoryManager();
for ( Artifact artifact : artifacts )
{
if ( !artifact.isResolved() )
{
String path = lrm.getPathForLocalArtifact( RepositoryUtils.toArtifact( artifact ) );
artifact.setFile( new File( lrm.getRepository().getBasedir(), path ) );
}
}
}
project.setResolvedArtifacts( artifacts );
project.setArtifacts( artifacts );
return resolutionResult;
}
private List<String> getProfileIds( List<Profile> profiles )
{
List<String> ids = new ArrayList<>( profiles.size() );
for ( Profile profile : profiles )
{
ids.add( profile.getId() );
}
return ids;
}
private ModelBuildingRequest getModelBuildingRequest( InternalConfig config )
{
ProjectBuildingRequest configuration = config.request;
ModelBuildingRequest request = new DefaultModelBuildingRequest();
RequestTrace trace = RequestTrace.newChild( null, configuration ).newChild( request );
ModelResolver resolver =
new ProjectModelResolver( config.session, trace, repoSystem, repositoryManager, config.repositories,
configuration.getRepositoryMerging(), config.modelPool );
request.setValidationLevel( configuration.getValidationLevel() );
request.setProcessPlugins( configuration.isProcessPlugins() );
request.setProfiles( configuration.getProfiles() );
request.setActiveProfileIds( configuration.getActiveProfileIds() );
request.setInactiveProfileIds( configuration.getInactiveProfileIds() );
request.setSystemProperties( configuration.getSystemProperties() );
request.setUserProperties( configuration.getUserProperties() );
request.setBuildStartTime( configuration.getBuildStartTime() );
request.setModelResolver( resolver );
request.setModelCache( config.modelCache );
return request;
}
@Override
public ProjectBuildingResult build( Artifact artifact, ProjectBuildingRequest request )
throws ProjectBuildingException
{
return build( artifact, false, request );
}
@Override
public ProjectBuildingResult build( Artifact artifact, boolean allowStubModel, ProjectBuildingRequest request )
throws ProjectBuildingException
{
org.eclipse.aether.artifact.Artifact pomArtifact = RepositoryUtils.toArtifact( artifact );
pomArtifact = ArtifactDescriptorUtils.toPomArtifact( pomArtifact );
InternalConfig config = new InternalConfig( request, null, null );
boolean localProject;
try
{
ArtifactRequest pomRequest = new ArtifactRequest();
pomRequest.setArtifact( pomArtifact );
pomRequest.setRepositories( config.repositories );
ArtifactResult pomResult = repoSystem.resolveArtifact( config.session, pomRequest );
pomArtifact = pomResult.getArtifact();
localProject = pomResult.getRepository() instanceof WorkspaceRepository;
}
catch ( org.eclipse.aether.resolution.ArtifactResolutionException e )
{
if ( e.getResults().get( 0 ).isMissing() && allowStubModel )
{
return build( null, createStubModelSource( artifact ), config );
}
throw new ProjectBuildingException( artifact.getId(),
"Error resolving project artifact: " + e.getMessage(), e );
}
File pomFile = pomArtifact.getFile();
if ( "pom".equals( artifact.getType() ) )
{
artifact.selectVersion( pomArtifact.getVersion() );
artifact.setFile( pomFile );
artifact.setResolved( true );
}
return build( localProject ? pomFile : null, new FileModelSource( pomFile ), config );
}
private ModelSource createStubModelSource( Artifact artifact )
{
StringBuilder buffer = new StringBuilder( 1024 );
buffer.append( "<?xml version='1.0'?>" );
buffer.append( "<project>" );
buffer.append( "<modelVersion>4.0.0</modelVersion>" );
buffer.append( "<groupId>" ).append( artifact.getGroupId() ).append( "</groupId>" );
buffer.append( "<artifactId>" ).append( artifact.getArtifactId() ).append( "</artifactId>" );
buffer.append( "<version>" ).append( artifact.getBaseVersion() ).append( "</version>" );
buffer.append( "<packaging>" ).append( artifact.getType() ).append( "</packaging>" );
buffer.append( "</project>" );
return new StringModelSource( buffer, artifact.getId() );
}
@Override
public List<ProjectBuildingResult> build( List<File> pomFiles, boolean recursive, ProjectBuildingRequest request )
throws ProjectBuildingException
{
List<ProjectBuildingResult> results = new ArrayList<>();
List<InterimResult> interimResults = new ArrayList<>();
ReactorModelPool modelPool = new ReactorModelPool();
ReactorModelCache modelCache = new ReactorModelCache();
InternalConfig config = new InternalConfig( request, modelPool, modelCache );
Map<String, MavenProject> projectIndex = new HashMap<>( 256 );
boolean noErrors =
build( results, interimResults, projectIndex, pomFiles, new LinkedHashSet<File>(), true, recursive,
config );
populateReactorModelPool( modelPool, interimResults );
ClassLoader oldContextClassLoader = Thread.currentThread().getContextClassLoader();
try
{
noErrors =
build( results, new ArrayList<MavenProject>(), projectIndex, interimResults, request,
new HashMap<File, Boolean>() ) && noErrors;
}
finally
{
Thread.currentThread().setContextClassLoader( oldContextClassLoader );
}
if ( !noErrors )
{
throw new ProjectBuildingException( results );
}
return results;
}
private boolean build( List<ProjectBuildingResult> results, List<InterimResult> interimResults,
Map<String, MavenProject> projectIndex, List<File> pomFiles, Set<File> aggregatorFiles,
boolean isRoot, boolean recursive, InternalConfig config )
{
boolean noErrors = true;
for ( File pomFile : pomFiles )
{
aggregatorFiles.add( pomFile );
if ( !build( results, interimResults, projectIndex, pomFile, aggregatorFiles, isRoot, recursive, config ) )
{
noErrors = false;
}
aggregatorFiles.remove( pomFile );
}
return noErrors;
}
private boolean build( List<ProjectBuildingResult> results, List<InterimResult> interimResults,
Map<String, MavenProject> projectIndex, File pomFile, Set<File> aggregatorFiles,
boolean isRoot, boolean recursive, InternalConfig config )
{
boolean noErrors = true;
ModelBuildingRequest request = getModelBuildingRequest( config );
MavenProject project = new MavenProject();
request.setPomFile( pomFile );
request.setTwoPhaseBuilding( true );
request.setLocationTracking( true );
DefaultModelBuildingListener listener =
new DefaultModelBuildingListener( project, projectBuildingHelper, config.request );
request.setModelBuildingListener( listener );
try
{
ModelBuildingResult result = modelBuilder.build( request );
Model model = result.getEffectiveModel();
projectIndex.put( result.getModelIds().get( 0 ), project );
InterimResult interimResult = new InterimResult( pomFile, request, result, listener, isRoot );
interimResults.add( interimResult );
if ( recursive && !model.getModules().isEmpty() )
{
File basedir = pomFile.getParentFile();
List<File> moduleFiles = new ArrayList<>();
for ( String module : model.getModules() )
{
if ( StringUtils.isEmpty( module ) )
{
continue;
}
module = module.replace( '\\', File.separatorChar ).replace( '/', File.separatorChar );
File moduleFile = new File( basedir, module );
if ( moduleFile.isDirectory() )
{
moduleFile = modelProcessor.locatePom( moduleFile );
}
if ( !moduleFile.isFile() )
{
ModelProblem problem =
new DefaultModelProblem( "Child module " + moduleFile + " of " + pomFile
+ " does not exist", ModelProblem.Severity.ERROR, ModelProblem.Version.BASE, model, -1,
-1, null );
result.getProblems().add( problem );
noErrors = false;
continue;
}
if ( Os.isFamily( Os.FAMILY_WINDOWS ) )
{
// we don't canonicalize on unix to avoid interfering with symlinks
try
{
moduleFile = moduleFile.getCanonicalFile();
}
catch ( IOException e )
{
moduleFile = moduleFile.getAbsoluteFile();
}
}
else
{
moduleFile = new File( moduleFile.toURI().normalize() );
}
if ( aggregatorFiles.contains( moduleFile ) )
{
StringBuilder buffer = new StringBuilder( 256 );
for ( File aggregatorFile : aggregatorFiles )
{
buffer.append( aggregatorFile ).append( " -> " );
}
buffer.append( moduleFile );
ModelProblem problem =
new DefaultModelProblem( "Child module " + moduleFile + " of " + pomFile
+ " forms aggregation cycle " + buffer, ModelProblem.Severity.ERROR,
ModelProblem.Version.BASE, model, -1, -1, null );
result.getProblems().add( problem );
noErrors = false;
continue;
}
moduleFiles.add( moduleFile );
}
interimResult.modules = new ArrayList<>();
if ( !build( results, interimResult.modules, projectIndex, moduleFiles, aggregatorFiles, false,
recursive, config ) )
{
noErrors = false;
}
}
}
catch ( ModelBuildingException e )
{
results.add( new DefaultProjectBuildingResult( e.getModelId(), pomFile, e.getProblems() ) );
noErrors = false;
}
return noErrors;
}
static class InterimResult
{
File pomFile;
ModelBuildingRequest request;
ModelBuildingResult result;
DefaultModelBuildingListener listener;
boolean root;
List<InterimResult> modules = Collections.emptyList();
InterimResult( File pomFile, ModelBuildingRequest request, ModelBuildingResult result,
DefaultModelBuildingListener listener, boolean root )
{
this.pomFile = pomFile;
this.request = request;
this.result = result;
this.listener = listener;
this.root = root;
}
}
private void populateReactorModelPool( ReactorModelPool reactorModelPool, List<InterimResult> interimResults )
{
for ( InterimResult interimResult : interimResults )
{
Model model = interimResult.result.getEffectiveModel();
reactorModelPool.put( model.getGroupId(), model.getArtifactId(), model.getVersion(), model.getPomFile() );
populateReactorModelPool( reactorModelPool, interimResult.modules );
}
}
private boolean build( List<ProjectBuildingResult> results, List<MavenProject> projects,
Map<String, MavenProject> projectIndex, List<InterimResult> interimResults,
ProjectBuildingRequest request, Map<File, Boolean> profilesXmls )
{
boolean noErrors = true;
for ( InterimResult interimResult : interimResults )
{
try
{
ModelBuildingResult result = modelBuilder.build( interimResult.request, interimResult.result );
MavenProject project = interimResult.listener.getProject();
initProject( project, projectIndex, result, profilesXmls, request );
List<MavenProject> modules = new ArrayList<>();
noErrors =
build( results, modules, projectIndex, interimResult.modules, request, profilesXmls ) && noErrors;
projects.addAll( modules );
projects.add( project );
project.setExecutionRoot( interimResult.root );
project.setCollectedProjects( modules );
results.add( new DefaultProjectBuildingResult( project, result.getProblems(), null ) );
}
catch ( ModelBuildingException e )
{
results.add( new DefaultProjectBuildingResult( e.getModelId(), interimResult.pomFile,
e.getProblems() ) );
noErrors = false;
}
}
return noErrors;
}
private void initProject( MavenProject project, Map<String, MavenProject> projects, ModelBuildingResult result,
Map<File, Boolean> profilesXmls, ProjectBuildingRequest projectBuildingRequest )
{
Model model = result.getEffectiveModel();
project.setModel( model );
project.setOriginalModel( result.getRawModel() );
project.setFile( model.getPomFile() );
Model parentModel = result.getModelIds().size() > 1 && !result.getModelIds().get( 1 ).isEmpty()
? result.getRawModel( result.getModelIds().get( 1 ) )
: null;
if ( parentModel != null )
{
final String parentGroupId = inheritedGroupId( result, 1 );
final String parentVersion = inheritedVersion( result, 1 );
project.setParentArtifact( repositorySystem.createProjectArtifact( parentGroupId,
parentModel.getArtifactId(),
parentVersion ) );
// org.apache.maven.its.mng4834:parent:0.1
String parentModelId = result.getModelIds().get( 1 );
File parentPomFile = result.getRawModel( parentModelId ).getPomFile();
MavenProject parent = projects.get( parentModelId );
if ( parent == null )
{
//
// At this point the DefaultModelBuildingListener has fired and it populates the
// remote repositories with those found in the pom.xml, along with the existing externally
// defined repositories.
//
projectBuildingRequest.setRemoteRepositories( project.getRemoteArtifactRepositories() );
if ( parentPomFile != null )
{
project.setParentFile( parentPomFile );
try
{
parent = build( parentPomFile, projectBuildingRequest ).getProject();
}
catch ( ProjectBuildingException e )
{
// MNG-4488 where let invalid parents slide on by
if ( logger.isDebugEnabled() )
{
// Message below is checked for in the MNG-2199 core IT.
logger.warn( "Failed to build parent project for " + project.getId(), e );
}
else
{
// Message below is checked for in the MNG-2199 core IT.
logger.warn( "Failed to build parent project for " + project.getId() );
}
}
}
else
{
Artifact parentArtifact = project.getParentArtifact();
try
{
parent = build( parentArtifact, projectBuildingRequest ).getProject();
}
catch ( ProjectBuildingException e )
{
// MNG-4488 where let invalid parents slide on by
if ( logger.isDebugEnabled() )
{
// Message below is checked for in the MNG-2199 core IT.
logger.warn( "Failed to build parent project for " + project.getId(), e );
}
else
{
// Message below is checked for in the MNG-2199 core IT.
logger.warn( "Failed to build parent project for " + project.getId() );
}
}
}
}
project.setParent( parent );
}
Artifact projectArtifact =
repositorySystem.createArtifact( project.getGroupId(), project.getArtifactId(), project.getVersion(), null,
project.getPackaging() );
project.setArtifact( projectArtifact );
if ( project.getFile() != null )
{
Build build = project.getBuild();
project.addScriptSourceRoot( build.getScriptSourceDirectory() );
project.addCompileSourceRoot( build.getSourceDirectory() );
project.addTestCompileSourceRoot( build.getTestSourceDirectory() );
}
List<Profile> activeProfiles = new ArrayList<>();
activeProfiles.addAll( result.getActivePomProfiles( result.getModelIds().get( 0 ) ) );
activeProfiles.addAll( result.getActiveExternalProfiles() );
project.setActiveProfiles( activeProfiles );
project.setInjectedProfileIds( "external", getProfileIds( result.getActiveExternalProfiles() ) );
for ( String modelId : result.getModelIds() )
{
project.setInjectedProfileIds( modelId, getProfileIds( result.getActivePomProfiles( modelId ) ) );
}
String modelId = findProfilesXml( result, profilesXmls );
if ( modelId != null )
{
ModelProblem problem =
new DefaultModelProblem( "Detected profiles.xml alongside " + modelId
+ ", this file is no longer supported and was ignored" + ", please use the settings.xml instead",
ModelProblem.Severity.WARNING, ModelProblem.Version.V30, model, -1, -1, null );
result.getProblems().add( problem );
}
//
// All the parts that were taken out of MavenProject for Maven 4.0.0
//
project.setProjectBuildingRequest( projectBuildingRequest );
// pluginArtifacts
Set<Artifact> pluginArtifacts = new HashSet<>();
for ( Plugin plugin : project.getBuildPlugins() )
{
Artifact artifact = repositorySystem.createPluginArtifact( plugin );
if ( artifact != null )
{
pluginArtifacts.add( artifact );
}
}
project.setPluginArtifacts( pluginArtifacts );
// reportArtifacts
Set<Artifact> reportArtifacts = new HashSet<>();
for ( ReportPlugin report : project.getReportPlugins() )
{
Plugin pp = new Plugin();
pp.setGroupId( report.getGroupId() );
pp.setArtifactId( report.getArtifactId() );
pp.setVersion( report.getVersion() );
Artifact artifact = repositorySystem.createPluginArtifact( pp );
if ( artifact != null )
{
reportArtifacts.add( artifact );
}
}
project.setReportArtifacts( reportArtifacts );
// extensionArtifacts
Set<Artifact> extensionArtifacts = new HashSet<>();
List<Extension> extensions = project.getBuildExtensions();
if ( extensions != null )
{
for ( Extension ext : extensions )
{
String version;
if ( StringUtils.isEmpty( ext.getVersion() ) )
{
version = "RELEASE";
}
else
{
version = ext.getVersion();
}
Artifact artifact =
repositorySystem.createArtifact( ext.getGroupId(), ext.getArtifactId(), version, null, "jar" );
if ( artifact != null )
{
extensionArtifacts.add( artifact );
}
}
}
project.setExtensionArtifacts( extensionArtifacts );
// managedVersionMap
Map<String, Artifact> map = null;
if ( repositorySystem != null )
{
List<Dependency> deps;
DependencyManagement dependencyManagement = project.getDependencyManagement();
if ( ( dependencyManagement != null ) && ( ( deps = dependencyManagement.getDependencies() ) != null )
&& ( deps.size() > 0 ) )
{
map = new HashMap<>();
for ( Dependency d : dependencyManagement.getDependencies() )
{
Artifact artifact = repositorySystem.createDependencyArtifact( d );
if ( artifact != null )
{
map.put( d.getManagementKey(), artifact );
}
}
}
else
{
map = Collections.emptyMap();
}
}
project.setManagedVersionMap( map );
// release artifact repository
if ( project.getDistributionManagement() != null
&& project.getDistributionManagement().getRepository() != null )
{
try
{
DeploymentRepository r = project.getDistributionManagement().getRepository();
if ( !StringUtils.isEmpty( r.getId() ) && !StringUtils.isEmpty( r.getUrl() ) )
{
ArtifactRepository repo = repositorySystem.buildArtifactRepository( r );
repositorySystem.injectProxy( projectBuildingRequest.getRepositorySession(),
Arrays.asList( repo ) );
repositorySystem.injectAuthentication( projectBuildingRequest.getRepositorySession(),
Arrays.asList( repo ) );
project.setReleaseArtifactRepository( repo );
}
}
catch ( InvalidRepositoryException e )
{
throw new IllegalStateException( "Failed to create release distribution repository for "
+ project.getId(), e );
}
}
// snapshot artifact repository
if ( project.getDistributionManagement() != null
&& project.getDistributionManagement().getSnapshotRepository() != null )
{
try
{
DeploymentRepository r = project.getDistributionManagement().getSnapshotRepository();
if ( !StringUtils.isEmpty( r.getId() ) && !StringUtils.isEmpty( r.getUrl() ) )
{
ArtifactRepository repo = repositorySystem.buildArtifactRepository( r );
repositorySystem.injectProxy( projectBuildingRequest.getRepositorySession(),
Arrays.asList( repo ) );
repositorySystem.injectAuthentication( projectBuildingRequest.getRepositorySession(),
Arrays.asList( repo ) );
project.setSnapshotArtifactRepository( repo );
}
}
catch ( InvalidRepositoryException e )
{
throw new IllegalStateException( "Failed to create snapshot distribution repository for "
+ project.getId(), e );
}
}
}
private static String inheritedGroupId( final ModelBuildingResult result, final int modelIndex )
{
String groupId = null;
final String modelId = result.getModelIds().get( modelIndex );
if ( !modelId.isEmpty() )
{
final Model model = result.getRawModel( modelId );
groupId = model.getGroupId() != null
? model.getGroupId()
: inheritedGroupId( result, modelIndex + 1 );
}
return groupId;
}
private static String inheritedVersion( final ModelBuildingResult result, final int modelIndex )
{
String version = null;
final String modelId = result.getModelIds().get( modelIndex );
if ( !modelId.isEmpty() )
{
final Model model = result.getRawModel( modelId );
version = model.getVersion() != null
? model.getVersion()
: inheritedVersion( result, modelIndex + 1 );
}
return version;
}
private String findProfilesXml( ModelBuildingResult result, Map<File, Boolean> profilesXmls )
{
for ( String modelId : result.getModelIds() )
{
Model model = result.getRawModel( modelId );
File basedir = model.getProjectDirectory();
if ( basedir == null )
{
break;
}
Boolean profilesXml = profilesXmls.get( basedir );
if ( profilesXml == null )
{
profilesXml = new File( basedir, "profiles.xml" ).exists();
profilesXmls.put( basedir, profilesXml );
}
if ( profilesXml )
{
return modelId;
}
}
return null;
}
class InternalConfig
{
public final ProjectBuildingRequest request;
public final RepositorySystemSession session;
public final List<RemoteRepository> repositories;
public final ReactorModelPool modelPool;
public final ReactorModelCache modelCache;
InternalConfig( ProjectBuildingRequest request, ReactorModelPool modelPool, ReactorModelCache modelCache )
{
this.request = request;
this.modelPool = modelPool;
this.modelCache = modelCache;
session =
LegacyLocalRepositoryManager.overlay( request.getLocalRepository(), request.getRepositorySession(),
repoSystem );
repositories = RepositoryUtils.toRepos( request.getRemoteRepositories() );
}
}
}
| |
package org.hl7.fhir.dstu3.model;
/*
Copyright (c) 2011+, HL7, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of HL7 nor the names of its contributors may be used to
endorse or promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
// Generated on Mon, Apr 17, 2017 17:38-0400 for FHIR v3.0.1
import java.util.*;
import org.hl7.fhir.utilities.Utilities;
import ca.uhn.fhir.model.api.annotation.Child;
import ca.uhn.fhir.model.api.annotation.ChildOrder;
import ca.uhn.fhir.model.api.annotation.Description;
import ca.uhn.fhir.model.api.annotation.DatatypeDef;
import ca.uhn.fhir.model.api.annotation.Block;
import org.hl7.fhir.instance.model.api.*;
import org.hl7.fhir.exceptions.FHIRException;
/**
* Related artifacts such as additional documentation, justification, or bibliographic references.
*/
@DatatypeDef(name="RelatedArtifact")
public class RelatedArtifact extends Type implements ICompositeType {
public enum RelatedArtifactType {
/**
* Additional documentation for the knowledge resource. This would include additional instructions on usage as well as additional information on clinical context or appropriateness
*/
DOCUMENTATION,
/**
* A summary of the justification for the knowledge resource including supporting evidence, relevant guidelines, or other clinically important information. This information is intended to provide a way to make the justification for the knowledge resource available to the consumer of interventions or results produced by the knowledge resource
*/
JUSTIFICATION,
/**
* Bibliographic citation for papers, references, or other relevant material for the knowledge resource. This is intended to allow for citation of related material, but that was not necessarily specifically prepared in connection with this knowledge resource
*/
CITATION,
/**
* The previous version of the knowledge resource
*/
PREDECESSOR,
/**
* The next version of the knowledge resource
*/
SUCCESSOR,
/**
* The knowledge resource is derived from the related artifact. This is intended to capture the relationship in which a particular knowledge resource is based on the content of another artifact, but is modified to capture either a different set of overall requirements, or a more specific set of requirements such as those involved in a particular institution or clinical setting
*/
DERIVEDFROM,
/**
* The knowledge resource depends on the given related artifact
*/
DEPENDSON,
/**
* The knowledge resource is composed of the given related artifact
*/
COMPOSEDOF,
/**
* added to help the parsers with the generic types
*/
NULL;
public static RelatedArtifactType fromCode(String codeString) throws FHIRException {
if (codeString == null || "".equals(codeString))
return null;
if ("documentation".equals(codeString))
return DOCUMENTATION;
if ("justification".equals(codeString))
return JUSTIFICATION;
if ("citation".equals(codeString))
return CITATION;
if ("predecessor".equals(codeString))
return PREDECESSOR;
if ("successor".equals(codeString))
return SUCCESSOR;
if ("derived-from".equals(codeString))
return DERIVEDFROM;
if ("depends-on".equals(codeString))
return DEPENDSON;
if ("composed-of".equals(codeString))
return COMPOSEDOF;
if (Configuration.isAcceptInvalidEnums())
return null;
else
throw new FHIRException("Unknown RelatedArtifactType code '"+codeString+"'");
}
public String toCode() {
switch (this) {
case DOCUMENTATION: return "documentation";
case JUSTIFICATION: return "justification";
case CITATION: return "citation";
case PREDECESSOR: return "predecessor";
case SUCCESSOR: return "successor";
case DERIVEDFROM: return "derived-from";
case DEPENDSON: return "depends-on";
case COMPOSEDOF: return "composed-of";
default: return "?";
}
}
public String getSystem() {
switch (this) {
case DOCUMENTATION: return "http://hl7.org/fhir/related-artifact-type";
case JUSTIFICATION: return "http://hl7.org/fhir/related-artifact-type";
case CITATION: return "http://hl7.org/fhir/related-artifact-type";
case PREDECESSOR: return "http://hl7.org/fhir/related-artifact-type";
case SUCCESSOR: return "http://hl7.org/fhir/related-artifact-type";
case DERIVEDFROM: return "http://hl7.org/fhir/related-artifact-type";
case DEPENDSON: return "http://hl7.org/fhir/related-artifact-type";
case COMPOSEDOF: return "http://hl7.org/fhir/related-artifact-type";
default: return "?";
}
}
public String getDefinition() {
switch (this) {
case DOCUMENTATION: return "Additional documentation for the knowledge resource. This would include additional instructions on usage as well as additional information on clinical context or appropriateness";
case JUSTIFICATION: return "A summary of the justification for the knowledge resource including supporting evidence, relevant guidelines, or other clinically important information. This information is intended to provide a way to make the justification for the knowledge resource available to the consumer of interventions or results produced by the knowledge resource";
case CITATION: return "Bibliographic citation for papers, references, or other relevant material for the knowledge resource. This is intended to allow for citation of related material, but that was not necessarily specifically prepared in connection with this knowledge resource";
case PREDECESSOR: return "The previous version of the knowledge resource";
case SUCCESSOR: return "The next version of the knowledge resource";
case DERIVEDFROM: return "The knowledge resource is derived from the related artifact. This is intended to capture the relationship in which a particular knowledge resource is based on the content of another artifact, but is modified to capture either a different set of overall requirements, or a more specific set of requirements such as those involved in a particular institution or clinical setting";
case DEPENDSON: return "The knowledge resource depends on the given related artifact";
case COMPOSEDOF: return "The knowledge resource is composed of the given related artifact";
default: return "?";
}
}
public String getDisplay() {
switch (this) {
case DOCUMENTATION: return "Documentation";
case JUSTIFICATION: return "Justification";
case CITATION: return "Citation";
case PREDECESSOR: return "Predecessor";
case SUCCESSOR: return "Successor";
case DERIVEDFROM: return "Derived From";
case DEPENDSON: return "Depends On";
case COMPOSEDOF: return "Composed Of";
default: return "?";
}
}
}
public static class RelatedArtifactTypeEnumFactory implements EnumFactory<RelatedArtifactType> {
public RelatedArtifactType fromCode(String codeString) throws IllegalArgumentException {
if (codeString == null || "".equals(codeString))
if (codeString == null || "".equals(codeString))
return null;
if ("documentation".equals(codeString))
return RelatedArtifactType.DOCUMENTATION;
if ("justification".equals(codeString))
return RelatedArtifactType.JUSTIFICATION;
if ("citation".equals(codeString))
return RelatedArtifactType.CITATION;
if ("predecessor".equals(codeString))
return RelatedArtifactType.PREDECESSOR;
if ("successor".equals(codeString))
return RelatedArtifactType.SUCCESSOR;
if ("derived-from".equals(codeString))
return RelatedArtifactType.DERIVEDFROM;
if ("depends-on".equals(codeString))
return RelatedArtifactType.DEPENDSON;
if ("composed-of".equals(codeString))
return RelatedArtifactType.COMPOSEDOF;
throw new IllegalArgumentException("Unknown RelatedArtifactType code '"+codeString+"'");
}
public Enumeration<RelatedArtifactType> fromType(Base code) throws FHIRException {
if (code == null)
return null;
if (code.isEmpty())
return new Enumeration<RelatedArtifactType>(this);
String codeString = ((PrimitiveType) code).asStringValue();
if (codeString == null || "".equals(codeString))
return null;
if ("documentation".equals(codeString))
return new Enumeration<RelatedArtifactType>(this, RelatedArtifactType.DOCUMENTATION);
if ("justification".equals(codeString))
return new Enumeration<RelatedArtifactType>(this, RelatedArtifactType.JUSTIFICATION);
if ("citation".equals(codeString))
return new Enumeration<RelatedArtifactType>(this, RelatedArtifactType.CITATION);
if ("predecessor".equals(codeString))
return new Enumeration<RelatedArtifactType>(this, RelatedArtifactType.PREDECESSOR);
if ("successor".equals(codeString))
return new Enumeration<RelatedArtifactType>(this, RelatedArtifactType.SUCCESSOR);
if ("derived-from".equals(codeString))
return new Enumeration<RelatedArtifactType>(this, RelatedArtifactType.DERIVEDFROM);
if ("depends-on".equals(codeString))
return new Enumeration<RelatedArtifactType>(this, RelatedArtifactType.DEPENDSON);
if ("composed-of".equals(codeString))
return new Enumeration<RelatedArtifactType>(this, RelatedArtifactType.COMPOSEDOF);
throw new FHIRException("Unknown RelatedArtifactType code '"+codeString+"'");
}
public String toCode(RelatedArtifactType code) {
if (code == RelatedArtifactType.DOCUMENTATION)
return "documentation";
if (code == RelatedArtifactType.JUSTIFICATION)
return "justification";
if (code == RelatedArtifactType.CITATION)
return "citation";
if (code == RelatedArtifactType.PREDECESSOR)
return "predecessor";
if (code == RelatedArtifactType.SUCCESSOR)
return "successor";
if (code == RelatedArtifactType.DERIVEDFROM)
return "derived-from";
if (code == RelatedArtifactType.DEPENDSON)
return "depends-on";
if (code == RelatedArtifactType.COMPOSEDOF)
return "composed-of";
return "?";
}
public String toSystem(RelatedArtifactType code) {
return code.getSystem();
}
}
/**
* The type of relationship to the related artifact.
*/
@Child(name = "type", type = {CodeType.class}, order=0, min=1, max=1, modifier=false, summary=true)
@Description(shortDefinition="documentation | justification | citation | predecessor | successor | derived-from | depends-on | composed-of", formalDefinition="The type of relationship to the related artifact." )
@ca.uhn.fhir.model.api.annotation.Binding(valueSet="http://hl7.org/fhir/ValueSet/related-artifact-type")
protected Enumeration<RelatedArtifactType> type;
/**
* A brief description of the document or knowledge resource being referenced, suitable for display to a consumer.
*/
@Child(name = "display", type = {StringType.class}, order=1, min=0, max=1, modifier=false, summary=true)
@Description(shortDefinition="Brief description of the related artifact", formalDefinition="A brief description of the document or knowledge resource being referenced, suitable for display to a consumer." )
protected StringType display;
/**
* A bibliographic citation for the related artifact. This text SHOULD be formatted according to an accepted citation format.
*/
@Child(name = "citation", type = {StringType.class}, order=2, min=0, max=1, modifier=false, summary=true)
@Description(shortDefinition="Bibliographic citation for the artifact", formalDefinition="A bibliographic citation for the related artifact. This text SHOULD be formatted according to an accepted citation format." )
protected StringType citation;
/**
* A url for the artifact that can be followed to access the actual content.
*/
@Child(name = "url", type = {UriType.class}, order=3, min=0, max=1, modifier=false, summary=true)
@Description(shortDefinition="Where the artifact can be accessed", formalDefinition="A url for the artifact that can be followed to access the actual content." )
protected UriType url;
/**
* The document being referenced, represented as an attachment. This is exclusive with the resource element.
*/
@Child(name = "document", type = {Attachment.class}, order=4, min=0, max=1, modifier=false, summary=true)
@Description(shortDefinition="What document is being referenced", formalDefinition="The document being referenced, represented as an attachment. This is exclusive with the resource element." )
protected Attachment document;
/**
* The related resource, such as a library, value set, profile, or other knowledge resource.
*/
@Child(name = "resource", type = {Reference.class}, order=5, min=0, max=1, modifier=false, summary=true)
@Description(shortDefinition="What resource is being referenced", formalDefinition="The related resource, such as a library, value set, profile, or other knowledge resource." )
protected Reference resource;
/**
* The actual object that is the target of the reference (The related resource, such as a library, value set, profile, or other knowledge resource.)
*/
protected Resource resourceTarget;
private static final long serialVersionUID = -660871462L;
/**
* Constructor
*/
public RelatedArtifact() {
super();
}
/**
* Constructor
*/
public RelatedArtifact(Enumeration<RelatedArtifactType> type) {
super();
this.type = type;
}
/**
* @return {@link #type} (The type of relationship to the related artifact.). This is the underlying object with id, value and extensions. The accessor "getType" gives direct access to the value
*/
public Enumeration<RelatedArtifactType> getTypeElement() {
if (this.type == null)
if (Configuration.errorOnAutoCreate())
throw new Error("Attempt to auto-create RelatedArtifact.type");
else if (Configuration.doAutoCreate())
this.type = new Enumeration<RelatedArtifactType>(new RelatedArtifactTypeEnumFactory()); // bb
return this.type;
}
public boolean hasTypeElement() {
return this.type != null && !this.type.isEmpty();
}
public boolean hasType() {
return this.type != null && !this.type.isEmpty();
}
/**
* @param value {@link #type} (The type of relationship to the related artifact.). This is the underlying object with id, value and extensions. The accessor "getType" gives direct access to the value
*/
public RelatedArtifact setTypeElement(Enumeration<RelatedArtifactType> value) {
this.type = value;
return this;
}
/**
* @return The type of relationship to the related artifact.
*/
public RelatedArtifactType getType() {
return this.type == null ? null : this.type.getValue();
}
/**
* @param value The type of relationship to the related artifact.
*/
public RelatedArtifact setType(RelatedArtifactType value) {
if (this.type == null)
this.type = new Enumeration<RelatedArtifactType>(new RelatedArtifactTypeEnumFactory());
this.type.setValue(value);
return this;
}
/**
* @return {@link #display} (A brief description of the document or knowledge resource being referenced, suitable for display to a consumer.). This is the underlying object with id, value and extensions. The accessor "getDisplay" gives direct access to the value
*/
public StringType getDisplayElement() {
if (this.display == null)
if (Configuration.errorOnAutoCreate())
throw new Error("Attempt to auto-create RelatedArtifact.display");
else if (Configuration.doAutoCreate())
this.display = new StringType(); // bb
return this.display;
}
public boolean hasDisplayElement() {
return this.display != null && !this.display.isEmpty();
}
public boolean hasDisplay() {
return this.display != null && !this.display.isEmpty();
}
/**
* @param value {@link #display} (A brief description of the document or knowledge resource being referenced, suitable for display to a consumer.). This is the underlying object with id, value and extensions. The accessor "getDisplay" gives direct access to the value
*/
public RelatedArtifact setDisplayElement(StringType value) {
this.display = value;
return this;
}
/**
* @return A brief description of the document or knowledge resource being referenced, suitable for display to a consumer.
*/
public String getDisplay() {
return this.display == null ? null : this.display.getValue();
}
/**
* @param value A brief description of the document or knowledge resource being referenced, suitable for display to a consumer.
*/
public RelatedArtifact setDisplay(String value) {
if (Utilities.noString(value))
this.display = null;
else {
if (this.display == null)
this.display = new StringType();
this.display.setValue(value);
}
return this;
}
/**
* @return {@link #citation} (A bibliographic citation for the related artifact. This text SHOULD be formatted according to an accepted citation format.). This is the underlying object with id, value and extensions. The accessor "getCitation" gives direct access to the value
*/
public StringType getCitationElement() {
if (this.citation == null)
if (Configuration.errorOnAutoCreate())
throw new Error("Attempt to auto-create RelatedArtifact.citation");
else if (Configuration.doAutoCreate())
this.citation = new StringType(); // bb
return this.citation;
}
public boolean hasCitationElement() {
return this.citation != null && !this.citation.isEmpty();
}
public boolean hasCitation() {
return this.citation != null && !this.citation.isEmpty();
}
/**
* @param value {@link #citation} (A bibliographic citation for the related artifact. This text SHOULD be formatted according to an accepted citation format.). This is the underlying object with id, value and extensions. The accessor "getCitation" gives direct access to the value
*/
public RelatedArtifact setCitationElement(StringType value) {
this.citation = value;
return this;
}
/**
* @return A bibliographic citation for the related artifact. This text SHOULD be formatted according to an accepted citation format.
*/
public String getCitation() {
return this.citation == null ? null : this.citation.getValue();
}
/**
* @param value A bibliographic citation for the related artifact. This text SHOULD be formatted according to an accepted citation format.
*/
public RelatedArtifact setCitation(String value) {
if (Utilities.noString(value))
this.citation = null;
else {
if (this.citation == null)
this.citation = new StringType();
this.citation.setValue(value);
}
return this;
}
/**
* @return {@link #url} (A url for the artifact that can be followed to access the actual content.). This is the underlying object with id, value and extensions. The accessor "getUrl" gives direct access to the value
*/
public UriType getUrlElement() {
if (this.url == null)
if (Configuration.errorOnAutoCreate())
throw new Error("Attempt to auto-create RelatedArtifact.url");
else if (Configuration.doAutoCreate())
this.url = new UriType(); // bb
return this.url;
}
public boolean hasUrlElement() {
return this.url != null && !this.url.isEmpty();
}
public boolean hasUrl() {
return this.url != null && !this.url.isEmpty();
}
/**
* @param value {@link #url} (A url for the artifact that can be followed to access the actual content.). This is the underlying object with id, value and extensions. The accessor "getUrl" gives direct access to the value
*/
public RelatedArtifact setUrlElement(UriType value) {
this.url = value;
return this;
}
/**
* @return A url for the artifact that can be followed to access the actual content.
*/
public String getUrl() {
return this.url == null ? null : this.url.getValue();
}
/**
* @param value A url for the artifact that can be followed to access the actual content.
*/
public RelatedArtifact setUrl(String value) {
if (Utilities.noString(value))
this.url = null;
else {
if (this.url == null)
this.url = new UriType();
this.url.setValue(value);
}
return this;
}
/**
* @return {@link #document} (The document being referenced, represented as an attachment. This is exclusive with the resource element.)
*/
public Attachment getDocument() {
if (this.document == null)
if (Configuration.errorOnAutoCreate())
throw new Error("Attempt to auto-create RelatedArtifact.document");
else if (Configuration.doAutoCreate())
this.document = new Attachment(); // cc
return this.document;
}
public boolean hasDocument() {
return this.document != null && !this.document.isEmpty();
}
/**
* @param value {@link #document} (The document being referenced, represented as an attachment. This is exclusive with the resource element.)
*/
public RelatedArtifact setDocument(Attachment value) {
this.document = value;
return this;
}
/**
* @return {@link #resource} (The related resource, such as a library, value set, profile, or other knowledge resource.)
*/
public Reference getResource() {
if (this.resource == null)
if (Configuration.errorOnAutoCreate())
throw new Error("Attempt to auto-create RelatedArtifact.resource");
else if (Configuration.doAutoCreate())
this.resource = new Reference(); // cc
return this.resource;
}
public boolean hasResource() {
return this.resource != null && !this.resource.isEmpty();
}
/**
* @param value {@link #resource} (The related resource, such as a library, value set, profile, or other knowledge resource.)
*/
public RelatedArtifact setResource(Reference value) {
this.resource = value;
return this;
}
/**
* @return {@link #resource} The actual object that is the target of the reference. The reference library doesn't populate this, but you can use it to hold the resource if you resolve it. (The related resource, such as a library, value set, profile, or other knowledge resource.)
*/
public Resource getResourceTarget() {
return this.resourceTarget;
}
/**
* @param value {@link #resource} The actual object that is the target of the reference. The reference library doesn't use these, but you can use it to hold the resource if you resolve it. (The related resource, such as a library, value set, profile, or other knowledge resource.)
*/
public RelatedArtifact setResourceTarget(Resource value) {
this.resourceTarget = value;
return this;
}
protected void listChildren(List<Property> childrenList) {
super.listChildren(childrenList);
childrenList.add(new Property("type", "code", "The type of relationship to the related artifact.", 0, java.lang.Integer.MAX_VALUE, type));
childrenList.add(new Property("display", "string", "A brief description of the document or knowledge resource being referenced, suitable for display to a consumer.", 0, java.lang.Integer.MAX_VALUE, display));
childrenList.add(new Property("citation", "string", "A bibliographic citation for the related artifact. This text SHOULD be formatted according to an accepted citation format.", 0, java.lang.Integer.MAX_VALUE, citation));
childrenList.add(new Property("url", "uri", "A url for the artifact that can be followed to access the actual content.", 0, java.lang.Integer.MAX_VALUE, url));
childrenList.add(new Property("document", "Attachment", "The document being referenced, represented as an attachment. This is exclusive with the resource element.", 0, java.lang.Integer.MAX_VALUE, document));
childrenList.add(new Property("resource", "Reference(Any)", "The related resource, such as a library, value set, profile, or other knowledge resource.", 0, java.lang.Integer.MAX_VALUE, resource));
}
@Override
public Base[] getProperty(int hash, String name, boolean checkValid) throws FHIRException {
switch (hash) {
case 3575610: /*type*/ return this.type == null ? new Base[0] : new Base[] {this.type}; // Enumeration<RelatedArtifactType>
case 1671764162: /*display*/ return this.display == null ? new Base[0] : new Base[] {this.display}; // StringType
case -1442706713: /*citation*/ return this.citation == null ? new Base[0] : new Base[] {this.citation}; // StringType
case 116079: /*url*/ return this.url == null ? new Base[0] : new Base[] {this.url}; // UriType
case 861720859: /*document*/ return this.document == null ? new Base[0] : new Base[] {this.document}; // Attachment
case -341064690: /*resource*/ return this.resource == null ? new Base[0] : new Base[] {this.resource}; // Reference
default: return super.getProperty(hash, name, checkValid);
}
}
@Override
public Base setProperty(int hash, String name, Base value) throws FHIRException {
switch (hash) {
case 3575610: // type
value = new RelatedArtifactTypeEnumFactory().fromType(castToCode(value));
this.type = (Enumeration) value; // Enumeration<RelatedArtifactType>
return value;
case 1671764162: // display
this.display = castToString(value); // StringType
return value;
case -1442706713: // citation
this.citation = castToString(value); // StringType
return value;
case 116079: // url
this.url = castToUri(value); // UriType
return value;
case 861720859: // document
this.document = castToAttachment(value); // Attachment
return value;
case -341064690: // resource
this.resource = castToReference(value); // Reference
return value;
default: return super.setProperty(hash, name, value);
}
}
@Override
public Base setProperty(String name, Base value) throws FHIRException {
if (name.equals("type")) {
value = new RelatedArtifactTypeEnumFactory().fromType(castToCode(value));
this.type = (Enumeration) value; // Enumeration<RelatedArtifactType>
} else if (name.equals("display")) {
this.display = castToString(value); // StringType
} else if (name.equals("citation")) {
this.citation = castToString(value); // StringType
} else if (name.equals("url")) {
this.url = castToUri(value); // UriType
} else if (name.equals("document")) {
this.document = castToAttachment(value); // Attachment
} else if (name.equals("resource")) {
this.resource = castToReference(value); // Reference
} else
return super.setProperty(name, value);
return value;
}
@Override
public Base makeProperty(int hash, String name) throws FHIRException {
switch (hash) {
case 3575610: return getTypeElement();
case 1671764162: return getDisplayElement();
case -1442706713: return getCitationElement();
case 116079: return getUrlElement();
case 861720859: return getDocument();
case -341064690: return getResource();
default: return super.makeProperty(hash, name);
}
}
@Override
public String[] getTypesForProperty(int hash, String name) throws FHIRException {
switch (hash) {
case 3575610: /*type*/ return new String[] {"code"};
case 1671764162: /*display*/ return new String[] {"string"};
case -1442706713: /*citation*/ return new String[] {"string"};
case 116079: /*url*/ return new String[] {"uri"};
case 861720859: /*document*/ return new String[] {"Attachment"};
case -341064690: /*resource*/ return new String[] {"Reference"};
default: return super.getTypesForProperty(hash, name);
}
}
@Override
public Base addChild(String name) throws FHIRException {
if (name.equals("type")) {
throw new FHIRException("Cannot call addChild on a primitive type RelatedArtifact.type");
}
else if (name.equals("display")) {
throw new FHIRException("Cannot call addChild on a primitive type RelatedArtifact.display");
}
else if (name.equals("citation")) {
throw new FHIRException("Cannot call addChild on a primitive type RelatedArtifact.citation");
}
else if (name.equals("url")) {
throw new FHIRException("Cannot call addChild on a primitive type RelatedArtifact.url");
}
else if (name.equals("document")) {
this.document = new Attachment();
return this.document;
}
else if (name.equals("resource")) {
this.resource = new Reference();
return this.resource;
}
else
return super.addChild(name);
}
public String fhirType() {
return "RelatedArtifact";
}
public RelatedArtifact copy() {
RelatedArtifact dst = new RelatedArtifact();
copyValues(dst);
dst.type = type == null ? null : type.copy();
dst.display = display == null ? null : display.copy();
dst.citation = citation == null ? null : citation.copy();
dst.url = url == null ? null : url.copy();
dst.document = document == null ? null : document.copy();
dst.resource = resource == null ? null : resource.copy();
return dst;
}
protected RelatedArtifact typedCopy() {
return copy();
}
@Override
public boolean equalsDeep(Base other) {
if (!super.equalsDeep(other))
return false;
if (!(other instanceof RelatedArtifact))
return false;
RelatedArtifact o = (RelatedArtifact) other;
return compareDeep(type, o.type, true) && compareDeep(display, o.display, true) && compareDeep(citation, o.citation, true)
&& compareDeep(url, o.url, true) && compareDeep(document, o.document, true) && compareDeep(resource, o.resource, true)
;
}
@Override
public boolean equalsShallow(Base other) {
if (!super.equalsShallow(other))
return false;
if (!(other instanceof RelatedArtifact))
return false;
RelatedArtifact o = (RelatedArtifact) other;
return compareValues(type, o.type, true) && compareValues(display, o.display, true) && compareValues(citation, o.citation, true)
&& compareValues(url, o.url, true);
}
public boolean isEmpty() {
return super.isEmpty() && ca.uhn.fhir.util.ElementUtil.isEmpty(type, display, citation
, url, document, resource);
}
}
| |
package org.knowm.xchange.btcmarkets.dto;
import static org.fest.assertions.api.Assertions.assertThat;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.math.BigDecimal;
import java.util.Date;
import java.util.List;
import org.junit.Test;
import org.knowm.xchange.btcmarkets.BtcMarketsAssert;
import org.knowm.xchange.btcmarkets.dto.account.BTCMarketsBalance;
import org.knowm.xchange.btcmarkets.dto.marketdata.BTCMarketsOrderBook;
import org.knowm.xchange.btcmarkets.dto.marketdata.BTCMarketsTicker;
import org.knowm.xchange.btcmarkets.dto.trade.BTCMarketsCancelOrderRequest;
import org.knowm.xchange.btcmarkets.dto.trade.BTCMarketsCancelOrderResponse;
import org.knowm.xchange.btcmarkets.dto.trade.BTCMarketsMyTradingRequest;
import org.knowm.xchange.btcmarkets.dto.trade.BTCMarketsOrder;
import org.knowm.xchange.btcmarkets.dto.trade.BTCMarketsOrders;
import org.knowm.xchange.btcmarkets.dto.trade.BTCMarketsPlaceOrderResponse;
import org.knowm.xchange.btcmarkets.dto.trade.BTCMarketsTradeHistory;
import org.knowm.xchange.btcmarkets.dto.trade.BTCMarketsUserTrade;
import org.knowm.xchange.btcmarkets.service.BTCMarketsTestSupport;
import org.knowm.xchange.currency.CurrencyPair;
import org.knowm.xchange.dto.Order;
import org.knowm.xchange.dto.trade.LimitOrder;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
@SuppressWarnings("ThrowableResultOfMethodCallIgnored")
public class BTCMarketsDtoTest extends BTCMarketsTestSupport {
@Test
public void shouldParseBalances() throws IOException {
final BTCMarketsBalance[] response = parse(BTCMarketsBalance[].class);
assertThat(response).hasSize(3);
assertThat(response[2].getCurrency()).isEqualTo("LTC");
assertThat(response[2].getBalance()).isEqualTo(new BigDecimal("10.00000000"));
assertThat(response[2].getPendingFunds()).isEqualTo(new BigDecimal("0E-8"));
assertThat(response[2].toString()).isEqualTo("BTCMarketsBalance{pendingFunds=0E-8, balance=10.00000000, currency='LTC'}");
}
@Test
public void shouldParseNullAvailabilityBalances() throws IOException {
// given
final BTCMarketsBalance[] expectedBtcMarketsBalances = expectedBtcMarketsBalances();
// when
final BTCMarketsBalance[] response = parse("NullAvailabilityBalances", BTCMarketsBalance[].class);
// then
assertThat(response).hasSize(3);
for (int i = 0; i < response.length; i++) {
BtcMarketsAssert.assertEquals(response[i], expectedBtcMarketsBalances[i]);
}
}
@Test
public void shouldSerializeCancelOrderRequest() throws UnsupportedEncodingException, JsonProcessingException {
assertThatSerializesCorrectly(new BTCMarketsCancelOrderRequest(6840125478L));
}
@Test
public void shouldFailWhenParsingFailedCancelOrderResponseAsResponse() throws IOException {
try {
parse(BTCMarketsCancelOrderResponse.class);
assertThat(true).as("Should throw exception").isFalse();
} catch (JsonMappingException ignored) {
}
}
@Test
public void shouldParseEmptyCancelOrderResponse() throws IOException {
// when
final BTCMarketsCancelOrderResponse response = parse("EmptyCancelOrderResponse", BTCMarketsCancelOrderResponse.class);
// then
assertThat(response.getSuccess()).isTrue();
assertThat(response.getErrorCode()).isNull();
assertThat(response.getErrorMessage()).isNull();
;
}
@Test
public void shouldParseNullCancelOrderResponse() throws IOException {
// when
final BTCMarketsCancelOrderResponse response = parse("NullCancelOrderResponse", BTCMarketsCancelOrderResponse.class);
// then
assertThat(response.getSuccess()).isTrue();
assertThat(response.getErrorCode()).isNull();
assertThat(response.getErrorMessage()).isNull();
;
}
@Test
public void shouldParseCancelOrderResponseAsException() throws IOException {
// when
final BTCMarketsException ex = parse("CancelOrderResponse", BTCMarketsException.class);
// then
assertThat(ex.getSuccess()).isTrue();
assertThat(ex.getErrorCode()).isNull();
List<BTCMarketsException> responses = ex.getResponses();
assertThat(responses).hasSize(2);
BTCMarketsException response1 = responses.get(0);
assertThat(response1.getSuccess()).isTrue();
assertThat(response1.getErrorCode()).isNull();
;
assertThat(response1.getMessage()).contains("(HTTP status code: 0)");
assertThat(response1.getId()).isEqualTo(6840125484L);
BTCMarketsException response2 = responses.get(1);
assertThat(response2.getSuccess()).isFalse();
assertThat(response2.getErrorCode()).isEqualTo(3);
assertThat(response2.getMessage()).contains("order does not exist.");
assertThat(response2.getId()).isEqualTo(6840125478L);
}
@Test
public void shouldFailWhenParsingFailedPlaceOrderResponseAsResponse() throws IOException {
try {
parse("Error-PlaceOrderResponse", BTCMarketsPlaceOrderResponse.class);
assertThat(true).as("Should throw exception").isFalse();
} catch (JsonMappingException ignored) {
}
}
@Test
public void shouldParseFailedPlaceOrderResponseAsException() throws IOException {
// when
final BTCMarketsException ex = parse("Error-PlaceOrderResponse", BTCMarketsException.class);
// then
assertThat(ex.getSuccess()).isFalse();
assertThat(ex.getErrorCode()).isEqualTo(3);
assertThat(ex.getMessage()).contains("Invalid argument.");
assertThat(ex.getResponses()).isNull();
assertThat(ex.getId()).isEqualTo(0);
assertThat(ex.getClientRequestId()).isEqualTo("abc-cdf-1000");
}
@Test
public void shouldSerializeMyTradingRequest() throws UnsupportedEncodingException, JsonProcessingException {
final BTCMarketsMyTradingRequest request = new BTCMarketsMyTradingRequest("AUD", "BTC", 10, new Date(33434568724000L));
assertThatSerializesCorrectly(request);
}
@Test
public void shouldSerializePlaceOrderRequest() throws UnsupportedEncodingException, JsonProcessingException {
assertThatSerializesCorrectly(new BTCMarketsOrder(new BigDecimal("0.10000000"), new BigDecimal("130.00000000"), "AUD", "BTC",
BTCMarketsOrder.Side.Bid, BTCMarketsOrder.Type.Limit, "abc-cdf-1000"));
}
@Test
public void shoudParseOrderBook() throws IOException {
// given
final LimitOrder[] expectedAsks = expectedAsks();
final LimitOrder[] expectedBids = expectedBids();
// when
final BTCMarketsOrderBook response = parse("ShortOrderBook", BTCMarketsOrderBook.class);
// then
assertThat(response.getCurrency()).isEqualTo("AUD");
assertThat(response.getInstrument()).isEqualTo("BTC");
assertThat(response.getTimestamp().getTime()).isEqualTo(1442997827000L);
List<BigDecimal[]> asks = response.getAsks();
assertThat(asks).hasSize(3);
for (int i = 0; i < asks.size(); i++) {
BtcMarketsAssert.assertEquals(expectedAsks[i], Order.OrderType.ASK, CurrencyPair.BTC_AUD, asks.get(i));
}
List<BigDecimal[]> bids = response.getBids();
assertThat(bids).hasSize(2);
for (int i = 0; i < bids.size(); i++) {
BtcMarketsAssert.assertEquals(expectedBids[i], Order.OrderType.BID, CurrencyPair.BTC_AUD, bids.get(i));
}
assertThat(response.toString())
.isEqualTo(String.format("BTCMarketsOrderBook{currency='AUD', instrument='BTC', timestamp=%s, bids=2, asks=3}", new Date(1442997827000L)));
}
@Test
public void shouldParseOrders() throws IOException {
// given
final BTCMarketsOrder[] expectedParsedBtcMarketsOrders = expectedParsedBtcMarketsOrders();
// when
final BTCMarketsOrders response = parse(BTCMarketsOrders.class);
// then
assertThat(response.getSuccess()).isTrue();
assertThat(response.getErrorCode()).isNull();
assertThat(response.getErrorMessage()).isNull();
List<BTCMarketsOrder> ordersList = response.getOrders();
assertThat(ordersList).hasSize(2);
for (int i = 0; i < ordersList.size(); i++) {
BtcMarketsAssert.assertEquals(ordersList.get(i), expectedParsedBtcMarketsOrders[i]);
}
}
@Test
public void shouldParsePlaceOrderResponse() throws IOException {
// when
final BTCMarketsPlaceOrderResponse response = parse(BTCMarketsPlaceOrderResponse.class);
// then
assertThat(response.getSuccess()).isTrue();
assertThat(response.getErrorCode()).isNull();
assertThat(response.getErrorMessage()).isNull();
assertThat(response.getId()).isEqualTo(100);
assertThat(response.getClientRequestId()).isEqualTo("abc-cdf-1000");
assertThat(response.toString()).isEqualTo("BTCMarketsPlaceOrderResponse{success=true, errorMessage='null', errorCode=null}");
}
@Test
public void shouldParseTicker() throws IOException {
// when
final BTCMarketsTicker response = parse(BTCMarketsTicker.class);
// then
BtcMarketsAssert.assertEquals(response, EXPECTED_BTC_MARKETS_TICKER);
}
@Test
public void shouldParseTradeHistory() throws IOException {
// given
final List<BTCMarketsUserTrade> expectedParsedBtcMarketsUserTrades = expectedParsedBtcMarketsUserTrades();
// when
final BTCMarketsTradeHistory response = parse(BTCMarketsTradeHistory.class);
// then
assertThat(response.getSuccess()).isTrue();
assertThat(response.getErrorCode()).isNull();
assertThat(response.getErrorMessage()).isNull();
List<BTCMarketsUserTrade> userTrades = response.getTrades();
assertThat(userTrades).hasSize(3);
for (int i = 0; i < userTrades.size(); i++) {
BtcMarketsAssert.assertEquals(userTrades.get(i), expectedParsedBtcMarketsUserTrades.get(i));
}
}
private <T> void assertThatSerializesCorrectly(T request) throws JsonProcessingException, UnsupportedEncodingException {
final String json = new ObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(request);
final InputStream expected = getStream(getBaseFileName(request.getClass()));
assertThat(new ByteArrayInputStream(json.getBytes("UTF-8"))).hasContentEqualTo(expected);
}
}
| |
/**
*/
package CIM.IEC61970.Informative.InfAssets.impl;
import CIM.IEC61968.Assets.Asset;
import CIM.IEC61968.Assets.AssetsPackage;
import CIM.IEC61970.Core.impl.CurveImpl;
import CIM.IEC61970.Informative.InfAssets.AssetPropertyCurve;
import CIM.IEC61970.Informative.InfAssets.InfAssetsPackage;
import CIM.IEC61970.Informative.InfAssets.Specification;
import java.util.Collection;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.util.EObjectWithInverseResolvingEList;
import org.eclipse.emf.ecore.util.InternalEList;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Asset Property Curve</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* </p>
* <ul>
* <li>{@link CIM.IEC61970.Informative.InfAssets.impl.AssetPropertyCurveImpl#getSpecification <em>Specification</em>}</li>
* <li>{@link CIM.IEC61970.Informative.InfAssets.impl.AssetPropertyCurveImpl#getAssets <em>Assets</em>}</li>
* </ul>
*
* @generated
*/
public class AssetPropertyCurveImpl extends CurveImpl implements AssetPropertyCurve {
/**
* The cached value of the '{@link #getSpecification() <em>Specification</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getSpecification()
* @generated
* @ordered
*/
protected Specification specification;
/**
* The cached value of the '{@link #getAssets() <em>Assets</em>}' reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getAssets()
* @generated
* @ordered
*/
protected EList<Asset> assets;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected AssetPropertyCurveImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return InfAssetsPackage.eINSTANCE.getAssetPropertyCurve();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Specification getSpecification() {
if (specification != null && specification.eIsProxy()) {
InternalEObject oldSpecification = (InternalEObject)specification;
specification = (Specification)eResolveProxy(oldSpecification);
if (specification != oldSpecification) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, InfAssetsPackage.ASSET_PROPERTY_CURVE__SPECIFICATION, oldSpecification, specification));
}
}
return specification;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Specification basicGetSpecification() {
return specification;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetSpecification(Specification newSpecification, NotificationChain msgs) {
Specification oldSpecification = specification;
specification = newSpecification;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, InfAssetsPackage.ASSET_PROPERTY_CURVE__SPECIFICATION, oldSpecification, newSpecification);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setSpecification(Specification newSpecification) {
if (newSpecification != specification) {
NotificationChain msgs = null;
if (specification != null)
msgs = ((InternalEObject)specification).eInverseRemove(this, InfAssetsPackage.SPECIFICATION__ASSET_PROPERTY_CURVES, Specification.class, msgs);
if (newSpecification != null)
msgs = ((InternalEObject)newSpecification).eInverseAdd(this, InfAssetsPackage.SPECIFICATION__ASSET_PROPERTY_CURVES, Specification.class, msgs);
msgs = basicSetSpecification(newSpecification, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, InfAssetsPackage.ASSET_PROPERTY_CURVE__SPECIFICATION, newSpecification, newSpecification));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<Asset> getAssets() {
if (assets == null) {
assets = new EObjectWithInverseResolvingEList.ManyInverse<Asset>(Asset.class, this, InfAssetsPackage.ASSET_PROPERTY_CURVE__ASSETS, AssetsPackage.ASSET__ASSET_PROPERTY_CURVES);
}
return assets;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public NotificationChain eInverseAdd(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case InfAssetsPackage.ASSET_PROPERTY_CURVE__SPECIFICATION:
if (specification != null)
msgs = ((InternalEObject)specification).eInverseRemove(this, InfAssetsPackage.SPECIFICATION__ASSET_PROPERTY_CURVES, Specification.class, msgs);
return basicSetSpecification((Specification)otherEnd, msgs);
case InfAssetsPackage.ASSET_PROPERTY_CURVE__ASSETS:
return ((InternalEList<InternalEObject>)(InternalEList<?>)getAssets()).basicAdd(otherEnd, msgs);
}
return super.eInverseAdd(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case InfAssetsPackage.ASSET_PROPERTY_CURVE__SPECIFICATION:
return basicSetSpecification(null, msgs);
case InfAssetsPackage.ASSET_PROPERTY_CURVE__ASSETS:
return ((InternalEList<?>)getAssets()).basicRemove(otherEnd, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case InfAssetsPackage.ASSET_PROPERTY_CURVE__SPECIFICATION:
if (resolve) return getSpecification();
return basicGetSpecification();
case InfAssetsPackage.ASSET_PROPERTY_CURVE__ASSETS:
return getAssets();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case InfAssetsPackage.ASSET_PROPERTY_CURVE__SPECIFICATION:
setSpecification((Specification)newValue);
return;
case InfAssetsPackage.ASSET_PROPERTY_CURVE__ASSETS:
getAssets().clear();
getAssets().addAll((Collection<? extends Asset>)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case InfAssetsPackage.ASSET_PROPERTY_CURVE__SPECIFICATION:
setSpecification((Specification)null);
return;
case InfAssetsPackage.ASSET_PROPERTY_CURVE__ASSETS:
getAssets().clear();
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case InfAssetsPackage.ASSET_PROPERTY_CURVE__SPECIFICATION:
return specification != null;
case InfAssetsPackage.ASSET_PROPERTY_CURVE__ASSETS:
return assets != null && !assets.isEmpty();
}
return super.eIsSet(featureID);
}
} //AssetPropertyCurveImpl
| |
/*******************************************************************************
* Copyright (C) 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package com.google.cloud.dataflow.sdk.runners.worker;
import com.google.cloud.dataflow.sdk.options.PipelineOptions;
import com.google.cloud.dataflow.sdk.options.StreamingOptions;
import com.google.cloud.dataflow.sdk.transforms.DoFn;
import com.google.cloud.dataflow.sdk.transforms.windowing.BoundedWindow;
import com.google.cloud.dataflow.sdk.util.DoFnInfo;
import com.google.cloud.dataflow.sdk.util.DoFnRunner;
import com.google.cloud.dataflow.sdk.util.DoFnRunner.OutputManager;
import com.google.cloud.dataflow.sdk.util.ExecutionContext;
import com.google.cloud.dataflow.sdk.util.ExecutionContext.StepContext;
import com.google.cloud.dataflow.sdk.util.SideInputReader;
import com.google.cloud.dataflow.sdk.util.StreamingSideInputDoFnRunner;
import com.google.cloud.dataflow.sdk.util.WindowedValue;
import com.google.cloud.dataflow.sdk.util.common.CounterSet;
import com.google.cloud.dataflow.sdk.util.common.worker.ElementCounter;
import com.google.cloud.dataflow.sdk.util.common.worker.OutputReceiver;
import com.google.cloud.dataflow.sdk.util.common.worker.ParDoFn;
import com.google.cloud.dataflow.sdk.util.common.worker.Receiver;
import com.google.cloud.dataflow.sdk.util.common.worker.StateSampler;
import com.google.cloud.dataflow.sdk.values.TupleTag;
import com.google.common.base.Preconditions;
import com.google.common.base.Throwables;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.annotation.Nullable;
/**
* A base class providing simple set up, processing, and tear down for a wrapped
* {@link DoFn}.
*
* <p>Subclasses override just a method to provide a {@link DoFnInfo} for the
* wrapped {@link DoFn}.
*/
public abstract class ParDoFnBase implements ParDoFn {
private final PipelineOptions options;
private final SideInputReader sideInputReader;
private final TupleTag<Object> mainOutputTag;
private final List<TupleTag<?>> sideOutputTags;
private final String stepName;
private final String transformName;
private final ExecutionContext executionContext;
private final CounterSet.AddCounterMutator addCounterMutator;
private final StateSampler stateSampler;
/** The DoFnRunner executing a batch. Null between batches. */
private DoFnRunner<Object, Object> fnRunner;
public ExecutionContext getExecutionContext() {
return executionContext;
}
/**
* Creates a {@link ParDoFnBase} using basic information about the step being executed.
*/
protected ParDoFnBase(
PipelineOptions options,
SideInputReader sideInputReader,
List<String> outputTags,
String stepName,
String transformName,
ExecutionContext executionContext,
CounterSet.AddCounterMutator addCounterMutator,
StateSampler stateSampler) {
this.options = options;
// We vend a freshly deserialized version for each run
this.sideInputReader = sideInputReader;
Preconditions.checkArgument(
outputTags.size() > 0,
"expected at least one output");
this.mainOutputTag = new TupleTag<>(outputTags.get(0));
this.sideOutputTags = new ArrayList<>();
if (outputTags.size() > 1) {
for (String tag : outputTags.subList(1, outputTags.size())) {
this.sideOutputTags.add(new TupleTag<Object>(tag));
}
}
this.stepName = stepName;
this.transformName = transformName;
this.executionContext = executionContext;
this.addCounterMutator = addCounterMutator;
this.stateSampler = stateSampler;
}
/**
* Creates a fresh {@link DoFnInfo}. This will be called for each bundle.
*/
protected abstract DoFnInfo<?, ?> getDoFnInfo();
@Override
public void startBundle(final Receiver... receivers) throws Exception {
if (receivers.length != sideOutputTags.size() + 1) {
throw new AssertionError(
"unexpected number of receivers for DoFn");
}
StepContext stepContext = null;
if (executionContext != null) {
stepContext = executionContext.getOrCreateStepContext(stepName, transformName, stateSampler);
}
@SuppressWarnings("unchecked")
DoFnInfo<Object, Object> doFnInfo = (DoFnInfo<Object, Object>) getDoFnInfo();
OutputManager outputManager = new OutputManager() {
final Map<TupleTag<?>, OutputReceiver> undeclaredOutputs = new HashMap<>();
@Nullable
private Receiver getReceiverOrNull(TupleTag<?> tag) {
if (tag.equals(mainOutputTag)) {
return receivers[0];
} else if (sideOutputTags.contains(tag)) {
return receivers[sideOutputTags.indexOf(tag) + 1];
} else {
return undeclaredOutputs.get(tag);
}
}
@Override
public <T> void output(TupleTag<T> tag, WindowedValue<T> output) {
Receiver receiver = getReceiverOrNull(tag);
if (receiver == null) {
// A new undeclared output.
// TODO: plumb through the operationName, so that we can
// name implicit outputs after it.
String outputName = "implicit-" + tag.getId();
// TODO: plumb through the counter prefix, so we can
// make it available to the OutputReceiver class in case
// it wants to use it in naming output counters. (It
// doesn't today.)
OutputReceiver undeclaredReceiver = new OutputReceiver();
ElementCounter outputCounter = new DataflowOutputCounter(outputName, addCounterMutator);
undeclaredReceiver.addOutputCounter(outputCounter);
undeclaredOutputs.put(tag, undeclaredReceiver);
receiver = undeclaredReceiver;
}
try {
receiver.process(output);
} catch (Throwable t) {
throw Throwables.propagate(t);
}
}
};
if (options.as(StreamingOptions.class).isStreaming() && !sideInputReader.isEmpty()) {
fnRunner = new StreamingSideInputDoFnRunner<Object, Object, BoundedWindow>(
options,
doFnInfo,
sideInputReader,
outputManager,
mainOutputTag,
sideOutputTags,
stepContext,
addCounterMutator);
} else {
fnRunner = DoFnRunner.create(
options,
doFnInfo.getDoFn(),
sideInputReader,
outputManager,
mainOutputTag,
sideOutputTags,
stepContext,
addCounterMutator,
doFnInfo.getWindowingStrategy());
}
fnRunner.startBundle();
}
@Override
@SuppressWarnings("unchecked")
public void processElement(Object elem) throws Exception {
fnRunner.processElement((WindowedValue<Object>) elem);
}
@Override
public void finishBundle() throws Exception {
fnRunner.finishBundle();
fnRunner = null;
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.sql.plugin;
import org.elasticsearch.common.Strings;
import org.elasticsearch.core.Tuple;
import org.elasticsearch.common.xcontent.MediaType;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.xpack.ql.util.StringUtils;
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
import org.elasticsearch.xpack.sql.action.BasicFormatter;
import org.elasticsearch.xpack.sql.action.SqlQueryResponse;
import org.elasticsearch.xpack.sql.proto.ColumnInfo;
import org.elasticsearch.xpack.sql.session.Cursor;
import org.elasticsearch.xpack.sql.session.Cursors;
import org.elasticsearch.xpack.sql.util.DateUtils;
import java.net.URLDecoder;
import java.nio.charset.StandardCharsets;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.function.Function;
import static org.elasticsearch.xpack.sql.action.BasicFormatter.FormatOption.TEXT;
import static org.elasticsearch.xpack.sql.proto.Protocol.URL_PARAM_DELIMITER;
/**
* Templating class for displaying SQL responses in text formats.
*/
enum TextFormat implements MediaType {
/**
* Default text writer.
*
* The implementation is a bit weird since state needs to be passed around, namely the formatter
* since it is initialized based on the first page of data.
* To avoid leaking the formatter, it gets discovered again in the wrapping method to attach it
* to the next cursor and so on.
*/
PLAIN_TEXT() {
@Override
String format(RestRequest request, SqlQueryResponse response) {
BasicFormatter formatter = null;
Cursor cursor = null;
ZoneId zoneId = null;
// check if the cursor is already wrapped first
if (response.hasCursor()) {
Tuple<Cursor, ZoneId> tuple = Cursors.decodeFromStringWithZone(response.cursor());
cursor = tuple.v1();
zoneId = tuple.v2();
if (cursor instanceof TextFormatterCursor) {
formatter = ((TextFormatterCursor) cursor).getFormatter();
}
}
// if there are headers available, it means it's the first request
// so initialize the underlying formatter and wrap it in the cursor
if (response.columns() != null) {
formatter = new BasicFormatter(response.columns(), response.rows(), TEXT);
// if there's a cursor, wrap the formatter in it
if (cursor != null) {
response.cursor(Cursors.encodeToString(new TextFormatterCursor(cursor, formatter), zoneId));
}
// format with header
return formatter.formatWithHeader(response.columns(), response.rows());
} else if (formatter != null) { // should be initialized (wrapped by the cursor)
// format without header
return formatter.formatWithoutHeader(response.rows());
} else if (response.hasId()) {
// an async request has no results yet
return StringUtils.EMPTY;
}
// if this code is reached, it means it's a next page without cursor wrapping
throw new SqlIllegalArgumentException("Cannot find text formatter - this is likely a bug");
}
@Override
public String queryParameter() {
return FORMAT_TEXT;
}
@Override
String contentType() {
return CONTENT_TYPE_TXT;
}
@Override
protected Character delimiter() {
throw new UnsupportedOperationException();
}
@Override
protected String eol() {
throw new UnsupportedOperationException();
}
@Override
public Set<HeaderValue> headerValues() {
return Set.of(
new HeaderValue(CONTENT_TYPE_TXT,
Map.of("header", "present|absent")),
new HeaderValue(VENDOR_CONTENT_TYPE_TXT,
Map.of("header", "present|absent", COMPATIBLE_WITH_PARAMETER_NAME, VERSION_PATTERN)));
}
},
/**
* Comma Separated Values implementation.
*
* Based on:
* https://tools.ietf.org/html/rfc4180
* https://www.iana.org/assignments/media-types/text/csv
* https://www.w3.org/TR/sparql11-results-csv-tsv/
*
*/
CSV() {
@Override
protected Character delimiter() {
return ',';
}
@Override
protected String eol() {
//CRLF
return "\r\n";
}
@Override
public String queryParameter() {
return FORMAT_CSV;
}
@Override
String contentType() {
return CONTENT_TYPE_CSV;
}
@Override
String contentType(RestRequest request) {
return contentType() + "; charset=utf-8; " +
URL_PARAM_HEADER + "=" + (hasHeader(request) ? PARAM_HEADER_PRESENT : PARAM_HEADER_ABSENT);
}
@Override
protected Character delimiter(RestRequest request) {
String delimiterParam = request.param(URL_PARAM_DELIMITER);
if (delimiterParam == null) {
return delimiter();
}
delimiterParam = URLDecoder.decode(delimiterParam, StandardCharsets.UTF_8);
if (delimiterParam.length() != 1) {
throw new IllegalArgumentException("invalid " +
(delimiterParam.length() > 0 ? "multi-character" : "empty") + " delimiter [" + delimiterParam + "]");
}
Character delimiter = delimiterParam.charAt(0);
switch (delimiter) {
case '"':
case '\n':
case '\r':
throw new IllegalArgumentException("illegal reserved character specified as delimiter [" + delimiter + "]");
case '\t':
throw new IllegalArgumentException("illegal delimiter [TAB] specified as delimiter for the [csv] format; " +
"choose the [tsv] format instead");
}
return delimiter;
}
@Override
String maybeEscape(String value, Character delimiter) {
boolean needsEscaping = false;
for (int i = 0; i < value.length(); i++) {
char c = value.charAt(i);
if (c == '"' || c == '\n' || c == '\r' || c == delimiter) {
needsEscaping = true;
break;
}
}
if (needsEscaping) {
StringBuilder sb = new StringBuilder();
sb.append('"');
for (int i = 0; i < value.length(); i++) {
char c = value.charAt(i);
if (value.charAt(i) == '"') {
sb.append('"');
}
sb.append(c);
}
sb.append('"');
value = sb.toString();
}
return value;
}
@Override
boolean hasHeader(RestRequest request) {
String header = request.param(URL_PARAM_HEADER);
if (header == null) {
List<String> values = request.getAllHeaderValues("Accept");
if (values != null) {
// header values are separated by `;` so try breaking it down
for (String value : values) {
String[] params = Strings.tokenizeToStringArray(value, ";");
for (String param : params) {
if (param.toLowerCase(Locale.ROOT).equals(URL_PARAM_HEADER + "=" + PARAM_HEADER_ABSENT)) {
return false;
}
}
}
}
return true;
} else {
return header.toLowerCase(Locale.ROOT).equals(PARAM_HEADER_ABSENT) == false;
}
}
@Override
public Set<HeaderValue> headerValues() {
return Set.of(
new HeaderValue(CONTENT_TYPE_CSV,
Map.of("header", "present|absent","delimiter", ".+")),// more detailed parsing is in TextFormat.CSV#delimiter
new HeaderValue(VENDOR_CONTENT_TYPE_CSV,
Map.of("header", "present|absent","delimiter", ".+", COMPATIBLE_WITH_PARAMETER_NAME, VERSION_PATTERN)));
}
},
TSV() {
@Override
protected Character delimiter() {
return '\t';
}
@Override
protected String eol() {
// only LF
return "\n";
}
@Override
public String queryParameter() {
return FORMAT_TSV;
}
@Override
String contentType() {
return CONTENT_TYPE_TSV;
}
@Override
String contentType(RestRequest request) {
return contentType() + "; charset=utf-8";
}
@Override
String maybeEscape(String value, Character __) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < value.length(); i++) {
char c = value.charAt(i);
switch (c) {
case '\n' :
sb.append("\\n");
break;
case '\t' :
sb.append("\\t");
break;
default:
sb.append(c);
}
}
return sb.toString();
}
@Override
public Set<HeaderValue> headerValues() {
return Set.of(
new HeaderValue(CONTENT_TYPE_TSV, Map.of("header", "present|absent")),
new HeaderValue(VENDOR_CONTENT_TYPE_TSV,
Map.of("header", "present|absent", COMPATIBLE_WITH_PARAMETER_NAME, VERSION_PATTERN)));
}
};
private static final String FORMAT_TEXT = "txt";
private static final String FORMAT_CSV = "csv";
private static final String FORMAT_TSV = "tsv";
private static final String CONTENT_TYPE_TXT = "text/plain";
private static final String VENDOR_CONTENT_TYPE_TXT = "text/vnd.elasticsearch+plain";
private static final String CONTENT_TYPE_CSV = "text/csv";
private static final String VENDOR_CONTENT_TYPE_CSV = "text/vnd.elasticsearch+csv";
private static final String CONTENT_TYPE_TSV = "text/tab-separated-values";
private static final String VENDOR_CONTENT_TYPE_TSV = "text/vnd.elasticsearch+tab-separated-values";
private static final String URL_PARAM_HEADER = "header";
private static final String PARAM_HEADER_ABSENT = "absent";
private static final String PARAM_HEADER_PRESENT = "present";
String format(RestRequest request, SqlQueryResponse response) {
StringBuilder sb = new StringBuilder();
// if the header is requested (and the column info is present - namely it's the first page) return the info
if (hasHeader(request) && response.columns() != null) {
row(sb, response.columns(), ColumnInfo::name, delimiter(request));
}
for (List<Object> row : response.rows()) {
row(sb, row, f -> f instanceof ZonedDateTime ? DateUtils.toString((ZonedDateTime) f) : Objects.toString(f, StringUtils.EMPTY),
delimiter(request));
}
return sb.toString();
}
boolean hasHeader(RestRequest request) {
return true;
}
/**
* Formal IANA mime type.
*/
abstract String contentType();
/**
* Content type depending on the request.
* Might be used by some formatters (like CSV) to specify certain metadata like
* whether the header is returned or not.
*/
String contentType(RestRequest request) {
return contentType();
}
// utility method for consuming a row.
<F> void row(StringBuilder sb, List<F> row, Function<F, String> toString, Character delimiter) {
for (int i = 0; i < row.size(); i++) {
sb.append(maybeEscape(toString.apply(row.get(i)), delimiter));
if (i < row.size() - 1) {
sb.append(delimiter);
}
}
sb.append(eol());
}
/**
* Delimiter between fields
*/
protected abstract Character delimiter();
protected Character delimiter(RestRequest request) {
return delimiter();
}
/**
* String indicating end-of-line or row.
*/
protected abstract String eol();
/**
* Method used for escaping (if needed) a given value.
*/
String maybeEscape(String value, Character delimiter) {
return value;
}
}
| |
/**
* Copyright (c) 2013-2020 Contributors to the Eclipse Foundation
*
* <p> See the NOTICE file distributed with this work for additional information regarding copyright
* ownership. All rights reserved. This program and the accompanying materials are made available
* under the terms of the Apache License, Version 2.0 which accompanies this distribution and is
* available at http://www.apache.org/licenses/LICENSE-2.0.txt
*/
package org.locationtech.geowave.analytic.spark;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.function.PairFlatMapFunction;
import org.apache.spark.broadcast.Broadcast;
import org.locationtech.geowave.core.geotime.util.GeometryUtils;
import org.locationtech.geowave.core.index.ByteArray;
import org.locationtech.geowave.core.index.InsertionIds;
import org.locationtech.geowave.core.index.NumericIndexStrategy;
import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData;
import org.locationtech.geowave.mapreduce.input.GeoWaveInputKey;
import org.locationtech.jts.geom.Envelope;
import org.locationtech.jts.geom.Geometry;
import org.opengis.feature.simple.SimpleFeature;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Lists;
import scala.Tuple2;
public class GeoWaveIndexedRDD implements Serializable {
/**
*
*/
private static final long serialVersionUID = 1L;
private static Logger LOGGER = LoggerFactory.getLogger(GeoWaveIndexedRDD.class);
private final GeoWaveRDD geowaveRDD;
private JavaPairRDD<ByteArray, Tuple2<GeoWaveInputKey, SimpleFeature>> rawFeatureRDD = null;
private JavaPairRDD<ByteArray, Tuple2<GeoWaveInputKey, Geometry>> rawGeometryRDD = null;
// Because it can be expensive to serialize IndexStrategy for every record.
// Index strategy must be able to be broadcast.
private Broadcast<NumericIndexStrategy> indexStrategy = null;
public GeoWaveIndexedRDD(
final GeoWaveRDD geowaveRDD,
final Broadcast<NumericIndexStrategy> indexStrategy) {
this.geowaveRDD = geowaveRDD;
this.indexStrategy = indexStrategy;
}
public void reset() {
rawFeatureRDD = null;
rawGeometryRDD = null;
}
public void reindex(final Broadcast<? extends NumericIndexStrategy> newIndexStrategy) {
// Remove original indexing strategy
if (indexStrategy != null) {
indexStrategy.unpersist();
}
indexStrategy = (Broadcast<NumericIndexStrategy>) newIndexStrategy;
reset();
}
public JavaPairRDD<ByteArray, Tuple2<GeoWaveInputKey, SimpleFeature>> getIndexedFeatureRDD() {
return this.getIndexedFeatureRDD(0.0);
}
public JavaPairRDD<ByteArray, Tuple2<GeoWaveInputKey, SimpleFeature>> getIndexedFeatureRDD(
final double bufferAmount) {
verifyParameters();
if (!geowaveRDD.isLoaded()) {
LOGGER.error("Must provide a loaded RDD.");
return null;
}
if (rawFeatureRDD == null) {
final JavaPairRDD<ByteArray, Tuple2<GeoWaveInputKey, SimpleFeature>> indexedData =
geowaveRDD.getRawRDD().flatMapToPair(
new PairFlatMapFunction<Tuple2<GeoWaveInputKey, SimpleFeature>, ByteArray, Tuple2<GeoWaveInputKey, SimpleFeature>>() {
/**
*
*/
private static final long serialVersionUID = 1L;
@Override
public Iterator<Tuple2<ByteArray, Tuple2<GeoWaveInputKey, SimpleFeature>>> call(
final Tuple2<GeoWaveInputKey, SimpleFeature> t) throws Exception {
// Flattened output array.
final List<Tuple2<ByteArray, Tuple2<GeoWaveInputKey, SimpleFeature>>> result =
new ArrayList<>();
// Pull feature to index from tuple
final SimpleFeature inputFeature = t._2;
// If we are dealing with null or empty
// geometry we can't properly compare this
// feature.
final Geometry geom = (Geometry) inputFeature.getDefaultGeometry();
if (geom == null) {
return Collections.emptyIterator();
}
final Envelope internalEnvelope = geom.getEnvelopeInternal();
if (internalEnvelope.isNull()) {
return Collections.emptyIterator();
}
// If we have to buffer geometry for
// predicate expand bounds
internalEnvelope.expandBy(bufferAmount);
// Get data range from expanded envelope
final MultiDimensionalNumericData boundsRange =
GeometryUtils.getBoundsFromEnvelope(internalEnvelope);
final NumericIndexStrategy index = indexStrategy.value();
InsertionIds insertIds = index.getInsertionIds(boundsRange, 80);
// If we didnt expand the envelope for
// buffering we can trim the indexIds by the
// geometry
if (bufferAmount == 0.0) {
insertIds = RDDUtils.trimIndexIds(insertIds, geom, index);
}
for (final Iterator<byte[]> iter =
insertIds.getCompositeInsertionIds().iterator(); iter.hasNext();) {
final byte[] id = iter.next();
final Tuple2<GeoWaveInputKey, SimpleFeature> valuePair =
new Tuple2<>(t._1, inputFeature);
final Tuple2<ByteArray, Tuple2<GeoWaveInputKey, SimpleFeature>> indexPair =
new Tuple2<>(new ByteArray(id), valuePair);
result.add(indexPair);
}
return result.iterator();
}
});
rawFeatureRDD = indexedData;
}
return rawFeatureRDD;
}
public JavaPairRDD<ByteArray, Tuple2<GeoWaveInputKey, Geometry>> getIndexedGeometryRDD() {
return this.getIndexedGeometryRDD(0.0, false);
}
public JavaPairRDD<ByteArray, Tuple2<GeoWaveInputKey, Geometry>> getIndexedGeometryRDD(
final double bufferAmount,
final boolean recalculate) {
verifyParameters();
if (!geowaveRDD.isLoaded()) {
LOGGER.error("Must provide a loaded RDD.");
return null;
}
if ((rawGeometryRDD == null) || recalculate) {
rawGeometryRDD =
geowaveRDD.getRawRDD().filter(
t -> ((t._2.getDefaultGeometry() != null)
&& !((Geometry) t._2.getDefaultGeometry()).getEnvelopeInternal().isNull())).flatMapToPair(
new PairFlatMapFunction<Tuple2<GeoWaveInputKey, SimpleFeature>, ByteArray, Tuple2<GeoWaveInputKey, Geometry>>() {
/**
*
*/
private static final long serialVersionUID = 1L;
@Override
public Iterator<Tuple2<ByteArray, Tuple2<GeoWaveInputKey, Geometry>>> call(
final Tuple2<GeoWaveInputKey, SimpleFeature> t) throws Exception {
// Pull feature to index from tuple
final SimpleFeature inputFeature = t._2;
// If we are dealing with null or empty
// geometry we can't properly compare this
// feature.
final Geometry geom = (Geometry) inputFeature.getDefaultGeometry();
final Envelope internalEnvelope = geom.getEnvelopeInternal();
// If we have to buffer geometry for
// predicate expand bounds
internalEnvelope.expandBy(bufferAmount);
// Get data range from expanded envelope
final MultiDimensionalNumericData boundsRange =
GeometryUtils.getBoundsFromEnvelope(internalEnvelope);
final NumericIndexStrategy index = indexStrategy.value();
InsertionIds insertIds = index.getInsertionIds(boundsRange, 80);
// If we didnt expand the envelope for
// buffering we can trim the indexIds by the
// geometry
if (bufferAmount == 0.0) {
insertIds = RDDUtils.trimIndexIds(insertIds, geom, index);
}
// Flattened output array.
final List<Tuple2<ByteArray, Tuple2<GeoWaveInputKey, Geometry>>> result =
Lists.newArrayListWithCapacity(insertIds.getSize());
for (final Iterator<byte[]> iter =
insertIds.getCompositeInsertionIds().iterator(); iter.hasNext();) {
final byte[] id = iter.next();
final Tuple2<GeoWaveInputKey, Geometry> valuePair =
new Tuple2<>(t._1, geom);
final Tuple2<ByteArray, Tuple2<GeoWaveInputKey, Geometry>> indexPair =
new Tuple2<>(new ByteArray(id), valuePair);
result.add(indexPair);
}
return result.iterator();
}
});
}
return rawGeometryRDD;
}
public Broadcast<NumericIndexStrategy> getIndexStrategy() {
return indexStrategy;
}
public GeoWaveRDD getGeoWaveRDD() {
return geowaveRDD;
}
private boolean verifyParameters() {
if (geowaveRDD == null) {
LOGGER.error("Must supply a input rdd to index. Please set one and try again.");
return false;
}
if (indexStrategy == null) {
LOGGER.error("Broadcasted strategy must be set before features can be indexed.");
return false;
}
return true;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you maynot use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicablelaw or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.end2end;
import static org.apache.phoenix.util.TestUtil.ROW1;
import static org.apache.phoenix.util.TestUtil.ROW7;
import static org.apache.phoenix.util.TestUtil.ROW9;
import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.math.BigDecimal;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.util.Collection;
import java.util.Properties;
import org.apache.phoenix.util.PropertiesUtil;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
@RunWith(Parameterized.class)
public class CastAndCoerceIT extends BaseQueryIT {
public CastAndCoerceIT(String indexDDL, boolean columnEncoded, boolean keepDeletedCells) {
super(indexDDL, columnEncoded, keepDeletedCells);
}
@Parameters(name="CastAndCoerceIT_{index}") // name is used by failsafe as file name in reports
public static synchronized Collection<Object> data() {
return BaseQueryIT.allIndexes();
}
@Test
public void testCastOperatorInSelect() throws Exception {
String query = "SELECT CAST(a_integer AS decimal)/2 FROM " + tableName + " WHERE ?=organization_id and 5=a_integer";
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(getUrl(), props);
try {
PreparedStatement statement = conn.prepareStatement(query);
statement.setString(1, tenantId);
ResultSet rs = statement.executeQuery();
assertTrue (rs.next());
assertEquals(BigDecimal.valueOf(2.5), rs.getBigDecimal(1));
assertFalse(rs.next());
} finally {
conn.close();
}
}
@Test
public void testCastOperatorInWhere() throws Exception {
String query = "SELECT a_integer FROM " + tableName + " WHERE ?=organization_id and 2.5 = CAST(a_integer AS DECIMAL)/2 ";
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(getUrl(), props);
try {
PreparedStatement statement = conn.prepareStatement(query);
statement.setString(1, tenantId);
ResultSet rs = statement.executeQuery();
assertTrue (rs.next());
assertEquals(5, rs.getInt(1));
assertFalse(rs.next());
} finally {
conn.close();
}
}
@Test
public void testCoerceIntegerToLong() throws Exception {
String query = "SELECT entity_id FROM " + tableName + " WHERE organization_id=? AND x_long >= x_integer";
String url = getUrl();
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(url, props);
try {
PreparedStatement statement = conn.prepareStatement(query);
statement.setString(1, tenantId);
ResultSet rs = statement.executeQuery();
assertTrue(rs.next());
assertEquals(ROW7, rs.getString(1));
assertTrue(rs.next());
assertEquals(ROW9, rs.getString(1));
assertFalse(rs.next());
} finally {
conn.close();
}
}
@Test
public void testCoerceLongToDecimal1() throws Exception {
String query = "SELECT entity_id FROM " + tableName + " WHERE organization_id=? AND x_decimal > x_integer";
String url = getUrl();
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(url, props);
try {
PreparedStatement statement = conn.prepareStatement(query);
statement.setString(1, tenantId);
ResultSet rs = statement.executeQuery();
assertTrue(rs.next());
assertEquals(ROW9, rs.getString(1));
assertFalse(rs.next());
} finally {
conn.close();
}
}
@Test
public void testCoerceLongToDecimal2() throws Exception {
String query = "SELECT entity_id FROM " + tableName + " WHERE organization_id=? AND x_integer <= x_decimal";
String url = getUrl();
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(url, props);
try {
PreparedStatement statement = conn.prepareStatement(query);
statement.setString(1, tenantId);
ResultSet rs = statement.executeQuery();
assertTrue(rs.next());
assertEquals(ROW9, rs.getString(1));
assertFalse(rs.next());
} finally {
conn.close();
}
}
@Test
public void testCoerceTinyIntToSmallInt() throws Exception {
String query = "SELECT entity_id FROM " + tableName + " WHERE organization_id=? AND a_byte >= a_short";
String url = getUrl();
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(url, props);
try {
PreparedStatement statement = conn.prepareStatement(query);
statement.setString(1, tenantId);
ResultSet rs = statement.executeQuery();
assertTrue(rs.next());
assertEquals(ROW9, rs.getString(1));
assertFalse(rs.next());
} finally {
conn.close();
}
}
@Test
public void testCoerceDateToBigInt() throws Exception {
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
PreparedStatement statement;
ResultSet rs;
String query;
long dateAsLong;
BigDecimal dateAsDecimal;
String url;
Connection conn;
url = getUrl();
conn = DriverManager.getConnection(url, props);
conn.setAutoCommit(true);
conn.createStatement().execute("UPSERT INTO " + tableName + " (organization_id,entity_id,a_time,a_timestamp) SELECT organization_id,entity_id,a_date,a_date FROM " + tableName);
conn = DriverManager.getConnection(url, props);
try {
query = "SELECT entity_id, CAST(a_date AS BIGINT) FROM " + tableName + " WHERE organization_id=? AND a_date IS NOT NULL LIMIT 1";
statement = conn.prepareStatement(query);
statement.setString(1, tenantId);
rs = statement.executeQuery();
assertTrue(rs.next());
assertEquals(ROW1, rs.getString(1));
dateAsLong = rs.getLong(2);
assertFalse(rs.next());
query = "SELECT entity_id FROM " + tableName + " WHERE organization_id=? AND a_date = CAST(? AS DATE) LIMIT 1";
statement = conn.prepareStatement(query);
statement.setString(1, tenantId);
statement.setLong(2, dateAsLong);
rs = statement.executeQuery();
assertTrue(rs.next());
assertEquals(ROW1, rs.getString(1));
assertFalse(rs.next());
query = "SELECT entity_id, CAST(a_time AS BIGINT) FROM " + tableName + " WHERE organization_id=? AND a_time IS NOT NULL LIMIT 1";
statement = conn.prepareStatement(query);
statement.setString(1, tenantId);
rs = statement.executeQuery();
assertTrue(rs.next());
assertEquals(ROW1, rs.getString(1));
dateAsLong = rs.getLong(2);
assertFalse(rs.next());
query = "SELECT entity_id FROM " + tableName + " WHERE organization_id=? AND a_time = CAST(? AS TIME) LIMIT 1";
statement = conn.prepareStatement(query);
statement.setString(1, tenantId);
statement.setLong(2, dateAsLong);
rs = statement.executeQuery();
assertTrue(rs.next());
assertEquals(ROW1, rs.getString(1));
assertFalse(rs.next());
query = "SELECT entity_id, CAST(a_timestamp AS DECIMAL) FROM " + tableName + " WHERE organization_id=? AND a_timestamp IS NOT NULL LIMIT 1";
statement = conn.prepareStatement(query);
statement.setString(1, tenantId);
rs = statement.executeQuery();
assertTrue(rs.next());
assertEquals(ROW1, rs.getString(1));
dateAsDecimal = rs.getBigDecimal(2);
assertFalse(rs.next());
query = "SELECT entity_id FROM " + tableName + " WHERE organization_id=? AND a_timestamp = CAST(? AS TIMESTAMP) LIMIT 1";
statement = conn.prepareStatement(query);
statement.setString(1, tenantId);
statement.setBigDecimal(2, dateAsDecimal);
rs = statement.executeQuery();
assertTrue(rs.next());
assertEquals(ROW1, rs.getString(1));
assertFalse(rs.next());
query = "SELECT entity_id, CAST(a_timestamp AS BIGINT) FROM " + tableName + " WHERE organization_id=? AND a_timestamp IS NOT NULL LIMIT 1";
statement = conn.prepareStatement(query);
statement.setString(1, tenantId);
rs = statement.executeQuery();
assertTrue(rs.next());
assertEquals(ROW1, rs.getString(1));
dateAsLong = rs.getLong(2);
assertFalse(rs.next());
query = "SELECT entity_id FROM " + tableName + " WHERE organization_id=? AND a_timestamp = CAST(? AS TIMESTAMP) LIMIT 1";
statement = conn.prepareStatement(query);
statement.setString(1, tenantId);
statement.setLong(2, dateAsLong);
rs = statement.executeQuery();
assertTrue(rs.next());
assertEquals(ROW1, rs.getString(1));
assertFalse(rs.next());
} finally {
conn.close();
}
}
}
| |
/*L
* Copyright SAIC, SAIC-Frederick.
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/caadapter/LICENSE.txt for details.
*/
package gov.nih.nci.caadapter.ui.common.jgraph;
import gov.nih.nci.caadapter.common.Log;
import gov.nih.nci.caadapter.common.MetaObject;
import gov.nih.nci.caadapter.common.function.meta.ParameterMeta;
import gov.nih.nci.caadapter.hl7.datatype.DatatypeBaseObject;
import gov.nih.nci.caadapter.ui.common.MappableNode;
import gov.nih.nci.caadapter.ui.common.tree.DefaultMappableTreeNode;
import gov.nih.nci.caadapter.ui.common.tree.DefaultTargetTreeNode;
import gov.nih.nci.caadapter.ui.common.tree.DefaultSourceTreeNode;
import gov.nih.nci.caadapter.common.metadata.AttributeMetadata;
import gov.nih.nci.caadapter.common.metadata.ObjectMetadata;
import gov.nih.nci.caadapter.common.metadata.AssociationMetadata;
import gov.nih.nci.caadapter.common.metadata.TableMetadata;
import gov.nih.nci.caadapter.common.metadata.ColumnMetadata;
import org.jgraph.graph.AttributeMap;
import org.jgraph.graph.DefaultGraphCell;
import org.jgraph.graph.DefaultPort;
import org.jgraph.graph.GraphConstants;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.TreeNode;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.Point;
import java.awt.Font;
import java.awt.geom.Point2D;
import java.util.Map;
import java.util.Set;
import java.util.Enumeration;
/**
* This class defines a list of utilities to help carry out some general functionality used in UI.
* This class utilizes singleton pattern.
*
* @author OWNER: Scott Jiang
* @author LAST UPDATE $Author: wangeug $
* @version Since caAdapter v1.2
* revision $Revision: 1.11 $
* date $Date: 2008-10-09 18:17:18 $
*/
public final class UIHelper
{
//to defin the vertex that represents the almost invisible cell for source or target tree
public static final int VERTEX_CELL_WIDTH = 3;
public static final int VERTEX_CELL_HEIGHT = 3;
public static final Color DEFAULT_VERTEX_COLOR = Color.BLACK;
public static final Color DEFAULT_VERTEX_BORDER_COLOR = Color.BLACK;
public static final Color DEFAULT_MAPPING_LINK_COLOR = Color.BLUE.darker().darker();
public static final Color MAPPING_LINK_OBJECT_COLOR = Color.green.darker();
public static final Color MAPPING_LINK_ATTRIBUTE_COLOR = Color.blue;
public static final Color MAPPING_LINK_ASSOCIATION_COLOR = Color.RED;
private static final Dimension invisibleVertexDimension = new Dimension(VERTEX_CELL_WIDTH, VERTEX_CELL_HEIGHT);
//location of port in cell
public static final int PORT_LEFT = 0;
public static final int PORT_RIGHT = 1;
public static final int PORT_NORTH = 2;
public static final int PORT_SOUTH = 3;
private static final String PORT_INPUT_STRING = "Input";
private static final String PORT_OUTPUT_STRING = "Output";
//used most time for Logging purposes.
private static final UIHelper internalInstance = new UIHelper();
public static final int getDefaultFunctionalBoxInputOrientation()
{
return PORT_LEFT;
}
public static final int getDefaultFunctionalBoxOutputOrientation()
{
return PORT_RIGHT;
}
public static final String getDefaultFunctionalBoxInputCaption()
{
return PORT_INPUT_STRING;
}
public static final String getDefaultFunctionalBoxOutputCaption()
{
return PORT_OUTPUT_STRING;
}
private static final boolean isPortOrientationMatch(DefaultGraphCell cell, boolean isInputData)
{
Object cellLabel = cell.getUserObject();
if(cellLabel!=null)
{
String cellLabelStr = (String) cellLabel;
String defaultCaption = isInputData ? getDefaultFunctionalBoxInputCaption() : getDefaultFunctionalBoxOutputCaption();
return (cellLabelStr.contains(defaultCaption));
}
else
{
return false;
}
}
/**
* Return first un-used port.
* @param cell
* @param isInputData
* @return a DefaultPort object
*/
public static final DefaultPort getFirstUnmappedPort(DefaultGraphCell cell, boolean isInputData)
{
int size = cell.getChildCount();
DefaultPort rtnPort = null;
for (int i = 0; i < size; i++)
{
DefaultGraphCell childCell = (DefaultGraphCell) cell.getChildAt(i);
if (childCell instanceof DefaultPort)
{
DefaultPort childPort = (DefaultPort) childCell;
boolean portOrientationMatch = isPortOrientationMatch(childCell, isInputData);
if (!childPort.edges().hasNext() && portOrientationMatch)
{//not used port, empty and the port caption matches, ie, input for source, output for target
rtnPort=childPort;
break;
}
}
else if(childCell instanceof DefaultGraphCell)
{//cell contains cell, then find the first of port of that then.
/**
* NOTE:
* Since DefaultPort is descendant of DefaultGraphCell, so has to structure this way.
*/
rtnPort = getFirstUnmappedPort(childCell, isInputData);
if(rtnPort!=null)
break;
}
}
return rtnPort;
}
/**
* Answer whether a given tree node is from source or target tree.
* @param treeNode
* @return whether a given tree node is from source or target tree.
*/
public static final boolean isDataFromSourceTree(TreeNode treeNode)
{
boolean isDataFromSourceTree = false;
if (treeNode instanceof DefaultSourceTreeNode)
{
isDataFromSourceTree = true;
}
else if (treeNode instanceof DefaultTargetTreeNode)
{
isDataFromSourceTree = false;
}
else
{
String msg = "The data is of type '" + treeNode.getClass().getName() + "', but I don't know where it is from!";
System.err.println(msg);
throw new UnsupportedOperationException(msg);
}
return isDataFromSourceTree;
}
public static final Dimension getDefaultSourceOrTargetVertexDimension()
{
return invisibleVertexDimension;
}
public static final Color getLinkColor(Object metaData)
{
Color linkColor=DEFAULT_MAPPING_LINK_COLOR;
if (metaData instanceof ObjectMetadata)
linkColor=MAPPING_LINK_OBJECT_COLOR;
else if(metaData instanceof TableMetadata)
linkColor=MAPPING_LINK_OBJECT_COLOR;
else if (metaData instanceof AttributeMetadata)
linkColor=MAPPING_LINK_ATTRIBUTE_COLOR;
else if(metaData instanceof ColumnMetadata)
{
linkColor=MAPPING_LINK_ATTRIBUTE_COLOR;
ColumnMetadata column=(ColumnMetadata)metaData;
String columnType=column.getType();
if (columnType!=null&&columnType.equals(ColumnMetadata.TYPE_ASSOCIATION))
linkColor=MAPPING_LINK_ASSOCIATION_COLOR;
}
else if (metaData instanceof AssociationMetadata)
linkColor=MAPPING_LINK_ASSOCIATION_COLOR;
return linkColor;
}
public static final AttributeMap getDefaultUnmovableEdgeStyle(Object metaData)
{
Color linkColor=getLinkColor(metaData);
return getDefaultUnmovableMappingEdgeStyle(linkColor);
}
private static final AttributeMap getDefaultUnmovableMappingEdgeStyle(Color lineColor)
{
AttributeMap lineStyle = new AttributeMap();
GraphConstants.setLineBegin(lineStyle, GraphConstants.ARROW_NONE);
GraphConstants.setLineColor(lineStyle,lineColor);
GraphConstants.setBeginSize(lineStyle, 10);
GraphConstants.setFont(lineStyle, GraphConstants.DEFAULTFONT.deriveFont(10));
GraphConstants.setBendable(lineStyle, false);
GraphConstants.setEditable(lineStyle, false);
GraphConstants.setMoveable(lineStyle, false);
GraphConstants.setResize(lineStyle, false);
GraphConstants.setSizeable(lineStyle, false);
return lineStyle;
}
public static final AttributeMap getDefaultInvisibleVertexAttribute(Point position, boolean representSourceVertex)
{
AttributeMap map = getDefaultInvisibleVertexBounds(position, representSourceVertex);
GraphConstants.setEditable(map, false);
GraphConstants.setBendable(map, false);
GraphConstants.setSelectable(map, false);
GraphConstants.setResize(map, false);
GraphConstants.setSizeable(map, false);
return map;
}
public static final AttributeMap getDefaultInvisibleVertexBounds(Point position, boolean representSourceVertex)
{
AttributeMap map = new AttributeMap();
Dimension cellDimension = getDefaultSourceOrTargetVertexDimension();
//hide source is better? or present the target cell is better?
double pointX = position.getX();
if (representSourceVertex)
{
pointX -= Math.ceil(cellDimension.getWidth() * 2 + 0.5);
}
else
{
pointX += Math.ceil(cellDimension.getWidth() + 2 );
}
map = (AttributeMap) UIHelper.createBounds(map, pointX, position.getY(), cellDimension, DEFAULT_VERTEX_COLOR, false);
//since it is invisible, explicitly set autosize to be false.
GraphConstants.setAutoSize(map, false);
return map;
}
/**
* copied similar function from JGraph implementation to make the vertex smaller
* Returns an attributeMap for the specified position and color.
*/
public static Map createBounds(AttributeMap map, Point2D point, Dimension dimension, Color c, boolean moveable)
{
return createBounds(map, point.getX(), point.getY(), dimension, c, moveable);
}
public static Map createBounds(AttributeMap map, double x, double y, Dimension dimension, Color c, boolean moveable)
{
// final int ALPHA = 255;
GraphConstants.setBounds(map, map.createRect(x, y, dimension.getWidth(), dimension.getHeight()));
// GraphConstants.setBorder(map, BorderFactory.createBevelBorder(BevelBorder.RAISED));
GraphConstants.setForeground(map, Color.BLACK.darker().darker());//new Color(ALPHA - c.getRed(), ALPHA - c.getGreen(), ALPHA - c.getBlue(), ALPHA));
GraphConstants.setBackground(map, c.darker());
// Add a nice looking gradient background
GraphConstants.setGradientColor(map, c.darker());
// Make sure the cell is resized on insert
// GraphConstants.setSize();
// GraphConstants.setResize(map, true);
// Add a Border Color Attribute to the Map
GraphConstants.setBorderColor(map, Color.BLACK.darker().darker());
GraphConstants.setFont(map, GraphConstants.DEFAULTFONT.deriveFont(Font.BOLD, 12));
GraphConstants.setMoveable(map, moveable);
GraphConstants.setOpaque(map, true);
GraphConstants.setResize(map, false);
GraphConstants.setAutoSize(map, true);
return map;
}
public static final Map getDefaultFunctionBoxPortAttributes(Map map, Dimension portDimension)
{
GraphConstants.setSize(map, portDimension);
GraphConstants.setSize(map, portDimension);
GraphConstants.setSelectable(map, true);
GraphConstants.setBorderColor(map, Color.RED.darker().darker());
GraphConstants.setMoveable(map, true);
return map;
}
/**
* Could return null if nothing is found.
* @param treeRoot
* @param metaObject
* @return MappableNode, null if nothing is found.
*/
public static final MappableNode constructMappableNode(Object treeRoot, MetaObject metaObject)
{
MappableNode result = null;
if(treeRoot instanceof DefaultMappableTreeNode)
{//either source or target
DefaultMappableTreeNode root = (DefaultMappableTreeNode) treeRoot;
result = (MappableNode) root.findFirstTreeNodeMatchUserObject(metaObject);
}
if(result==null)
{
Log.logError(internalInstance, "Could not find the metaObject '" + metaObject + "' in the given tree rooted by '" + treeRoot + "'.");
Log.logError(internalInstance, "treeRoot is of type '" + (treeRoot==null? "null" : treeRoot.getClass().getName()) + "'");
}
return result;
}
public static final MappableNode constructMappableNodeObjectXmlPath(Object treeRoot, String dtObjectXmlPath)
{
MappableNode result = null;
if(treeRoot instanceof DefaultMappableTreeNode)
{//either source or target
DefaultMappableTreeNode root = (DefaultMappableTreeNode) treeRoot;
result = (MappableNode)findTreeNodeWithXmlPath(root, (String)dtObjectXmlPath);
}
else if (treeRoot instanceof DefaultMutableTreeNode)
{
DefaultMutableTreeNode rootNode =(DefaultMutableTreeNode)treeRoot;
result = (MappableNode)findTreeNodeWithXmlPath(rootNode, (String)dtObjectXmlPath);
}
if(result == null)
{
Log.logError(internalInstance, (new StringBuilder()).append("Could not find the datatypeBaseObject '").append(dtObjectXmlPath).append("' in the given tree rooted by '").append(treeRoot).append("'.").toString());
Log.logError(internalInstance, (new StringBuilder()).append("treeRoot is of type '").append(treeRoot != null ? treeRoot.getClass().getName() : "null").append("'").toString());
}
return result;
}
public static DefaultMutableTreeNode findTreeNodeWithXmlPath(DefaultMutableTreeNode treeNode, String nodeXmlPath)
{
if (nodeXmlPath==null)
{
System.out.println("UIHelper.findTreeNodeWithXmlPath()..invalid node to search:"+nodeXmlPath);
return null;
}
Object userObj = treeNode.getUserObject();
if(userObj instanceof DatatypeBaseObject)
{
DatatypeBaseObject dtUserObj = (DatatypeBaseObject)userObj;
if(dtUserObj.getXmlPath().equals(nodeXmlPath))
return treeNode;
}
else if (userObj instanceof MetaObject )
{
String objXmlPath=((MetaObject)userObj).getXmlPath();
if (nodeXmlPath.equalsIgnoreCase(objXmlPath))
return treeNode;
}
for(Enumeration childEnum = treeNode.children(); childEnum.hasMoreElements();)
{
DefaultMutableTreeNode childNode = (DefaultMutableTreeNode)childEnum.nextElement();
DefaultMutableTreeNode childUserObj = findTreeNodeWithXmlPath(childNode, nodeXmlPath);
if(childUserObj != null)
return childUserObj;
}
return null;
}
/**
* Return true if port matches the type of input (isInputPort is true) or output (isInputPort is false);
* @param port
* @param isInputPort
* @return true if port matches the type of input (isInputPort is true) or output (isInputPort is false);
*/
public static final boolean isPortTypeMatch(DefaultPort port, boolean isInputPort)
{
boolean result = false;
Object obj = port.getUserObject();
if(obj instanceof ParameterMeta)
{
ParameterMeta paramMeta = (ParameterMeta) obj;
result = (isInputPort ==paramMeta.isInput());
}
return result;
}
/**
* Return true if port is already mapped;
* @param port
* @return true if port is already mapped.
*/
public static final boolean isPortMapped(DefaultPort port)
{
boolean result = false;
Set edges = port.getEdges();
if (edges != null && !edges.isEmpty())
{
result = true;
}
return result;
}
}
/**
* HISTORY : $Log: not supported by cvs2svn $
* HISTORY : Revision 1.10 2008/06/09 19:53:51 phadkes
* HISTORY : New license text replaced for all .java files.
* HISTORY :
* HISTORY : Revision 1.9 2008/01/08 18:44:28 wangeug
* HISTORY : check null
* HISTORY :
* HISTORY : Revision 1.8 2007/12/13 21:08:29 wangeug
* HISTORY : resolve code dependence in compiling
* HISTORY :
* HISTORY : Revision 1.7 2007/12/06 20:46:56 wangeug
* HISTORY : support both data model and object model
* HISTORY :
* HISTORY : Revision 1.6 2007/12/03 15:25:47 wangeug
* HISTORY : mappingParser: find target node from an Xmi tree node
* HISTORY :
* HISTORY : Revision 1.5 2007/07/03 18:37:48 wangeug
* HISTORY : construct map node with xmlpath
* HISTORY :
* HISTORY : Revision 1.4 2007/06/14 15:44:58 wangeug
* HISTORY : set link color of target table based on column type:
* HISTORY : TYPE_ATTRIBUTE or TYPE_ASSOCIATION
* HISTORY :
* HISTORY : Revision 1.3 2007/06/12 20:17:16 wangeug
* HISTORY : set colors with links
* HISTORY :
* HISTORY : Revision 1.2 2007/04/19 14:05:44 wangeug
* HISTORY : set link color based on linkType
* HISTORY :
* HISTORY : Revision 1.1 2007/04/03 16:17:14 wangeug
* HISTORY : initial loading
* HISTORY :
* HISTORY : Revision 1.26 2006/08/02 18:44:23 jiangsc
* HISTORY : License Update
* HISTORY :
* HISTORY : Revision 1.25 2006/01/03 19:16:52 jiangsc
* HISTORY : License Update
* HISTORY :
* HISTORY : Revision 1.24 2006/01/03 18:56:25 jiangsc
* HISTORY : License Update
* HISTORY :
* HISTORY : Revision 1.23 2005/12/29 23:06:17 jiangsc
* HISTORY : Changed to latest project name.
* HISTORY :
* HISTORY : Revision 1.22 2005/12/14 21:37:19 jiangsc
* HISTORY : Updated license information
* HISTORY :
* HISTORY : Revision 1.21 2005/11/29 16:23:54 jiangsc
* HISTORY : Updated License
* HISTORY :
* HISTORY : Revision 1.20 2005/11/11 19:23:59 jiangsc
* HISTORY : Support Pseudo Root in Mapping Panel.
* HISTORY :
* HISTORY : Revision 1.19 2005/11/02 20:23:56 jiangsc
* HISTORY : Enhanced to select only not-mapped port
* HISTORY :
* HISTORY : Revision 1.18 2005/10/25 22:00:42 jiangsc
* HISTORY : Re-arranged system output strings within UI packages.
* HISTORY :
* HISTORY : Revision 1.17 2005/10/21 15:11:55 jiangsc
* HISTORY : Resolve scrolling issue.
* HISTORY :
* HISTORY : Revision 1.16 2005/10/20 22:29:29 jiangsc
* HISTORY : Resolve scrolling issue.
* HISTORY :
* HISTORY : Revision 1.15 2005/10/18 17:01:03 jiangsc
* HISTORY : Changed one function signature in DragDrop component;
* HISTORY : Enhanced drag-drop status monitoring in HL7MappingPanel;
* HISTORY :
* HISTORY : Revision 1.14 2005/09/27 21:47:59 jiangsc
* HISTORY : Customized edge rendering and initially added a link highlighter class.
* HISTORY :
* HISTORY : Revision 1.13 2005/08/24 22:28:41 jiangsc
* HISTORY : Enhanced JGraph implementation;
* HISTORY : Save point of CSV and HSM navigation update;
* HISTORY :
* HISTORY : Revision 1.12 2005/08/24 21:09:29 jiangsc
* HISTORY : minor update
* HISTORY :
* HISTORY : Revision 1.11 2005/08/22 21:35:28 jiangsc
* HISTORY : Changed BaseComponentFactory and other UI classes to use File instead of string name;
* HISTORY : Added first implementation of Function Constant;
* HISTORY :
* HISTORY : Revision 1.10 2005/08/04 22:22:10 jiangsc
* HISTORY : Updated license and class header information.
* HISTORY :
*/
| |
/* Copyright (C) 2003-2016 Patrick G. Durand
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.plealog.genericapp.ui.common;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Component;
import java.awt.Font;
import java.awt.FontMetrics;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Insets;
import java.awt.RenderingHints;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import javax.swing.AbstractAction;
import javax.swing.Action;
import javax.swing.ButtonGroup;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JCheckBoxMenuItem;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JPopupMenu;
import javax.swing.JTextField;
import javax.swing.border.Border;
import javax.swing.text.Caret;
import com.plealog.genericapp.api.EZEnvironment;
/**
* This class provides a compact search field.
*
* @author Patrick G. Durand
*/
public class SearchField extends JPanel{
/**
*
*/
private static final long serialVersionUID = 6977747292306810413L;
protected MyTextField _textField;
private JLabel _headerIcon;
private JButton _resetIcon;
private Object[] _options;
private Object _selectedOption;
private String _helper;
private JPanel _commandPnl;
private JPanel _userCommandPnl;
public final static String PROPERTY_TEXT = "text";
public final static String PROPERTY_OPTION = "option";
public SearchField(){
this(null, null, null);
}
public SearchField(String text){
this(text, null, null);
}
public SearchField(String text, Object[] options){
this(text, options, null);
}
public SearchField(String text, Object[] options, Object selectedOption){
super();
setOpaque(false);
initBorder();
initComponents();
if(text != null){
setText(text);
}
if(options != null){
setOptions(options);
}
if(selectedOption != null){
setSelectedOption(selectedOption);
}
}
public void setHelperText(String helper){
_helper = helper;
}
protected JPopupMenu createOptionPopup() {
final JPopupMenu pop = new JPopupMenu();
ButtonGroup bg = new ButtonGroup();
for (Object o : _options) {
final JCheckBoxMenuItem c = new JCheckBoxMenuItem(o.toString());
c.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
int i = pop.getComponentIndex(c);
setSelectedOption(_options[i]);
}
});
if(o == _selectedOption){
c.setSelected(true);
}
bg.add(c);
pop.add(c);
}
return pop;
}
protected void initComponents() {
_textField = new MyTextField();
_textField.setBorder(null);
//_textField.setAutoscrolls(false);
_textField.addKeyListener(new KeyAdapter() {
public void keyReleased(KeyEvent e) {
if(getText() == null || getText().length() <=0 ){
_resetIcon.setVisible(false);
firePropertyChange(PROPERTY_TEXT, "not empty", getText());
repaint();
}else{
if(!_resetIcon.isVisible()){
_resetIcon.setVisible(true);
repaint();
}
firePropertyChange(PROPERTY_TEXT, "", getText());
}
}
});
_headerIcon = new JLabel(EZEnvironment.getImageIcon("search.png"));
_resetIcon = new JButton(new AbortAction());
_resetIcon.setIcon(EZEnvironment.getImageIcon("erase.png"));
_resetIcon.setOpaque(true);
_resetIcon.setMargin(new Insets(0,0,0,0));
_resetIcon.setFocusable(false);
_resetIcon.setBorder(null);
_resetIcon.setBorderPainted(false);
_resetIcon.setBackground(Color.white);
_resetIcon.setVisible(false);
_commandPnl = new JPanel(new BorderLayout());
_commandPnl.add(_resetIcon, BorderLayout.EAST);
_userCommandPnl = new JPanel();
_userCommandPnl.setBorder(null);
_userCommandPnl.setOpaque(true);
_userCommandPnl.setBackground(Color.white);
_commandPnl.add(_userCommandPnl, BorderLayout.WEST);
setBackground(_textField.getBackground());
setLayout(new BorderLayout());
add(_headerIcon, BorderLayout.WEST);
add(_textField, BorderLayout.CENTER);
add(_commandPnl, BorderLayout.EAST);
}
public JButton addUserAction(ImageIcon icon, Action act){
JButton btn = new JButton(act);
btn.setIcon(icon);
btn.setOpaque(true);
btn.setBackground(Color.white);
btn.setMargin(new Insets(0,0,0,0));
btn.setFocusable(false);
btn.setBorder(null);
btn.setBorderPainted(false);
btn.setVisible(true);
_userCommandPnl.add(btn);
return btn;
}
protected void initBorder() {
setBorder(new RoundedBorder());
}
public void setOptions(Object[] options){
this._options = options;
if(options != null){
_headerIcon.setIcon(EZEnvironment.getImageIcon("search.png"));
if(_selectedOption == null){
setSelectedOption(options[0]);
}
_headerIcon.addMouseListener(optionListener);
}else{
_headerIcon.setIcon(EZEnvironment.getImageIcon("search.png"));
setSelectedOption(null);
_headerIcon.removeMouseListener(optionListener);
}
}
private class AbortAction extends AbstractAction{
/**
*
*/
private static final long serialVersionUID = 4654484865986385770L;
public void actionPerformed(ActionEvent e){
setText("");
setTextForeground(Color.black);
setTextBackground(Color.white);
}
}
public void focusOnTextfield(){
this._textField.setSelectionStart(0);
this._textField.setSelectionEnd(this._textField.getText().length());
this._textField.requestFocusInWindow();
}
public void addActionListener(ActionListener l) {
_textField.addActionListener(l);
}
public void addKeyListener(KeyListener l) {
_textField.addKeyListener(l);
}
public String getSelectedText() {
return _textField.getSelectedText();
}
public int getSelectionEnd() {
return _textField.getSelectionEnd();
}
public int getSelectionStart() {
return _textField.getSelectionStart();
}
public void setCaret(Caret c) {
_textField.setCaret(c);
}
public void setEditable(boolean b) {
_textField.setEditable(b);
}
public void setEnabled(boolean enabled){
super.setEnabled(enabled);
_headerIcon.setEnabled(enabled);
_textField.setEnabled(enabled);
_resetIcon.setEnabled(enabled);
}
public void setToolTipText(String text){
_textField.setToolTipText(text);
}
public void setText(String t) {
firePropertyChange(PROPERTY_TEXT, _textField.getText(), t == null ? "" : t);
_textField.setText(t);
if(t == null || t.length() <= 0){
_resetIcon.setVisible(false);
revalidate();
}else{
if(!_resetIcon.isVisible()){
_resetIcon.setVisible(true);
revalidate();
}
}
}
public void setTextForeground(Color clr){
_textField.setForeground(clr);
}
public void setTextBackground(Color clr){
_textField.setBackground(clr);
}
public String getText(){
return _textField.getText();
}
public Object getSelectedOption() {
return _selectedOption;
}
public void setSelectedOption(Object selectedOption) {
firePropertyChange(PROPERTY_OPTION, this._selectedOption, selectedOption);
this._selectedOption = selectedOption;
}
public Object[] getOptions() {
return _options;
}
public Font getFont() {
return _textField != null && _textField.getFont() != null ? _textField.getFont() : super.getFont();
}
public void setFont(Font f) {
super.setFont(f);
if(_textField != null){
_textField.setFont(f);
}
}
public void requestFocusForSearchfield() {
_textField.requestFocusInWindow();
}
private MouseListener optionListener = new MouseAdapter(){
public void mousePressed(MouseEvent e) {
if(_options != null){
JPopupMenu pop = createOptionPopup();
pop.show(_headerIcon, 0, _headerIcon.getHeight() + 1);
}
}
};
private class RoundedBorder implements Border{
int thickness = 19;
Insets insets = new Insets(2,5,2,5);
public Insets getBorderInsets(Component c) {
return insets;
}
public boolean isBorderOpaque() {
return false;
}
public void paintBorder(Component c, Graphics g, int x, int y, int width, int height) {
Color oldColor = g.getColor();
((Graphics2D)g).setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
g.setColor(c.getBackground());
g.fillRoundRect(x, y, width-1, height-1, thickness, thickness);
g.setColor(c.getBackground().darker().darker());
g.drawRoundRect(x, y, width-1, height-1, thickness, thickness);
((Graphics2D)g).setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_DEFAULT);
g.setColor(oldColor);
}
}
private class MyTextField extends JTextField{
/**
*
*/
private static final long serialVersionUID = 2247364901010780336L;
public void paintComponent(Graphics g){
super.paintComponent(g);
String txt = this.getText();
if (_helper==null || txt==null || txt.length()!=0)
return;
FontMetrics fm = this.getFontMetrics(this.getFont());
Color oldClr = g.getColor();
Font oldFnt = g.getFont();
g.setColor(Color.LIGHT_GRAY);
g.setFont(this.getFont());
g.drawString(_helper, 2, (this.getBounds().height+fm.getAscent()) / 2 - 1);
g.setColor(oldClr);
g.setFont(oldFnt);
}
}
}
| |
package org.redisson;
import static org.assertj.core.api.Assertions.assertThat;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import org.junit.Assert;
import org.junit.Test;
import org.redisson.api.LocalCachedMapOptions;
import org.redisson.api.LocalCachedMapOptions.EvictionPolicy;
import org.redisson.api.LocalCachedMapOptions.ReconnectionStrategy;
import org.redisson.api.LocalCachedMapOptions.SyncStrategy;
import org.redisson.api.MapOptions.WriteMode;
import org.redisson.api.RLocalCachedMap;
import org.redisson.api.RMap;
import org.redisson.client.codec.Codec;
import org.redisson.client.codec.DoubleCodec;
import org.redisson.client.codec.IntegerCodec;
import org.redisson.client.codec.StringCodec;
import org.redisson.codec.CompositeCodec;
public class RedissonLocalCachedMapTest extends BaseMapTest {
public abstract class UpdateTest {
RLocalCachedMap<String, Integer> map1;
RLocalCachedMap<String, Integer> map2;
Map<String, Integer> cache1;
Map<String, Integer> cache2;
public void execute() throws InterruptedException {
LocalCachedMapOptions<String, Integer> options = LocalCachedMapOptions.<String, Integer>defaults().evictionPolicy(EvictionPolicy.LFU)
.syncStrategy(SyncStrategy.UPDATE)
.reconnectionStrategy(ReconnectionStrategy.CLEAR)
.cacheSize(5);
map1 = redisson.getLocalCachedMap("test2", options);
cache1 = map1.getCachedMap();
map2 = redisson.getLocalCachedMap("test2", options);
cache2 = map2.getCachedMap();
map1.put("1", 1);
map1.put("2", 2);
Thread.sleep(50);
assertThat(cache1.size()).isEqualTo(2);
assertThat(cache2.size()).isEqualTo(2);
test();
}
public abstract void test() throws InterruptedException;
}
public abstract class InvalidationTest {
RLocalCachedMap<String, Integer> map1;
RLocalCachedMap<String, Integer> map2;
Map<String, Integer> cache1;
Map<String, Integer> cache2;
public void execute() throws InterruptedException {
LocalCachedMapOptions<String, Integer> options = LocalCachedMapOptions.<String, Integer>defaults().evictionPolicy(EvictionPolicy.LFU).cacheSize(5);
map1 = redisson.getLocalCachedMap("test", options);
cache1 = map1.getCachedMap();
map2 = redisson.getLocalCachedMap("test", options);
cache2 = map2.getCachedMap();
map1.put("1", 1);
map1.put("2", 2);
assertThat(map2.get("1")).isEqualTo(1);
assertThat(map2.get("2")).isEqualTo(2);
assertThat(cache1.size()).isEqualTo(2);
assertThat(cache2.size()).isEqualTo(2);
test();
}
public abstract void test() throws InterruptedException;
}
@Override
protected <K, V> RMap<K, V> getMap(String name) {
return redisson.getLocalCachedMap(name, LocalCachedMapOptions.<K, V>defaults());
}
@Override
protected <K, V> RMap<K, V> getMap(String name, Codec codec) {
return redisson.getLocalCachedMap(name, codec, LocalCachedMapOptions.<K, V>defaults());
}
@Override
protected <K, V> RMap<K, V> getWriterTestMap(String name, Map<K, V> map) {
LocalCachedMapOptions<K, V> options = LocalCachedMapOptions.<K, V>defaults().writer(createMapWriter(map));
return redisson.getLocalCachedMap(name, options);
}
@Override
protected <K, V> RMap<K, V> getWriteBehindTestMap(String name, Map<K, V> map) {
LocalCachedMapOptions<K, V> options = LocalCachedMapOptions.<K, V>defaults()
.writer(createMapWriter(map))
.writeMode(WriteMode.WRITE_BEHIND);
return redisson.getLocalCachedMap("test", options);
}
@Override
protected <K, V> RMap<K, V> getLoaderTestMap(String name, Map<K, V> map) {
LocalCachedMapOptions<K, V> options = LocalCachedMapOptions.<K, V>defaults().loader(createMapLoader(map));
return redisson.getLocalCachedMap(name, options);
}
@Test
public void testBigPutAll() throws InterruptedException {
RLocalCachedMap<Object, Object> m = redisson.getLocalCachedMap("testValuesWithNearCache2",
LocalCachedMapOptions.defaults().evictionPolicy(EvictionPolicy.LFU).syncStrategy(SyncStrategy.INVALIDATE));
Map<Object, Object> map = new HashMap<>();
for (int k = 0; k < 10000; k++) {
map.put("" + k, "" + k);
}
m.putAll(map);
assertThat(m.size()).isEqualTo(10000);
}
@Test
public void testReadValuesAndEntries() {
RLocalCachedMap<Object, Object> m = redisson.getLocalCachedMap("testValuesWithNearCache2",
LocalCachedMapOptions.defaults());
m.clear();
m.put("a", 1);
m.put("b", 2);
m.put("c", 3);
Set<Integer> expectedValuesSet = new HashSet<>();
expectedValuesSet.add(1);
expectedValuesSet.add(2);
expectedValuesSet.add(3);
Set<Object> actualValuesSet = new HashSet<>(m.readAllValues());
Assert.assertEquals(expectedValuesSet, actualValuesSet);
Map<String, Integer> expectedMap = new HashMap<>();
expectedMap.put("a", 1);
expectedMap.put("b", 2);
expectedMap.put("c", 3);
Assert.assertEquals(expectedMap.entrySet(), m.readAllEntrySet());
}
@Test
public void testClearEmpty() {
RLocalCachedMap<Object, Object> localCachedMap = redisson.getLocalCachedMap("udi-test",
LocalCachedMapOptions.defaults());
localCachedMap.clear();
}
@Test
public void testDelete() {
RLocalCachedMap<String, String> localCachedMap = redisson.getLocalCachedMap("udi-test",
LocalCachedMapOptions.defaults());
assertThat(localCachedMap.delete()).isFalse();
localCachedMap.put("1", "2");
assertThat(localCachedMap.delete()).isTrue();
}
@Test
public void testInvalidationOnClear() throws InterruptedException {
new InvalidationTest() {
@Override
public void test() throws InterruptedException {
map1.clear();
Thread.sleep(50);
assertThat(cache1.size()).isZero();
assertThat(cache2.size()).isZero();
assertThat(map1.size()).isZero();
assertThat(map2.size()).isZero();
}
}.execute();
}
@Test
public void testInvalidationOnUpdateNonBinaryCodec() throws InterruptedException {
LocalCachedMapOptions<String, String> options = LocalCachedMapOptions.<String, String>defaults().evictionPolicy(EvictionPolicy.LFU).cacheSize(5);
RLocalCachedMap<String, String> map1 = redisson.getLocalCachedMap("test", new StringCodec(), options);
Map<String, String> cache1 = map1.getCachedMap();
RLocalCachedMap<String, String> map2 = redisson.getLocalCachedMap("test", new StringCodec(), options);
Map<String, String> cache2 = map2.getCachedMap();
map1.put("1", "1");
map1.put("2", "2");
assertThat(map2.get("1")).isEqualTo("1");
assertThat(map2.get("2")).isEqualTo("2");
assertThat(cache1.size()).isEqualTo(2);
assertThat(cache2.size()).isEqualTo(2);
map1.put("1", "3");
map2.put("2", "4");
Thread.sleep(50);
assertThat(cache1.size()).isEqualTo(1);
assertThat(cache2.size()).isEqualTo(1);
}
@Test
public void testSyncOnUpdate() throws InterruptedException {
new InvalidationTest() {
@Override
public void test() throws InterruptedException {
map1.put("1", 3);
map2.put("2", 4);
Thread.sleep(50);
assertThat(cache1.size()).isEqualTo(1);
assertThat(cache2.size()).isEqualTo(1);
}
}.execute();
new UpdateTest() {
@Override
public void test() throws InterruptedException {
map1.put("1", 3);
map2.put("2", 4);
Thread.sleep(50);
assertThat(cache1.size()).isEqualTo(2);
assertThat(cache2.size()).isEqualTo(2);
}
}.execute();
}
@Test
public void testNoInvalidationOnUpdate() throws InterruptedException {
LocalCachedMapOptions<String, Integer> options = LocalCachedMapOptions.<String, Integer>defaults()
.evictionPolicy(EvictionPolicy.LFU)
.cacheSize(5)
.syncStrategy(SyncStrategy.NONE);
RLocalCachedMap<String, Integer> map1 = redisson.getLocalCachedMap("test", options);
Map<String, Integer> cache1 = map1.getCachedMap();
RLocalCachedMap<String, Integer> map2 = redisson.getLocalCachedMap("test", options);
Map<String, Integer> cache2 = map2.getCachedMap();
map1.put("1", 1);
map1.put("2", 2);
assertThat(map2.get("1")).isEqualTo(1);
assertThat(map2.get("2")).isEqualTo(2);
assertThat(cache1.size()).isEqualTo(2);
assertThat(cache2.size()).isEqualTo(2);
map1.put("1", 3);
map2.put("2", 4);
Thread.sleep(50);
assertThat(cache1.size()).isEqualTo(2);
assertThat(cache2.size()).isEqualTo(2);
}
@Test
public void testLocalCacheState() throws InterruptedException {
LocalCachedMapOptions<String, String> options = LocalCachedMapOptions.<String, String>defaults()
.evictionPolicy(EvictionPolicy.LFU)
.cacheSize(5)
.syncStrategy(SyncStrategy.INVALIDATE);
RLocalCachedMap<String, String> map = redisson.getLocalCachedMap("test", options);
map.put("1", "11");
map.put("2", "22");
assertThat(map.cachedKeySet()).containsExactlyInAnyOrder("1", "2");
assertThat(map.cachedValues()).containsExactlyInAnyOrder("11", "22");
assertThat(map.getCachedMap().keySet()).containsExactlyInAnyOrder("1", "2");
assertThat(map.getCachedMap().values()).containsExactlyInAnyOrder("11", "22");
}
@Test
public void testLocalCacheClear() throws InterruptedException {
LocalCachedMapOptions<String, Integer> options = LocalCachedMapOptions.<String, Integer>defaults()
.evictionPolicy(EvictionPolicy.LFU)
.cacheSize(5)
.syncStrategy(SyncStrategy.INVALIDATE);
RLocalCachedMap<String, Integer> map1 = redisson.getLocalCachedMap("test", options);
Map<String, Integer> cache1 = map1.getCachedMap();
RLocalCachedMap<String, Integer> map2 = redisson.getLocalCachedMap("test", options);
Map<String, Integer> cache2 = map2.getCachedMap();
map1.put("1", 1);
map1.put("2", 2);
assertThat(map2.get("1")).isEqualTo(1);
assertThat(map2.get("2")).isEqualTo(2);
assertThat(cache1.size()).isEqualTo(2);
assertThat(cache2.size()).isEqualTo(2);
map1.clearLocalCache();
assertThat(redisson.getKeys().count()).isEqualTo(1);
assertThat(cache1.size()).isZero();
assertThat(cache2.size()).isZero();
}
@Test
public void testNoInvalidationOnRemove() throws InterruptedException {
LocalCachedMapOptions<String, Integer> options = LocalCachedMapOptions.<String, Integer>defaults()
.evictionPolicy(EvictionPolicy.LFU)
.cacheSize(5)
.syncStrategy(SyncStrategy.NONE);
RLocalCachedMap<String, Integer> map1 = redisson.getLocalCachedMap("test", options);
Map<String, Integer> cache1 = map1.getCachedMap();
RLocalCachedMap<String, Integer> map2 = redisson.getLocalCachedMap("test", options);
Map<String, Integer> cache2 = map2.getCachedMap();
map1.put("1", 1);
map1.put("2", 2);
assertThat(map2.get("1")).isEqualTo(1);
assertThat(map2.get("2")).isEqualTo(2);
assertThat(cache1.size()).isEqualTo(2);
assertThat(cache2.size()).isEqualTo(2);
map1.remove("1");
map2.remove("2");
Thread.sleep(50);
assertThat(cache1.size()).isEqualTo(1);
assertThat(cache2.size()).isEqualTo(1);
}
@Test
public void testSyncOnRemove() throws InterruptedException {
new InvalidationTest() {
@Override
public void test() throws InterruptedException {
map1.remove("1");
map2.remove("2");
Thread.sleep(50);
assertThat(cache1.size()).isEqualTo(0);
assertThat(cache2.size()).isEqualTo(0);
}
}.execute();
new UpdateTest() {
@Override
public void test() throws InterruptedException {
map1.remove("1");
map2.remove("2");
Thread.sleep(50);
assertThat(cache1.size()).isEqualTo(0);
assertThat(cache2.size()).isEqualTo(0);
}
}.execute();
}
@Test
public void testLFU() {
RLocalCachedMap<String, Integer> map = redisson.getLocalCachedMap("test", LocalCachedMapOptions.<String, Integer>defaults().evictionPolicy(EvictionPolicy.LFU).cacheSize(5));
Map<String, Integer> cache = map.getCachedMap();
map.put("12", 1);
map.put("14", 2);
map.put("15", 3);
map.put("16", 4);
map.put("17", 5);
map.put("18", 6);
assertThat(cache.size()).isEqualTo(5);
assertThat(map.size()).isEqualTo(6);
assertThat(map.keySet()).containsOnly("12", "14", "15", "16", "17", "18");
assertThat(map.values()).containsOnly(1, 2, 3, 4, 5, 6);
}
@Test
public void testLRU() {
RLocalCachedMap<String, Integer> map = redisson.getLocalCachedMap("test", LocalCachedMapOptions.<String, Integer>defaults().evictionPolicy(EvictionPolicy.LRU).cacheSize(5));
Map<String, Integer> cache = map.getCachedMap();
map.put("12", 1);
map.put("14", 2);
map.put("15", 3);
map.put("16", 4);
map.put("17", 5);
map.put("18", 6);
assertThat(cache.size()).isEqualTo(5);
assertThat(map.size()).isEqualTo(6);
assertThat(map.keySet()).containsOnly("12", "14", "15", "16", "17", "18");
assertThat(map.values()).containsOnly(1, 2, 3, 4, 5, 6);
}
@Test
public void testSizeCache() {
RLocalCachedMap<String, Integer> map = redisson.getLocalCachedMap("test", LocalCachedMapOptions.defaults());
Map<String, Integer> cache = map.getCachedMap();
map.put("12", 1);
map.put("14", 2);
map.put("15", 3);
assertThat(cache.size()).isEqualTo(3);
assertThat(map.size()).isEqualTo(3);
}
@Test
public void testInvalidationOnPut() throws InterruptedException {
new InvalidationTest() {
@Override
public void test() throws InterruptedException {
map1.put("1", 10);
map1.put("2", 20);
Thread.sleep(50);
assertThat(cache1).hasSize(2);
assertThat(cache2).isEmpty();
}
}.execute();
}
@Test
public void testPutGetCache() {
RLocalCachedMap<String, Integer> map = redisson.getLocalCachedMap("test", LocalCachedMapOptions.defaults());
Map<String, Integer> cache = map.getCachedMap();
map.put("12", 1);
map.put("14", 2);
map.put("15", 3);
assertThat(cache).containsValues(1, 2, 3);
assertThat(map.get("12")).isEqualTo(1);
assertThat(map.get("14")).isEqualTo(2);
assertThat(map.get("15")).isEqualTo(3);
RLocalCachedMap<String, Integer> map1 = redisson.getLocalCachedMap("test", LocalCachedMapOptions.defaults());
assertThat(map1.get("12")).isEqualTo(1);
assertThat(map1.get("14")).isEqualTo(2);
assertThat(map1.get("15")).isEqualTo(3);
}
@Test
public void testGetAllCache() {
RLocalCachedMap<String, Integer> map = redisson.getLocalCachedMap("getAll", LocalCachedMapOptions.defaults());
Map<String, Integer> cache = map.getCachedMap();
map.put("1", 100);
map.put("2", 200);
map.put("3", 300);
map.put("4", 400);
assertThat(cache.size()).isEqualTo(4);
Map<String, Integer> filtered = map.getAll(new HashSet<String>(Arrays.asList("2", "3", "5")));
Map<String, Integer> expectedMap = new HashMap<String, Integer>();
expectedMap.put("2", 200);
expectedMap.put("3", 300);
assertThat(filtered).isEqualTo(expectedMap);
RMap<String, Integer> map1 = redisson.getLocalCachedMap("getAll", LocalCachedMapOptions.defaults());
Map<String, Integer> filtered1 = map1.getAll(new HashSet<String>(Arrays.asList("2", "3", "5")));
assertThat(filtered1).isEqualTo(expectedMap);
}
@Test
public void testInvalidationOnPutAll() throws InterruptedException {
new InvalidationTest() {
@Override
public void test() throws InterruptedException {
Map<String, Integer> entries = new HashMap<>();
entries.put("1", 10);
entries.put("2", 20);
map1.putAll(entries);
Thread.sleep(50);
assertThat(cache1).hasSize(2);
assertThat(cache2).isEmpty();
}
}.execute();
}
@Test
public void testPutAllCache() throws InterruptedException {
RLocalCachedMap<Integer, String> map = redisson.getLocalCachedMap("simple", LocalCachedMapOptions.defaults());
RLocalCachedMap<Integer, String> map1 = redisson.getLocalCachedMap("simple", LocalCachedMapOptions.defaults());
Map<Integer, String> cache = map.getCachedMap();
Map<Integer, String> cache1 = map1.getCachedMap();
map.put(1, "1");
map.put(2, "2");
map.put(3, "3");
Map<Integer, String> joinMap = new HashMap<Integer, String>();
joinMap.put(4, "4");
joinMap.put(5, "5");
joinMap.put(6, "6");
map.putAll(joinMap);
assertThat(cache.size()).isEqualTo(6);
assertThat(cache1.size()).isEqualTo(0);
assertThat(map.keySet()).containsOnly(1, 2, 3, 4, 5, 6);
map1.putAll(joinMap);
// waiting for cache cleanup listeners triggering
Thread.sleep(500);
assertThat(cache.size()).isEqualTo(3);
assertThat(cache1.size()).isEqualTo(3);
}
@Test
public void testAddAndGet() throws InterruptedException {
RLocalCachedMap<Integer, Integer> map = redisson.getLocalCachedMap("getAll", new CompositeCodec(redisson.getConfig().getCodec(), IntegerCodec.INSTANCE), LocalCachedMapOptions.defaults());
Map<Integer, Integer> cache = map.getCachedMap();
map.put(1, 100);
Integer res = map.addAndGet(1, 12);
assertThat(cache.size()).isEqualTo(1);
assertThat(res).isEqualTo(112);
res = map.get(1);
assertThat(res).isEqualTo(112);
RMap<Integer, Double> map2 = redisson.getLocalCachedMap("getAll2", new CompositeCodec(redisson.getConfig().getCodec(), DoubleCodec.INSTANCE), LocalCachedMapOptions.defaults());
map2.put(1, new Double(100.2));
Double res2 = map2.addAndGet(1, new Double(12.1));
assertThat(res2).isEqualTo(112.3);
res2 = map2.get(1);
assertThat(res2).isEqualTo(112.3);
RMap<String, Integer> mapStr = redisson.getLocalCachedMap("mapStr", new CompositeCodec(redisson.getConfig().getCodec(), IntegerCodec.INSTANCE), LocalCachedMapOptions.defaults());
assertThat(mapStr.put("1", 100)).isNull();
assertThat(mapStr.addAndGet("1", 12)).isEqualTo(112);
assertThat(mapStr.get("1")).isEqualTo(112);
assertThat(cache.size()).isEqualTo(1);
}
@Test
public void testFastPutIfAbsent() throws Exception {
RLocalCachedMap<SimpleKey, SimpleValue> map = redisson.getLocalCachedMap("simple", LocalCachedMapOptions.defaults());
Map<SimpleKey, SimpleValue> cache = map.getCachedMap();
SimpleKey key = new SimpleKey("1");
SimpleValue value = new SimpleValue("2");
map.put(key, value);
assertThat(map.fastPutIfAbsent(key, new SimpleValue("3"))).isFalse();
assertThat(cache.size()).isEqualTo(1);
assertThat(map.get(key)).isEqualTo(value);
SimpleKey key1 = new SimpleKey("2");
SimpleValue value1 = new SimpleValue("4");
assertThat(map.fastPutIfAbsent(key1, value1)).isTrue();
Thread.sleep(50);
assertThat(cache.size()).isEqualTo(2);
assertThat(map.get(key1)).isEqualTo(value1);
}
@Test
public void testReadAllEntrySet() {
RLocalCachedMap<SimpleKey, SimpleValue> map = redisson.getLocalCachedMap("simple12", LocalCachedMapOptions.defaults());
Map<SimpleKey, SimpleValue> cache = map.getCachedMap();
map.put(new SimpleKey("1"), new SimpleValue("2"));
map.put(new SimpleKey("33"), new SimpleValue("44"));
map.put(new SimpleKey("5"), new SimpleValue("6"));
assertThat(map.readAllEntrySet().size()).isEqualTo(3);
assertThat(cache.size()).isEqualTo(3);
Map<SimpleKey, SimpleValue> testMap = new HashMap<>(map);
assertThat(map.readAllEntrySet()).containsOnlyElementsOf(testMap.entrySet());
RMap<SimpleKey, SimpleValue> map2 = redisson.getLocalCachedMap("simple12", LocalCachedMapOptions.defaults());
assertThat(map2.readAllEntrySet()).containsOnlyElementsOf(testMap.entrySet());
}
@Test
public void testPutIfAbsent() throws Exception {
RLocalCachedMap<SimpleKey, SimpleValue> map = redisson.getLocalCachedMap("simple12", LocalCachedMapOptions.defaults());
Map<SimpleKey, SimpleValue> cache = map.getCachedMap();
SimpleKey key = new SimpleKey("1");
SimpleValue value = new SimpleValue("2");
map.put(key, value);
Assert.assertEquals(value, map.putIfAbsent(key, new SimpleValue("3")));
Assert.assertEquals(value, map.get(key));
SimpleKey key1 = new SimpleKey("2");
SimpleValue value1 = new SimpleValue("4");
Assert.assertNull(map.putIfAbsent(key1, value1));
Assert.assertEquals(value1, map.get(key1));
assertThat(cache.size()).isEqualTo(2);
}
@Test
public void testInvalidationOnRemoveValue() throws InterruptedException {
new InvalidationTest() {
@Override
public void test() throws InterruptedException {
map1.remove("1", 1);
map1.remove("2", 2);
Thread.sleep(50);
assertThat(cache1).isEmpty();
assertThat(cache2).isEmpty();
}
}.execute();
}
@Test
public void testRemoveValue() {
RLocalCachedMap<SimpleKey, SimpleValue> map = redisson.getLocalCachedMap("simple12", LocalCachedMapOptions.defaults());
Map<SimpleKey, SimpleValue> cache = map.getCachedMap();
map.put(new SimpleKey("1"), new SimpleValue("2"));
boolean res = map.remove(new SimpleKey("1"), new SimpleValue("2"));
Assert.assertTrue(res);
SimpleValue val1 = map.get(new SimpleKey("1"));
Assert.assertNull(val1);
Assert.assertEquals(0, map.size());
assertThat(cache.size()).isEqualTo(0);
}
@Test
public void testRemoveValueFail() {
RLocalCachedMap<SimpleKey, SimpleValue> map = redisson.getLocalCachedMap("simple12", LocalCachedMapOptions.defaults());
Map<SimpleKey, SimpleValue> cache = map.getCachedMap();
map.put(new SimpleKey("1"), new SimpleValue("2"));
boolean res = map.remove(new SimpleKey("2"), new SimpleValue("1"));
Assert.assertFalse(res);
boolean res1 = map.remove(new SimpleKey("1"), new SimpleValue("3"));
Assert.assertFalse(res1);
SimpleValue val1 = map.get(new SimpleKey("1"));
Assert.assertEquals("2", val1.getValue());
assertThat(cache.size()).isEqualTo(1);
}
@Test
public void testInvalidationOnReplaceOldValue() throws InterruptedException {
new InvalidationTest() {
@Override
public void test() throws InterruptedException {
map1.replace("1", 1, 10);
map1.replace("2", 2, 20);
Thread.sleep(50);
assertThat(cache1).hasSize(2);
assertThat(cache2).isEmpty();
}
}.execute();
}
@Test
public void testReplaceOldValueFail() {
RLocalCachedMap<SimpleKey, SimpleValue> map = redisson.getLocalCachedMap("simple", LocalCachedMapOptions.defaults());
Map<SimpleKey, SimpleValue> cache = map.getCachedMap();
map.put(new SimpleKey("1"), new SimpleValue("2"));
boolean res = map.replace(new SimpleKey("1"), new SimpleValue("43"), new SimpleValue("31"));
Assert.assertFalse(res);
SimpleValue val1 = map.get(new SimpleKey("1"));
Assert.assertEquals("2", val1.getValue());
assertThat(cache.size()).isEqualTo(1);
}
@Test
public void testReplaceOldValueSuccess() {
RLocalCachedMap<SimpleKey, SimpleValue> map = redisson.getLocalCachedMap("simple", LocalCachedMapOptions.defaults());
Map<SimpleKey, SimpleValue> cache = map.getCachedMap();
map.put(new SimpleKey("1"), new SimpleValue("2"));
boolean res = map.replace(new SimpleKey("1"), new SimpleValue("2"), new SimpleValue("3"));
Assert.assertTrue(res);
boolean res1 = map.replace(new SimpleKey("1"), new SimpleValue("2"), new SimpleValue("3"));
Assert.assertFalse(res1);
SimpleValue val1 = map.get(new SimpleKey("1"));
Assert.assertEquals("3", val1.getValue());
assertThat(cache.size()).isEqualTo(1);
}
@Test
public void testInvalidationOnReplaceValue() throws InterruptedException {
new InvalidationTest() {
@Override
public void test() throws InterruptedException {
map1.replace("1", 10);
map1.replace("2", 20);
Thread.sleep(50);
assertThat(cache1).hasSize(2);
assertThat(cache2).isEmpty();
}
}.execute();
}
@Test
public void testReplaceValue() {
RLocalCachedMap<SimpleKey, SimpleValue> map = redisson.getLocalCachedMap("simple", LocalCachedMapOptions.defaults());
Map<SimpleKey, SimpleValue> cache = map.getCachedMap();
map.put(new SimpleKey("1"), new SimpleValue("2"));
SimpleValue res = map.replace(new SimpleKey("1"), new SimpleValue("3"));
Assert.assertEquals("2", res.getValue());
assertThat(cache.size()).isEqualTo(1);
SimpleValue val1 = map.get(new SimpleKey("1"));
Assert.assertEquals("3", val1.getValue());
}
@Test
public void testReadAllValues() {
RLocalCachedMap<SimpleKey, SimpleValue> map = redisson.getLocalCachedMap("simple", LocalCachedMapOptions.defaults());
Map<SimpleKey, SimpleValue> cache = map.getCachedMap();
map.put(new SimpleKey("1"), new SimpleValue("2"));
map.put(new SimpleKey("33"), new SimpleValue("44"));
map.put(new SimpleKey("5"), new SimpleValue("6"));
assertThat(cache.size()).isEqualTo(3);
assertThat(map.readAllValues().size()).isEqualTo(3);
Map<SimpleKey, SimpleValue> testMap = new HashMap<>(map);
assertThat(map.readAllValues()).containsOnlyElementsOf(testMap.values());
RMap<SimpleKey, SimpleValue> map2 = redisson.getLocalCachedMap("simple", LocalCachedMapOptions.defaults());
assertThat(map2.readAllValues()).containsOnlyElementsOf(testMap.values());
}
@Test
public void testInvalidationOnFastRemove() throws InterruptedException {
new InvalidationTest() {
@Override
public void test() throws InterruptedException {
map1.fastRemove("1", "2", "3");
Thread.sleep(50);
assertThat(cache1).isEmpty();
assertThat(cache2).isEmpty();
}
}.execute();
}
@Test
public void testRemove() {
RLocalCachedMap<String, Integer> map = redisson.getLocalCachedMap("test", LocalCachedMapOptions.defaults());
Map<String, Integer> cache = map.getCachedMap();
map.put("12", 1);
assertThat(cache.size()).isEqualTo(1);
assertThat(map.remove("12")).isEqualTo(1);
assertThat(cache.size()).isEqualTo(0);
assertThat(map.remove("14")).isNull();
}
@Test
public void testFastRemove() throws InterruptedException, ExecutionException {
RLocalCachedMap<Integer, Integer> map = redisson.getLocalCachedMap("test", LocalCachedMapOptions.defaults());
map.put(1, 3);
map.put(2, 4);
map.put(7, 8);
assertThat(map.fastRemove(1, 2)).isEqualTo(2);
assertThat(map.fastRemove(2)).isEqualTo(0);
assertThat(map.size()).isEqualTo(1);
}
@Test
public void testFastRemoveEmpty() throws InterruptedException, ExecutionException {
LocalCachedMapOptions options = LocalCachedMapOptions.defaults()
.evictionPolicy(EvictionPolicy.NONE)
.cacheSize(3)
.syncStrategy(SyncStrategy.NONE);
RLocalCachedMap<String, Integer> map = redisson.getLocalCachedMap("test", options);
assertThat(map.fastRemove("test")).isZero();
}
@Test
public void testInvalidationOnFastPut() throws InterruptedException {
new InvalidationTest() {
@Override
public void test() throws InterruptedException {
map1.fastPut("1", 10);
map1.fastPut("2", 20);
Thread.sleep(50);
assertThat(cache1).hasSize(2);
assertThat(cache2).isEmpty();
}
}.execute();
}
@Test
public void testFastPut() {
RLocalCachedMap<String, Integer> map = redisson.getLocalCachedMap("test", LocalCachedMapOptions.defaults());
Assert.assertTrue(map.fastPut("1", 2));
assertThat(map.get("1")).isEqualTo(2);
Assert.assertFalse(map.fastPut("1", 3));
assertThat(map.get("1")).isEqualTo(3);
Assert.assertEquals(1, map.size());
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.metamodel;
import java.io.File;
import java.io.InputStream;
import java.net.URL;
import java.sql.Connection;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import javax.sql.DataSource;
import org.apache.metamodel.cassandra.CassandraDataContext;
import org.apache.metamodel.couchdb.CouchDbDataContext;
import org.apache.metamodel.csv.CsvConfiguration;
import org.apache.metamodel.csv.CsvDataContext;
import org.apache.metamodel.elasticsearch.nativeclient.ElasticSearchDataContext;
import org.apache.metamodel.elasticsearch.rest.ElasticSearchRestClient;
import org.apache.metamodel.elasticsearch.rest.ElasticSearchRestDataContext;
import org.apache.metamodel.excel.ExcelConfiguration;
import org.apache.metamodel.excel.ExcelDataContext;
import org.apache.metamodel.fixedwidth.FixedWidthConfiguration;
import org.apache.metamodel.fixedwidth.FixedWidthDataContext;
import org.apache.metamodel.hbase.HBaseConfiguration;
import org.apache.metamodel.hbase.HBaseDataContext;
import org.apache.metamodel.jdbc.JdbcDataContext;
import org.apache.metamodel.json.JsonDataContext;
import org.apache.metamodel.mongodb.mongo3.MongoDbDataContext;
import org.apache.metamodel.openoffice.OpenOfficeDataContext;
import org.apache.metamodel.pojo.PojoDataContext;
import org.apache.metamodel.pojo.TableDataProvider;
import org.apache.metamodel.salesforce.SalesforceDataContext;
import org.apache.metamodel.schema.TableType;
import org.apache.metamodel.sugarcrm.SugarCrmDataContext;
import org.apache.metamodel.util.FileHelper;
import org.apache.metamodel.util.Resource;
import org.apache.metamodel.util.SimpleTableDef;
import org.apache.metamodel.xml.XmlDomDataContext;
import org.ektorp.http.StdHttpClient.Builder;
import org.elasticsearch.client.Client;
import org.xml.sax.InputSource;
import com.datastax.driver.core.Cluster;
import com.google.common.base.Strings;
import com.mongodb.MongoClient;
import com.mongodb.MongoCredential;
import com.mongodb.ServerAddress;
import com.mongodb.client.MongoDatabase;
/**
* A factory for DataContext objects. This class substantially easens the task
* of creating and initializing DataContext objects and/or their strategies for
* reading datastores.
*
* @see DataContext
*/
public class DataContextFactory {
public static final char DEFAULT_CSV_SEPARATOR_CHAR = CsvConfiguration.DEFAULT_SEPARATOR_CHAR;
public static final char DEFAULT_CSV_QUOTE_CHAR = CsvConfiguration.DEFAULT_QUOTE_CHAR;
private DataContextFactory() {
// Prevent instantiation
}
/**
* Creates a composite DataContext based on a set of delegate DataContexts.
*
* Composite DataContexts enables cross-DataContext querying and unified
* schema exploration
*
* @param delegates
* an array/var-args of delegate DataContexts
* @return a DataContext that matches the request
*/
public static DataContext createCompositeDataContext(DataContext... delegates) {
return new CompositeDataContext(delegates);
}
/**
* Creates a composite DataContext based on a set of delegate DataContexts.
*
* Composite DataContexts enables cross-DataContext querying and unified
* schema exploration
*
* @param delegates
* a collection of delegate DataContexts
* @return a DataContext that matches the request
*/
public static DataContext createCompositeDataContext(Collection<DataContext> delegates) {
return new CompositeDataContext(delegates);
}
/**
* Creates a DataContext that connects to a Salesforce.com instance.
*
* @param username
* the Salesforce username
* @param password
* the Salesforce password
* @param securityToken
* the Salesforce security token
* @return a DataContext object that matches the request
*/
public static DataContext createSalesforceDataContext(String username, String password, String securityToken) {
return new SalesforceDataContext(username, password, securityToken);
}
/**
* Create a DataContext that connects to a SugarCRM system.
*
* @param baseUrl
* the base URL of the system, e.g. http://localhost/sugarcrm
* @param username
* the SugarCRM username
* @param password
* the SugarCRM password
* @param applicationName
* the name of the application you are connecting with
* @return a DataContext object that matches the request
*/
public static DataContext createSugarCrmDataContext(String baseUrl, String username, String password,
String applicationName) {
return new SugarCrmDataContext(baseUrl, username, password, applicationName);
}
/**
* Creates a DataContext based on a CSV file
*
* @param file
* a CSV file
* @return a DataContext object that matches the request
*/
public static UpdateableDataContext createCsvDataContext(File file) {
return createCsvDataContext(file, DEFAULT_CSV_SEPARATOR_CHAR, DEFAULT_CSV_QUOTE_CHAR);
}
/**
* Creates a DataContext based on a JSON file
*/
public static DataContext createJsonDataContext(File file) {
return new JsonDataContext(file);
}
/**
* Creates a DataContext based on a CSV file
*
* @param file
* a CSV file
* @param separatorChar
* the char to use for separating values
* @param quoteChar
* the char used for quoting values (typically if they include
* the separator char)
* @return a DataContext object that matches the request
*/
public static UpdateableDataContext createCsvDataContext(File file, char separatorChar, char quoteChar) {
return createCsvDataContext(file, separatorChar, quoteChar, FileHelper.DEFAULT_ENCODING);
}
/**
* Creates a DataContext based on a CSV file
*
* @param file
* a CSV file
* @param separatorChar
* the char to use for separating values
* @param quoteChar
* the char used for quoting values (typically if they include
* the separator char)
* @param encoding
* the character encoding of the file
* @return a DataContext object that matches the request
*/
public static UpdateableDataContext createCsvDataContext(File file, char separatorChar, char quoteChar,
String encoding) {
final CsvConfiguration configuration = new CsvConfiguration(CsvConfiguration.DEFAULT_COLUMN_NAME_LINE, encoding,
separatorChar, quoteChar, CsvConfiguration.DEFAULT_ESCAPE_CHAR);
final CsvDataContext dc = new CsvDataContext(file, configuration);
return dc;
}
/**
* Creates a DataContext based on a CSV file
*
* @param file
* a CSV file
* @param configuration
* the CSV configuration to use
* @return a DataContext object that matches the request
*/
public static UpdateableDataContext createCsvDataContext(File file, CsvConfiguration configuration) {
final CsvDataContext dc = new CsvDataContext(file, configuration);
return dc;
}
/**
* Creates a DataContext based on CSV-content through an input stream
*
* @param inputStream
* the input stream to read from
* @param separatorChar
* the char to use for separating values
* @param quoteChar
* the char used for quoting values (typically if they include
* the separator char)
* @return a DataContext object that matches the request
*/
public static DataContext createCsvDataContext(InputStream inputStream, char separatorChar, char quoteChar) {
return createCsvDataContext(inputStream, separatorChar, quoteChar, FileHelper.DEFAULT_ENCODING);
}
/**
* Creates a DataContext based on CSV-content through an input stream
*
* @param inputStream
* the input stream to read from
* @param separatorChar
* the char to use for separating values
* @param quoteChar
* the char used for quoting values (typically if they include
* the separator char)
* @return a DataContext object that matches the request
*/
public static DataContext createCsvDataContext(InputStream inputStream, char separatorChar, char quoteChar,
String encoding) {
final CsvConfiguration configuration = new CsvConfiguration(CsvConfiguration.DEFAULT_COLUMN_NAME_LINE, encoding,
separatorChar, quoteChar, CsvConfiguration.DEFAULT_ESCAPE_CHAR);
final CsvDataContext dc = new CsvDataContext(inputStream, configuration);
return dc;
}
/**
* Creates a DataContext based on CSV-content through an input stream
*
* @param inputStream
* the input stream to read from
* @param configuration
* the CSV configuration to use
* @return a DataContext object that matches the request
*/
public static DataContext createCsvDataContext(InputStream inputStream, CsvConfiguration configuration) {
final CsvDataContext dc = new CsvDataContext(inputStream, configuration);
return dc;
}
/**
* Creates a DataContext based on a fixed width file.
*
* @param file
* the file to read from.
* @param fileEncoding
* the character encoding of the file.
* @param fixedValueWidth
* the (fixed) width of values in the file.
* @return a DataContext object that matches the request
*/
public static DataContext createFixedWidthDataContext(File file, String fileEncoding, int fixedValueWidth) {
return createFixedWidthDataContext(file, new FixedWidthConfiguration(
FixedWidthConfiguration.DEFAULT_COLUMN_NAME_LINE, fileEncoding, fixedValueWidth));
}
/**
* Creates a DataContext based on a fixed width file.
*
* @param file
* the file to read from.
* @param configuration
* the fixed width configuration to use
* @return a DataContext object that matches the request
*/
public static DataContext createFixedWidthDataContext(File file, FixedWidthConfiguration configuration) {
final FixedWidthDataContext dc = new FixedWidthDataContext(file, configuration);
return dc;
}
/**
* Creates a DataContext based on a fixed width file.
*
* @param resource
* the resource to read from.
* @param configuration
* the fixed width configuration to use
* @return a DataContext object that matches the request
*/
public static DataContext createFixedWidthDataContext(Resource resource, FixedWidthConfiguration configuration) {
final FixedWidthDataContext dc = new FixedWidthDataContext(resource, configuration);
return dc;
}
/**
* Creates a DataContext based on a fixed width file.
*
* @param file
* the file to read from.
* @param fileEncoding
* the character encoding of the file.
* @param fixedValueWidth
* the (fixed) width of values in the file.
* @param headerLineNumber
* the line number of the column headers.
* @return a DataContext object that matches the request
*/
public static DataContext createFixedWidthDataContext(File file, String fileEncoding, int fixedValueWidth,
int headerLineNumber) {
return createFixedWidthDataContext(file, new FixedWidthConfiguration(
FixedWidthConfiguration.DEFAULT_COLUMN_NAME_LINE, fileEncoding, fixedValueWidth));
}
/**
* Creates a DataContext based on an Excel spreadsheet file
*
* @param file
* an excel spreadsheet file
* @param configuration
* the configuration with metadata for reading the spreadsheet
* @return a DataContext object that matches the request
*/
public static UpdateableDataContext createExcelDataContext(File file, ExcelConfiguration configuration) {
return new ExcelDataContext(file, configuration);
}
/**
* Creates a DataContext based on an Excel spreadsheet file
*
* @param file
* an Excel spreadsheet file
* @return a DataContext object that matches the request
*/
public static UpdateableDataContext createExcelDataContext(File file) {
return createExcelDataContext(file, new ExcelConfiguration());
}
/**
* Creates a DataContext based on XML-content from an input source.
*
* Tables are created by examining the data in the XML file, NOT by reading
* XML Schemas (xsd/dtd's). This enables compliance with ALL xml formats but
* also raises a risk that two XML files with the same format won't
* necessarily yield the same table model if some optional attributes or
* tags are omitted in one of the files.
*
* @param inputSource
* an input source feeding XML content
* @param schemaName
* the name to be used for the main schema
* @param autoFlattenTables
* a boolean indicating if MetaModel should flatten very simple
* table structures (where tables only contain a single
* data-carrying column) for greater usability of the generated
* table-based model
* @return a DataContext object that matches the request
*/
public static DataContext createXmlDataContext(InputSource inputSource, String schemaName,
boolean autoFlattenTables) {
XmlDomDataContext dc = new XmlDomDataContext(inputSource, schemaName, autoFlattenTables);
return dc;
}
/**
* Creates a DataContext based on XML-content from a File.
*
* Tables are created by examining the data in the XML file, NOT by reading
* XML Schemas (xsd/dtd's). This enables compliance with ALL xml formats but
* also raises a risk that two XML files with the same format won't
* necessarily yield the same table model if some optional attributes or
* tags are omitted in one of the files.
*
* @param file
* the File to use for feeding XML content
* @param autoFlattenTables
* a boolean indicating if MetaModel should flatten very simple
* table structures (where tables only contain a single
* data-carrying column) for greater usability of the generated
* table-based model
* @return a DataContext object that matches the request
*/
public static DataContext createXmlDataContext(File file, boolean autoFlattenTables) {
XmlDomDataContext dc = new XmlDomDataContext(file, autoFlattenTables);
return dc;
}
/**
* Creates a DataContext based on XML-content from a URL.
*
* Tables are created by examining the data in the XML file, NOT by reading
* XML Schemas (xsd/dtd's). This enables compliance with ALL xml formats but
* also raises a risk that two XML files with the same format won't
* necessarily yield the same table model if some optional attributes or
* tags are omitted in one of the files.
*
* @param url
* the URL to use for feeding XML content
* @param autoFlattenTables
* a boolean indicating if MetaModel should flatten very simple
* table structures (where tables only contain a single
* data-carrying column) for greater usability of the generated
* table-based model
* @return a DataContext object that matches the request
*/
public static DataContext createXmlDataContext(URL url, boolean autoFlattenTables) {
XmlDomDataContext dc = new XmlDomDataContext(url, autoFlattenTables);
return dc;
}
/**
* Creates a DataContext based on an OpenOffice.org database file.
*
* @param file
* an OpenOffice.org database file
* @return a DataContext object that matches the request
*/
public static DataContext createOpenOfficeDataContext(File file) {
return new OpenOfficeDataContext(file);
}
/**
* Creates a DataContext based on a JDBC connection
*
* @param connection
* a JDBC connection
* @return a DataContext object that matches the request
*/
public static UpdateableDataContext createJdbcDataContext(Connection connection) {
return new JdbcDataContext(connection);
}
/**
* Creates a DataContext based on a JDBC datasource
*
* @param ds
* a JDBC datasource
* @return a DataContext object that matches the request
*/
public static UpdateableDataContext createJdbcDataContext(DataSource ds) {
return new JdbcDataContext(ds);
}
/**
* Creates a DataContext based on a JDBC connection
*
* @param connection
* a JDBC connection
* @param catalogName
* a catalog name to use
* @return a DataContext object that matches the request
*/
public static UpdateableDataContext createJdbcDataContext(Connection connection, String catalogName) {
return new JdbcDataContext(connection, TableType.DEFAULT_TABLE_TYPES, catalogName);
}
/**
* Creates a DataContext based on a JDBC connection
*
* @param connection
* a JDBC connection
* @param tableTypes
* the types of tables to include in the generated schemas
* @return a DataContext object that matches the request
*/
public static UpdateableDataContext createJdbcDataContext(Connection connection, TableType... tableTypes) {
return new JdbcDataContext(connection, tableTypes, null);
}
/**
* Creates a DataContext based on a JDBC connection
*
* @param connection
* a JDBC connection
* @param catalogName
* a catalog name to use
* @param tableTypes
* the types of tables to include in the generated schemas
* @return a DataContext object that matches the request
*/
public static UpdateableDataContext createJdbcDataContext(Connection connection, String catalogName,
TableType[] tableTypes) {
return new JdbcDataContext(connection, tableTypes, catalogName);
}
/**
* Creates a DataContext based on a JDBC datasource
*
* @param ds
* a JDBC datasource
* @param tableTypes
* the types of tables to include in the generated schemas
* @return a DataContext object that matches the request
*/
public static UpdateableDataContext createJdbcDataContext(DataSource ds, TableType... tableTypes) {
return new JdbcDataContext(ds, tableTypes, null);
}
/**
* Creates a DataContext based on a JDBC datasource
*
* @param ds
* a JDBC datasource
* @param catalogName
* a catalog name to use
* @param tableTypes
* the types of tables to include in the generated schemas
* @return a DataContext object that matches the request
*/
public static UpdateableDataContext createJdbcDataContext(DataSource ds, String catalogName,
TableType[] tableTypes) {
return new JdbcDataContext(ds, tableTypes, catalogName);
}
/**
* Creates a DataContext based on a JDBC datasource
*
* @param ds
* a JDBC datasource
* @param catalogName
* a catalog name to use
* @return a DataContext object that matches the request
*/
public static UpdateableDataContext createJdbcDataContext(DataSource ds, String catalogName) {
return new JdbcDataContext(ds, TableType.DEFAULT_TABLE_TYPES, catalogName);
}
/**
* Creates a new MongoDB datacontext.
*
* @param hostname
* The hostname of the MongoDB instance
* @param port
* the port of the MongoDB instance, or null if the default port
* should be used.
* @param databaseName
* the name of the database
* @param username
* the username, or null if unauthenticated access should be used
* @param password
* the password, or null if unauthenticated access should be used
* @param tableDefs
* an array of table definitions, or null if table definitions
* should be autodetected.
* @return a DataContext object that matches the request
*/
@SuppressWarnings("resource")
public static UpdateableDataContext createMongoDbDataContext(String hostname, Integer port, String databaseName,
String username, char[] password, SimpleTableDef[] tableDefs) {
try {
final ServerAddress serverAddress;
if (port == null) {
serverAddress = new ServerAddress(hostname);
} else {
serverAddress = new ServerAddress(hostname, port);
}
final MongoClient mongoClient;
final MongoDatabase mongoDb;
if (Strings.isNullOrEmpty(username)) {
mongoClient = new MongoClient(serverAddress);
} else {
final MongoCredential credential = MongoCredential.createCredential(username, databaseName, password);
mongoClient = new MongoClient(serverAddress, Arrays.asList(credential));
}
mongoDb = mongoClient.getDatabase(databaseName);
if (tableDefs == null || tableDefs.length == 0) {
return new MongoDbDataContext(mongoDb);
}
return new MongoDbDataContext(mongoDb, tableDefs);
} catch (Exception e) {
if (e instanceof RuntimeException) {
throw (RuntimeException) e;
}
throw new IllegalStateException(e);
}
}
/**
* Creates a new MongoDB datacontext.
*
* @param hostname
* The hostname of the MongoDB instance
* @param port
* the port of the MongoDB instance, or null if the default port
* should be used.
* @param databaseName
* the name of the database
* @param username
* the username, or null if unauthenticated access should be used
* @param password
* the password, or null if unauthenticated access should be used
* @return a DataContext object that matches the request
*/
public static UpdateableDataContext createMongoDbDataContext(String hostname, Integer port, String databaseName,
String username, char[] password) {
return createMongoDbDataContext(hostname, port, databaseName, username, password, null);
}
/**
* Creates a new CouchDB datacontext.
*
* @param hostname
* The hostname of the CouchDB instance
* @param port
* the port of the CouchDB instance, or null if the default port
* should be used.
* @param username
* the username, or null if unauthenticated access should be used
* @param password
* the password, or null if unauthenticated access should be used
* @return a DataContext object that matches the request
*/
public static UpdateableDataContext createCouchDbDataContext(String hostname, Integer port, String username,
String password) {
return createCouchDbDataContext(hostname, port, username, password, null);
}
/**
* Creates a new CouchDB datacontext.
*
* @param hostname
* The hostname of the CouchDB instance
* @param port
* the port of the CouchDB instance, or null if the default port
* should be used.
* @param username
* the username, or null if unauthenticated access should be used
* @param password
* the password, or null if unauthenticated access should be used
* @param tableDefs
* an array of table definitions, or null if table definitions
* should be autodetected.
* @return a DataContext object that matches the request
*/
public static UpdateableDataContext createCouchDbDataContext(String hostname, Integer port, String username,
String password, SimpleTableDef[] tableDefs) {
final Builder httpClientBuilder = new Builder();
httpClientBuilder.host(hostname);
if (port != null) {
httpClientBuilder.port(port);
}
if (username != null) {
httpClientBuilder.username(username);
}
if (password != null) {
httpClientBuilder.password(password);
}
// increased timeouts (20 sec) - metamodel typically does quite some
// batching so it might take a bit of time to provide a connection.
httpClientBuilder.connectionTimeout(20000);
httpClientBuilder.socketTimeout(20000);
if (tableDefs == null || tableDefs.length == 0) {
return new CouchDbDataContext(httpClientBuilder);
}
return new CouchDbDataContext(httpClientBuilder, tableDefs);
}
/**
* Creates a new JSON-based ElasticSearch datacontext.
* @param client
* The Jest client
* @param indexName
* The ElasticSearch index name
* @return a DataContext object that matches the request
*/
public static UpdateableDataContext createElasticSearchDataContext(final ElasticSearchRestClient client,
final String indexName) {
return new ElasticSearchRestDataContext(client, indexName);
}
/**
* Creates a new ElasticSearch datacontext.
*
* @param client
* The ElasticSearch client
* @param indexName
* The ElasticSearch index name
* @return a DataContext object that matches the request
*/
public static UpdateableDataContext createElasticSearchDataContext(Client client, String indexName) {
return new ElasticSearchDataContext(client, indexName);
}
/**
* Creates a new Cassandra datacontext.
*
* @param cluster
* The Cassandra client
* @param keySpaceName
* The Cassandra key space name
* @return a DataContext object that matches the request
*/
public static DataContext createCassandraDataContext(Cluster cluster, String keySpaceName) {
return new CassandraDataContext(cluster, keySpaceName);
}
/**
* Creates a new HBase datacontext.
*
* @param configuration
* {@code HBaseConfiguration} object containing detailed HBase
* configuration properties.
*
* @return a DataContext object that matches the request
*/
public static DataContext createHBaseDataContext(HBaseConfiguration configuration){
return new HBaseDataContext(configuration);
}
/**
* Creates a new HBase datacontext.
*
* @param configuration
* {@code HBaseConfiguration} object containing detailed HBase
* configuration properties.
*
* @param connection
* A cluster connection encapsulating lower level individual
* connections to actual servers and a connection to zookeeper.
*
* @return a DataContext object that matches the request
*/
public static DataContext createHBaseDataContext(HBaseConfiguration configuration,org.apache.hadoop.hbase.client.Connection connection) {
return new HBaseDataContext(configuration, connection);
}
/**
* Creates a new POJO data context that is empty but can be populated at
* will.
*
* @return a DataContext object that matches the request
*
*/
public static DataContext createPojoDataContext() {
return new PojoDataContext();
}
/**
* Creates a new POJO data context based on the provided
* {@link TableDataProvider}s.
*
* @param tables
* list of tables
*
* @return DataContext object that matches the request
*/
public static DataContext createPojoDataContext(List<TableDataProvider<?>> tables) {
return new PojoDataContext(tables);
}
/**
* Creates a new POJO data context based on the provided
* {@link TableDataProvider}s.
*
* @param schemaName
* the name of the created schema
*
* @param tables
* table information
*
* @return DataContext object that matches the request
*
*/
public static DataContext createPojoDataContext(String schemaName,TableDataProvider<?>[] tables) {
return new PojoDataContext(schemaName, tables);
}
/**
* Creates a new POJO data context based on the provided
* {@link TableDataProvider}s.
*
* @param schemaName
* the name of the created schema
*
* @param tables
* list of tables
*
* @return DataContext object that matches the request
*/
public static DataContext createPojoDataContext(String schemaName,List<TableDataProvider<?>> tables) {
return new PojoDataContext(schemaName, tables);
}
}
| |
/*
* Copyright 2014 Groupon, Inc
* Copyright 2014 The Billing Project, LLC
*
* The Billing Project licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.killbill.billing.jaxrs.resources;
import java.util.List;
import java.util.UUID;
import javax.inject.Inject;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.HeaderParam;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import org.killbill.billing.ObjectType;
import org.killbill.billing.account.api.Account;
import org.killbill.billing.account.api.AccountApiException;
import org.killbill.billing.account.api.AccountUserApi;
import org.killbill.billing.jaxrs.json.CustomFieldJson;
import org.killbill.billing.jaxrs.json.PaymentJson;
import org.killbill.billing.jaxrs.json.PaymentTransactionJson;
import org.killbill.billing.jaxrs.json.TagJson;
import org.killbill.billing.jaxrs.util.Context;
import org.killbill.billing.jaxrs.util.JaxrsUriBuilder;
import org.killbill.billing.payment.api.Payment;
import org.killbill.billing.payment.api.PaymentApi;
import org.killbill.billing.payment.api.PaymentApiException;
import org.killbill.billing.payment.api.PaymentOptions;
import org.killbill.billing.payment.api.PluginProperty;
import org.killbill.billing.payment.api.TransactionStatus;
import org.killbill.billing.util.api.AuditUserApi;
import org.killbill.billing.util.api.CustomFieldApiException;
import org.killbill.billing.util.api.CustomFieldUserApi;
import org.killbill.billing.util.api.TagApiException;
import org.killbill.billing.util.api.TagDefinitionApiException;
import org.killbill.billing.util.api.TagUserApi;
import org.killbill.billing.util.audit.AccountAuditLogs;
import org.killbill.billing.util.callcontext.CallContext;
import org.killbill.billing.util.callcontext.TenantContext;
import org.killbill.clock.Clock;
import org.killbill.commons.metrics.TimedResource;
import com.google.common.collect.ImmutableList;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiResponse;
import io.swagger.annotations.ApiResponses;
import static javax.ws.rs.core.MediaType.APPLICATION_JSON;
@Path(JaxrsResource.PAYMENT_TRANSACTIONS_PATH)
@Api(value = JaxrsResource.PAYMENT_TRANSACTIONS_PATH, description = "Operations on payment transactions")
public class TransactionResource extends JaxRsResourceBase {
private static final String ID_PARAM_NAME = "transactionId";
@Inject
public TransactionResource(final JaxrsUriBuilder uriBuilder,
final TagUserApi tagUserApi,
final CustomFieldUserApi customFieldUserApi,
final AuditUserApi auditUserApi,
final AccountUserApi accountUserApi,
final PaymentApi paymentApi,
final Clock clock,
final Context context) {
super(uriBuilder, tagUserApi, customFieldUserApi, auditUserApi, accountUserApi, paymentApi, null, clock, context);
}
@TimedResource(name = "getPaymentByTransactionId")
@GET
@Path("/{transactionId:" + UUID_PATTERN + "}/")
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Retrieve a payment by transaction id", response = PaymentJson.class)
@ApiResponses(value = {@ApiResponse(code = 404, message = "Payment not found")})
public Response getPaymentByTransactionId(@PathParam("transactionId") final String transactionIdStr,
@QueryParam(QUERY_WITH_PLUGIN_INFO) @DefaultValue("false") final Boolean withPluginInfo,
@QueryParam(QUERY_WITH_ATTEMPTS) @DefaultValue("false") final Boolean withAttempts,
@QueryParam(QUERY_PLUGIN_PROPERTY) final List<String> pluginPropertiesString,
@QueryParam(QUERY_AUDIT) @DefaultValue("NONE") final AuditMode auditMode,
@javax.ws.rs.core.Context final HttpServletRequest request) throws PaymentApiException {
final Iterable<PluginProperty> pluginProperties = extractPluginProperties(pluginPropertiesString);
final UUID transactionIdId = UUID.fromString(transactionIdStr);
final TenantContext tenantContext = context.createContext(request);
final Payment payment = paymentApi.getPaymentByTransactionId(transactionIdId, withPluginInfo, withAttempts, pluginProperties, tenantContext);
final AccountAuditLogs accountAuditLogs = auditUserApi.getAccountAuditLogs(payment.getAccountId(), auditMode.getLevel(), tenantContext);
final PaymentJson result = new PaymentJson(payment, accountAuditLogs);
return Response.status(Response.Status.OK).entity(result).build();
}
@TimedResource
@POST
@Path("/{transactionId:" + UUID_PATTERN + "}/")
@Consumes(APPLICATION_JSON)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Mark a pending payment transaction as succeeded or failed")
@ApiResponses(value = {@ApiResponse(code = 400, message = "Invalid paymentId supplied"),
@ApiResponse(code = 404, message = "Account or Payment not found")})
public Response notifyStateChanged(final PaymentTransactionJson json,
@PathParam("transactionId") final String transactionIdStr,
@QueryParam(QUERY_PAYMENT_CONTROL_PLUGIN_NAME) final List<String> paymentControlPluginNames,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final UriInfo uriInfo,
@javax.ws.rs.core.Context final HttpServletRequest request) throws PaymentApiException, AccountApiException {
verifyNonNullOrEmpty(json, "PaymentTransactionJson body should be specified");
verifyNonNullOrEmpty(json.getPaymentId(), "PaymentTransactionJson paymentId needs to be set",
json.getStatus(), "PaymentTransactionJson status needs to be set");
final PaymentOptions paymentOptions = createControlPluginApiPaymentOptions(paymentControlPluginNames);
final CallContext callContext = context.createContext(createdBy, reason, comment, request);
final UUID paymentId = UUID.fromString(json.getPaymentId());
final Payment payment = paymentApi.getPayment(paymentId, false, false, ImmutableList.<PluginProperty>of(), callContext);
final Account account = accountUserApi.getAccountById(payment.getAccountId(), callContext);
final boolean success = TransactionStatus.SUCCESS.name().equals(json.getStatus());
final Payment result = paymentApi.notifyPendingTransactionOfStateChangedWithPaymentControl(account, UUID.fromString(transactionIdStr), success, paymentOptions, callContext);
return uriBuilder.buildResponse(uriInfo, PaymentResource.class, "getPayment", result.getId(), request);
}
@TimedResource
@GET
@Path("/{transactionId:" + UUID_PATTERN + "}/" + CUSTOM_FIELDS)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Retrieve payment transaction custom fields", response = CustomFieldJson.class, responseContainer = "List")
@ApiResponses(value = {@ApiResponse(code = 400, message = "Invalid transaction id supplied")})
public Response getCustomFields(@PathParam(ID_PARAM_NAME) final String id,
@QueryParam(QUERY_AUDIT) @DefaultValue("NONE") final AuditMode auditMode,
@javax.ws.rs.core.Context final HttpServletRequest request) {
return super.getCustomFields(UUID.fromString(id), auditMode, context.createContext(request));
}
@TimedResource
@POST
@Path("/{transactionId:" + UUID_PATTERN + "}/" + CUSTOM_FIELDS)
@Consumes(APPLICATION_JSON)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Add custom fields to payment transaction")
@ApiResponses(value = {@ApiResponse(code = 400, message = "Invalid transaction id supplied")})
public Response createCustomFields(@PathParam(ID_PARAM_NAME) final String id,
final List<CustomFieldJson> customFields,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final HttpServletRequest request,
@javax.ws.rs.core.Context final UriInfo uriInfo) throws CustomFieldApiException {
return super.createCustomFields(UUID.fromString(id), customFields,
context.createContext(createdBy, reason, comment, request), uriInfo, request);
}
@TimedResource
@DELETE
@Path("/{transactionId:" + UUID_PATTERN + "}/" + CUSTOM_FIELDS)
@Consumes(APPLICATION_JSON)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Remove custom fields from payment transaction")
@ApiResponses(value = {@ApiResponse(code = 400, message = "Invalid transaction id supplied")})
public Response deleteCustomFields(@PathParam(ID_PARAM_NAME) final String id,
@QueryParam(QUERY_CUSTOM_FIELDS) final String customFieldList,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final HttpServletRequest request) throws CustomFieldApiException {
return super.deleteCustomFields(UUID.fromString(id), customFieldList,
context.createContext(createdBy, reason, comment, request));
}
@TimedResource
@GET
@Path("/{transactionId:" + UUID_PATTERN + "}/" + TAGS)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Retrieve payment transaction tags", response = TagJson.class, responseContainer = "List")
@ApiResponses(value = {@ApiResponse(code = 400, message = "Invalid transaction id supplied"),
@ApiResponse(code = 404, message = "Invoice not found")})
public Response getTags(@PathParam(ID_PARAM_NAME) final String id,
@QueryParam(QUERY_AUDIT) @DefaultValue("NONE") final AuditMode auditMode,
@QueryParam(QUERY_TAGS_INCLUDED_DELETED) @DefaultValue("false") final Boolean includedDeleted,
@javax.ws.rs.core.Context final HttpServletRequest request) throws TagDefinitionApiException, PaymentApiException {
final TenantContext tenantContext = context.createContext(request);
final Payment payment = paymentApi.getPaymentByTransactionId(UUID.fromString(id), false, false, ImmutableList.<PluginProperty>of(), tenantContext);
return super.getTags(payment.getAccountId(), UUID.fromString(id), auditMode, includedDeleted, tenantContext);
}
@TimedResource
@POST
@Path("/{transactionId:" + UUID_PATTERN + "}/" + TAGS)
@Consumes(APPLICATION_JSON)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Add tags to payment transaction")
@ApiResponses(value = {@ApiResponse(code = 400, message = "Invalid transaction id supplied")})
public Response createTags(@PathParam(ID_PARAM_NAME) final String id,
@QueryParam(QUERY_TAGS) final String tagList,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final UriInfo uriInfo,
@javax.ws.rs.core.Context final HttpServletRequest request) throws TagApiException {
return super.createTags(UUID.fromString(id), tagList, uriInfo,
context.createContext(createdBy, reason, comment, request), request);
}
@TimedResource
@DELETE
@Path("/{transactionId:" + UUID_PATTERN + "}/" + TAGS)
@Consumes(APPLICATION_JSON)
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Remove tags from payment transaction")
@ApiResponses(value = {@ApiResponse(code = 400, message = "Invalid transaction id supplied")})
public Response deleteTags(@PathParam(ID_PARAM_NAME) final String id,
@QueryParam(QUERY_TAGS) final String tagList,
@HeaderParam(HDR_CREATED_BY) final String createdBy,
@HeaderParam(HDR_REASON) final String reason,
@HeaderParam(HDR_COMMENT) final String comment,
@javax.ws.rs.core.Context final HttpServletRequest request) throws TagApiException {
return super.deleteTags(UUID.fromString(id), tagList,
context.createContext(createdBy, reason, comment, request));
}
@Override
protected ObjectType getObjectType() {
return ObjectType.TRANSACTION;
}
}
| |
/*
*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.wso2.carbon.apimgt.rest.api.store.impl;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.json.simple.JSONObject;
import org.wso2.carbon.apimgt.api.APIConsumer;
import org.wso2.carbon.apimgt.api.APIManagementException;
import org.wso2.carbon.apimgt.api.model.Application;
import org.wso2.carbon.apimgt.api.model.ApplicationConstants;
import org.wso2.carbon.apimgt.api.model.Subscriber;
import org.wso2.carbon.apimgt.api.model.Tier;
import org.wso2.carbon.apimgt.impl.APIConstants;
import org.wso2.carbon.apimgt.impl.APIManagerFactory;
import org.wso2.carbon.apimgt.impl.utils.APIUtil;
import org.wso2.carbon.apimgt.rest.api.store.ApplicationsApiService;
import org.wso2.carbon.apimgt.rest.api.store.dto.ApplicationDTO;
import org.wso2.carbon.apimgt.rest.api.store.dto.ApplicationKeyDTO;
import org.wso2.carbon.apimgt.rest.api.store.dto.ApplicationKeyGenerateRequestDTO;
import org.wso2.carbon.apimgt.rest.api.store.dto.ApplicationListDTO;
import org.wso2.carbon.apimgt.rest.api.store.utils.RestAPIStoreUtils;
import org.wso2.carbon.apimgt.rest.api.store.utils.mappings.ApplicationKeyMappingUtil;
import org.wso2.carbon.apimgt.rest.api.store.utils.mappings.ApplicationMappingUtil;
import org.wso2.carbon.apimgt.rest.api.util.RestApiConstants;
import org.wso2.carbon.apimgt.rest.api.util.utils.RestApiUtil;
import javax.ws.rs.core.Response;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Map;
/**
* This is the service implementation class for Store application related operations
*/
public class ApplicationsApiServiceImpl extends ApplicationsApiService {
private static final Log log = LogFactory.getLog(ApplicationsApiServiceImpl.class);
/**
* Retrieves all the applications that the user has access to
*
* @param groupId group Id
* @param query search condition
* @param limit max number of objects returns
* @param offset starting index
* @param accept accepted media type of the client
* @param ifNoneMatch If-None-Match header value
* @return Response object containing resulted applications
*/
@Override
public Response applicationsGet(String groupId, String query, Integer limit, Integer offset, String accept,
String ifNoneMatch) {
String username = RestApiUtil.getLoggedInUsername();
// currently groupId is taken from the user so that groupId coming as a query parameter is not honored.
// As a improvement, we can check admin privileges of the user and honor groupId.
groupId = RestApiUtil.getLoggedInUserGroupId();
limit = limit != null ? limit : RestApiConstants.PAGINATION_LIMIT_DEFAULT;
offset = offset != null ? offset : RestApiConstants.PAGINATION_OFFSET_DEFAULT;
ApplicationListDTO applicationListDTO;
try {
APIConsumer apiConsumer = APIManagerFactory.getInstance().getAPIConsumer(username);
Application[] allMatchedApps = new Application[0];
if (StringUtils.isBlank(query)) {
allMatchedApps = apiConsumer.getApplications(new Subscriber(username), groupId);
} else {
Application application = apiConsumer.getApplicationsByName(username, query, groupId);
if (application != null) {
allMatchedApps = new Application[1];
allMatchedApps[0] = application;
}
}
//allMatchedApps are already sorted to application name
applicationListDTO = ApplicationMappingUtil.fromApplicationsToDTO(allMatchedApps, limit, offset);
ApplicationMappingUtil.setPaginationParams(applicationListDTO, groupId, limit, offset,
allMatchedApps.length);
return Response.ok().entity(applicationListDTO).build();
} catch (APIManagementException e) {
RestApiUtil
.handleInternalServerError("Error while retrieving applications of the user " + username, e, log);
}
return null;
}
/**
* Creates a new application
*
* @param body request body containing application details
* @param contentType Content-Type header value
* @return 201 response if successful
*/
@Override
public Response applicationsPost(ApplicationDTO body, String contentType) {
String username = RestApiUtil.getLoggedInUsername();
try {
APIConsumer apiConsumer = APIManagerFactory.getInstance().getAPIConsumer(username);
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
//validate the tier specified for the application
String tierName = body.getThrottlingTier();
if (tierName != null) {
Map<String, Tier> appTierMap = APIUtil.getTiers(APIConstants.TIER_APPLICATION_TYPE, tenantDomain);
if (appTierMap == null || RestApiUtil.findTier(appTierMap.values(), tierName) == null) {
RestApiUtil.handleBadRequest("Specified tier " + tierName + " is invalid", log);
}
} else {
RestApiUtil.handleBadRequest("Throttling tier cannot be null", log);
}
//subscriber field of the body is not honored. It is taken from the context
Application application = ApplicationMappingUtil.fromDTOtoApplication(body, username);
//setting the proper groupId. This is not honored for now.
// Later we can honor it by checking admin privileges of the user.
String groupId = RestApiUtil.getLoggedInUserGroupId();
application.setGroupId(groupId);
int applicationId = apiConsumer.addApplication(application, username);
//retrieves the created application and send as the response
Application createdApplication = apiConsumer.getApplicationById(applicationId);
ApplicationDTO createdApplicationDTO = ApplicationMappingUtil.fromApplicationtoDTO(createdApplication);
//to be set as the Location header
URI location = new URI(RestApiConstants.RESOURCE_PATH_APPLICATIONS + "/" +
createdApplicationDTO.getApplicationId());
return Response.created(location).entity(createdApplicationDTO).header("ETag",createdApplication.getCreatedTime()).build();
} catch (APIManagementException | URISyntaxException e) {
if (RestApiUtil.isDueToResourceAlreadyExists(e)) {
RestApiUtil.handleResourceAlreadyExistsError(
"An application already exists with name " + body.getName(), e,
log);
} else {
RestApiUtil.handleInternalServerError("Error while adding a new application for the user " + username,
e, log);
}
}
return null;
}
/**
* Generate keys for a application
*
* @param applicationId application identifier
* @param body request body
* @param contentType Content-Type header value
* @param ifMatch If-Match header value
* @param ifUnmodifiedSince If-Unmodified-Since header value
* @return A response object containing application keys
*/
@Override
@SuppressWarnings("unchecked")
public Response applicationsGenerateKeysPost(String applicationId, ApplicationKeyGenerateRequestDTO body,
String contentType, String ifMatch, String ifUnmodifiedSince) {
String username = RestApiUtil.getLoggedInUsername();
try {
APIConsumer apiConsumer = APIManagerFactory.getInstance().getAPIConsumer(username);
Application application = apiConsumer.getApplicationByUUID(applicationId);
if (application != null) {
if (RestAPIStoreUtils.isUserAccessAllowedForApplication(application)) {
String[] accessAllowDomainsArray = body.getAccessAllowDomains().toArray(new String[1]);
JSONObject jsonParamObj = new JSONObject();
jsonParamObj.put(ApplicationConstants.OAUTH_CLIENT_USERNAME, username);
String jsonParams = jsonParamObj.toString();
String tokenScopes = StringUtils.join(body.getScopes(), " ");
Map<String, Object> keyDetails = apiConsumer.requestApprovalForApplicationRegistration(
username, application.getName(), body.getKeyType().toString(), body.getCallbackUrl(),
accessAllowDomainsArray, body.getValidityTime(), tokenScopes, application.getGroupId(),
jsonParams);
ApplicationKeyDTO applicationKeyDTO =
ApplicationKeyMappingUtil.fromApplicationKeyToDTO(keyDetails, body.getKeyType().toString());
return Response.ok().entity(applicationKeyDTO).build();
} else {
RestApiUtil.handleAuthorizationFailure(RestApiConstants.RESOURCE_APPLICATION, applicationId, log);
}
} else {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_APPLICATION, applicationId, log);
}
} catch (APIManagementException e) {
if (RestApiUtil.rootCauseMessageMatches(e, "primary key violation")) {
RestApiUtil
.handleResourceAlreadyExistsError("Keys already generated for the application " + applicationId,
e,
log);
} else {
RestApiUtil.handleInternalServerError("Error while generating keys for application " + applicationId, e,
log);
}
}
return null;
}
/**
* Get an application by Id
*
* @param applicationId application identifier
* @param accept accepted media type of the client
* @param ifNoneMatch If-None-Match header value
* @param ifModifiedSince If-Modified-Since header value
* @return response containing the required application object
*/
@Override
public Response applicationsApplicationIdGet(String applicationId, String accept, String ifNoneMatch,
String ifModifiedSince) {
String username = RestApiUtil.getLoggedInUsername();
try {
APIConsumer apiConsumer = APIManagerFactory.getInstance().getAPIConsumer(username);
Application application = apiConsumer.getApplicationByUUID(applicationId);
if (application != null) {
if (RestAPIStoreUtils.isUserAccessAllowedForApplication(application)) {
ApplicationDTO applicationDTO = ApplicationMappingUtil.fromApplicationtoDTO(application);
return Response.ok().entity(applicationDTO).build();
} else {
RestApiUtil.handleAuthorizationFailure(RestApiConstants.RESOURCE_APPLICATION, applicationId, log);
}
} else {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_APPLICATION, applicationId, log);
}
} catch (APIManagementException e) {
RestApiUtil.handleInternalServerError("Error while retrieving application " + applicationId, e, log);
}
return null;
}
/**
* Update an application by Id
*
* @param applicationId application identifier
* @param body request body containing application details
* @param contentType Content-Type header value
* @param ifMatch If-Match header value
* @param ifUnmodifiedSince If-Unmodified-Since header value
* @return response containing the updated application object
*/
@Override
public Response applicationsApplicationIdPut(String applicationId, ApplicationDTO body, String contentType,
String ifMatch, String ifUnmodifiedSince) {
String username = RestApiUtil.getLoggedInUsername();
try {
APIConsumer apiConsumer = APIManagerFactory.getInstance().getAPIConsumer(username);
Application oldApplication = apiConsumer.getApplicationByUUID(applicationId);
if (oldApplication != null) {
if (RestAPIStoreUtils.isUserAccessAllowedForApplication(oldApplication)) {
//we do not honor the subscriber coming from the request body as we can't change the subscriber of the application
Application application = ApplicationMappingUtil.fromDTOtoApplication(body, username);
//groupId of the request body is not honored for now.
// Later we can improve by checking admin privileges of the user.
application.setGroupId(oldApplication.getGroupId());
//we do not honor the application id which is sent via the request body
application.setUUID(oldApplication.getUUID());
apiConsumer.updateApplication(application);
//retrieves the updated application and send as the response
Application updatedApplication = apiConsumer.getApplicationByUUID(applicationId);
ApplicationDTO updatedApplicationDTO = ApplicationMappingUtil
.fromApplicationtoDTO(updatedApplication);
return Response.ok().entity(updatedApplicationDTO).header("ETag",updatedApplication.getLastUpdatedTime()).build();
} else {
RestApiUtil.handleAuthorizationFailure(RestApiConstants.RESOURCE_APPLICATION, applicationId, log);
}
} else {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_APPLICATION, applicationId, log);
}
} catch (APIManagementException e) {
RestApiUtil.handleInternalServerError("Error while updating application " + applicationId, e, log);
}
return null;
}
/**
* Deletes an application by id
*
* @param applicationId application identifier
* @param ifMatch If-Match header value
* @param ifUnmodifiedSince If-Unmodified-Since header value
* @return 200 Response if successfully deleted the application
*/
@Override
@SuppressWarnings("unchecked")
public Response applicationsApplicationIdDelete(String applicationId, String ifMatch,
String ifUnmodifiedSince) {
String username = RestApiUtil.getLoggedInUsername();
try {
APIConsumer apiConsumer = APIManagerFactory.getInstance().getAPIConsumer(username);
Application application = apiConsumer.getApplicationByUUID(applicationId);
if (application != null) {
if (RestAPIStoreUtils.isUserAccessAllowedForApplication(application)) {
apiConsumer.removeApplication(application);
return Response.ok().build();
} else {
RestApiUtil.handleAuthorizationFailure(RestApiConstants.RESOURCE_APPLICATION, applicationId, log);
}
} else {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_APPLICATION, applicationId, log);
}
} catch (APIManagementException e) {
RestApiUtil.handleInternalServerError("Error while deleting application " + applicationId, e, log);
}
return null;
}
/**
* get the lastUpdatedTime for an application DELETE
*
* @param applicationId
* @param ifMatch
* @param ifUnmodifiedSince
* @return
*/
@Override
public String applicationsApplicationIdDeleteGetLastUpdatedTime(String applicationId, String ifMatch, String ifUnmodifiedSince) {
return RestAPIStoreUtils.getLastUpdatedTimeByApplicationId(applicationId);
}
/**
* get the lastUpdatedTime for an application
*
* @param applicationId
* @param accept
* @param ifNoneMatch
* @param ifModifiedSince
* @return
*/
@Override
public String applicationsApplicationIdGetGetLastUpdatedTime(String applicationId, String accept, String ifNoneMatch, String ifModifiedSince) {
return RestAPIStoreUtils.getLastUpdatedTimeByApplicationId(applicationId);
}
/**
* get the lastUpdatedTime for an application PUT
*
* @param applicationId
* @param body
* @param contentType
* @param ifMatch
* @param ifUnmodifiedSince
* @return
*/
@Override
public String applicationsApplicationIdPutGetLastUpdatedTime(String applicationId, ApplicationDTO body, String contentType, String ifMatch, String ifUnmodifiedSince) {
return RestAPIStoreUtils.getLastUpdatedTimeByApplicationId(applicationId);
}
@Override
public String applicationsGenerateKeysPostGetLastUpdatedTime(String applicationId, ApplicationKeyGenerateRequestDTO body, String contentType, String ifMatch, String ifUnmodifiedSince) {
return null;
}
@Override
public String applicationsGetGetLastUpdatedTime(String groupId, String query, Integer limit, Integer offset, String accept, String ifNoneMatch) {
return null;
}
@Override
public String applicationsPostGetLastUpdatedTime(ApplicationDTO body, String contentType) {
return null;
}
}
| |
/*
* Copyright 2005-2006 Olivier Descout
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.intellij.idea.lang.javascript.psiutil;
import org.jetbrains.annotations.NonNls;
import com.intellij.lang.javascript.JSTokenTypes;
import com.intellij.lang.javascript.psi.JSAssignmentExpression;
import com.intellij.lang.javascript.psi.JSBinaryExpression;
import com.intellij.lang.javascript.psi.JSBlockStatement;
import com.intellij.lang.javascript.psi.JSConditionalExpression;
import com.intellij.lang.javascript.psi.JSExpression;
import com.intellij.lang.javascript.psi.JSExpressionStatement;
import com.intellij.lang.javascript.psi.JSReturnStatement;
import com.intellij.lang.javascript.psi.JSStatement;
import com.intellij.lang.javascript.psi.JSIfStatement;
import com.intellij.lang.javascript.psi.JSElement;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.PsiElement;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.IncorrectOperationException;
public class ConditionalUtils {
private ConditionalUtils() {}
public static JSStatement stripBraces(JSStatement branch) {
if (branch != null && branch instanceof JSBlockStatement) {
final JSBlockStatement block = (JSBlockStatement) branch;
final JSStatement[] statements = block.getStatements();
if (statements.length == 1) {
return statements[0];
} else {
return block;
}
}
return branch;
}
public static boolean isReturn(JSStatement statement, String value) {
if (statement == null) {
return false;
}
if (!(statement instanceof JSReturnStatement)) {
return false;
}
final JSExpression returnExpression = ((JSReturnStatement) statement).getExpression();
return (returnExpression != null && value.equals(returnExpression.getText()));
}
public static boolean isAssignment(JSStatement statement, String value) {
if (statement == null || !(statement instanceof JSExpressionStatement)) {
return false;
}
final JSExpression expression = ((JSExpressionStatement) statement).getExpression();
if (!(expression instanceof JSAssignmentExpression)) {
return false;
}
final JSExpression rhs = ((JSAssignmentExpression) expression).getROperand();
return (rhs != null && rhs.getText().equals(value));
}
public static boolean isAssignment(JSStatement statement) {
if (!(statement instanceof JSExpressionStatement)) {
return false;
}
final JSExpressionStatement expressionStatement = (JSExpressionStatement) statement;
return (expressionStatement.getExpression() instanceof JSAssignmentExpression);
}
public static boolean isConditional(JSExpression expression) {
expression = ParenthesesUtils.stripParentheses(expression);
if (expression == null) {
return false;
}
if (expression instanceof JSConditionalExpression) {
return true;
}
if (expression instanceof JSBinaryExpression) {
final JSBinaryExpression binaryExpression = (JSBinaryExpression) expression;
final IElementType sign = binaryExpression.getOperationSign();
if (JSTokenTypes.XOR.equals(sign)) {
return (isConditional(binaryExpression.getLOperand()) &&
isConditional(binaryExpression.getROperand()));
}
return (JSTokenTypes.ANDAND.equals(sign) || JSTokenTypes.OROR.equals(sign) ||
JSTokenTypes.EQ .equals(sign) || JSTokenTypes.NE .equals(sign) ||
JSTokenTypes.LT .equals(sign) || JSTokenTypes.LE .equals(sign) ||
JSTokenTypes.GT .equals(sign) || JSTokenTypes.GE .equals(sign));
}
return BoolUtils.isBooleanLiteral(expression);
}
public static boolean isSimplifiableImplicitReturn(JSIfStatement ifStatement, boolean negated) {
final JSStatement thenBranch = ConditionalUtils.stripBraces(ifStatement.getThen());
final PsiElement nextStatement = JSElementFactory.getNonWhiteSpaceSibling(ifStatement, true);
if (!(nextStatement instanceof JSStatement)) {
return false;
}
final JSStatement elseBranch = (JSStatement) nextStatement;
if (negated) {
return (ConditionalUtils.isReturn(thenBranch, BoolUtils.FALSE) &&
ConditionalUtils.isReturn(elseBranch, BoolUtils.TRUE));
} else {
return (ConditionalUtils.isReturn(thenBranch, BoolUtils.TRUE) &&
ConditionalUtils.isReturn(elseBranch, BoolUtils.FALSE));
}
}
public static boolean isSimplifiableReturn(JSIfStatement ifStatement, boolean negated) {
final JSStatement thenBranch = ConditionalUtils.stripBraces(ifStatement.getThen());
final JSStatement elseBranch = ConditionalUtils.stripBraces(ifStatement.getElse());
if (negated) {
return (ConditionalUtils.isReturn(thenBranch, BoolUtils.FALSE) &&
ConditionalUtils.isReturn(elseBranch, BoolUtils.TRUE));
} else {
return (ConditionalUtils.isReturn(thenBranch, BoolUtils.TRUE) &&
ConditionalUtils.isReturn(elseBranch, BoolUtils.FALSE));
}
}
public static boolean isSimplifiableAssignment(JSIfStatement ifStatement, boolean negated) {
final JSStatement thenBranch = ConditionalUtils.stripBraces(ifStatement.getThen());
final JSStatement elseBranch = ConditionalUtils.stripBraces(ifStatement.getElse());
final boolean isAssignment;
if (negated) {
isAssignment = (ConditionalUtils.isAssignment(thenBranch, BoolUtils.FALSE) &&
ConditionalUtils.isAssignment(elseBranch, BoolUtils.TRUE));
} else {
isAssignment = (ConditionalUtils.isAssignment(thenBranch, BoolUtils.TRUE) &&
ConditionalUtils.isAssignment(elseBranch, BoolUtils.FALSE));
}
if (isAssignment) {
final JSAssignmentExpression thenExpression = (JSAssignmentExpression) ((JSExpressionStatement) thenBranch).getExpression();
final JSAssignmentExpression elseExpression = (JSAssignmentExpression) ((JSExpressionStatement) elseBranch).getExpression();
final IElementType thenSign = thenExpression.getOperationSign();
final IElementType elseSign = elseExpression.getOperationSign();
if (!thenSign.equals(elseSign)) {
return false;
}
final JSExpression thenLhs = thenExpression.getLOperand();
final JSExpression elseLhs = elseExpression.getLOperand();
return EquivalenceChecker.expressionsAreEquivalent(thenLhs, elseLhs);
} else {
return false;
}
}
public static boolean isSimplifiableImplicitAssignment(JSIfStatement ifStatement, boolean negated) {
if (ifStatement.getElse() != null) {
return false;
}
final JSStatement thenBranch = ConditionalUtils.stripBraces(ifStatement.getThen());
final PsiElement nextStatement = JSElementFactory.getNonWhiteSpaceSibling(ifStatement, false);
if (!(nextStatement instanceof JSStatement)) {
return false;
}
final JSStatement elseBranch = ConditionalUtils.stripBraces((JSStatement) nextStatement);
final boolean isAssignment;
if (negated) {
isAssignment = (ConditionalUtils.isAssignment(thenBranch, BoolUtils.FALSE) &&
ConditionalUtils.isAssignment(elseBranch, BoolUtils.TRUE));
} else {
isAssignment = (ConditionalUtils.isAssignment(thenBranch, BoolUtils.TRUE) &&
ConditionalUtils.isAssignment(elseBranch, BoolUtils.FALSE));
}
if (isAssignment) {
final JSAssignmentExpression thenExpression = (JSAssignmentExpression) ((JSExpressionStatement) thenBranch).getExpression();
final JSAssignmentExpression elseExpression = (JSAssignmentExpression) ((JSExpressionStatement) elseBranch).getExpression();
final IElementType thenSign = thenExpression.getOperationSign();
final IElementType elseSign = elseExpression.getOperationSign();
if (!thenSign.equals(elseSign)) {
return false;
}
final JSExpression thenLhs = thenExpression.getLOperand();
final JSExpression elseLhs = elseExpression.getLOperand();
return EquivalenceChecker.expressionsAreEquivalent(thenLhs, elseLhs);
} else {
return false;
}
}
public static void replaceSimplifiableImplicitReturn(JSIfStatement statement, boolean negated)
throws IncorrectOperationException {
final JSExpression condition = statement.getCondition();
final String conditionText = (negated ? BoolUtils.getNegatedExpressionText(condition) : condition.getText());
final JSElement nextStatement = (JSElement) JSElementFactory.getNonWhiteSpaceSibling(statement, true);
@NonNls final String newStatement = "return " + conditionText + ';';
JSElementFactory.replaceStatement(statement, newStatement);
assert (nextStatement != null);
JSElementFactory.removeElement(nextStatement);
}
public static void replaceSimplifiableReturn(JSIfStatement statement, boolean negated)
throws IncorrectOperationException {
final JSExpression condition = statement.getCondition();
final String conditionText = (negated ? BoolUtils.getNegatedExpressionText(condition) : condition.getText());
@NonNls final String newStatement = "return " + conditionText + ';';
JSElementFactory.replaceStatement(statement, newStatement);
}
public static void replaceSimplifiableAssignment(JSIfStatement statement, boolean negated)
throws IncorrectOperationException {
final JSExpression condition = statement.getCondition();
final String conditionText = (negated ? BoolUtils.getNegatedExpressionText(condition) : condition.getText());
final JSExpressionStatement assignmentStatement = (JSExpressionStatement) ConditionalUtils.stripBraces(statement.getThen());
final JSAssignmentExpression assignmentExpression = (JSAssignmentExpression) assignmentStatement.getExpression();
final IElementType operator = assignmentExpression.getOperationSign();
final String operand = BinaryOperatorUtils.getOperatorText(operator);
final JSExpression lhs = assignmentExpression.getLOperand();
final String lhsText = lhs.getText();
JSElementFactory.replaceStatement(statement, lhsText + operand + conditionText + ';');
}
public static void replaceSimplifiableImplicitAssignment(JSIfStatement statement, boolean negated)
throws IncorrectOperationException {
final JSElement prevStatement = (JSElement) JSElementFactory.getNonWhiteSpaceSibling(statement, false);
final JSExpression condition = statement.getCondition();
final String conditionText = (negated ? BoolUtils.getNegatedExpressionText(condition) : condition.getText());
final JSExpressionStatement assignmentStatement = (JSExpressionStatement) ConditionalUtils.stripBraces(statement.getThen());
final JSAssignmentExpression assignmentExpression = (JSAssignmentExpression) assignmentStatement.getExpression();
final IElementType operator = assignmentExpression.getOperationSign();
final String operand = BinaryOperatorUtils.getOperatorText(operator);
final JSExpression lhs = assignmentExpression.getLOperand();
final String lhsText = lhs.getText();
JSElementFactory.replaceStatement(statement, lhsText + operand + conditionText + ';');
assert (prevStatement != null);
JSElementFactory.removeElement(prevStatement);
}
public static void replaceAssignmentOrReturnIfSimplifiable(JSIfStatement statement)
throws IncorrectOperationException {
if (isSimplifiableAssignment(statement, false)) {
replaceSimplifiableAssignment(statement, false);
} else if (isSimplifiableAssignment(statement, true)) {
replaceSimplifiableAssignment(statement, true);
} else if (isSimplifiableReturn(statement, false)) {
replaceSimplifiableReturn(statement, false);
} else if (isSimplifiableReturn(statement, true)) {
replaceSimplifiableReturn(statement, true);
} else if (isSimplifiableImplicitReturn(statement, false)) {
replaceSimplifiableImplicitReturn(statement, false);
} else if (isSimplifiableImplicitReturn(statement, true)) {
replaceSimplifiableImplicitReturn(statement, true);
} else if (isSimplifiableImplicitAssignment(statement, false)) {
replaceSimplifiableImplicitAssignment(statement, false);
} else if (isSimplifiableImplicitAssignment(statement, true)) {
replaceSimplifiableImplicitAssignment(statement, true);
}
}
public static void replaceConditionalWithIf(JSConditionalExpression conditional)
throws IncorrectOperationException {
final JSStatement statement = PsiTreeUtil.getParentOfType(conditional, JSStatement.class);
assert (statement != null);
final String statementText = statement.getText();
final String conditionalText = ParenthesesUtils.unstripParentheses(conditional).getText();
final int conditionalIndex = statementText.indexOf(conditionalText);
final String statementStart = statementText.substring(0, conditionalIndex);
final String statementEnd = statementText.substring(conditionalIndex + conditionalText.length());
final JSExpression condition = ParenthesesUtils.stripParentheses(conditional.getCondition());
final JSExpression thenExpression = ParenthesesUtils.stripParentheses(conditional.getThen());
final JSExpression elseExpression = ParenthesesUtils.stripParentheses(conditional.getElse());
@NonNls final String ifStatementText = "if (" + condition.getText() + ") {" +
statementStart + thenExpression.getText() + statementEnd +
"} else {" +
statementStart + elseExpression.getText() + statementEnd +
'}';
JSElementFactory.replaceStatement(statement, ifStatementText);
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: GetHashmapResponse.proto
package com.alachisoft.tayzgrid.common.protobuf;
public final class GetHashmapResponseProtocol {
private GetHashmapResponseProtocol() {}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
}
public static final class GetHashmapResponse extends
com.google.protobuf.GeneratedMessage {
// Use GetHashmapResponse.newBuilder() to construct.
private GetHashmapResponse() {
initFields();
}
private GetHashmapResponse(boolean noInit) {}
private static final GetHashmapResponse defaultInstance;
public static GetHashmapResponse getDefaultInstance() {
return defaultInstance;
}
public GetHashmapResponse getDefaultInstanceForType() {
return defaultInstance;
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.alachisoft.tayzgrid.common.protobuf.GetHashmapResponseProtocol.internal_static_com_alachisoft_tayzgrid_common_protobuf_GetHashmapResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.alachisoft.tayzgrid.common.protobuf.GetHashmapResponseProtocol.internal_static_com_alachisoft_tayzgrid_common_protobuf_GetHashmapResponse_fieldAccessorTable;
}
// optional int64 viewId = 1;
public static final int VIEWID_FIELD_NUMBER = 1;
private boolean hasViewId;
private long viewId_ = 0L;
public boolean hasViewId() { return hasViewId; }
public long getViewId() { return viewId_; }
// repeated string members = 2;
public static final int MEMBERS_FIELD_NUMBER = 2;
private java.util.List<java.lang.String> members_ =
java.util.Collections.emptyList();
public java.util.List<java.lang.String> getMembersList() {
return members_;
}
public int getMembersCount() { return members_.size(); }
public java.lang.String getMembers(int index) {
return members_.get(index);
}
// repeated .com.alachisoft.tayzgrid.common.protobuf.KeyValuePair keyValuePair = 3;
public static final int KEYVALUEPAIR_FIELD_NUMBER = 3;
private java.util.List<com.alachisoft.tayzgrid.common.protobuf.KeyValuePairProtocol.KeyValuePair> keyValuePair_ =
java.util.Collections.emptyList();
public java.util.List<com.alachisoft.tayzgrid.common.protobuf.KeyValuePairProtocol.KeyValuePair> getKeyValuePairList() {
return keyValuePair_;
}
public int getKeyValuePairCount() { return keyValuePair_.size(); }
public com.alachisoft.tayzgrid.common.protobuf.KeyValuePairProtocol.KeyValuePair getKeyValuePair(int index) {
return keyValuePair_.get(index);
}
// optional int32 bucketSize = 4;
public static final int BUCKETSIZE_FIELD_NUMBER = 4;
private boolean hasBucketSize;
private int bucketSize_ = 0;
public boolean hasBucketSize() { return hasBucketSize; }
public int getBucketSize() { return bucketSize_; }
private void initFields() {
}
public final boolean isInitialized() {
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (hasViewId()) {
output.writeInt64(1, getViewId());
}
for (java.lang.String element : getMembersList()) {
output.writeString(2, element);
}
for (com.alachisoft.tayzgrid.common.protobuf.KeyValuePairProtocol.KeyValuePair element : getKeyValuePairList()) {
output.writeMessage(3, element);
}
if (hasBucketSize()) {
output.writeInt32(4, getBucketSize());
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (hasViewId()) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(1, getViewId());
}
{
int dataSize = 0;
for (java.lang.String element : getMembersList()) {
dataSize += com.google.protobuf.CodedOutputStream
.computeStringSizeNoTag(element);
}
size += dataSize;
size += 1 * getMembersList().size();
}
for (com.alachisoft.tayzgrid.common.protobuf.KeyValuePairProtocol.KeyValuePair element : getKeyValuePairList()) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(3, element);
}
if (hasBucketSize()) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(4, getBucketSize());
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
public static com.alachisoft.tayzgrid.common.protobuf.GetHashmapResponseProtocol.GetHashmapResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static com.alachisoft.tayzgrid.common.protobuf.GetHashmapResponseProtocol.GetHashmapResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static com.alachisoft.tayzgrid.common.protobuf.GetHashmapResponseProtocol.GetHashmapResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static com.alachisoft.tayzgrid.common.protobuf.GetHashmapResponseProtocol.GetHashmapResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static com.alachisoft.tayzgrid.common.protobuf.GetHashmapResponseProtocol.GetHashmapResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static com.alachisoft.tayzgrid.common.protobuf.GetHashmapResponseProtocol.GetHashmapResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static com.alachisoft.tayzgrid.common.protobuf.GetHashmapResponseProtocol.GetHashmapResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
}
public static com.alachisoft.tayzgrid.common.protobuf.GetHashmapResponseProtocol.GetHashmapResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
}
public static com.alachisoft.tayzgrid.common.protobuf.GetHashmapResponseProtocol.GetHashmapResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static com.alachisoft.tayzgrid.common.protobuf.GetHashmapResponseProtocol.GetHashmapResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(com.alachisoft.tayzgrid.common.protobuf.GetHashmapResponseProtocol.GetHashmapResponse prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder> {
private com.alachisoft.tayzgrid.common.protobuf.GetHashmapResponseProtocol.GetHashmapResponse result;
// Construct using com.alachisoft.tayzgrid.common.protobuf.GetHashmapResponseProtocol.GetHashmapResponse.newBuilder()
private Builder() {}
private static Builder create() {
Builder builder = new Builder();
builder.result = new com.alachisoft.tayzgrid.common.protobuf.GetHashmapResponseProtocol.GetHashmapResponse();
return builder;
}
protected com.alachisoft.tayzgrid.common.protobuf.GetHashmapResponseProtocol.GetHashmapResponse internalGetResult() {
return result;
}
public Builder clear() {
if (result == null) {
throw new IllegalStateException(
"Cannot call clear() after build().");
}
result = new com.alachisoft.tayzgrid.common.protobuf.GetHashmapResponseProtocol.GetHashmapResponse();
return this;
}
public Builder clone() {
return create().mergeFrom(result);
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.alachisoft.tayzgrid.common.protobuf.GetHashmapResponseProtocol.GetHashmapResponse.getDescriptor();
}
public com.alachisoft.tayzgrid.common.protobuf.GetHashmapResponseProtocol.GetHashmapResponse getDefaultInstanceForType() {
return com.alachisoft.tayzgrid.common.protobuf.GetHashmapResponseProtocol.GetHashmapResponse.getDefaultInstance();
}
public boolean isInitialized() {
return result.isInitialized();
}
public com.alachisoft.tayzgrid.common.protobuf.GetHashmapResponseProtocol.GetHashmapResponse build() {
if (result != null && !isInitialized()) {
throw newUninitializedMessageException(result);
}
return buildPartial();
}
private com.alachisoft.tayzgrid.common.protobuf.GetHashmapResponseProtocol.GetHashmapResponse buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
if (!isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return buildPartial();
}
public com.alachisoft.tayzgrid.common.protobuf.GetHashmapResponseProtocol.GetHashmapResponse buildPartial() {
if (result == null) {
throw new IllegalStateException(
"build() has already been called on this Builder.");
}
if (result.members_ != java.util.Collections.EMPTY_LIST) {
result.members_ =
java.util.Collections.unmodifiableList(result.members_);
}
if (result.keyValuePair_ != java.util.Collections.EMPTY_LIST) {
result.keyValuePair_ =
java.util.Collections.unmodifiableList(result.keyValuePair_);
}
com.alachisoft.tayzgrid.common.protobuf.GetHashmapResponseProtocol.GetHashmapResponse returnMe = result;
result = null;
return returnMe;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.alachisoft.tayzgrid.common.protobuf.GetHashmapResponseProtocol.GetHashmapResponse) {
return mergeFrom((com.alachisoft.tayzgrid.common.protobuf.GetHashmapResponseProtocol.GetHashmapResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.alachisoft.tayzgrid.common.protobuf.GetHashmapResponseProtocol.GetHashmapResponse other) {
if (other == com.alachisoft.tayzgrid.common.protobuf.GetHashmapResponseProtocol.GetHashmapResponse.getDefaultInstance()) return this;
if (other.hasViewId()) {
setViewId(other.getViewId());
}
if (!other.members_.isEmpty()) {
if (result.members_.isEmpty()) {
result.members_ = new java.util.ArrayList<java.lang.String>();
}
result.members_.addAll(other.members_);
}
if (!other.keyValuePair_.isEmpty()) {
if (result.keyValuePair_.isEmpty()) {
result.keyValuePair_ = new java.util.ArrayList<com.alachisoft.tayzgrid.common.protobuf.KeyValuePairProtocol.KeyValuePair>();
}
result.keyValuePair_.addAll(other.keyValuePair_);
}
if (other.hasBucketSize()) {
setBucketSize(other.getBucketSize());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
return this;
}
break;
}
case 8: {
setViewId(input.readInt64());
break;
}
case 18: {
addMembers(input.readString());
break;
}
case 26: {
com.alachisoft.tayzgrid.common.protobuf.KeyValuePairProtocol.KeyValuePair.Builder subBuilder = com.alachisoft.tayzgrid.common.protobuf.KeyValuePairProtocol.KeyValuePair.newBuilder();
input.readMessage(subBuilder, extensionRegistry);
addKeyValuePair(subBuilder.buildPartial());
break;
}
case 32: {
setBucketSize(input.readInt32());
break;
}
}
}
}
// optional int64 viewId = 1;
public boolean hasViewId() {
return result.hasViewId();
}
public long getViewId() {
return result.getViewId();
}
public Builder setViewId(long value) {
result.hasViewId = true;
result.viewId_ = value;
return this;
}
public Builder clearViewId() {
result.hasViewId = false;
result.viewId_ = 0L;
return this;
}
// repeated string members = 2;
public java.util.List<java.lang.String> getMembersList() {
return java.util.Collections.unmodifiableList(result.members_);
}
public int getMembersCount() {
return result.getMembersCount();
}
public java.lang.String getMembers(int index) {
return result.getMembers(index);
}
public Builder setMembers(int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
result.members_.set(index, value);
return this;
}
public Builder addMembers(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
if (result.members_.isEmpty()) {
result.members_ = new java.util.ArrayList<java.lang.String>();
}
result.members_.add(value);
return this;
}
public Builder addAllMembers(
java.lang.Iterable<? extends java.lang.String> values) {
if (result.members_.isEmpty()) {
result.members_ = new java.util.ArrayList<java.lang.String>();
}
super.addAll(values, result.members_);
return this;
}
public Builder clearMembers() {
result.members_ = java.util.Collections.emptyList();
return this;
}
// repeated .com.alachisoft.tayzgrid.common.protobuf.KeyValuePair keyValuePair = 3;
public java.util.List<com.alachisoft.tayzgrid.common.protobuf.KeyValuePairProtocol.KeyValuePair> getKeyValuePairList() {
return java.util.Collections.unmodifiableList(result.keyValuePair_);
}
public int getKeyValuePairCount() {
return result.getKeyValuePairCount();
}
public com.alachisoft.tayzgrid.common.protobuf.KeyValuePairProtocol.KeyValuePair getKeyValuePair(int index) {
return result.getKeyValuePair(index);
}
public Builder setKeyValuePair(int index, com.alachisoft.tayzgrid.common.protobuf.KeyValuePairProtocol.KeyValuePair value) {
if (value == null) {
throw new NullPointerException();
}
result.keyValuePair_.set(index, value);
return this;
}
public Builder setKeyValuePair(int index, com.alachisoft.tayzgrid.common.protobuf.KeyValuePairProtocol.KeyValuePair.Builder builderForValue) {
result.keyValuePair_.set(index, builderForValue.build());
return this;
}
public Builder addKeyValuePair(com.alachisoft.tayzgrid.common.protobuf.KeyValuePairProtocol.KeyValuePair value) {
if (value == null) {
throw new NullPointerException();
}
if (result.keyValuePair_.isEmpty()) {
result.keyValuePair_ = new java.util.ArrayList<com.alachisoft.tayzgrid.common.protobuf.KeyValuePairProtocol.KeyValuePair>();
}
result.keyValuePair_.add(value);
return this;
}
public Builder addKeyValuePair(com.alachisoft.tayzgrid.common.protobuf.KeyValuePairProtocol.KeyValuePair.Builder builderForValue) {
if (result.keyValuePair_.isEmpty()) {
result.keyValuePair_ = new java.util.ArrayList<com.alachisoft.tayzgrid.common.protobuf.KeyValuePairProtocol.KeyValuePair>();
}
result.keyValuePair_.add(builderForValue.build());
return this;
}
public Builder addAllKeyValuePair(
java.lang.Iterable<? extends com.alachisoft.tayzgrid.common.protobuf.KeyValuePairProtocol.KeyValuePair> values) {
if (result.keyValuePair_.isEmpty()) {
result.keyValuePair_ = new java.util.ArrayList<com.alachisoft.tayzgrid.common.protobuf.KeyValuePairProtocol.KeyValuePair>();
}
super.addAll(values, result.keyValuePair_);
return this;
}
public Builder clearKeyValuePair() {
result.keyValuePair_ = java.util.Collections.emptyList();
return this;
}
// optional int32 bucketSize = 4;
public boolean hasBucketSize() {
return result.hasBucketSize();
}
public int getBucketSize() {
return result.getBucketSize();
}
public Builder setBucketSize(int value) {
result.hasBucketSize = true;
result.bucketSize_ = value;
return this;
}
public Builder clearBucketSize() {
result.hasBucketSize = false;
result.bucketSize_ = 0;
return this;
}
// @@protoc_insertion_point(builder_scope:com.alachisoft.tayzgrid.common.protobuf.GetHashmapResponse)
}
static {
defaultInstance = new GetHashmapResponse(true);
com.alachisoft.tayzgrid.common.protobuf.GetHashmapResponseProtocol.internalForceInit();
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:com.alachisoft.tayzgrid.common.protobuf.GetHashmapResponse)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_com_alachisoft_tayzgrid_common_protobuf_GetHashmapResponse_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_com_alachisoft_tayzgrid_common_protobuf_GetHashmapResponse_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\030GetHashmapResponse.proto\022\'com.alachiso" +
"ft.tayzgrid.common.protobuf\032\022KeyValuePai" +
"r.proto\"\226\001\n\022GetHashmapResponse\022\016\n\006viewId" +
"\030\001 \001(\003\022\017\n\007members\030\002 \003(\t\022K\n\014keyValuePair\030" +
"\003 \003(\01325.com.alachisoft.tayzgrid.common.p" +
"rotobuf.KeyValuePair\022\022\n\nbucketSize\030\004 \001(\005" +
"B\034B\032GetHashmapResponseProtocol"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_com_alachisoft_tayzgrid_common_protobuf_GetHashmapResponse_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_com_alachisoft_tayzgrid_common_protobuf_GetHashmapResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_com_alachisoft_tayzgrid_common_protobuf_GetHashmapResponse_descriptor,
new java.lang.String[] { "ViewId", "Members", "KeyValuePair", "BucketSize", },
com.alachisoft.tayzgrid.common.protobuf.GetHashmapResponseProtocol.GetHashmapResponse.class,
com.alachisoft.tayzgrid.common.protobuf.GetHashmapResponseProtocol.GetHashmapResponse.Builder.class);
return null;
}
};
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
com.alachisoft.tayzgrid.common.protobuf.KeyValuePairProtocol.getDescriptor(),
}, assigner);
}
public static void internalForceInit() {}
// @@protoc_insertion_point(outer_class_scope)
}
| |
package net.ihiroky.niotty;
import net.ihiroky.niotty.util.Arguments;
import net.ihiroky.niotty.util.MPSCArrayQueue;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayDeque;
import java.util.Deque;
import java.util.HashMap;
import java.util.Map;
import java.util.PriorityQueue;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.TimeUnit;
/**
* Provides an dispatcher to process events which is queued in a event queue.
* <p>
* The event, which implements {@link Event}, is queued by
* {@link #offer(Event)}. It is processed by a dedicated
* thread in queued (FIFO) order. A queue blocking strategy is determined by
* {@link #poll(long)} and {@link #wakeUp()} of this sub class, this class provides
* the queue only.
* </p>
* <p>
* This class has a timer to process a event with some delay.
* {@link #schedule(Event, long, java.util.concurrent.TimeUnit)}
* is used to register a event to the timer. If a event returns a positive value, the event is
* registered to the timer implicitly to be processed after the returned value. If returns
* zero, the event is inserted to the event queue to processed again immediately.
* </p>
* <p>
* This class holds a set of {@link EventDispatcherSelection}. The selection shows
* a object which is associated with this event dispatcher. The number of selections can be used
* to control the balancing of the association.
* </p>
*/
public abstract class EventDispatcher implements Runnable, Comparable<EventDispatcher> {
private final Queue<Event> eventQueue_;
private final Queue<EventFuture> delayQueue_;
private volatile Thread thread_;
private final Map<EventDispatcherSelection, Integer> selectionCountMap_;
private Logger logger_ = LoggerFactory.getLogger(EventDispatcher.class);
private static final int INITIAL_EVENT_BUFFER_SIZE = 1024;
private static final int INITIAL_DELAY_QUEUE_SIZE = 1024;
/**
* Creates a new instance.
* An invocation of this constructor behaves in exactly the same way as the invocation
* {@code EventDispatcher(0)}.
*
* @param eventQueueCapacity a size of the event queue to buffer events;
* less than or equal 0 to use unbounded queue
*/
protected EventDispatcher(int eventQueueCapacity) {
eventQueue_ = (eventQueueCapacity <= 0)
? new ConcurrentLinkedQueue<Event>()
: new MPSCArrayQueue<Event>(eventQueueCapacity);
delayQueue_ = new PriorityQueue<EventFuture>(INITIAL_DELAY_QUEUE_SIZE);
selectionCountMap_ = new HashMap<EventDispatcherSelection, Integer>();
}
/**
* Creates a new instance with unbounded event queue.
*/
protected EventDispatcher() {
this(0);
}
void close() {
Thread t = thread_;
if (t != null) {
t.interrupt();
thread_ = null;
}
}
protected Thread thread() {
return thread_;
}
/**
* Inserts a specified event to the event queue.
* @param event the event to be inserted to the event queue
* @throws NullPointerException the event is null
*/
public void offer(Event event) {
eventQueue_.offer(event);
wakeUp();
}
/**
* If a caller is executed in the dispatcher thread, run the event immediately.
* Otherwise, inserts the event to the event queue.
* @param event the event
* @return a future representing pending completion of the event, not cancellable.
* @throws NullPointerException if the event is null
*/
public EventFuture schedule(final Event event) {
return schedule(event, 0, TimeUnit.NANOSECONDS);
}
/**
* Registers a specified event to the timer with specified delay time.
* @param event the event to be registered to the timer
* @param delay the delay of event execution
* @param timeUnit unit of the delay
* @return a future representing pending completion of the event
* @throws NullPointerException if event or timeUnit is null
*/
public EventFuture schedule(final Event event, long delay, TimeUnit timeUnit) {
Arguments.requireNonNull(event, "event");
Arguments.requireNonNull(timeUnit, "timeUnit");
if (delay == 0) {
EventFuture future = new EventFuture(System.nanoTime(), event);
execute(future);
return future;
}
long expire = System.nanoTime() + timeUnit.toNanos(delay);
final EventFuture future = new EventFuture(expire, event);
if (isInDispatcherThread()) {
delayQueue_.offer(future);
wakeUp();
return future;
}
eventQueue_.offer(new Event() {
@Override
public long execute() throws Exception {
delayQueue_.offer(future);
return DONE;
}
});
wakeUp();
return future;
}
/**
* If a caller is executed in the dispatcher thread, run the event immediately.
* Otherwise, inserts the event to the event queue.
* @param event the event
* @throws NullPointerException if the event is null
*/
public void execute(Event event) {
if (isInDispatcherThread()) {
try {
long waitTimeNanos = event.execute();
if (waitTimeNanos == Event.DONE) {
return;
}
if (waitTimeNanos > 0) {
long expire = System.nanoTime() + waitTimeNanos;
if (expire < 0) {
logger_.warn("[execute] The expire for {} is overflowed. Skip to schedule.", event);
return;
}
delayQueue_.offer(eventFuture(event, expire));
wakeUp();
} else {
eventQueue_.offer(event);
wakeUp();
}
} catch (Exception e) {
logger_.warn("[execute] Unexpected exception.", e);
}
} else {
eventQueue_.offer(event);
wakeUp();
}
}
private static EventFuture eventFuture(Event event, long expire) {
return (event instanceof EventFuture)
? ((EventFuture) event).setExpire(expire)
: new EventFuture(expire, event);
}
void waitUntilStarted() throws InterruptedException {
synchronized (this) {
while (thread_ == null) {
wait();
}
}
}
/**
* Executes the dispatcher on a thread provided by {@link EventDispatcherGroup}.
*/
public void run() {
Deque<Event> eventBuffer = new ArrayDeque<Event>(INITIAL_EVENT_BUFFER_SIZE);
Queue<Event> eventQueue = eventQueue_;
Queue<EventFuture> delayQueue = delayQueue_;
try {
synchronized (this) {
thread_ = Thread.currentThread();
onOpen();
notifyAll(); // Counter part: waitUntilStarted()
}
long delayNanos = Long.MAX_VALUE;
while (thread_ != null) {
try {
poll(eventQueue.isEmpty() ? delayNanos : Event.RETRY_IMMEDIATELY);
processEvents(eventQueue, eventBuffer, delayQueue);
delayNanos = processDelayedEvent(eventQueue, delayQueue);
} catch (InterruptedException ie) {
logger_.debug("[run] Interrupted.", ie);
break;
} catch (Exception e) {
if (thread_ != null) {
logger_.warn("[run] Unexpected exception.", e);
}
}
while (!eventBuffer.isEmpty()) {
eventQueue.offer(eventBuffer.pollFirst());
}
}
} finally {
onClose();
eventQueue.clear();
synchronized (selectionCountMap_) {
selectionCountMap_.clear();
}
thread_ = null;
}
}
private void processEvents(
Queue<Event> eventQueue, Deque<Event> buffer, Queue<EventFuture> delayQueue) throws Exception {
Event event;
for (;;) {
event = eventQueue.poll();
if (event == null) {
break;
}
long retryDelay = event.execute();
if (retryDelay == Event.DONE) {
continue;
}
if (retryDelay > 0) {
long expire = System.nanoTime() + retryDelay;
if (expire < 0) {
logger_.warn("[processEvent] The expire for {} is overflowed. Skip to schedule.", event);
continue;
}
delayQueue.offer(eventFuture(event, expire));
} else {
buffer.offerLast(event);
}
}
}
private long processDelayedEvent(Queue<Event> eventQueue, Queue<EventFuture> delayQueue) throws Exception {
long now = System.nanoTime();
EventFuture f;
for (;;) {
f = delayQueue.peek();
if (f == null || f.expire() > now) {
break;
}
if (!f.readyToDispatch()) {
delayQueue.poll();
continue;
}
try {
delayQueue.poll();
long waitTimeNanos = f.event_.execute();
if (waitTimeNanos == Event.DONE) {
f.dispatched();
continue;
}
if (waitTimeNanos > 0) {
long expire = now + waitTimeNanos;
if (expire > 0) {
f.setExpire(expire);
delayQueue.offer(f);
continue;
}
logger_.warn("[processDelayedEvent] The expire for {} is overflowed. Skip to schedule.", f.event_);
} else {
eventQueue.offer(f.event_);
}
f.dispatched();
} catch (Exception ex) {
logger_.warn("[execute] Unexpected exception.", ex);
}
}
while (f != null && f.isCancelled()) {
delayQueue.poll();
f = delayQueue.peek();
}
return (f != null) ? f.expire() - now : Long.MAX_VALUE;
}
/**
* Returns true if the caller is executed on the thread which executes this dispatcher.
* @return true if the caller is executed on the thread which executes this dispatcher
*/
public boolean isInDispatcherThread() {
return Thread.currentThread() == thread_;
}
/**
* Returns true if the given thread is this dispatcher thread.
* @param thread the thread
* @return true if the given thread is this dispatcher thread
*/
public boolean isAssigned(Thread thread) {
return thread == thread_;
}
/**
* Returns true if the thread which executes this dispatcher is alive.
* @return true if the thread which executes this dispatcher is alive
*/
public boolean isAlive() {
Thread t = thread_;
return (t != null) && t.isAlive();
}
@Override
public String toString() {
return (thread_ != null) ? thread_.toString() : super.toString();
}
@Override
public int compareTo(EventDispatcher that) {
return selectionCount() - that.selectionCount();
}
int selectionCount() {
synchronized (selectionCountMap_) {
return selectionCountMap_.size();
}
}
int duplicationCountFor(EventDispatcherSelection selection) {
Integer count;
synchronized (selectionCountMap_) {
count = selectionCountMap_.get(selection);
}
return (count != null) ? count : 0;
}
/**
* Counts the duplication counter by one if a object specified by the selection is associated with this object.
* @param selection the object to test
* @return true if a object specified by the selection is associated with this object
*/
boolean countUpDuplication(EventDispatcherSelection selection) {
Integer count;
synchronized (selectionCountMap_) {
count = selectionCountMap_.get(selection);
if (count != null) {
selectionCountMap_.put(selection, count + 1);
}
}
return count != null;
}
/**
* Associates a object specified by the selection with this object.
* @param selection the object to be associated
* @return the number of the selections associated with this object
* @throws NullPointerException if selection is null
*/
protected int accept(EventDispatcherSelection selection) {
Arguments.requireNonNull(selection, "selection");
int size;
synchronized (selectionCountMap_) {
Integer count = selectionCountMap_.get(selection);
if (count == null) {
count = 0;
}
selectionCountMap_.put(selection, count + 1);
size = selectionCountMap_.size();
}
return size;
}
/**
* Dissociate a object specified by the selection from this object.
* @param selection the object to be dissociate
* @return the number of the selections weight associated with this object, exclude the selection
*/
public int reject(EventDispatcherSelection selection) {
Arguments.requireNonNull(selection, "selection");
int size;
synchronized (selectionCountMap_) {
Integer count = selectionCountMap_.get(selection);
if (count != null) {
count = count - 1;
if (count != 0) {
selectionCountMap_.put(selection, count);
} else {
selectionCountMap_.remove(selection);
}
}
size = selectionCountMap_.size();
}
return size;
}
/**
* This method is called once when the dispatcher is initialized.
*/
protected abstract void onOpen();
/**
* This method is called once when the dispatcher is ready to terminate.
*/
protected abstract void onClose();
/**
* Executes any procedure.
* This method returns when the procedure was executed, the method {@link #wakeUp()} is invoked,
* the current thread is interrupted, or the given timeout period expires, whichever comes first.
*
* @param timeoutNanos a time to block for up to timeout by nanoseconds, more or less,
* while waiting for a channel to become ready; if zero, block indefinitely;
* if negative, returns immediately
* @throws Exception if some error occurs
*/
protected abstract void poll(long timeoutNanos) throws Exception;
/**
* This method is called when a new event is inserted to the event queue.
* The implementation is required to wake up the thread executing
* {@link #poll(long)} on waiting timeout.
*/
protected abstract void wakeUp();
}
| |
/*
* Copyright 2016 Elmar Rhex Gomez.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.elmargomez.dominohttp.request;
import android.support.annotation.NonNull;
import android.support.annotation.StringDef;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
/**
* The http request.
*
* @param <I> is the specific body Data.
* @param <R> is the success listener Object.
*/
public abstract class Request<I, R> implements Comparable {
public static final String GET = "GET";
public static final String PUT = "PUT";
public static final String POST = "POST";
@Retention(RetentionPolicy.SOURCE)
@StringDef({GET, PUT, POST})
public @interface Method {
}
public static final String APPLICATION_JSON = "application/json";
public static final String TEXT_PLAIN = "text/plain";
public static final String IMAGE_JPEG = "image/jpeg";
@Retention(RetentionPolicy.SOURCE)
@StringDef({APPLICATION_JSON, TEXT_PLAIN, IMAGE_JPEG})
public @interface ContentType {
}
public final ArrayList<String> tagHolder = new ArrayList<>();
private SuccessListener<R> successListener;
private FailedListeners failedListenersListener;
private String requestKey;
private String url;
private String method;
private String contentType;
private final HashMap<String, String> header = new HashMap<>();
protected byte[] data;
private I input;
// todo change to private
public String requestName;
private int retryCount;
private boolean shouldCached = true;
private boolean isCanceled;
private Object optionalTag;
public Request(String requestName, SuccessListener<R> successListener,
FailedListeners failedListenersListener) {
this.requestName = requestName;
this.successListener = successListener;
this.failedListenersListener = failedListenersListener;
}
public void setURL(@NonNull String url) {
this.url = url;
}
public String getURL() {
return url;
}
public void setMethod(@NonNull @Method String method) {
this.method = method;
}
public String getMethod() {
return method;
}
public void setContentType(@NonNull @ContentType String ct) {
this.contentType = ct;
}
public String getContentType() {
return this.contentType;
}
public String getRequestKey() {
if (requestKey == null) {
StringBuilder builder = new StringBuilder();
builder.append(requestName);
builder.append("|");
builder.append(url);
builder.append("|");
for (String key : header.keySet()) {
builder.append(key);
builder.append(header.get(key));
builder.append("|");
}
if (data != null) {
builder.append(data.length);
}
requestKey = builder.toString();
}
return requestKey;
}
public void addHeaders(Map<String, String> s) {
header.putAll(s);
}
public void addHeader(String key, String val) {
header.put(key, val);
}
public Map<String, String> getHeaders() {
return header;
}
public void setShouldCached(boolean v) {
shouldCached = v;
}
public boolean shouldCached() {
return shouldCached;
}
public void setCanceled(boolean v) {
isCanceled = v;
}
public boolean isCanceled() {
return isCanceled;
}
public void setRetryCount(int r) {
retryCount = r;
}
public int getRetryCount() {
return retryCount;
}
public void decRetryCount() {
retryCount--;
}
protected I getBody() {
return input;
}
public void setBody(I i) {
this.input = i;
}
public abstract byte[] getByteData();
public abstract R generateResponse(byte[] b);
public void setTag(Object tag) {
optionalTag = tag;
}
public Object getTag() {
return optionalTag;
}
public SuccessListener getSuccessListener() {
return successListener;
}
public FailedListeners getErrorListener() {
return failedListenersListener;
}
/**
* The Success Listener for the Current Request.
*
* @param <T>
*/
public interface SuccessListener<T> {
void response(Request request, T t);
}
/**
* The Failed Listener for the Current Request.
*/
public interface FailedListeners {
void error(Request request, String string);
}
}
| |
package gui;
import face.FaceManager;
import face.IconEnum;
import face.Icons;
import gui.GUIMain;
import gui.listeners.ListenerFace;
import gui.listeners.ListenerName;
import gui.listeners.ListenerURL;
import irc.Donor;
import irc.message.Message;
import irc.message.MessageQueue;
import irc.message.MessageWrapper;
import lib.pircbot.org.jibble.pircbot.User;
import lib.pircbot.org.jibble.pircbot.Channel;
import util.Constants;
import util.Utils;
import util.misc.Donation;
import util.settings.Settings;
import javax.swing.*;
import javax.swing.event.DocumentEvent;
import javax.swing.event.DocumentListener;
import javax.swing.text.DefaultCaret;
import javax.swing.text.SimpleAttributeSet;
import javax.swing.text.StyleConstants;
import javax.swing.text.StyledDocument;
import javax.swing.text.html.HTML;
import java.awt.*;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.text.SimpleDateFormat;
import java.util.*;
/**
* All channels are stored in this format.
*/
public class ChatPane implements DocumentListener {
private JFrame poppedOutPane = null;
// The timestamp of when we decided to wait to scroll back down
private long scrollbarTimestamp = -1;
public void setPoppedOutPane(JFrame pane) {
poppedOutPane = pane;
}
public JFrame getPoppedOutPane() {
return poppedOutPane;
}
public void createPopOut() {
if (poppedOutPane == null) {
JFrame frame = new JFrame(getPoppedOutTitle());
frame.addWindowListener(new WindowAdapter() {
@Override
public void windowClosed(WindowEvent e) {
getScrollPane().setViewportView(getTextPane());
scrollToBottom();
setPoppedOutPane(null);
}
});
JScrollPane pane = new JScrollPane();
frame.setIconImage(new ImageIcon(getClass().getResource("/image/icon.png")).getImage());
pane.setViewportView(getTextPane());
pane.setHorizontalScrollBarPolicy(ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER);
pane.getVerticalScrollBar().setPreferredSize(new Dimension(0, 0));
frame.add(pane);
frame.setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE);
frame.pack();
frame.setSize(750, 420);
frame.setVisible(true);
setPoppedOutPane(frame);
}
}
/**
* Keeps track of how many subs this channel gets.
* TODO: make this a statistic that the user can output to a file ("yesterday sub #")
*/
private int subCount = 0;
private int viewerCount = -1;
private int viewerPeak = 0;
public void setViewerCount(int newCount) {
if (newCount > viewerPeak) viewerPeak = newCount;
viewerCount = newCount;
if (getPoppedOutPane() != null) poppedOutPane.setTitle(getPoppedOutTitle());
if (GUIMain.channelPane.getSelectedIndex() == index) GUIMain.updateTitle(getViewerCountString());
}
public String getPoppedOutTitle() {
return chan + " | " + getViewerCountString();
}
public String getViewerCountString() {
if (chan.equalsIgnoreCase("system logs")) return null;
if (viewerCount == -1) return "Viewer count: Offline";
return String.format("Viewer count: %d (%d)", viewerCount, viewerPeak);
}
public void onWhisper(MessageWrapper m) {
SimpleAttributeSet senderSet, receiverSet;
String sender = m.getLocal().getSender();
String receiver = (String) m.getLocal().getExtra();
print(m, "\n" + getTime(), GUIMain.norm);
User senderUser = GUIMain.currentSettings.channelManager.getUser(sender, true);
User receiverUser = GUIMain.currentSettings.channelManager.getUser(receiver, true);
senderSet = getUserSet(senderUser);
receiverSet = getUserSet(receiverUser);
//name stuff
print(m, " ", GUIMain.norm);
FaceManager.handleNameFaces(sender, senderSet);
FaceManager.handleNameFaces(receiverUser.getNick(), receiverSet);
print(m, senderUser.getDisplayName(), senderSet);
print(m, " (whisper)-> ", GUIMain.norm);
print(m, receiverUser.getDisplayName(), receiverSet);
print(m, ": ", GUIMain.norm);
printMessage(m, m.getLocal().getContent(), GUIMain.norm, senderUser);
}
private SimpleAttributeSet getUserSet(User u) {
SimpleAttributeSet user = new SimpleAttributeSet();
StyleConstants.setFontFamily(user, GUIMain.currentSettings.font.getFamily());
StyleConstants.setFontSize(user, GUIMain.currentSettings.font.getSize());
StyleConstants.setForeground(user, Utils.getColorFromUser(u));
user.addAttribute(HTML.Attribute.NAME, u.getDisplayName());
return user;
}
/**
* This is the main boolean to check to see if this tab should pulse.
* <p>
* This boolean checks to see if the tab wasn't toggled, if it's visible (not in a combined tab),
* and if it's not selected. TODO check for global setting of pulsing tabs
*
* @return True if this tab should pulse, else false.
*/
public boolean shouldPulse() {
boolean shouldPulseLocal = (this instanceof CombinedChatPane) ?
((CombinedChatPane) this).getActiveChatPane().shouldPulseLoc() : shouldPulseLoc;
return shouldPulseLocal && isTabVisible() && GUIMain.channelPane.getSelectedIndex() != index && index != 0;
}
private boolean shouldPulseLoc = true;
/**
* Determines if this tab should pulse.
*
* @return True if this tab is not toggled off, else false. ("Tab Pulsing OFF")
*/
public boolean shouldPulseLoc() {
return shouldPulseLoc;
}
/**
* Sets the value for if this tab should pulse or not.
*
* @param newBool True (default) if tab pulsing should happen, else false if you wish to
* toggle tab pulsing off.
*/
public void setShouldPulse(boolean newBool) {
shouldPulseLoc = newBool;
}
/**
* Sets the pulsing boolean if this tab is starting to pulse.
* <p>
* Used by the TabPulse class.
*
* @param isPulsing True if the tab is starting to pulse, else false to stop pulsing.
*/
public void setPulsing(boolean isPulsing) {
this.isPulsing = isPulsing;
}
/**
* Used by the TabPulse class.
*
* @return true if the chat pane is currently pulsing, else false.
*/
public boolean isPulsing() {
return isPulsing;
}
private boolean isPulsing = false;
//credit to http://stackoverflow.com/a/4047794 for the below
public boolean isScrollBarFullyExtended(JScrollBar vScrollBar) {
BoundedRangeModel model = vScrollBar.getModel();
return (model.getExtent() + model.getValue()) == model.getMaximum();
}
public void doScrollToBottom() {
if (textPane.isVisible()) {
Rectangle visibleRect = textPane.getVisibleRect();
visibleRect.y = textPane.getHeight() - visibleRect.height;
textPane.scrollRectToVisible(visibleRect);
} else {
textPane.setCaretPosition(textPane.getDocument().getLength());
}
}
private boolean messageOut = false;
@Override
public void insertUpdate(DocumentEvent e) {
maybeScrollToBottom();
if (GUIMain.currentSettings.cleanupChat) {
try {
if (e.getDocument().getText(e.getOffset(), e.getLength()).contains("\n")) {
cleanupCounter++;
}
} catch (Exception ignored) {
}
if (cleanupCounter > GUIMain.currentSettings.chatMax) {
/* cleanup every n messages */
if (!messageOut) {
MessageQueue.addMessage(new Message().setType(Message.MessageType.CLEAR_TEXT).setExtra(this));
messageOut = true;
}
}
}
}
@Override
public void removeUpdate(DocumentEvent e) {
maybeScrollToBottom();
}
@Override
public void changedUpdate(DocumentEvent e) {
maybeScrollToBottom();
}
/**
* Used to queue a scrollToBottom only if the scroll bar is already at the bottom
* OR
* It's been more than 10 seconds since we've been scrolled up and have been receiving messages
*/
private void maybeScrollToBottom() {
JScrollBar scrollBar = scrollPane.getVerticalScrollBar();
boolean scrollBarAtBottom = isScrollBarFullyExtended(scrollBar);
if (scrollBarAtBottom) {
// We're back at the bottom, reset timer
scrollbarTimestamp = -1;
scrollToBottom();
} else if(scrollbarTimestamp != -1){
if(System.currentTimeMillis() - scrollbarTimestamp >= 10 * 1000L) {
// If the time difference is more than 10 seconds, scroll to bottom anyway after resetting time
scrollbarTimestamp = -1;
scrollToBottom();
}
} else {
scrollbarTimestamp = System.currentTimeMillis();
}
}
public void scrollToBottom() {
// Push the call to "scrollToBottom" back TWO PLACES on the
// AWT-EDT queue so that it runs *after* Swing has had an
// opportunity to "react" to the appending of new text:
// this ensures that we "scrollToBottom" only after a new
// bottom has been recalculated during the natural
// revalidation of the GUI that occurs after having
// appending new text to the JTextArea.
EventQueue.invokeLater(() -> EventQueue.invokeLater(this::doScrollToBottom));
}
private String chan;
public String getChannel() {
return chan;
}
private int index;
public void setIndex(int newIndex) {
index = newIndex;
}
public int getIndex() {
return index;
}
private JTextPane textPane;
public JTextPane getTextPane() {
return textPane;
}
private JScrollPane scrollPane;
public JScrollPane getScrollPane() {
return scrollPane;
}
public void setScrollPane(JScrollPane pane) {
scrollPane = pane;
}
private boolean isTabVisible = true;
public boolean isTabVisible() {
return isTabVisible;
}
public void setTabVisible(boolean newBool) {
isTabVisible = newBool;
}
private int cleanupCounter = 0;
public void resetCleanupCounter() {
cleanupCounter = 0;
}
//TODO make this be in 24 hour if they want
final SimpleDateFormat format = new SimpleDateFormat("[h:mm a]", Locale.getDefault());
public String getTime() {
return format.format(new Date(System.currentTimeMillis()));
}
/**
* You initialize this class with the channel it's for and the text pane you'll be editing.
*
* @param channel The channel ("name") of this chat pane. Ex: "System Logs" or "#gocnak"
* @param scrollPane The scroll pane for the tab.
* @param pane The text pane that shows the messages for the given channel.
* @param index The index of the pane in the main GUI.
*/
public ChatPane(String channel, JScrollPane scrollPane, JTextPane pane, int index) {
chan = channel;
textPane = pane;
((DefaultCaret) textPane.getCaret()).setUpdatePolicy(DefaultCaret.NEVER_UPDATE);
this.index = index;
this.scrollPane = scrollPane;
textPane.getDocument().addDocumentListener(this);
}
public ChatPane() {
//Used by the CombinedChatPane class, which calls its super anyways.
}
/**
* This is the main message method when somebody sends a message to the channel.
*
* @param m The message from the chat.
*/
public void onMessage(MessageWrapper m, boolean showChannel) {
if (textPane == null) return;
Message message = m.getLocal();
SimpleAttributeSet user = new SimpleAttributeSet();
StyleConstants.setFontFamily(user, GUIMain.currentSettings.font.getFamily());
StyleConstants.setFontSize(user, GUIMain.currentSettings.font.getSize());
String sender = message.getSender().toLowerCase();
String channel = message.getChannel();
String mess = message.getContent();
boolean isMe = (message.getType() == Message.MessageType.ACTION_MESSAGE);
try {
print(m, "\n" + getTime(), GUIMain.norm);
User u = GUIMain.currentSettings.channelManager.getUser(sender, true);
Color c;
if (u.getColor() != null) {
if (GUIMain.userColMap.containsKey(sender)) {
c = GUIMain.userColMap.get(sender);
} else {
c = u.getColor();
if (!Utils.checkColor(c)) {
c = Utils.getColorFromHashcode(sender.hashCode());
}
}
} else {//temporarily assign their color as randomly generated
c = Utils.getColorFromHashcode(sender.hashCode());
}
StyleConstants.setForeground(user, c);
if (channel.substring(1).equals(sender)) {
insertIcon(m, IconEnum.BROADCASTER, null);
}
if (u.isOp(channel)) {
if (!channel.substring(1).equals(sender) && !u.isStaff() && !u.isAdmin() && !u.isGlobalMod()) {//not the broadcaster again
insertIcon(m, IconEnum.MOD, null);
}
}
if (u.isGlobalMod()) {
insertIcon(m, IconEnum.GLOBALMOD, null);
}
//TODO if GUIMain.currentSettings.donorsEnabled
if (u.isDonor()) {
insertIcon(m, u.getDonationStatus(), null);
}
if (u.isStaff()) {
insertIcon(m, IconEnum.STAFF, null);
}
if (u.isAdmin()) {
insertIcon(m, IconEnum.ADMIN, null);
}
boolean isSubscriber = u.isSubscriber(channel);
if (isSubscriber) {
Channel ch = GUIMain.currentSettings.channelManager.getChannel(channel);
int length = ch.isSubscriber(u);
insertIcon(m, IconEnum.SUBSCRIBER, channel + "/" + length);
}
if (u.isTurbo()) {
insertIcon(m, IconEnum.TURBO, null);
}
if (u.isPrime()) {
insertIcon(m, IconEnum.PRIME, null);
}
//name stuff
print(m, " ", GUIMain.norm);
user.addAttribute(HTML.Attribute.NAME, sender);
SimpleAttributeSet userColor = new SimpleAttributeSet(user);
FaceManager.handleNameFaces(sender, user);
if (showChannel) {
print(m, u.getDisplayName(), user);
print(m, " (" + channel.substring(1) + ")" + (isMe ? " " : ": "), GUIMain.norm);
} else {
print(m, u.getDisplayName(), user);
print(m, (!isMe ? ": " : " "), userColor);
}
//keyword?
SimpleAttributeSet set;
if (Utils.mentionsKeyword(mess)) {
set = Utils.getSetForKeyword(mess);
} else {
set = (isMe ? userColor : GUIMain.norm);
}
//URL, Faces, rest of message
printMessage(m, mess, set, u);
if (channel.substring(1).equalsIgnoreCase(GUIMain.currentSettings.accountManager.getUserAccount().getName()))
//check status of the sub, has it been a month?
GUIMain.currentSettings.subscriberManager.updateSubscriber(u, channel, isSubscriber);
if (shouldPulse())
GUIMain.instance.pulseTab(this);
} catch (Exception e) {
GUIMain.log(e);
}
}
/**
* Credit: TDuva
* <p>
* Cycles through message data, tagging things like Faces and URLs.
*
* @param text The message
* @param style The default message style to use.
*/
protected void printMessage(MessageWrapper m, String text, SimpleAttributeSet style, User u) {
// Where stuff was found
TreeMap<Integer, Integer> ranges = new TreeMap<>();
// The style of the stuff (basically metadata)
HashMap<Integer, SimpleAttributeSet> rangesStyle = new HashMap<>();
findLinks(text, ranges, rangesStyle);
findEmoticons(text, ranges, rangesStyle, u, m.getLocal().getChannel());
// Actually print everything
int lastPrintedPos = 0;
for (Map.Entry<Integer, Integer> range : ranges.entrySet()) {
int start = range.getKey();
int end = range.getValue();
if (start > lastPrintedPos) {
// If there is anything between the special stuff, print that
// first as regular text
print(m, text.substring(lastPrintedPos, start), style);
}
print(m, text.substring(start, end + 1), rangesStyle.get(start));
lastPrintedPos = end + 1;
}
// If anything is left, print that as well as regular text
if (lastPrintedPos < text.length()) {
print(m, text.substring(lastPrintedPos), style);
}
}
private void findLinks(String text, Map<Integer, Integer> ranges, Map<Integer, SimpleAttributeSet> rangesStyle) {
// Find links
Constants.urlMatcher.reset(text);
while (Constants.urlMatcher.find()) {
int start = Constants.urlMatcher.start();
int end = Constants.urlMatcher.end() - 1;
if (!Utils.inRanges(start, ranges) && !Utils.inRanges(end, ranges)) {
String foundUrl = Constants.urlMatcher.group();
if (Utils.checkURL(foundUrl)) {
ranges.put(start, end);
rangesStyle.put(start, Utils.URLStyle(foundUrl));
}
}
}
}
private void findEmoticons(String text, Map<Integer, Integer> ranges, Map<Integer, SimpleAttributeSet> rangesStyle, User u, String channel) {
FaceManager.handleFaces(ranges, rangesStyle, text, FaceManager.FACE_TYPE.NORMAL_FACE, null, null);
if (u != null && u.getEmotes() != null) {
FaceManager.handleFaces(ranges, rangesStyle, text, FaceManager.FACE_TYPE.TWITCH_FACE, null, u.getEmotes());
}
//TODO if (currentSettings.FFZFaceEnabled)
if (GUIMain.currentSettings.ffzEmotes && channel != null) {
channel = channel.replaceAll("#", "");
FaceManager.handleFaces(ranges, rangesStyle, text, FaceManager.FACE_TYPE.FRANKER_FACE, channel, null);
}
}
protected void print(MessageWrapper wrapper, String string, SimpleAttributeSet set) {
if (textPane == null) return;
Runnable r = () -> {
try {
textPane.getStyledDocument().insertString(textPane.getStyledDocument().getLength(), string, set);
} catch (Exception e) {
GUIMain.log(e);
}
};
wrapper.addPrint(r);
}
/**
* Handles inserting icons before and after the message.
*
* @param m The message itself.
* @param status IconEnum.Subscriber for sub message, else pass Donor#getDonationStatus(d#getAmount())
*/
public void onIconMessage(MessageWrapper m, IconEnum status) {
try {
Message message = m.getLocal();
print(m, "\n", GUIMain.norm);
for (int i = 0; i < 5; i++) {
insertIcon(m, status, (status == IconEnum.SUBSCRIBER ? message.getChannel() : null));
}
print(m, " " + message.getContent() + (status == IconEnum.SUBSCRIBER ? (" (" + (subCount + 1) + ") ") : " "), GUIMain.norm);
for (int i = 0; i < 5; i++) {
insertIcon(m, status, (status == IconEnum.SUBSCRIBER ? message.getChannel() : null));
}
} catch (Exception e) {
GUIMain.log(e);
}
boolean shouldIncrement = ((status == IconEnum.SUBSCRIBER) && (m.getLocal().getExtra() == null));//checking for repeat messages
if (shouldIncrement) subCount++;
}
public void onSub(MessageWrapper m) {
onIconMessage(m, IconEnum.SUBSCRIBER);
}
public void onDonation(MessageWrapper m) {
Donation d = (Donation) m.getLocal().getExtra();
onIconMessage(m, Donor.getDonationStatus(d.getAmount()));
}
public void insertIcon(MessageWrapper m, IconEnum type, String channel) {
SimpleAttributeSet attrs = new SimpleAttributeSet();
Icons.BotnakIcon icon = Icons.getIcon(type, channel);
StyleConstants.setIcon(attrs, icon.getImage());
try {
print(m, " ", null);
print(m, icon.getType().type, attrs);
} catch (Exception e) {
GUIMain.log("INSERT ICON: ");
GUIMain.log(e);
}
}
public String getText() {
return (textPane != null && textPane.getText() != null) ? textPane.getText() : "";
}
// Source: http://stackoverflow.com/a/4628879
// by http://stackoverflow.com/users/131872/camickr & Community
public void cleanupChat() {
if (textPane == null || textPane.getParent() == null) return;
if (!(textPane.getParent() instanceof JViewport)) {
return;
}
JViewport viewport = ((JViewport) textPane.getParent());
Point startPoint = viewport.getViewPosition();
// we are not deleting right before the visible area, but one screen behind
// for convenience, otherwise flickering.
if (startPoint == null) return;
final int start = textPane.viewToModel(startPoint);
if (start > 0) // not equal zero, because then we don't have to delete anything
{
final StyledDocument doc = textPane.getStyledDocument();
try {
if (GUIMain.currentSettings.logChat && chan != null) {
String[] toRemove = doc.getText(0, start).split("\\n");
Utils.logChat(toRemove, chan, 1);
}
doc.remove(0, start);
resetCleanupCounter();
} catch (Exception e) {
GUIMain.log("Failed clearing chat: ");
GUIMain.log(e);
}
}
messageOut = false;
}
/**
* Creates a pane of the given channel.
*
* @param channel The channel, also used as the key for the hashmap.
* @return The created ChatPane.
*/
public static ChatPane createPane(String channel) {
JScrollPane scrollPane = new JScrollPane();
scrollPane.setHorizontalScrollBarPolicy(ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER);
JTextPane pane = new JTextPane();
pane.setEditorKit(new WrapEditorKit());
pane.setEditable(false);
pane.setFocusable(false);
pane.setMargin(new Insets(0, 0, 0, 0));
pane.setBackground(Color.black);
pane.setFont(GUIMain.currentSettings.font);
pane.addMouseListener(new ListenerURL());
pane.addMouseListener(new ListenerName());
pane.addMouseListener(new ListenerFace());
scrollPane.setViewportView(pane);
return new ChatPane(channel, scrollPane, pane, GUIMain.channelPane.getTabCount() - 1);
}
/**
* Deletes the pane and removes the tab from the tabbed pane.
*/
public void deletePane() {
if (GUIMain.currentSettings.logChat) {
Utils.logChat(getText().split("\\n"), chan, 2);
}
//TODO ensure the viewer list & other popped out GUIs are deleted
if (getPoppedOutPane() != null) {
getPoppedOutPane().dispose();
}
GUIMain.channelPane.removeTabAt(index);
GUIMain.channelPane.setSelectedIndex(index - 1);
}
/**
* Logs a message to this chat pane.
*
* @param message The message itself.
* @param isSystem Whether the message is a system log message or not.
*/
public void log(MessageWrapper message, boolean isSystem) {
print(message, "\n" + getTime(), GUIMain.norm);
print(message, " " + (isSystem ? "SYS: " : "") + message.getLocal().getContent(), GUIMain.norm);
}
}
| |
package org.xtuml.bp.core.test;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.core.resources.IFileState;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.jface.dialogs.Dialog;
import org.eclipse.jface.viewers.CellEditor;
import org.eclipse.jface.viewers.ComboBoxCellEditor;
import org.eclipse.jface.viewers.IStructuredSelection;
import org.eclipse.jface.viewers.StructuredSelection;
import org.eclipse.jface.viewers.TableViewer;
import org.eclipse.jface.viewers.ViewerCell;
import org.eclipse.swt.SWT;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.TableItem;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.xtuml.bp.core.Association_c;
import org.xtuml.bp.core.ClassAsAssociatedOneSide_c;
import org.xtuml.bp.core.ClassAsAssociatedOtherSide_c;
import org.xtuml.bp.core.ClassAsLink_c;
import org.xtuml.bp.core.ClassAsSimpleFormalizer_c;
import org.xtuml.bp.core.ClassAsSimpleParticipant_c;
import org.xtuml.bp.core.ClassAsSubtype_c;
import org.xtuml.bp.core.CorePlugin;
import org.xtuml.bp.core.ImportedClass_c;
import org.xtuml.bp.core.LinkedAssociation_c;
import org.xtuml.bp.core.Ooaofooa;
import org.xtuml.bp.core.Package_c;
import org.xtuml.bp.core.PackageableElement_c;
import org.xtuml.bp.core.SimpleAssociation_c;
import org.xtuml.bp.core.SubtypeSupertypeAssociation_c;
import org.xtuml.bp.core.common.BridgePointPreferencesStore;
import org.xtuml.bp.core.common.ClassQueryInterface_c;
import org.xtuml.bp.core.common.NonRootModelElement;
import org.xtuml.bp.core.common.TransactionManager;
import org.xtuml.bp.core.editors.association.AssociationEditorTab;
import org.xtuml.bp.core.editors.association.dialogs.AssociationTableDialog;
import org.xtuml.bp.core.editors.association.editing.AssociationEditingSupport;
import org.xtuml.bp.core.util.UIUtil;
import org.xtuml.bp.test.TestUtil;
import org.xtuml.bp.test.common.BaseTest;
import org.xtuml.bp.test.common.TestingUtilities;
import org.xtuml.bp.test.common.UITestingUtilities;
import org.xtuml.bp.ui.graphics.editor.GraphicalEditor;
import junit.framework.AssertionFailedError;
import junit.framework.TestCase;
@FixMethodOrder
public class AssociationTableEditorTests extends BaseTest {
static private Package_c pkg;
static private GraphicalEditor editor;
@Override
public void initialSetup() throws Exception {
String testElementName = "AssociationEditing";
TestingUtilities.importTestingProjectIntoWorkspace(testElementName);
UIUtil.dispatchAll();
m_sys = getSystemModel(testElementName);
modelRoot = Ooaofooa.getInstance("/AssociationEditing/models/AssociationEditing/AssociationEditing/AssociationEditing.xtuml");
pkg = Package_c.getOneEP_PKGOnR1401(m_sys,
candidate -> ((Package_c) candidate).getName().equals(testElementName));
editor = UITestingUtilities.getGraphicalEditorFor(pkg, true);
};
@Override
public void setUp() throws Exception {
super.setUp();
// enable association editor
CorePlugin.getDefault().getPreferenceStore()
.setValue(BridgePointPreferencesStore.ENABLE_TABLE_BASED_ASSOCIATION_EDITING, true);
}
@Override
public void tearDown() throws Exception {
super.tearDown();
CorePlugin.getDefault().getPreferenceStore()
.setValue(BridgePointPreferencesStore.ENABLE_TABLE_BASED_ASSOCIATION_EDITING, false);
BaseTest.dispatchEvents();
}
@Test
public void testFirstElementSelectedAndEditorStarted() {
selectElement(getAssociation(1));
testDialog(tab -> {
TableViewer viewer = tab.getTableViewer();
IStructuredSelection ss = viewer.getStructuredSelection();
assertTrue("Incorrect selection found", ss.size() == 1);
Association_c element = (Association_c) ss.getFirstElement();
assertTrue("Selection was not at the proper location", viewer.getElementAt(0) == element);
assertTrue("Found the incorrect table item at the first location.", String.valueOf(element.getNumb()).equals("1"));
assertTrue("Editor was not enabled on initial start", viewer.isCellEditorActive());
});
}
@Test
public void testFormalizedButtonStateMatchesModel() {
selectElement(getAssociation(1));
testDialog(tab -> assertTrue("Formalize button was not checked when a formal association was selected.", tab.getFormalizeButton().isEnabled()));
}
@Test
public void testBadValueForNumber() {
selectElement(getAssociation(1));
testDialog(tab -> {
CellEditor cellEditor = tab.getEditingSupport()[0].getActiveCellEditor();
cellEditor.setValue("x");
assertFalse("Invalid value was allowed to be entered for Number.", cellEditor.isValueValid());
});
}
@Test
public void testBadValueForTextPhrase() {
// this test needs the MASL char checking enabled
CorePlugin.getDefault().getPreferenceStore().putValue(BridgePointPreferencesStore.REQUIRE_MASL_STYLE_IDENTIFIERS, "true");
selectElement(getAssociation(1));
testDialog(tab -> {
tab.getTableViewer().editElement(tab.getTableViewer().getElementAt(0), 3);
CellEditor cellEditor = tab.getEditingSupport()[3].getActiveCellEditor();
cellEditor.setValue("!");
tab.getTableViewer().applyEditorValue();
CorePlugin.getDefault().getPreferenceStore().putValue(BridgePointPreferencesStore.REQUIRE_MASL_STYLE_IDENTIFIERS, "false");
assertFalse("Invalid value was allowed to be entered for Phrase.", cellEditor.isValueValid());
});
}
@Test
public void testNoEditorForOneSide() {
selectElement(getAssociation(1));
testDialog(tab -> {
tab.getTableViewer().editElement(tab.getTableViewer().getElementAt(0), 1);
AssociationEditingSupport editingSupport = tab.getEditingSupport()[1];
assertNull("Cell editor was created for ClassAsAssociatedOneSide", editingSupport);
});
}
@Test
public void testNoEditorForOtherSide() {
selectElement(getAssociation(1));
testDialog(tab -> {
tab.getTableViewer().editElement(tab.getTableViewer().getElementAt(0), 4);
AssociationEditingSupport editingSupport = tab.getEditingSupport()[4];
assertNull("Cell editor was created for ClassAsAssociatedOtherSide", editingSupport);
});
}
@Test
public void testNoEditorForLink() {
selectElement(getAssociation(1));
testDialog(tab -> {
tab.getTableViewer().editElement(tab.getTableViewer().getElementAt(0), 8);
AssociationEditingSupport editingSupport = tab.getEditingSupport()[8];
assertNull("Cell editor was created for ClassAsLink", editingSupport);
});
}
@Test
public void testNoEditorForOneSideBinary() {
selectElement(getAssociation(1));
testDialog(tab -> {
tab.getTableViewer().editElement(tab.getTableViewer().getElementAt(1), 1);
AssociationEditingSupport editingSupport = tab.getEditingSupport()[1];
assertNull("Cell editor was created for ClassAsSimpleFormalizer", editingSupport);
});
}
@Test
public void testNoEditorForOtherSideBinary() {
selectElement(getAssociation(1));
testDialog(tab -> {
tab.getTableViewer().editElement(tab.getTableViewer().getElementAt(1), 4);
AssociationEditingSupport editingSupport = tab.getEditingSupport()[4];
assertNull("Cell editor was created for ClassAsSimpleParticipant", editingSupport);
});
}
@Test
public void testTabTraversal() {
selectElement(pkg);
testDialog(tab -> {
TableViewer viewer = tab.getTableViewer();
testTabTraversal(viewer, 2);
testTabTraversal(viewer, 3);
testTabTraversal(viewer, 5);
testTabTraversal(viewer, 6);
testTabTraversal(viewer, 7);
});
}
@Test
public void testUnformalizeBinary() {
Association_c r7 = getAssociation(7);
selectElement(r7);
testDialog(tab -> {
tab.getTableViewer().setSelection(new StructuredSelection(getAssociation(7)));
Button formalizeButton = tab.getFormalizeButton();
formalizeButton.setSelection(!formalizeButton.getSelection());
formalizeButton.notifyListeners(SWT.Selection, null);
UIUtil.dispatchAll();
checkPersistence(true);
});
assertFalse("Configuration dialog did not unformalize association.", r7.Isformalized());
TransactionManager.getSingleton().getUndoAction().run();
}
@Test
public void testUnformalizeLinked() {
Association_c r1 = getAssociation(1);
selectElement(r1);
testDialog(tab -> {
tab.getTableViewer().setSelection(new StructuredSelection(getAssociation(1)));
Button formalizeButton = tab.getFormalizeButton();
formalizeButton.setSelection(!formalizeButton.getSelection());
formalizeButton.notifyListeners(SWT.Selection, null);
UIUtil.dispatchAll();
checkPersistence(true);
});
assertFalse("Configuration dialog did not unformalize association.", r1.Isformalized());
}
@Test
public void testCMEAssociation() {
selectElement(getAssociation(1));
testContextMenu("Configure Associations", new String[] { "1" }, true);
}
@Test
public void testCMEMultipleAssociation() {
selectElement(getAssociation(1), getAssociation(2));
testContextMenu("Configure Associations", new String[] { "1", "2" }, true);
}
@Test
public void testCMEAssociationAndClass() {
selectElement(getAssociation(1), getModelClass("D"));
testContextMenu("Configure Associations", new String[] { "1", "2", "5" }, true);
}
@Test
public void testCMESimpleAssociationAndSubtype() {
Association_c superSub = getAssociation(3);
selectElement(getAssociation(1),
ClassAsSubtype_c.getOneR_SUBOnR213(SubtypeSupertypeAssociation_c.getManyR_SUBSUPsOnR206(superSub)));
testContextMenu(null, false);
}
@Test
public void testCMESimpleAssociationSupertype() {
selectElement(getAssociation(1), getAssociation(3));
testContextMenu(null, false);
}
@Test
public void testCMESupertype() {
selectElement(getAssociation(3));
testContextMenu(null, false);
}
@Test
public void testCMELink() {
Association_c assoc = getAssociation(1);
ClassAsLink_c link = ClassAsLink_c.getOneR_ASSROnR211(LinkedAssociation_c.getManyR_ASSOCsOnR206(assoc));
selectElement(link);
testContextMenu("Configure Associations", new String[] { "1" }, true);
}
@Test
public void testCMESubtype() {
Association_c superSub = getAssociation(3);
ClassAsSubtype_c subtype = ClassAsSubtype_c.getOneR_SUBOnR213(SubtypeSupertypeAssociation_c.getManyR_SUBSUPsOnR206(superSub));
selectElement(subtype);
testContextMenu(null, false);
}
@Test
public void testCMEClass() {
selectElement(getModelClass("B"));
testContextMenu(new String[] { "1", "2", "4" }, true);
}
@Test
public void testCMEImportedClass() {
selectElement(ImportedClass_c.getOneO_IOBJOnR8001(PackageableElement_c.getManyPE_PEsOnR8000(pkg)));
testContextMenu(new String[] { "4" }, true);
}
@Test
public void testCMEClasses() {
selectElement(getModelClass("A"), getModelClass("D"));
testContextMenu(new String[] { "1", "2" }, true);
}
@Test
public void testCMEClassImportClass() {
selectElement(ImportedClass_c.getOneO_IOBJOnR8001(PackageableElement_c.getManyPE_PEsOnR8000(pkg)),
getModelClass("A"));
testContextMenu("Configure Associations", new String[] { "1", "4", "6" }, true);
}
@Test
public void testCMEImportedClasses() {
ImportedClass_c[] imports = ImportedClass_c
.getManyO_IOBJsOnR8001(PackageableElement_c.getManyPE_PEsOnR8000(pkg));
selectElement(imports[0], imports[1]);
testContextMenu(new String[] { "4", "5", "6" }, true);
}
@Test
public void testCMEClassesWithSameAssociation() {
selectElement(getModelClass("A"), getModelClass("B"));
testContextMenu(new String[] { "1", "2", "4" }, true);
}
@Test
public void testCMEClassAndAttachedAssociation() {
selectElement(getAssociation(1), getModelClass("A"));
testContextMenu("Configure Associations", new String[] { "1" }, true);
}
@Test
public void testCMEDiagram() {
selectElement(pkg);
testContextMenu(new String[] { "1", "2", "4", "5", "6" }, true);
}
@Test
public void testCMEReflexive() {
selectElement(getAssociation(7));
testContextMenu("Configure Associations", new String[] { "7" }, true);
}
@Test
public void testCMEReflexiveClass() {
selectElement(getModelClass("E"));
testContextMenu(new String[] { "7" }, true);
}
@Test
public void testModifyAssociationNumberBinary() {
testDialog(tab -> {
Association_c association = getAssociation(2);
tab.getTableViewer().setSelection(new StructuredSelection(association));
tab.getTableViewer().editElement(association, 0);
CellEditor cellEditor = tab.getEditingSupport()[0].getActiveCellEditor();
cellEditor.setValue("9");
Event event = new Event();
event.character = SWT.CR;
event.keyCode = SWT.CR;
tab.getTableViewer().getTable().notifyListeners(SWT.KeyDown, event);
tab.getTableViewer().applyEditorValue();
assertTrue("Association number change was not made.", association.getNumb() == 9);
checkPersistence();
UIUtil.dispatchAll();
TransactionManager.getSingleton().getUndoAction().run();
UIUtil.dispatchAll();
});
}
@Test
public void testModifyAssociationNumberLinked() {
testDialog(tab -> {
Association_c association = getAssociation(1);
tab.getTableViewer().setSelection(new StructuredSelection(association));
tab.getTableViewer().editElement(association, 0);
CellEditor cellEditor = tab.getEditingSupport()[0].getActiveCellEditor();
cellEditor.setValue("9");
Event event = new Event();
event.character = SWT.CR;
event.keyCode = SWT.CR;
tab.getTableViewer().getTable().notifyListeners(SWT.KeyDown, event);
tab.getTableViewer().applyEditorValue();
assertTrue("Association number change was not made.", association.getNumb() == 9);
checkPersistence();
UIUtil.dispatchAll();
TransactionManager.getSingleton().getUndoAction().run();
UIUtil.dispatchAll();
});
}
@Test
public void testModifyOneSideTextPhraseBinary() {
testDialog(tab -> {
Association_c association = getAssociation(2);
tab.getTableViewer().setSelection(new StructuredSelection(association));
tab.getTableViewer().editElement(association, 3);
CellEditor cellEditor = tab.getEditingSupport()[3].getActiveCellEditor();
cellEditor.setValue("new_phrase");
Event event = new Event();
event.character = SWT.CR;
event.keyCode = SWT.CR;
tab.getTableViewer().getTable().notifyListeners(SWT.KeyDown, event);
assertTrue("Association phrase change was not made.", getPhrase(association, true).equals("new_phrase"));
checkPersistence();
});
}
@Test
public void testModifyOtherSideTextPhraseBinary() {
testDialog(tab -> {
Association_c association = getAssociation(2);
tab.getTableViewer().setSelection(new StructuredSelection(association));
tab.getTableViewer().editElement(association, 6);
CellEditor cellEditor = tab.getEditingSupport()[6].getActiveCellEditor();
cellEditor.setValue("new_phrase");
Event event = new Event();
event.character = SWT.CR;
event.keyCode = SWT.CR;
tab.getTableViewer().getTable().notifyListeners(SWT.KeyDown, event);
assertTrue("Association phrase change was not made.", getPhrase(association, false).equals("new_phrase"));
checkPersistence();
});
}
@Test
public void testModifyOtherSideTextPhraseBinaryFormalized() {
testDialog(tab -> {
Association_c association = getAssociation(7);
tab.getTableViewer().setSelection(new StructuredSelection(association));
tab.getTableViewer().editElement(association, 6);
CellEditor cellEditor = tab.getEditingSupport()[6].getActiveCellEditor();
cellEditor.setValue("new_phrase");
Event event = new Event();
event.character = SWT.CR;
event.keyCode = SWT.CR;
tab.getTableViewer().getTable().notifyListeners(SWT.KeyDown, event);
assertTrue("Association phrase change was not made.", getPhrase(association, false).equals("new_phrase"));
checkPersistence();
});
}
@Test
public void testModifyOneSideTextPhraseLinked() {
testDialog(tab -> {
Association_c association = getAssociation(1);
tab.getTableViewer().setSelection(new StructuredSelection(association));
tab.getTableViewer().editElement(association, 3);
CellEditor cellEditor = tab.getEditingSupport()[3].getActiveCellEditor();
cellEditor.setValue("new_phrase");
Event event = new Event();
event.character = SWT.CR;
event.keyCode = SWT.CR;
tab.getTableViewer().getTable().notifyListeners(SWT.KeyDown, event);
assertTrue("Association phrase change was not made.", getPhrase(association, true).equals("new_phrase"));
checkPersistence();
});
}
@Test
public void testModifyOtherSideTextPhraseLinked() {
testDialog(tab -> {
Association_c association = getAssociation(1);
tab.getTableViewer().setSelection(new StructuredSelection(association));
tab.getTableViewer().editElement(association, 6);
CellEditor cellEditor = tab.getEditingSupport()[6].getActiveCellEditor();
cellEditor.setValue("new_phrase");
Event event = new Event();
event.character = SWT.CR;
event.keyCode = SWT.CR;
tab.getTableViewer().getTable().notifyListeners(SWT.KeyDown, event);
assertTrue("Association phrase change was not made.", getPhrase(association, false).equals("new_phrase"));
checkPersistence();
});
}
@Test
public void testUndoRedo() {
// only need to test that CMEs are present
// and that keyboard shortcuts work all
// other aspects of undo/redo are already
// tested
testDialog(tab -> {
TransactionManager.getSingleton().clearStacks();
TransactionManager.getSingleton().setUndoRedoActionsState();
assertTrue(UITestingUtilities.checkItemStatusInContextMenu(tab.getMenu(), "Undo", "", true));
assertTrue(UITestingUtilities.checkItemStatusInContextMenu(tab.getMenu(), "Redo", "", true));
Association_c association = getAssociation(1);
tab.getTableViewer().setSelection(new StructuredSelection(association));
tab.getTableViewer().editElement(association, 6);
CellEditor cellEditor = tab.getEditingSupport()[6].getActiveCellEditor();
cellEditor.setValue("undo_test");
Event event = new Event();
event.character = SWT.CR;
event.keyCode = SWT.CR;
tab.getTableViewer().getTable().notifyListeners(SWT.KeyDown, event);
UIUtil.dispatchAll();
assertTrue(UITestingUtilities.checkItemStatusInContextMenu(tab.getMenu(), "Undo", "", false));
assertTrue(UITestingUtilities.checkItemStatusInContextMenu(tab.getMenu(), "Redo", "", true));
// perform undo with keys
event.character = 'z';
event.keyCode = 'z';
event.stateMask = SWT.COMMAND;
tab.getTableViewer().getTable().notifyListeners(SWT.KeyDown, event);
UIUtil.dispatchAll();
assertTrue(UITestingUtilities.checkItemStatusInContextMenu(tab.getMenu(), "Undo", "", true));
assertTrue(UITestingUtilities.checkItemStatusInContextMenu(tab.getMenu(), "Redo", "", false));
event.character = 'z';
event.keyCode = 'z';
event.stateMask = SWT.COMMAND | SWT.SHIFT;
tab.getTableViewer().getTable().notifyListeners(SWT.KeyDown, event);
UIUtil.dispatchAll();
assertTrue(UITestingUtilities.checkItemStatusInContextMenu(tab.getMenu(), "Undo", "", false));
assertTrue(UITestingUtilities.checkItemStatusInContextMenu(tab.getMenu(), "Redo", "", true));
});
}
@Test
public void testModificationRuleSimpleOne() {
testDialog(tab -> {
Association_c association = getAssociation(2);
checkRuleModification(tab, association, true, 2);
});
}
@Test
public void testModificationRuleSimpleOther() {
testDialog(tab -> {
Association_c association = getAssociation(2);
checkRuleModification(tab, association, false, 5);
});
}
@Test
public void testModificationRuleLinkedOne() {
testDialog(tab -> {
Association_c association = getAssociation(21);
checkRuleModification(tab, association, true, 2);
});
}
@Test
public void testModificationRuleLinkedOther() {
testDialog(tab -> {
Association_c association = getAssociation(1);
checkRuleModification(tab, association, false, 5);
});
}
@Test
public void testModificationRuleLinkedLink() {
testDialog(tab -> {
Association_c association = getAssociation(1);
ClassAsLink_c link = ClassAsLink_c
.getOneR_ASSROnR211(LinkedAssociation_c.getManyR_ASSOCsOnR206(association));
checkRuleModification(tab, link, true, 7);
});
}
/**
*
* 1 (unconditional one) mult = 0 cond = 0
* 0..1 (conditional one) mult = 0 cond = 1
* 1..* (unconditional many) mult = 1, cond = 0
* * (conditional many) mult = 1, cond = 1
*
*/
private void checkRuleModification(AssociationEditorTab tab, NonRootModelElement element, boolean oneside, int column) {
int count = 4;
NonRootModelElement association = element;
if(association instanceof ClassAsLink_c) {
association = Association_c.getOneR_RELOnR206(LinkedAssociation_c.getManyR_ASSOCsOnR211((ClassAsLink_c) association));
count = 2;
}
for(int i = 0; i < count; i++) {
tab.getTableViewer().editElement(association, column);
ComboBoxCellEditor cellEditor = (ComboBoxCellEditor) tab.getEditingSupport()[column].getActiveCellEditor();
Object currentValue = cellEditor.getValue();
cellEditor.setValue(Integer.valueOf(i));
Event event = new Event();
event.character = SWT.CR;
event.keyCode = SWT.CR;
tab.getTableViewer().getTable().notifyListeners(SWT.KeyDown, event);
tab.getTableViewer().applyEditorValue();
UIUtil.dispatchAll();
if(currentValue.equals(Integer.valueOf(i))) {
// verify that no persistence occurred
checkPersistence(false);
} else {
int cond = 0;
int mult = 0;
if(i == 1 && element instanceof ClassAsLink_c) {
cond = 0;
mult = 1;
}
if(i == 1 && element instanceof Association_c) {
cond = 1;
mult = 0;
}
if(i == 2) {
cond = 0;
mult = 1;
}
if(i == 3) {
cond = 1;
mult = 1;
}
verifyRule(element, oneside, cond, mult);
}
}
}
private void verifyRule(NonRootModelElement association, boolean oneside, int cond, int mult) {
if(association instanceof ClassAsLink_c) {
assertTrue("Multiplicity was not set for association.", ((ClassAsLink_c) association).getMult() == mult);
} else {
SimpleAssociation_c simp = SimpleAssociation_c.getOneR_SIMPOnR206((Association_c) association);
LinkedAssociation_c linked = LinkedAssociation_c.getOneR_ASSOCOnR206((Association_c) association);
if(simp != null) {
ClassAsSimpleParticipant_c[] parts = ClassAsSimpleParticipant_c.getManyR_PARTsOnR207(simp);
if(parts.length > 1) {
if(oneside) {
assertTrue("Multiplicity was not set correctly for rule.", parts[0].getMult() == mult);
assertTrue("Conditionality was not set correctly for rule.", parts[0].getCond() == cond);
} else {
assertTrue("Multiplicity was not set correctly for rule.", parts[1].getMult() == mult);
assertTrue("Conditionality was not set correctly for rule.", parts[1].getCond() == cond);
}
} else {
if(oneside) {
assertTrue("Multiplicity was not set correctly for rule.", parts[0].getMult() == mult);
assertTrue("Conditionality was not set correctly for rule.", parts[0].getCond() == cond);
} else {
ClassAsSimpleFormalizer_c form = ClassAsSimpleFormalizer_c.getOneR_FORMOnR208(simp);
assertTrue("Multiplicity was not set correctly for rule.", form.getMult() == mult);
assertTrue("Conditionality was not set correctly for rule.", form.getCond() == cond);
}
}
} else {
if(oneside) {
ClassAsAssociatedOneSide_c one = ClassAsAssociatedOneSide_c.getOneR_AONEOnR209(linked);
assertTrue("Multiplicity was not set correctly for rule.", one.getMult() == mult);
assertTrue("Conditionality was not set correctly for rule.", one.getCond() == cond);
} else {
ClassAsAssociatedOtherSide_c oth = ClassAsAssociatedOtherSide_c.getOneR_AOTHOnR210(linked);
assertTrue("Multiplicity was not set correctly for rule.", oth.getMult() == mult);
assertTrue("Conditionality was not set correctly for rule.", oth.getCond() == cond);
}
}
}
}
private String getPhrase(Association_c association, boolean oneside) {
ClassAsSimpleParticipant_c[] parts = ClassAsSimpleParticipant_c.getManyR_PARTsOnR207(SimpleAssociation_c.getManyR_SIMPsOnR206(association));
if(parts.length != 0) {
if(oneside) {
return parts[0].getTxt_phrs();
} else {
if(parts.length > 1) {
return parts[1].getTxt_phrs();
} else {
return ClassAsSimpleFormalizer_c
.getOneR_FORMOnR208(SimpleAssociation_c.getManyR_SIMPsOnR206(association)).getTxt_phrs();
}
}
}
ClassAsAssociatedOneSide_c caos = ClassAsAssociatedOneSide_c.getOneR_AONEOnR209(LinkedAssociation_c.getManyR_ASSOCsOnR206(association));
if(caos != null) {
if(oneside) {
return caos.getTxt_phrs();
} else {
ClassAsAssociatedOtherSide_c caots = ClassAsAssociatedOtherSide_c
.getOneR_AOTHOnR210(LinkedAssociation_c.getManyR_ASSOCsOnR206(association));
return caots.getTxt_phrs();
}
}
return "";
}
public void selectElement(NonRootModelElement... elements) {
UITestingUtilities.clearGraphicalSelection();
for(NonRootModelElement element : elements) {
UITestingUtilities.addElementToGraphicalSelection(element);
}
UIUtil.dispatchAll();
}
public Association_c getAssociation(int numb) {
return getAssociation(candidate -> ((Association_c) candidate).getNumb() == numb);
}
public Association_c getAssociation(ClassQueryInterface_c query) {
return Association_c.getOneR_RELOnR8001(PackageableElement_c.getManyPE_PEsOnR8000(pkg), query);
}
public void testContextMenu(String[] expectedResults, boolean exists) {
testContextMenu("Configure Associations", expectedResults, exists);
}
public void testTabTraversal(TableViewer viewer, int expectedColumn) {
viewer.getTable().setFocus();
Event tabEvent = new Event();
tabEvent.detail = SWT.TRAVERSE_TAB_NEXT;
tabEvent.character = SWT.TAB;
tabEvent.doit = true;
tabEvent.keyCode = SWT.TAB;
Control[] children = viewer.getTable().getChildren();
Control child = children[children.length - 1];
child.notifyListeners(SWT.Traverse, tabEvent);
assertTrue("After tab traversal the next editor was not active.", viewer.isCellEditorActive());
ViewerCell focusCell = viewer.getColumnViewerEditor().getFocusCell();
assertTrue("Tab traversal did not move to the next editable cell.",
focusCell.getColumnIndex() == expectedColumn);
UIUtil.dispatchAll();
}
static int previousFileHistoryCount = 0;
public void checkPersistence() {
checkPersistence(true);
}
public void checkPersistence(boolean changeExpected) {
try {
IFileState[] history = pkg.getFile().getHistory(new NullProgressMonitor());
if(changeExpected) {
assertTrue("Change was not persisted.", history.length > previousFileHistoryCount);
} else {
assertFalse("Change was persisted.", history.length > previousFileHistoryCount);
}
previousFileHistoryCount = history.length;
} catch (CoreException e) {
TestCase.fail("Unable to verify persistence of change.");
}
}
interface DialogTest {
public void doTest(AssociationEditorTab tab);
}
Throwable error = null;
public void testDialog(DialogTest test) {
// all tests can work against the diagram cme
selectElement(pkg);
TestUtil.dismissShell(shell -> {
if(shell != null && shell.getData() instanceof AssociationTableDialog) {
AssociationTableDialog tableDialog = (AssociationTableDialog) shell.getData();
AssociationEditorTab tab = (AssociationEditorTab) tableDialog.createdTab;
try {
test.doTest(tab);
Button close = TestUtil.findButton(shell, "Close");
close.notifyListeners(SWT.Selection, new Event());
return true;
} catch (AssertionFailedError e) {
error = e;
Button close = TestUtil.findButton(shell, "Close");
close.notifyListeners(SWT.Selection, new Event());
return true;
} finally {
if(!shell.isDisposed()) {
Button close = TestUtil.findButton(shell, "Close");
close.notifyListeners(SWT.Selection, new Event());
}
}
}
return false;
});
UITestingUtilities.activateMenuItem(editor.getCanvas().getMenu(), "Configure Associations");
if(error != null) {
Throwable t = error;
error = null;
// re-throw errors on the correct thread
throw new AssertionFailedError(t.getMessage());
}
}
List<String> tableItems = new ArrayList<String>();
public void testContextMenu(String name, String[] expectedResults, boolean exists) {
tableItems.clear();
if(exists) {
assertTrue("Unable to find menu entry", UITestingUtilities.checkItemStatusInContextMenu(editor.getCanvas().getMenu(), name, "", false));
} else {
assertTrue("Found the menu entry when it should not exist", UITestingUtilities.getMenuItem(editor.getCanvas().getMenu(), name) == null);
return;
}
TestUtil.dismissShell(shell -> {
if (shell != null && shell.getData() instanceof Dialog) {
Control[] children = shell.getChildren();
TableItem[] items = TestUtil
.getTableItems(shell);
for(TableItem item : items) {
tableItems.add(item.getText());
}
Button close = TestUtil.findButton((Composite) children[0], "Close");
close.notifyListeners(SWT.Selection, new Event());
return true;
}
return false;
});
UITestingUtilities.activateMenuItem(editor.getCanvas().getMenu(), name);
for(int i = 0; i < expectedResults.length; i++) {
assertEquals("Found unexpected table item.", expectedResults[i], tableItems.get(i));
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.experimental.logical.rules;
import java.io.IOException;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.pig.data.DataType;
import org.apache.pig.experimental.logical.expression.LogicalExpressionPlan;
import org.apache.pig.experimental.logical.expression.LogicalExpressionVisitor;
import org.apache.pig.experimental.logical.expression.MapLookupExpression;
import org.apache.pig.experimental.logical.optimizer.AllExpressionVisitor;
import org.apache.pig.experimental.logical.relational.LOFilter;
import org.apache.pig.experimental.logical.relational.LOGenerate;
import org.apache.pig.experimental.logical.relational.LOJoin;
import org.apache.pig.experimental.logical.relational.LOLoad;
import org.apache.pig.experimental.logical.relational.LogicalRelationalOperator;
import org.apache.pig.experimental.logical.relational.LogicalSchema;
import org.apache.pig.experimental.logical.relational.LogicalSchema.LogicalFieldSchema;
import org.apache.pig.experimental.plan.DependencyOrderWalker;
import org.apache.pig.experimental.plan.Operator;
import org.apache.pig.experimental.plan.OperatorPlan;
import org.apache.pig.experimental.plan.OperatorSubPlan;
import org.apache.pig.experimental.plan.ReverseDependencyOrderWalker;
/**
* This filter Marks every Load Operator which has a Map
* with MAP_MARKER_ANNOTATION. The annotation value is
* <code>Map<Integer,Set<String>><code> where Integer is the column number
* of the field and Set is the set of Keys in this field ( field is a map field only ).
*
* It does this for only the top level schema in load.
*
* Algorithm:
* Traverse the Plan in ReverseDependency order ( ie. Sink to Source )
* For LogicalRelationalOperators having MapLookupExpression in their
* expressionPlan collect uid and keys related to it. This is
* retained in the visitor
* For ForEach having nested LogicalPlan use the same visitor hence
* there is no distinction required
* At Sources find all the uids provided by this source and annotate this
* LogicalRelationalOperator ( load ) with <code>Map<Integer,Set<String>></code>
* containing only the column numbers that this LogicalRelationalOperator generates
*
* NOTE: This is a simple Map Pruner. If a map key is mentioned in the script
* then this pruner assumes you need the key. This pruner is not as optimized
* as column pruner ( which removes a column if it is mentioned but never used )
*
*/
public class MapKeysPruneHelper {
public static final String REQUIRED_MAPKEYS = "MapPruner:RequiredKeys";
private OperatorPlan currentPlan;
private OperatorSubPlan subplan;
public MapKeysPruneHelper(OperatorPlan currentPlan) {
this.currentPlan = currentPlan;
if (currentPlan instanceof OperatorSubPlan) {
subplan = new OperatorSubPlan(((OperatorSubPlan)currentPlan).getBasePlan());
} else {
subplan = new OperatorSubPlan(currentPlan);
}
}
@SuppressWarnings("unchecked")
public boolean check() throws IOException {
// First check if we have a load with a map in it or not
List<Operator> sources = currentPlan.getSources();
boolean hasMap = false;
for( Operator source : sources ) {
LogicalSchema schema = ((LogicalRelationalOperator)source).getSchema();
// If any of the loads has a null schema we dont know the ramifications here
// so we skip this optimization
if( schema == null ) {
return false;
}
if( hasMap( schema ) ) {
hasMap = true;
}
}
// We dont have any map in the first level of schema
if( !hasMap ) {
return false;
}
// Now we check what keys are needed
MapMarker marker = new MapMarker(currentPlan);
marker.visit();
// Get all Uids from Sinks
List<Operator> sinks = currentPlan.getSinks();
Set<Long> sinkMapUids = new HashSet<Long>();
for( Operator sink : sinks ) {
LogicalSchema schema = ((LogicalRelationalOperator)sink).getSchema();
sinkMapUids.addAll( getMapUids( schema ) );
}
// If we have found specific keys which are needed then we return true;
// Else if we dont have any specific keys we return false
boolean hasAnnotation = false;
for( Operator source : sources ) {
Map<Integer,Set<String>> annotationValue =
(Map<Integer, Set<String>>) ((LogicalRelationalOperator)source).getAnnotation(REQUIRED_MAPKEYS);
// Now for all full maps found in sinks we cannot prune them at source
if( ! sinkMapUids.isEmpty() && annotationValue != null &&
!annotationValue.isEmpty() ) {
Integer[] annotationKeyArray = annotationValue.keySet().toArray( new Integer[0] );
LogicalSchema sourceSchema = ((LogicalRelationalOperator)source).getSchema();
for( Integer col : annotationKeyArray ) {
if( sinkMapUids.contains(sourceSchema.getField(col).uid)) {
annotationValue.remove( col );
}
}
}
if ( annotationValue != null && annotationValue.isEmpty()) {
((LogicalRelationalOperator)source).removeAnnotation(REQUIRED_MAPKEYS);
annotationValue = null;
}
// Can we still prune any keys
if( annotationValue != null ) {
hasAnnotation = true;
subplan.add(source);
}
}
// If all the sinks dont have any schema, we cant to any optimization
return hasAnnotation;
}
/**
* This function checks if the schema has a map.
* We dont check for a nested structure.
* @param schema Schema to be checked
* @return true if it has a map, else false
* @throws NullPointerException incase Schema is null
*/
private boolean hasMap(LogicalSchema schema ) throws NullPointerException {
for( LogicalFieldSchema field : schema.getFields() ) {
if( field.type == DataType.MAP ) {
return true;
}
}
return false;
}
/**
* This function returns a set of Uids corresponding to
* map datatype in the first level of this schema
* @param schema Schema having fields
* @return
*/
private Set<Long> getMapUids(LogicalSchema schema ) {
Set<Long> uids = new HashSet<Long>();
if( schema != null ) {
for( LogicalFieldSchema field : schema.getFields() ) {
if( field.type == DataType.MAP ) {
uids.add( field.uid );
}
}
}
return uids;
}
public OperatorPlan reportChanges() {
return subplan;
}
/**
* This class collects all the information required to create
* the list of keys required for a map
*/
static public class MapMarker extends AllExpressionVisitor {
Map<Long,Set<String>> inputUids = null;
protected MapMarker(OperatorPlan plan) {
super(plan, new ReverseDependencyOrderWalker(plan));
inputUids = new HashMap<Long,Set<String>>();
}
@Override
public void visitLOLoad(LOLoad load) throws IOException {
if( load.getSchema() != null ) {
Map<Integer,Set<String>> annotation = new HashMap<Integer,Set<String>>();
for( int i=0; i<load.getSchema().size(); i++) {
LogicalFieldSchema field = load.getSchema().getField(i);
if( inputUids.containsKey( field.uid ) ) {
annotation.put(i, inputUids.get( field.uid ) );
}
}
load.annotate(REQUIRED_MAPKEYS, annotation);
}
}
@Override
public void visitLOFilter(LOFilter filter) throws IOException {
currentOp = filter;
MapExprMarker v = (MapExprMarker) getVisitor(filter.getFilterPlan());
v.visit();
mergeUidKeys( v.inputUids );
}
@Override
public void visitLOJoin(LOJoin join) throws IOException {
currentOp = join;
Collection<LogicalExpressionPlan> c = join.getExpressionPlans();
for (LogicalExpressionPlan plan : c) {
MapExprMarker v = (MapExprMarker) getVisitor(plan);
v.visit();
mergeUidKeys( v.inputUids );
}
}
@Override
public void visitLOGenerate(LOGenerate gen) throws IOException {
currentOp = gen;
Collection<LogicalExpressionPlan> plans = gen.getOutputPlans();
for( LogicalExpressionPlan plan : plans ) {
MapExprMarker v = (MapExprMarker) getVisitor(plan);
v.visit();
mergeUidKeys( v.inputUids );
}
}
private void mergeUidKeys( Map<Long, Set<String> > inputMap ) {
for( Map.Entry<Long, Set<String>> entry : inputMap.entrySet() ) {
if( inputUids.containsKey(entry.getKey()) ) {
Set<String> mapKeySet = inputUids.get(entry.getKey());
mapKeySet.addAll(entry.getValue());
} else {
inputUids.put(entry.getKey(), inputMap.get(entry.getKey()));
}
}
}
@Override
protected LogicalExpressionVisitor getVisitor(LogicalExpressionPlan expr) {
return new MapExprMarker(expr );
}
static class MapExprMarker extends LogicalExpressionVisitor {
Map<Long,Set<String>> inputUids = null;
protected MapExprMarker(OperatorPlan p) {
super(p, new DependencyOrderWalker(p));
inputUids = new HashMap<Long,Set<String>>();
}
public void visitMapLookup(MapLookupExpression op) throws IOException {
Long uid = op.getMap().getUid();
String key = op.getLookupKey();
HashSet<String> mapKeySet = null;
if( inputUids.containsKey(uid) ) {
mapKeySet = (HashSet<String>) inputUids.get(uid);
} else {
mapKeySet = new HashSet<String>();
inputUids.put(uid, mapKeySet);
}
mapKeySet.add(key);
}
}
}
}
| |
/*
* Copyright 2002-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.web.servlet.tags.form;
import java.beans.PropertyEditor;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.jsp.tagext.BodyTag;
import jakarta.servlet.jsp.tagext.Tag;
import org.dom4j.Document;
import org.dom4j.Element;
import org.dom4j.Node;
import org.dom4j.io.SAXReader;
import org.junit.jupiter.api.Test;
import org.springframework.beans.testfixture.beans.TestBean;
import org.springframework.validation.BeanPropertyBindingResult;
import org.springframework.validation.BindingResult;
import org.springframework.validation.Errors;
import org.springframework.web.servlet.support.BindStatus;
import org.springframework.web.servlet.support.RequestContext;
import org.springframework.web.servlet.tags.RequestContextAwareTag;
import org.springframework.web.testfixture.servlet.MockHttpServletRequest;
import org.springframework.web.testfixture.servlet.MockPageContext;
import static org.assertj.core.api.Assertions.assertThat;
/**
* @author Rob Harrop
* @author Juergen Hoeller
* @author Scott Andrews
* @author Jeremy Grelle
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
class OptionsTagTests extends AbstractHtmlElementTagTests {
private static final String COMMAND_NAME = "testBean";
private SelectTag selectTag;
private OptionsTag tag;
@Override
@SuppressWarnings("serial")
protected void onSetUp() {
this.tag = new OptionsTag() {
@Override
protected TagWriter createTagWriter() {
return new TagWriter(getWriter());
}
};
selectTag = new SelectTag() {
@Override
protected TagWriter createTagWriter() {
return new TagWriter(getWriter());
}
@Override
public String getName() {
// Should not be used other than to delegate to
// RequestDataValueDataProcessor
return "testName";
}
};
selectTag.setPageContext(getPageContext());
this.tag.setParent(selectTag);
this.tag.setPageContext(getPageContext());
}
@Test
void withCollection() throws Exception {
getPageContext().setAttribute(
SelectTag.LIST_VALUE_PAGE_ATTRIBUTE, new BindStatus(getRequestContext(), "testBean.country", false));
this.tag.setItems(Country.getCountries());
this.tag.setItemValue("isoCode");
this.tag.setItemLabel("name");
this.tag.setId("myOption");
this.tag.setCssClass("myClass");
this.tag.setOnclick("CLICK");
int result = this.tag.doStartTag();
assertThat(result).isEqualTo(Tag.SKIP_BODY);
String output = getOutput();
output = "<doc>" + output + "</doc>";
SAXReader reader = new SAXReader();
Document document = reader.read(new StringReader(output));
Element rootElement = document.getRootElement();
List children = rootElement.elements();
assertThat(children.size()).as("Incorrect number of children").isEqualTo(4);
Element element = (Element) rootElement.selectSingleNode("option[@value = 'UK']");
assertThat(element.attribute("selected").getValue()).as("UK node not selected").isEqualTo("selected");
assertThat(element.attribute("id").getValue()).isEqualTo("myOption3");
assertThat(element.attribute("class").getValue()).isEqualTo("myClass");
assertThat(element.attribute("onclick").getValue()).isEqualTo("CLICK");
}
@Test
void withCollectionAndDynamicAttributes() throws Exception {
String dynamicAttribute1 = "attr1";
String dynamicAttribute2 = "attr2";
getPageContext().setAttribute(
SelectTag.LIST_VALUE_PAGE_ATTRIBUTE, new BindStatus(getRequestContext(), "testBean.country", false));
this.tag.setItems(Country.getCountries());
this.tag.setItemValue("isoCode");
this.tag.setItemLabel("name");
this.tag.setId("myOption");
this.tag.setCssClass("myClass");
this.tag.setOnclick("CLICK");
this.tag.setDynamicAttribute(null, dynamicAttribute1, dynamicAttribute1);
this.tag.setDynamicAttribute(null, dynamicAttribute2, dynamicAttribute2);
int result = this.tag.doStartTag();
assertThat(result).isEqualTo(Tag.SKIP_BODY);
String output = getOutput();
output = "<doc>" + output + "</doc>";
SAXReader reader = new SAXReader();
Document document = reader.read(new StringReader(output));
Element rootElement = document.getRootElement();
List children = rootElement.elements();
assertThat(children.size()).as("Incorrect number of children").isEqualTo(4);
Element element = (Element) rootElement.selectSingleNode("option[@value = 'UK']");
assertThat(element.attribute("selected").getValue()).as("UK node not selected").isEqualTo("selected");
assertThat(element.attribute("id").getValue()).isEqualTo("myOption3");
assertThat(element.attribute("class").getValue()).isEqualTo("myClass");
assertThat(element.attribute("onclick").getValue()).isEqualTo("CLICK");
assertThat(element.attribute(dynamicAttribute1).getValue()).isEqualTo(dynamicAttribute1);
assertThat(element.attribute(dynamicAttribute2).getValue()).isEqualTo(dynamicAttribute2);
}
@Test
void withCollectionAndCustomEditor() throws Exception {
PropertyEditor propertyEditor = new SimpleFloatEditor();
TestBean target = new TestBean();
target.setMyFloat(Float.valueOf("12.34"));
BeanPropertyBindingResult errors = new BeanPropertyBindingResult(target, COMMAND_NAME);
errors.getPropertyAccessor().registerCustomEditor(Float.class, propertyEditor);
exposeBindingResult(errors);
getPageContext().setAttribute(
SelectTag.LIST_VALUE_PAGE_ATTRIBUTE, new BindStatus(getRequestContext(), "testBean.myFloat", false));
List<Float> floats = new ArrayList<>();
floats.add(Float.valueOf("12.30"));
floats.add(Float.valueOf("12.31"));
floats.add(Float.valueOf("12.32"));
floats.add(Float.valueOf("12.33"));
floats.add(Float.valueOf("12.34"));
floats.add(Float.valueOf("12.35"));
this.tag.setItems(floats);
int result = this.tag.doStartTag();
assertThat(result).isEqualTo(Tag.SKIP_BODY);
String output = getOutput();
output = "<doc>" + output + "</doc>";
SAXReader reader = new SAXReader();
Document document = reader.read(new StringReader(output));
Element rootElement = document.getRootElement();
List children = rootElement.elements();
assertThat(children.size()).as("Incorrect number of children").isEqualTo(6);
Element element = (Element) rootElement.selectSingleNode("option[text() = '12.34f']");
assertThat(element).as("Option node should not be null").isNotNull();
assertThat(element.attribute("selected").getValue()).as("12.34 node not selected").isEqualTo("selected");
assertThat(element.attribute("id")).as("No id rendered").isNull();
element = (Element) rootElement.selectSingleNode("option[text() = '12.35f']");
assertThat(element).as("Option node should not be null").isNotNull();
assertThat(element.attribute("selected")).as("12.35 node incorrectly selected").isNull();
assertThat(element.attribute("id")).as("No id rendered").isNull();
}
@Test
void withItemsNullReference() throws Exception {
getPageContext().setAttribute(
SelectTag.LIST_VALUE_PAGE_ATTRIBUTE, new BindStatus(getRequestContext(), "testBean.country", false));
this.tag.setItems(Collections.emptyList());
this.tag.setItemValue("isoCode");
this.tag.setItemLabel("name");
int result = this.tag.doStartTag();
assertThat(result).isEqualTo(Tag.SKIP_BODY);
String output = getOutput();
output = "<doc>" + output + "</doc>";
SAXReader reader = new SAXReader();
Document document = reader.read(new StringReader(output));
Element rootElement = document.getRootElement();
List children = rootElement.elements();
assertThat(children.size()).as("Incorrect number of children").isEqualTo(0);
}
@Test
void withoutItems() throws Exception {
this.tag.setItemValue("isoCode");
this.tag.setItemLabel("name");
this.selectTag.setPath("testBean");
this.selectTag.doStartTag();
int result = this.tag.doStartTag();
assertThat(result).isEqualTo(Tag.SKIP_BODY);
this.tag.doEndTag();
this.selectTag.doEndTag();
String output = getOutput();
SAXReader reader = new SAXReader();
Document document = reader.read(new StringReader(output));
Element rootElement = document.getRootElement();
List children = rootElement.elements();
assertThat(children.size()).as("Incorrect number of children").isEqualTo(0);
}
@Test
void withoutItemsEnumParent() throws Exception {
BeanWithEnum testBean = new BeanWithEnum();
testBean.setTestEnum(TestEnum.VALUE_2);
getPageContext().getRequest().setAttribute("testBean", testBean);
this.selectTag.setPath("testBean.testEnum");
this.selectTag.doStartTag();
int result = this.tag.doStartTag();
assertThat(result).isEqualTo(BodyTag.SKIP_BODY);
result = this.tag.doEndTag();
assertThat(result).isEqualTo(Tag.EVAL_PAGE);
this.selectTag.doEndTag();
String output = getWriter().toString();
SAXReader reader = new SAXReader();
Document document = reader.read(new StringReader(output));
Element rootElement = document.getRootElement();
assertThat(rootElement.elements().size()).isEqualTo(2);
Node value1 = rootElement.selectSingleNode("option[@value = 'VALUE_1']");
Node value2 = rootElement.selectSingleNode("option[@value = 'VALUE_2']");
assertThat(value1.getText()).isEqualTo("TestEnum: VALUE_1");
assertThat(value2.getText()).isEqualTo("TestEnum: VALUE_2");
assertThat(rootElement.selectSingleNode("option[@selected]")).isEqualTo(value2);
}
@Test
void withoutItemsEnumParentWithExplicitLabelsAndValues() throws Exception {
BeanWithEnum testBean = new BeanWithEnum();
testBean.setTestEnum(TestEnum.VALUE_2);
getPageContext().getRequest().setAttribute("testBean", testBean);
this.selectTag.setPath("testBean.testEnum");
this.tag.setItemLabel("enumLabel");
this.tag.setItemValue("enumValue");
this.selectTag.doStartTag();
int result = this.tag.doStartTag();
assertThat(result).isEqualTo(BodyTag.SKIP_BODY);
result = this.tag.doEndTag();
assertThat(result).isEqualTo(Tag.EVAL_PAGE);
this.selectTag.doEndTag();
String output = getWriter().toString();
SAXReader reader = new SAXReader();
Document document = reader.read(new StringReader(output));
Element rootElement = document.getRootElement();
assertThat(rootElement.elements().size()).isEqualTo(2);
Node value1 = rootElement.selectSingleNode("option[@value = 'Value: VALUE_1']");
Node value2 = rootElement.selectSingleNode("option[@value = 'Value: VALUE_2']");
assertThat(value1.getText()).isEqualTo("Label: VALUE_1");
assertThat(value2.getText()).isEqualTo("Label: VALUE_2");
assertThat(rootElement.selectSingleNode("option[@selected]")).isEqualTo(value2);
}
@Override
protected void extendRequest(MockHttpServletRequest request) {
TestBean bean = new TestBean();
bean.setName("foo");
bean.setCountry("UK");
bean.setMyFloat(Float.valueOf("12.34"));
request.setAttribute(COMMAND_NAME, bean);
List floats = new ArrayList();
floats.add(Float.valueOf("12.30"));
floats.add(Float.valueOf("12.31"));
floats.add(Float.valueOf("12.32"));
floats.add(Float.valueOf("12.33"));
floats.add(Float.valueOf("12.34"));
floats.add(Float.valueOf("12.35"));
request.setAttribute("floats", floats);
}
@Override
protected void exposeBindingResult(Errors errors) {
// wrap errors in a Model
Map model = new HashMap();
model.put(BindingResult.MODEL_KEY_PREFIX + COMMAND_NAME, errors);
// replace the request context with one containing the errors
MockPageContext pageContext = getPageContext();
RequestContext context = new RequestContext((HttpServletRequest) pageContext.getRequest(), model);
pageContext.setAttribute(RequestContextAwareTag.REQUEST_CONTEXT_PAGE_ATTRIBUTE, context);
}
}
| |
// Copyright 2011 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.api.ads.adwords.lib.client;
import com.google.api.ads.adwords.lib.client.reporting.ReportingConfiguration;
import com.google.api.ads.common.lib.auth.OAuth2Compatible;
import com.google.api.ads.common.lib.client.AdsSession;
import com.google.api.ads.common.lib.conf.ConfigurationHelper;
import com.google.api.ads.common.lib.conf.ConfigurationLoadException;
import com.google.api.ads.common.lib.exception.ValidationException;
import com.google.api.client.auth.oauth2.Credential;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import org.apache.commons.configuration.Configuration;
import java.io.File;
import java.net.MalformedURLException;
import java.net.URL;
import javax.annotation.Nullable;
import javax.annotation.concurrent.ThreadSafe;
/**
* A {@code AdWordsSession} represents a single session of AdWords use.
*
* <p>
* Implementation is not thread-safe.
* </p>
*/
public class AdWordsSession implements AdsSession, OAuth2Compatible {
private String clientCustomerId;
private Long expressBusinessId;
private String expressPlusPageId;
private Boolean isValidateOnly;
private Boolean isPartialFailure;
private Credential oAuth2Credential;
private ReportingConfiguration reportingConfiguration;
private final String userAgent;
private final String developerToken;
private final String endpoint;
public static final String DEFAULT_ENDPOINT = "https://adwords.google.com/";
private static final String DEFAULT_USER_AGENT = "INSERT_USERAGENT_HERE";
/**
* Private constructor.
*
* @param builder the AdWordsSession builder
*/
private AdWordsSession(Builder builder) {
this.clientCustomerId = builder.clientCustomerId;
this.developerToken = builder.developerToken;
this.endpoint = builder.endpoint;
this.isPartialFailure = builder.isPartialFailure;
this.isValidateOnly = builder.isValidateOnly;
this.oAuth2Credential = builder.oAuth2Credential;
this.userAgent = builder.userAgent;
this.reportingConfiguration = builder.reportingConfiguration;
}
/**
* Gets the client customer ID.
*/
public String getClientCustomerId() {
return clientCustomerId;
}
/**
* Sets the client customer ID.
*/
public void setClientCustomerId(String clientCustomerId) {
this.clientCustomerId = clientCustomerId;
}
/**
* Gets the AdWords Express business ID used by AdWords Express PromotionService.
*/
@Nullable
public Long getExpressBusinessId() {
return expressBusinessId;
}
/**
* Sets the AdWords Express business ID used by AdWords Express PromotionService.
*
* <p>When using PromotionService, either set this value or the express plus page ID,
* but not both.
*/
public void setExpressBusinessId(@Nullable Long businessId) {
this.expressBusinessId = businessId;
}
/**
* Gets the Google+ page ID for the Google My Business location used by AdWords Express
* PromotionService.
*/
@Nullable
public String getExpressPlusPageId() {
return expressPlusPageId;
}
/**
* Sets the Google+ page ID for the Google My Business location used by AdWords Express
* PromotionService.
*
* <p>When using PromotionService, either set this value or the express business ID,
* but not both.
*/
public void setExpressPlusPageId(String expressPlusPageId) {
this.expressPlusPageId = expressPlusPageId;
}
/**
* Returns {@code true} if the session should only validate the request.
*/
@Nullable
public Boolean isValidateOnly() {
return isValidateOnly;
}
/**
* Sets whether this session should only validate the request.
*/
public void setValidateOnly(@Nullable Boolean isValidateOnly) {
this.isValidateOnly = isValidateOnly;
}
/**
* @return the userAgent
*/
public String getUserAgent() {
return userAgent;
}
/**
* @return the developerToken
*/
public String getDeveloperToken() {
return developerToken;
}
/**
* @return the isPartialFailure
*/
@Nullable
public Boolean isPartialFailure() {
return isPartialFailure;
}
/**
* Sets whether this session should allow partial failure.
*/
public void setPartialFailure(@Nullable Boolean isPartialFailure) {
this.isPartialFailure = isPartialFailure;
}
/**
* Gets the OAuth2 credentials.
*/
@Override
public Credential getOAuth2Credential() {
return oAuth2Credential;
}
/**
* Sets the OAuth2 credential. Any other authentication credentials on the
* session will be removed.
*/
public void setOAuth2Credential(Credential oAuth2Credential) {
Preconditions.checkNotNull(oAuth2Credential, "oAuth2Credential cannot be null.");
clearAuthentication();
this.oAuth2Credential = oAuth2Credential;
}
/**
* Gets the reporting configuration.
*/
@Nullable
public ReportingConfiguration getReportingConfiguration() {
return reportingConfiguration;
}
/**
* Sets the reporting configuration.
*/
public void setReportingConfiguration(@Nullable ReportingConfiguration reportingConfiguration) {
this.reportingConfiguration = reportingConfiguration;
}
/**
* Clears all the authentication credentials from this session.
*/
private void clearAuthentication() {
oAuth2Credential = null;
}
/**
* @return the endpoint
*/
@Override
public String getEndpoint() {
return endpoint;
}
/**
* Returns a new {@link Builder} with all settings copied from this session. This is <em>not</em>
* thread-safe unless this session is an {@link ImmutableAdWordsSession}.
*/
public Builder newBuilder() {
return new Builder(this);
}
/**
* Immutable, thread-safe implementation of AdWordsSession.
*/
@ThreadSafe
public static final class ImmutableAdWordsSession extends AdWordsSession {
private ImmutableAdWordsSession(Builder builder) {
super(builder);
}
private void throwUnsupportedOperationException(String attributeName) {
throw new UnsupportedOperationException(
String.format(
"Cannot set %s. ImmutableAdWordsSession is immutable.", attributeName));
}
@Override
public final void setClientCustomerId(String clientCustomerId) {
throwUnsupportedOperationException("clientCustomerId");
}
@Override
public final void setExpressBusinessId(@Nullable Long businessId) {
throwUnsupportedOperationException("businessId");
}
@Override
public final void setExpressPlusPageId(String expressPlusPageId) {
throwUnsupportedOperationException("expressPlusPageId");
}
@Override
public final void setValidateOnly(@Nullable Boolean isValidateOnly) {
throwUnsupportedOperationException("isValidateOnly");
}
@Override
public final void setPartialFailure(@Nullable Boolean isPartialFailure) {
throwUnsupportedOperationException("isPartialFailure");
}
@Override
public final void setOAuth2Credential(Credential oAuth2Credential) {
throwUnsupportedOperationException("oAuth2Credential");
}
@Override
public final void setReportingConfiguration(
@Nullable ReportingConfiguration reportingConfiguration) {
throwUnsupportedOperationException("reportingConfiguration");
}
}
/**
* Builder for AdWordsSession.
*
* <p>
* Implementation is not thread-safe.
* </p>
*/
public static class Builder implements
com.google.api.ads.common.lib.utils.Builder<AdWordsSession> {
private String endpoint;
private String userAgent;
private String developerToken;
private String clientCustomerId;
private Boolean isPartialFailure;
private Boolean isValidateOnly;
private Credential oAuth2Credential;
private ReportingConfiguration reportingConfiguration;
private final ConfigurationHelper configHelper;
/**
* Constructs an empty builder. To construct a builder initialized to the settings of
* an existing {@link AdWordsSession}, use {@link AdWordsSession#newBuilder()} instead.
*/
public Builder() {
this.configHelper = new ConfigurationHelper();
}
private Builder(AdWordsSession sessionToClone) {
this();
this.endpoint = sessionToClone.getEndpoint();
this.userAgent = sessionToClone.getUserAgent();
this.developerToken = sessionToClone.getDeveloperToken();
this.clientCustomerId = sessionToClone.getClientCustomerId();
this.isPartialFailure = sessionToClone.isPartialFailure();
this.isValidateOnly = sessionToClone.isValidateOnly();
this.oAuth2Credential = sessionToClone.getOAuth2Credential();
this.reportingConfiguration = sessionToClone.getReportingConfiguration();
}
@Override
public Builder fromFile() throws ConfigurationLoadException {
return fromFile(Builder.DEFAULT_CONFIGURATION_FILENAME);
}
@Override
public Builder fromFile(String path) throws ConfigurationLoadException {
return from(configHelper.fromFile(path));
}
@Override
public Builder fromFile(File path) throws ConfigurationLoadException {
return from(configHelper.fromFile(path));
}
@Override
public Builder fromFile(URL path) throws ConfigurationLoadException {
return from(configHelper.fromFile(path));
}
/**
* Reads properties from the provided {@link Configuration} object.<br><br>
* Known properties:
* <ul>
* <li>api.adwords.clientCustomerId</li>
* <li>api.adwords.userAgent</li>
* <li>api.adwords.developerToken</li>
* <li>api.adwords.isPartialFailure</li>
* <li>api.adwords.endpoint</li>
* <li>api.adwords.reporting.skipHeader</li>
* <li>api.adwords.reporting.skipColumnHeader</li>
* <li>api.adwords.reporting.skipSummary</li>
* </ul>
*
* @param config
* @return Builder populated from the Configuration
*/
@Override
public Builder from(Configuration config) {
this.clientCustomerId = config.getString("api.adwords.clientCustomerId", null);
this.userAgent = config.getString("api.adwords.userAgent", null);
this.developerToken = config.getString("api.adwords.developerToken", null);
this.isPartialFailure = config.getBoolean("api.adwords.isPartialFailure", null);
this.endpoint = config.getString("api.adwords.endpoint", null);
// Only create a ReportConfiguration for this object if at least one reporting
// configuration config value is present.
Boolean isSkipReportHeader = config.getBoolean("api.adwords.reporting.skipHeader", null);
Boolean isSkipColumnHeader =
config.getBoolean("api.adwords.reporting.skipColumnHeader", null);
Boolean isSkipReportSummary = config.getBoolean("api.adwords.reporting.skipSummary", null);
if (isSkipReportHeader != null || isSkipColumnHeader != null || isSkipReportSummary != null) {
this.reportingConfiguration = new ReportingConfiguration.Builder()
.skipReportHeader(isSkipReportHeader)
.skipColumnHeader(isSkipColumnHeader)
.skipReportSummary(isSkipReportSummary)
.build();
}
return this;
}
/**
* Includes OAuth2 credential to be used for OAuth2 authentication.
*/
public Builder withOAuth2Credential(Credential oAuth2Credential) {
clearAuthentication();
this.oAuth2Credential = oAuth2Credential;
return this;
}
public Builder withReportingConfiguration(ReportingConfiguration reportingConfiguration) {
this.reportingConfiguration = reportingConfiguration;
return this;
}
/**
* Includes a developer token. Required.
*/
public Builder withDeveloperToken(String developerToken) {
this.developerToken = developerToken;
return this;
}
/**
* Includes user agent.
*/
public Builder withUserAgent(String userAgent) {
this.userAgent = userAgent;
return this;
}
/**
* Override the endpoint server. Optional and defaults to
* https://adwords.google.com.
*/
public Builder withEndpoint(String endpoint) {
this.endpoint = endpoint;
return this;
}
/**
* Includes a clientCustomerId.
*/
public Builder withClientCustomerId(String clientCustomerId) {
this.clientCustomerId = clientCustomerId;
return this;
}
/**
* Enables partial failure. Default is disabled.
*/
public Builder enablePartialFailure() {
this.isPartialFailure = true;
return this;
}
/**
* Enables validate only. Default is disabled.
*/
public Builder enableValidateOnly() {
this.isValidateOnly = true;
return this;
}
/**
* Clears all the authentication credentials from this session.
*/
private void clearAuthentication() {
oAuth2Credential = null;
}
/**
* Builds the {@code AdWordsSession}.
*
* @return the built {@code AdWordsSession}
* @throws ValidationException if the attributes of this builder fail validation
*/
@Override
public AdWordsSession build() throws ValidationException {
defaultOptionals();
validate();
return new AdWordsSession(this);
}
/**
* Builds a thread-safe {@link ImmutableAdWordsSession}.
* @return the built {@code ImmutableAdWordsSession}
* @throws ValidationException if the attributes of this builder fail validation
*/
public ImmutableAdWordsSession buildImmutable() throws ValidationException {
defaultOptionals();
validate();
return new ImmutableAdWordsSession(this);
}
/**
* Fills in defaults if {@code null}.
*/
private void defaultOptionals() {
if (this.endpoint == null) {
this.endpoint = DEFAULT_ENDPOINT;
}
}
/**
* Validates the properties for the AdWords session.
*/
private void validate() throws ValidationException {
// Check for at least one authentication mechanism.
if (this.oAuth2Credential == null) {
throw new ValidationException("OAuth2 authentication must be used.", "");
}
// Check that the developer token is set.
if (this.developerToken == null) {
throw new ValidationException("A developer token must be set.", "developerToken");
}
// Check that user agent is not empty or the default.
if (Strings.isNullOrEmpty(userAgent)
|| userAgent.contains(DEFAULT_USER_AGENT)) {
throw new ValidationException(String.format(
"User agent must be set and not be the default [%s]", DEFAULT_USER_AGENT),
"userAgent");
}
// Make sure they specify an endpoint.
try {
new URL(this.endpoint);
} catch (MalformedURLException e) {
throw new ValidationException(String.format("Endpoint [%s] not recognized as a valid URL.",
this.endpoint), "endpoint", e);
}
}
}
}
| |
package org.apache.rave.portal.util.data;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.apache.rave.model.*;
import org.apache.rave.portal.repository.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Transactional;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
/**
* Implementation of the DataImporter for the {@link ModelWrapper}
*/
@Transactional
public class ModelWrapperDataExecutor implements DataImporter.Executor<ModelWrapper> {
//TODO GROUP REPOSITORY
@Autowired
private PageLayoutRepository pageLayoutRepository;
@Autowired
private UserRepository userRepository;
@Autowired
private WidgetRepository widgetRepository;
@Autowired
private PageRepository pageRepository;
@Autowired
private AuthorityRepository authorityRepository;
@Autowired
private PortalPreferenceRepository portalPreferenceRepository;
@Autowired
private CategoryRepository categoryRepository;
@Autowired
private PageTemplateRepository pageTemplateRepository;
@Autowired
private ActivityStreamsRepository activityStreamsRepository;
public boolean needsLoading() {
return widgetRepository.getCountAll() == 0;
}
@Transactional
public void loadData(ModelWrapper wrapper) {
savePageLayouts(wrapper);
saveAuthorities(wrapper);
Map<String, User> usersByOriginalId = saveUsers(wrapper);
Map<String, Category> categoryByOldId = saveCategories(wrapper, usersByOriginalId);
Map<String, Widget> widgetsById = saveWidgets(wrapper, usersByOriginalId, categoryByOldId);
savePages(wrapper, usersByOriginalId, widgetsById);
savePreferences(wrapper);
saveTemplates(wrapper, widgetsById);
saveActivities(wrapper);
}
private void saveActivities(ModelWrapper wrapper) {
if (wrapper.getActivities() != null) {
for (ActivityStreamsEntry activity : wrapper.getActivities()) {
activityStreamsRepository.save(activity);
}
}
}
private void saveTemplates(ModelWrapper wrapper, Map<String, Widget> widgetsById) {
for (PageTemplate template : wrapper.getPageTemplates()) {
updateTemplate(widgetsById, template);
for (PageTemplate sub : template.getSubPageTemplates()) {
sub.setParentPageTemplate(template);
updateTemplate(widgetsById, sub);
}
pageTemplateRepository.save(template);
}
}
private void updateTemplate(Map<String, Widget> widgetsById, PageTemplate template) {
setIdViaReflection(template, null);
for (PageTemplateRegion region : template.getPageTemplateRegions()) {
setIdViaReflection(region, null);
region.setPageTemplate(template);
for (PageTemplateWidget widget : region.getPageTemplateWidgets()) {
Widget fromDb = widgetsById.get(widget.getWidgetId());
if (fromDb != null) {
widget.setWidgetId(fromDb.getId());
}
setIdViaReflection(widget, null);
widget.setPageTemplateRegion(region);
}
}
}
private Map<String, Category> saveCategories(ModelWrapper wrapper, Map<String, User> usersByOriginalId) {
Map<String, Category> categoryByOldId = Maps.newHashMap();
if (wrapper.getCategories() != null) {
for (Category category : wrapper.getCategories()) {
String id = category.getId() == null ? category.getText() : category.getId();
category.setId(null);
User lastModified = usersByOriginalId.get(category.getLastModifiedUserId());
if (lastModified != null) {
category.setLastModifiedUserId(lastModified.getId());
}
User created = usersByOriginalId.get(category.getCreatedUserId());
if (created != null) {
category.setCreatedUserId(created.getId());
}
categoryByOldId.put(id, categoryRepository.save(category));
}
}
return categoryByOldId;
}
private void savePreferences(ModelWrapper wrapper) {
if (wrapper.getPortalPreferences() != null) {
for (PortalPreference preference : wrapper.getPortalPreferences()) {
portalPreferenceRepository.save(preference);
}
}
}
private Map<String, User> saveUsers(ModelWrapper wrapper) {
Map<String, User> usersByOriginalId = Maps.newHashMap();
if (wrapper.getUsers() != null) {
for (User user : wrapper.getUsers()) {
String id = user.getId();
user.setId(null);
usersByOriginalId.put(id, userRepository.save(user));
}
}
return usersByOriginalId;
}
private void savePages(ModelWrapper wrapper, Map<String, User> usersByOriginalId, Map<String, Widget> widgetsById) {
if (wrapper.getPages() != null) {
for (Page page : wrapper.getPages()) {
updatePage(usersByOriginalId, widgetsById, page);
for (Page sub : page.getSubPages()) {
sub.setParentPage(page);
updatePage(usersByOriginalId, widgetsById, sub);
}
pageRepository.save(page);
}
}
}
private void updatePage(Map<String, User> usersByOriginalId, Map<String, Widget> widgetsById, Page page) {
for (Region region : page.getRegions()) {
region.setPage(page);
setIdViaReflection(region, null);
for (RegionWidget widget : region.getRegionWidgets()) {
widget.setRegion(region);
Widget fromDb = widgetsById.get(widget.getId());
if (fromDb != null) {
widget.setWidgetId(fromDb.getId());
}
setIdViaReflection(widget, null);
}
}
for (PageUser user : page.getMembers()) {
user.setPage(page);
setIdViaReflection(user, null);
User fromDb = usersByOriginalId.get(user.getUserId());
if (fromDb != null) {
user.setUserId(fromDb.getId());
}
}
User user = usersByOriginalId.get(page.getOwnerId());
page.setOwnerId(user.getId());
}
private Map<String, Widget> saveWidgets(ModelWrapper wrapper, Map<String, User> usersById, Map<String, Category> categoryMap) {
Map<String, Widget> widgetsByOldId = Maps.newHashMap();
if (wrapper.getWidgets() != null) {
for (Widget widget : wrapper.getWidgets()) {
String id = widget.getId() == null ? widget.getUrl() : widget.getId();
setIdViaReflection(widget, null);
User user = usersById.get(widget.getOwnerId());
if (user != null) {
widget.setOwnerId(user.getId());
}
List<Category> categories = Lists.newArrayList();
if (widget.getCategories() != null) {
for (Category c : widget.getCategories()) {
categories.add(categoryMap.get(c.getId()));
}
widget.setCategories(categories);
}
widgetsByOldId.put(id, widgetRepository.save(widget));
}
}
return widgetsByOldId;
}
private void setIdViaReflection(Object target, Object id) {
Class clazz = target.getClass();
Method setter = getMethod(clazz, "setId");
if (setter != null) {
callMethod(target, setter, id);
} else {
setField(target, id, clazz);
}
}
private void setField(Object target, Object id, Class clazz) {
List<String> potentialFields = Arrays.asList("id", "_id", "id_");
for (String name : potentialFields) {
if (findAndSetField(clazz, target, id, name)) return;
}
throw new IllegalStateException("Could not match field");
}
private boolean findAndSetField(Class clazz, Object target, Object value, String name) {
Field idField = getField(clazz, name);
if (idField != null) {
setField(target, idField, value);
return true;
} else {
return false;
}
}
private Field getField(Class clazz, String match) {
Field field = null;
try {
field = clazz.getDeclaredField(match);
} catch (NoSuchFieldException e) {
}
return field;
}
private Method getMethod(Class clazz, String match) {
Method method = null;
try {
method = clazz.getMethod(match, Void.class);
} catch (NoSuchMethodException e) {
}
return method;
}
private void callMethod(Object o, Method m, Object... param) {
try {
m.invoke(o, param);
} catch (IllegalAccessException e) {
throw new IllegalStateException("Unable to call method", e);
} catch (InvocationTargetException e) {
throw new IllegalStateException("Unable to call method", e);
}
}
private void setField(Object o, Field f, Object val) {
try {
f.set(o, val);
} catch (IllegalAccessException e) {
throw new IllegalStateException("Unable to set field", e);
}
}
private void saveAuthorities(ModelWrapper wrapper) {
if (wrapper.getAuthorities() != null) {
for (Authority authority : wrapper.getAuthorities()) {
authorityRepository.save(authority);
}
}
}
private void savePageLayouts(ModelWrapper wrapper) {
if (wrapper.getPageLayouts() != null) {
for (PageLayout layout : wrapper.getPageLayouts()) {
pageLayoutRepository.save(layout);
}
}
}
public void setPageLayoutRepository(PageLayoutRepository pageLayoutRepository) {
this.pageLayoutRepository = pageLayoutRepository;
}
public void setUserRepository(UserRepository userRepository) {
this.userRepository = userRepository;
}
public void setWidgetRepository(WidgetRepository widgetRepository) {
this.widgetRepository = widgetRepository;
}
public void setPageRepository(PageRepository pageRepository) {
this.pageRepository = pageRepository;
}
public void setAuthorityRepository(AuthorityRepository authorityRepository) {
this.authorityRepository = authorityRepository;
}
public void setPortalPreferenceRepository(PortalPreferenceRepository portalPreferenceRepository) {
this.portalPreferenceRepository = portalPreferenceRepository;
}
public void setCategoryRepository(CategoryRepository categoryRepository) {
this.categoryRepository = categoryRepository;
}
public void setPageTemplateRepository(PageTemplateRepository pageTemplateRepository) {
this.pageTemplateRepository = pageTemplateRepository;
}
public void setActivityStreamsRepository(ActivityStreamsRepository activityStreamsRepository) {
this.activityStreamsRepository = activityStreamsRepository;
}
}
| |
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.tabmodel.document;
import android.content.Intent;
import android.net.Uri;
import android.test.suitebuilder.annotation.SmallTest;
import org.chromium.base.CommandLine;
import org.chromium.base.ThreadUtils;
import org.chromium.base.test.util.AdvancedMockContext;
import org.chromium.chrome.browser.tab.Tab;
import org.chromium.chrome.browser.tabmodel.TabModel;
import org.chromium.chrome.browser.tabmodel.TabModelUtils;
import org.chromium.chrome.test.util.browser.tabmodel.document.MockActivityDelegate;
import org.chromium.chrome.test.util.browser.tabmodel.document.MockDocumentTabCreatorManager;
import org.chromium.chrome.test.util.browser.tabmodel.document.MockStorageDelegate;
import org.chromium.chrome.test.util.browser.tabmodel.document.TestInitializationObserver;
import org.chromium.content.browser.test.NativeLibraryTestBase;
import java.util.HashMap;
import java.util.Map;
/**
* Tests the functionality of the DocumentTabModel.
*/
public class DocumentTabModelImplTest extends NativeLibraryTestBase {
private static final String MODEL_STATE_WITH_1010_1011 = "CgUgACjyBwoFIAEo8wc=";
private static final String TAB_STATE_1010_ERFWORLD_RETARGETABLE =
"AAABSVhnsswAAAFkYAEAAAAAAAABAAAAAAAAAFABAABMAQAAAAAAACcAAABodHRwOi8vd3d3LmVyZndvcmxkLm"
+ "NvbS9lcmZfc3RyZWFtL3ZpZXcAAAAAAMQAAADAAAAAFQAAAAAAAABOAAAAaAB0AHQAcAA6AC8ALwB3AHcAdw"
+ "AuAGUAcgBmAHcAbwByAGwAZAAuAGMAbwBtAC8AZQByAGYAXwBzAHQAcgBlAGEAbQAvAHYAaQBlAHcAAAD///"
+ "//AAAAAAAAAAD/////AAAAAAgAAAAAAAAAAAAAAM2oGVWBBgUAzqgZVYEGBQDPqBlVgQYFAAEAAAAIAAAAAA"
+ "AAAAAAAAAIAAAAAAAAAAAAAAAAAAAAAAAAAP////8AAAAAAAAACAAAAAAAAAAAAQAAACcAAABodHRwOi8vd3"
+ "d3LmVyZndvcmxkLmNvbS9lcmZfc3RyZWFtL3ZpZXcAAAAAAOrxn50XZS4AAAAAAMgAAAD/////AAAAAAACAA"
+ "AAAAAAAAAA";
private static final String TAB_STATE_1011_REDDIT =
"AAABSVhw+HkAAAJkYAIAAAAAAAACAAAAAQAAACABAAAcAQAAAAAAABcAAABjaHJvbWUtbmF0aXZlOi8vbmV3"
+ "dGFiLwAHAAAATgBlAHcAIAB0AGEAYgAAAKQAAACgAAAAFQAAAAAAAAAuAAAAYwBoAHIAbwBtAGUALQBuAGEA"
+ "dABpAHYAZQA6AC8ALwBuAGUAdwB0AGEAYgAvAAAA/////wAAAAAAAAAA/////wAAAAAIAAAAAAAAAAAA8D9M"
+ "Bk15gQYFAE0GTXmBBgUATgZNeYEGBQABAAAACAAAAAAAAAAAAPC/CAAAAAAAAAAAAPC/AAAAAAAAAAD/////"
+ "AAAAAAYAAAAAAAAAAAAAAAEAAAAXAAAAY2hyb21lLW5hdGl2ZTovL25ld3RhYi8AAAAAAN5f08EXZS4AAAAA"
+ "AAAAAAAsAQAAKAEAAAEAAAAfAAAAaHR0cDovL3d3dy5yZWRkaXQuY29tL3IvYW5kcm9pZAAAAAAAtAAAALAA"
+ "AAAVAAAAAAAAAD4AAABoAHQAdABwADoALwAvAHcAdwB3AC4AcgBlAGQAZABpAHQALgBjAG8AbQAvAHIALwBh"
+ "AG4AZAByAG8AaQBkAAAA/////wAAAAAAAAAA/////wAAAAAIAAAAAAAAAAAAAABPBk15gQYFAFAGTXmBBgUA"
+ "TgZNeYEGBQABAAAACAAAAAAAAAAAAAAACAAAAAAAAAAAAAAAAAAAAAAAAAD/////AAAAAAEAAAIAAAAAAAAA"
+ "AAEAAAAbAAAAaHR0cDovL3JlZGRpdC5jb20vci9hbmRyb2lkAAAAAAAB65zCF2UuAAAAAADIAAAA/////wAA"
+ "AAAAAgAAAAAAAAAAAA==";
private static class CloseRunnable implements Runnable {
final DocumentTabModel mTabModel;
final int mIndex;
boolean mSucceeded;
public CloseRunnable(DocumentTabModel model, int index) {
mTabModel = model;
mIndex = index;
}
@Override
public void run() {
mSucceeded = mTabModel.closeTabAt(mIndex);
}
static boolean closeTabAt(DocumentTabModel model, int index) throws Exception {
CloseRunnable runnable = new CloseRunnable(model, index);
ThreadUtils.runOnUiThreadBlocking(runnable);
return runnable.mSucceeded;
}
}
private MockActivityDelegate mActivityDelegate;
private MockStorageDelegate mStorageDelegate;
private MockDocumentTabCreatorManager mTabCreatorManager;
private DocumentTabModel mTabModel;
private AdvancedMockContext mContext;
@Override
protected void setUp() throws Exception {
super.setUp();
CommandLine.init(null);
loadNativeLibraryAndInitBrowserProcess();
mActivityDelegate = new MockActivityDelegate();
mTabCreatorManager = new MockDocumentTabCreatorManager();
mContext = new AdvancedMockContext(getInstrumentation().getTargetContext());
mStorageDelegate = new MockStorageDelegate(mContext.getCacheDir());
}
@Override
protected void tearDown() throws Exception {
mStorageDelegate.ensureDirectoryDestroyed();
ThreadUtils.runOnUiThreadBlocking(new Runnable() {
@Override
public void run() {
if (mTabModel.isNativeInitialized()) mTabModel.destroy();
}
});
super.tearDown();
}
private void setupDocumentTabModel() {
ThreadUtils.runOnUiThreadBlocking(new Runnable() {
@Override
public void run() {
mTabModel = new DocumentTabModelImpl(mActivityDelegate, mStorageDelegate,
mTabCreatorManager, false, 1010, mContext);
mTabModel.startTabStateLoad();
}
});
}
private void initializeNativeTabModel() throws Exception {
ThreadUtils.runOnUiThreadBlocking(new Runnable() {
@Override
public void run() {
mTabModel.initializeNative();
}
});
TestInitializationObserver.waitUntilState(
mTabModel, DocumentTabModelImpl.STATE_DESERIALIZE_END);
}
@SmallTest
public void testBasic() throws Exception {
mActivityDelegate.addTask(false, 1010, "http://erfworld.com");
mActivityDelegate.addTask(false, 1011, "http://reddit.com/r/android");
mStorageDelegate.setTaskFileBytesFromEncodedString(MODEL_STATE_WITH_1010_1011);
mStorageDelegate.addEncodedTabState(1010, false, TAB_STATE_1010_ERFWORLD_RETARGETABLE);
mStorageDelegate.addEncodedTabState(1011, false, TAB_STATE_1011_REDDIT);
setupDocumentTabModel();
// Confirm the data from the task file is restored correctly.
assertEquals(2, mTabModel.getCount());
assertEquals(1010, mTabModel.getTabAt(0).getId());
assertEquals(1011, mTabModel.getTabAt(1).getId());
assertEquals("http://erfworld.com", mTabModel.getInitialUrlForDocument(1010));
assertEquals("http://reddit.com/r/android", mTabModel.getInitialUrlForDocument(1011));
// Due to using AsyncTask to fetch metadata, at this point both should be non-retargetable
// by default until that AsyncTask completes.
assertEquals(false, mTabModel.isRetargetable(1010));
assertEquals(false, mTabModel.isRetargetable(1011));
// State of the tabs.
assertTrue(mTabModel.isTabStateReady(1010));
assertNotNull(mTabModel.getTabStateForDocument(1010));
assertNull(mTabModel.getCurrentUrlForDocument(1010));
assertNull(mTabModel.getCurrentUrlForDocument(1011));
// Wait until the tab states are loaded.
TestInitializationObserver.waitUntilState(
mTabModel, DocumentTabModelImpl.STATE_LOAD_TAB_STATE_BG_END);
assertNotNull(mTabModel.getTabStateForDocument(1011));
// Startup AsyncTasks must be complete by now since they are both on serial executor.
assertEquals(true, mTabModel.isRetargetable(1010));
assertEquals(false, mTabModel.isRetargetable(1011));
// Load the native library, wait until the states are deserialized, then check their values.
initializeNativeTabModel();
assertEquals("http://www.erfworld.com/erf_stream/view",
mTabModel.getCurrentUrlForDocument(1010));
assertEquals("http://www.reddit.com/r/android", mTabModel.getCurrentUrlForDocument(1011));
}
@SmallTest
public void testIncognitoIgnored() throws Exception {
mActivityDelegate.addTask(false, 1010, "http://erfworld.com");
mActivityDelegate.addTask(false, 1011, "http://reddit.com/r/android");
mActivityDelegate.addTask(true, 1012, "http://incognito.com/ignored");
mStorageDelegate.setTaskFileBytesFromEncodedString(MODEL_STATE_WITH_1010_1011);
mStorageDelegate.addEncodedTabState(1010, false, TAB_STATE_1010_ERFWORLD_RETARGETABLE);
mStorageDelegate.addEncodedTabState(1011, false, TAB_STATE_1011_REDDIT);
setupDocumentTabModel();
// Confirm the data from the task file is restored correctly.
assertEquals(2, mTabModel.getCount());
assertEquals(1010, mTabModel.getTabAt(0).getId());
assertEquals(1011, mTabModel.getTabAt(1).getId());
assertEquals("http://erfworld.com", mTabModel.getInitialUrlForDocument(1010));
assertEquals("http://reddit.com/r/android", mTabModel.getInitialUrlForDocument(1011));
// Same as in testBasic.
assertEquals(false, mTabModel.isRetargetable(1010));
assertEquals(false, mTabModel.isRetargetable(1011));
// State of the tabs.
assertTrue(mTabModel.isTabStateReady(1010));
assertNotNull(mTabModel.getTabStateForDocument(1010));
assertNull(mTabModel.getCurrentUrlForDocument(1010));
assertNull(mTabModel.getCurrentUrlForDocument(1011));
// Wait until the tab states are loaded.
TestInitializationObserver.waitUntilState(
mTabModel, DocumentTabModelImpl.STATE_LOAD_TAB_STATE_BG_END);
assertNotNull(mTabModel.getTabStateForDocument(1011));
// Same as in testBasic.
assertEquals(true, mTabModel.isRetargetable(1010));
assertEquals(false, mTabModel.isRetargetable(1011));
// Load the native library, wait until the states are deserialized, then check their values.
initializeNativeTabModel();
assertEquals("http://www.erfworld.com/erf_stream/view",
mTabModel.getCurrentUrlForDocument(1010));
assertEquals("http://www.reddit.com/r/android", mTabModel.getCurrentUrlForDocument(1011));
}
/**
* Tasks found in Android's Recents and not in the DocumentTabModel's task file should be
* added to the DocumentTabModel.
*/
@SmallTest
public void testMissingTaskAddedAndUnretargetable() throws Exception {
mActivityDelegate.addTask(false, 1012, "http://digg.com");
mActivityDelegate.addTask(false, 1010, "http://erfworld.com");
mActivityDelegate.addTask(false, 1011, "http://reddit.com/r/android");
mStorageDelegate.setTaskFileBytesFromEncodedString(MODEL_STATE_WITH_1010_1011);
setupDocumentTabModel();
assertEquals(3, mTabModel.getCount());
assertEquals(1012, mTabModel.getTabAt(0).getId());
assertEquals(1010, mTabModel.getTabAt(1).getId());
assertEquals(1011, mTabModel.getTabAt(2).getId());
assertEquals("http://erfworld.com", mTabModel.getInitialUrlForDocument(1010));
assertEquals("http://reddit.com/r/android", mTabModel.getInitialUrlForDocument(1011));
assertEquals("http://digg.com", mTabModel.getInitialUrlForDocument(1012));
// Wait until the tab states are loaded, by then the AsyncTask to load metadata is done.
TestInitializationObserver.waitUntilState(
mTabModel, DocumentTabModelImpl.STATE_LOAD_TAB_STATE_BG_END);
assertEquals(true, mTabModel.isRetargetable(1010));
assertEquals(false, mTabModel.isRetargetable(1011));
assertEquals(false, mTabModel.isRetargetable(1012));
}
/**
* If a TabState file is missing, we won't be able to get a current URL for it but should still
* get notification that the TabState was loaded.
*/
@SmallTest
public void testMissingTabState() throws Exception {
mActivityDelegate.addTask(false, 1010, "http://erfworld.com");
mActivityDelegate.addTask(false, 1011, "http://reddit.com/r/android");
mStorageDelegate.setTaskFileBytesFromEncodedString(MODEL_STATE_WITH_1010_1011);
mStorageDelegate.addEncodedTabState(1011, false, TAB_STATE_1011_REDDIT);
setupDocumentTabModel();
assertEquals(2, mTabModel.getCount());
assertTrue(mTabModel.isTabStateReady(1010));
assertNull(mTabModel.getTabStateForDocument(1010));
assertNull(mTabModel.getCurrentUrlForDocument(1010));
assertNull(mTabModel.getCurrentUrlForDocument(1011));
// After the DocumentTabModel has progressed far enough, confirm that the other available
// TabState has been loaded.
TestInitializationObserver.waitUntilState(
mTabModel, DocumentTabModelImpl.STATE_LOAD_TAB_STATE_BG_END);
assertTrue(mTabModel.isTabStateReady(1010));
assertTrue(mTabModel.isTabStateReady(1011));
assertNull(mTabModel.getTabStateForDocument(1010));
assertNotNull(mTabModel.getTabStateForDocument(1011));
// Load the native library, wait until the states are deserialized, then check their values.
initializeNativeTabModel();
assertNull(null, mTabModel.getCurrentUrlForDocument(1010));
assertEquals("http://www.reddit.com/r/android", mTabModel.getCurrentUrlForDocument(1011));
}
@SmallTest
public void testTasksSwipedAwayBeforeTabModelCreation() throws Exception {
mActivityDelegate.addTask(false, 1010, "http://erfworld.com");
mStorageDelegate.setTaskFileBytesFromEncodedString(MODEL_STATE_WITH_1010_1011);
setupDocumentTabModel();
assertEquals(1, mTabModel.getCount());
assertEquals(1010, mTabModel.getTabAt(0).getId());
assertEquals("http://erfworld.com", mTabModel.getInitialUrlForDocument(1010));
}
/**
* Tasks swiped away in Android's Recents should be reflected as closed tabs in the
* DocumentTabModel.
*/
@SmallTest
public void testTasksSwipedAwayAfterTabModelCreation() throws Exception {
mActivityDelegate.addTask(false, 1010, "http://erfworld.com");
mActivityDelegate.addTask(false, 1011, "http://reddit.com/r/android");
mStorageDelegate.setTaskFileBytesFromEncodedString(MODEL_STATE_WITH_1010_1011);
setupDocumentTabModel();
assertEquals(2, mTabModel.getCount());
assertEquals(1010, mTabModel.getTabAt(0).getId());
assertEquals(1011, mTabModel.getTabAt(1).getId());
mActivityDelegate.removeTask(false, 1010);
ThreadUtils.runOnUiThreadBlocking(new Runnable() {
@Override
public void run() {
mTabModel.updateRecentlyClosed();
}
});
assertEquals(1, mTabModel.getCount());
assertEquals(1011, mTabModel.getTabAt(0).getId());
}
/**
* DocumentTabModel#closeAllTabs() should remove all the tabs from the TabModel.
*/
@SmallTest
public void testCloseAllTabs() throws Exception {
mActivityDelegate.addTask(false, 1010, "http://erfworld.com");
mActivityDelegate.addTask(false, 1011, "http://reddit.com/r/android");
mStorageDelegate.setTaskFileBytesFromEncodedString(MODEL_STATE_WITH_1010_1011);
setupDocumentTabModel();
assertEquals(2, mTabModel.getCount());
assertEquals(1010, mTabModel.getTabAt(0).getId());
assertEquals(1011, mTabModel.getTabAt(1).getId());
ThreadUtils.runOnUiThreadBlocking(new Runnable() {
@Override
public void run() {
mTabModel.closeAllTabs();
}
});
assertEquals(0, mTabModel.getCount());
}
/**
* DocumentTabModel#closeTabAt() should close a tab and slide the other ones in to fill the gap.
* This test also relies on the DocumentTabModel adding tasks it finds in Android's Recents to
* pad out the test.
*/
@SmallTest
public void testCloseTabAt() throws Exception {
mActivityDelegate.addTask(false, 1010, "http://erfworld.com");
mActivityDelegate.addTask(false, 1011, "http://reddit.com/r/android");
mActivityDelegate.addTask(false, 1012, "http://digg.com");
mActivityDelegate.addTask(false, 1013, "http://slashdot.org");
setupDocumentTabModel();
assertEquals(4, mTabModel.getCount());
assertEquals(1010, mTabModel.getTabAt(0).getId());
assertEquals(1011, mTabModel.getTabAt(1).getId());
assertEquals(1012, mTabModel.getTabAt(2).getId());
assertEquals(1013, mTabModel.getTabAt(3).getId());
assertTrue(CloseRunnable.closeTabAt(mTabModel, 1));
assertEquals(3, mTabModel.getCount());
assertEquals(1010, mTabModel.getTabAt(0).getId());
assertEquals(1012, mTabModel.getTabAt(1).getId());
assertEquals(1013, mTabModel.getTabAt(2).getId());
assertTrue(CloseRunnable.closeTabAt(mTabModel, 2));
assertEquals(2, mTabModel.getCount());
assertEquals(1010, mTabModel.getTabAt(0).getId());
assertEquals(1012, mTabModel.getTabAt(1).getId());
assertTrue(CloseRunnable.closeTabAt(mTabModel, 0));
assertEquals(1, mTabModel.getCount());
assertEquals(1012, mTabModel.getTabAt(0).getId());
assertTrue(CloseRunnable.closeTabAt(mTabModel, 0));
assertEquals(0, mTabModel.getCount());
assertFalse(CloseRunnable.closeTabAt(mTabModel, 0));
}
/**
* Test that the DocumentTabModel.index() function works as expected as Tabs are selected and
* closed.
*/
@SmallTest
public void testIndex() throws Exception {
mActivityDelegate.addTask(false, 1010, "http://erfworld.com");
mActivityDelegate.addTask(false, 1011, "http://reddit.com/r/android");
mActivityDelegate.addTask(false, 1012, "http://digg.com");
mActivityDelegate.addTask(false, 1013, "http://slashdot.org");
Map<String, Object> data = new HashMap<String, Object>();
data.put(DocumentTabModelImpl.PREF_LAST_SHOWN_TAB_ID_REGULAR, 1011);
mContext.addSharedPreferences(DocumentTabModelImpl.PREF_PACKAGE, data);
// The ID stored in the SharedPreferences points at index 1.
setupDocumentTabModel();
assertEquals(1, mTabModel.index());
// Pick a different Tab.
ThreadUtils.runOnUiThreadBlocking(new Runnable() {
@Override
public void run() {
TabModelUtils.setIndex(mTabModel, 3);
}
});
assertEquals(3, mTabModel.index());
assertEquals(1013, mTabModel.getTabAt(3).getId());
assertEquals(1013, data.get(DocumentTabModelImpl.PREF_LAST_SHOWN_TAB_ID_REGULAR));
// Select the MRU tab since the last known Tab was closed. The last shown ID isn't updated
// when the new Tab is selected; it's the job of the DocumentActivity to alert the
// DocumentTabModel about shown Tab changes.
assertTrue(CloseRunnable.closeTabAt(mTabModel, 3));
assertEquals(3, mTabModel.getCount());
assertEquals(0, mTabModel.index());
assertEquals(1010, mTabModel.getTabAt(0).getId());
// Close everything; index should be invalid.
ThreadUtils.runOnUiThreadBlocking(new Runnable() {
@Override
public void run() {
mTabModel.closeAllTabs();
}
});
assertEquals(0, mTabModel.getCount());
assertEquals(TabModel.INVALID_TAB_INDEX, mTabModel.index());
}
/**
* Test that we don't add information about a Tab that's not valid for a DocumentActivity.
*/
@SmallTest
public void testAddTab() throws Exception {
setupDocumentTabModel();
assertEquals(0, mTabModel.getCount());
Intent badIntent = new Intent();
badIntent.setData(Uri.parse("http://toteslegit.com"));
Tab badTab = new Tab(false, null, null);
mTabModel.addTab(badIntent, badTab);
assertEquals(0, mTabModel.getCount());
Intent legitIntent = new Intent();
legitIntent.setData(Uri.parse("document://11684?http://erfworld.com"));
Tab legitTab = new Tab(11684, false, null, null);
mTabModel.addTab(legitIntent, legitTab);
assertEquals(1, mTabModel.getCount());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tomcat.util.http.fileupload;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
/**
* <p> Low level API for processing file uploads.
*
* <p> This class can be used to process data streams conforming to MIME
* 'multipart' format as defined in
* <a href="http://www.ietf.org/rfc/rfc1867.txt">RFC 1867</a>. Arbitrarily
* large amounts of data in the stream can be processed under constant
* memory usage.
*
* <p> The format of the stream is defined in the following way:<br>
*
* <code>
* multipart-body := preamble 1*encapsulation close-delimiter epilogue<br>
* encapsulation := delimiter body CRLF<br>
* delimiter := "--" boundary CRLF<br>
* close-delimiter := "--" boudary "--"<br>
* preamble := <ignore><br>
* epilogue := <ignore><br>
* body := header-part CRLF body-part<br>
* header-part := 1*header CRLF<br>
* header := header-name ":" header-value<br>
* header-name := <printable ascii characters except ":"><br>
* header-value := <any ascii characters except CR & LF><br>
* body-data := <arbitrary data><br>
* </code>
*
* <p>Note that body-data can contain another mulipart entity. There
* is limited support for single pass processing of such nested
* streams. The nested stream is <strong>required</strong> to have a
* boundary token of the same length as the parent stream (see {@link
* #setBoundary(byte[])}).
*
* <p>Here is an exaple of usage of this class.<br>
*
* <pre>
* try {
* MultipartStream multipartStream = new MultipartStream(input,
* boundary);
* boolean nextPart = malitPartStream.skipPreamble();
* OutputStream output;
* while(nextPart) {
* header = chunks.readHeader();
* // process headers
* // create some output stream
* multipartStream.readBodyPart(output);
* nextPart = multipartStream.readBoundary();
* }
* } catch(MultipartStream.MalformedStreamException e) {
* // the stream failed to follow required syntax
* } catch(IOException) {
* // a read or write error occurred
* }
*
* </pre>
*
* @author <a href="mailto:Rafal.Krzewski@e-point.pl">Rafal Krzewski</a>
* @author <a href="mailto:martinc@apache.org">Martin Cooper</a>
* @author Sean C. Sullivan
*
*
*/
public class MultipartStream
{
// ----------------------------------------------------- Manifest constants
/**
* The maximum length of <code>header-part</code> that will be
* processed (10 kilobytes = 10240 bytes.).
*/
public static final int HEADER_PART_SIZE_MAX = 10240;
/**
* The default length of the buffer used for processing a request.
*/
protected static final int DEFAULT_BUFSIZE = 4096;
/**
* A byte sequence that marks the end of <code>header-part</code>
* (<code>CRLFCRLF</code>).
*/
protected static final byte[] HEADER_SEPARATOR = {0x0D, 0x0A, 0x0D, 0x0A};
/**
* A byte sequence that that follows a delimiter that will be
* followed by an encapsulation (<code>CRLF</code>).
*/
protected static final byte[] FIELD_SEPARATOR = { 0x0D, 0x0A };
/**
* A byte sequence that that follows a delimiter of the last
* encapsulation in the stream (<code>--</code>).
*/
protected static final byte[] STREAM_TERMINATOR = { 0x2D, 0x2D };
// ----------------------------------------------------------- Data members
/**
* The input stream from which data is read.
*/
private InputStream input;
/**
* The length of the boundary token plus the leading <code>CRLF--</code>.
*/
private int boundaryLength;
/**
* The amount of data, in bytes, that must be kept in the buffer in order
* to detect delimiters reliably.
*/
private int keepRegion;
/**
* The byte sequence that partitions the stream.
*/
private byte[] boundary;
/**
* The length of the buffer used for processing the request.
*/
private int bufSize;
/**
* The buffer used for processing the request.
*/
private byte[] buffer;
/**
* The index of first valid character in the buffer.
* <br>
* 0 <= head < bufSize
*/
private int head;
/**
* The index of last valid characer in the buffer + 1.
* <br>
* 0 <= tail <= bufSize
*/
private int tail;
/**
* The content encoding to use when reading headers.
*/
private String headerEncoding;
// ----------------------------------------------------------- Constructors
/**
* Default constructor.
*
* @see #MultipartStream(InputStream, byte[], int)
* @see #MultipartStream(InputStream, byte[])
*
*/
public MultipartStream()
{
}
/**
* <p> Constructs a <code>MultipartStream</code> with a custom size buffer.
*
* <p> Note that the buffer must be at least big enough to contain the
* boundary string, plus 4 characters for CR/LF and double dash, plus at
* least one byte of data. Too small a buffer size setting will degrade
* performance.
*
* @param input The <code>InputStream</code> to serve as a data source.
* @param boundary The token used for dividing the stream into
* <code>encapsulations</code>.
* @param bufSize The size of the buffer to be used, in bytes.
*
*
* @see #MultipartStream()
* @see #MultipartStream(InputStream, byte[])
*
*/
public MultipartStream(InputStream input,
byte[] boundary,
int bufSize)
{
this.input = input;
this.bufSize = bufSize;
this.buffer = new byte[bufSize];
// We prepend CR/LF to the boundary to chop trailng CR/LF from
// body-data tokens.
this.boundary = new byte[boundary.length + 4];
this.boundaryLength = boundary.length + 4;
this.keepRegion = boundary.length + 3;
this.boundary[0] = 0x0D;
this.boundary[1] = 0x0A;
this.boundary[2] = 0x2D;
this.boundary[3] = 0x2D;
System.arraycopy(boundary, 0, this.boundary, 4, boundary.length);
head = 0;
tail = 0;
}
/**
* <p> Constructs a <code>MultipartStream</code> with a default size buffer.
*
* @param input The <code>InputStream</code> to serve as a data source.
* @param boundary The token used for dividing the stream into
* <code>encapsulations</code>.
*
* @exception IOException when an error occurs.
*
* @see #MultipartStream()
* @see #MultipartStream(InputStream, byte[], int)
*
*/
public MultipartStream(InputStream input,
byte[] boundary)
throws IOException
{
this(input, boundary, DEFAULT_BUFSIZE);
}
// --------------------------------------------------------- Public methods
/**
* Retrieves the character encoding used when reading the headers of an
* individual part. When not specified, or <code>null</code>, the platform
* default encoding is used.
*
* @return The encoding used to read part headers.
*/
public String getHeaderEncoding()
{
return headerEncoding;
}
/**
* Specifies the character encoding to be used when reading the headers of
* individual parts. When not specified, or <code>null</code>, the platform
* default encoding is used.
*
* @param encoding The encoding used to read part headers.
*/
public void setHeaderEncoding(String encoding)
{
headerEncoding = encoding;
}
/**
* Reads a byte from the <code>buffer</code>, and refills it as
* necessary.
*
* @return The next byte from the input stream.
*
* @exception IOException if there is no more data available.
*/
public byte readByte()
throws IOException
{
// Buffer depleted ?
if (head == tail)
{
head = 0;
// Refill.
tail = input.read(buffer, head, bufSize);
if (tail == -1)
{
// No more data available.
throw new IOException("No more data is available");
}
}
return buffer[head++];
}
/**
* Skips a <code>boundary</code> token, and checks whether more
* <code>encapsulations</code> are contained in the stream.
*
* @return <code>true</code> if there are more encapsulations in
* this stream; <code>false</code> otherwise.
*
* @exception MalformedStreamException if the stream ends unexpecetedly or
* fails to follow required syntax.
*/
public boolean readBoundary()
throws MalformedStreamException
{
byte[] marker = new byte[2];
boolean nextChunk = false;
head += boundaryLength;
try
{
marker[0] = readByte();
marker[1] = readByte();
if (arrayequals(marker, STREAM_TERMINATOR, 2))
{
nextChunk = false;
}
else if (arrayequals(marker, FIELD_SEPARATOR, 2))
{
nextChunk = true;
}
else
{
throw new MalformedStreamException(
"Unexpected characters follow a boundary");
}
}
catch (IOException e)
{
throw new MalformedStreamException("Stream ended unexpectedly");
}
return nextChunk;
}
/**
* <p>Changes the boundary token used for partitioning the stream.
*
* <p>This method allows single pass processing of nested multipart
* streams.
*
* <p>The boundary token of the nested stream is <code>required</code>
* to be of the same length as the boundary token in parent stream.
*
* <p>Restoring the parent stream boundary token after processing of a
* nested stream is left to the application.
*
* @param boundary The boundary to be used for parsing of the nested
* stream.
*
* @exception IllegalBoundaryException if the <code>boundary</code>
* has a different length than the one
* being currently parsed.
*/
public void setBoundary(byte[] boundary)
throws IllegalBoundaryException
{
if (boundary.length != boundaryLength - 4)
{
throw new IllegalBoundaryException(
"The length of a boundary token can not be changed");
}
System.arraycopy(boundary, 0, this.boundary, 4, boundary.length);
}
/**
* <p>Reads the <code>header-part</code> of the current
* <code>encapsulation</code>.
*
* <p>Headers are returned verbatim to the input stream, including the
* trailing <code>CRLF</code> marker. Parsing is left to the
* application.
*
* <p><strong>TODO</strong> allow limiting maximum header size to
* protect against abuse.
*
* @return The <code>header-part</code> of the current encapsulation.
*
* @exception MalformedStreamException if the stream ends unexpecetedly.
*/
public String readHeaders()
throws MalformedStreamException
{
int i = 0;
byte b[] = new byte[1];
// to support multi-byte characters
ByteArrayOutputStream baos = new ByteArrayOutputStream();
int sizeMax = HEADER_PART_SIZE_MAX;
int size = 0;
while (i < 4)
{
try
{
b[0] = readByte();
}
catch (IOException e)
{
throw new MalformedStreamException("Stream ended unexpectedly");
}
size++;
if (b[0] == HEADER_SEPARATOR[i])
{
i++;
}
else
{
i = 0;
}
if (size <= sizeMax)
{
baos.write(b[0]);
}
}
String headers = null;
if (headerEncoding != null)
{
try
{
headers = baos.toString(headerEncoding);
}
catch (UnsupportedEncodingException e)
{
// Fall back to platform default if specified encoding is not
// supported.
headers = baos.toString();
}
}
else
{
headers = baos.toString();
}
return headers;
}
/**
* <p>Reads <code>body-data</code> from the current
* <code>encapsulation</code> and writes its contents into the
* output <code>Stream</code>.
*
* <p>Arbitrary large amounts of data can be processed by this
* method using a constant size buffer. (see {@link
* #MultipartStream(InputStream,byte[],int) constructor}).
*
* @param output The <code>Stream</code> to write data into.
*
* @return the amount of data written.
*
* @exception MalformedStreamException if the stream ends unexpectedly.
* @exception IOException if an i/o error occurs.
*/
public int readBodyData(OutputStream output)
throws MalformedStreamException,
IOException
{
boolean done = false;
int pad;
int pos;
int bytesRead;
int total = 0;
while (!done)
{
// Is boundary token present somewere in the buffer?
pos = findSeparator();
if (pos != -1)
{
// Write the rest of the data before the boundary.
output.write(buffer, head, pos - head);
total += pos - head;
head = pos;
done = true;
}
else
{
// Determine how much data should be kept in the
// buffer.
if (tail - head > keepRegion)
{
pad = keepRegion;
}
else
{
pad = tail - head;
}
// Write out the data belonging to the body-data.
output.write(buffer, head, tail - head - pad);
// Move the data to the beging of the buffer.
total += tail - head - pad;
System.arraycopy(buffer, tail - pad, buffer, 0, pad);
// Refill buffer with new data.
head = 0;
bytesRead = input.read(buffer, pad, bufSize - pad);
// [pprrrrrrr]
if (bytesRead != -1)
{
tail = pad + bytesRead;
}
else
{
// The last pad amount is left in the buffer.
// Boundary can't be in there so write out the
// data you have and signal an error condition.
output.write(buffer, 0, pad);
output.flush();
total += pad;
throw new MalformedStreamException(
"Stream ended unexpectedly");
}
}
}
output.flush();
return total;
}
/**
* <p> Reads <code>body-data</code> from the current
* <code>encapsulation</code> and discards it.
*
* <p>Use this method to skip encapsulations you don't need or don't
* understand.
*
* @return The amount of data discarded.
*
* @exception MalformedStreamException if the stream ends unexpectedly.
* @exception IOException if an i/o error occurs.
*/
public int discardBodyData()
throws MalformedStreamException,
IOException
{
boolean done = false;
int pad;
int pos;
int bytesRead;
int total = 0;
while (!done)
{
// Is boundary token present somewere in the buffer?
pos = findSeparator();
if (pos != -1)
{
// Write the rest of the data before the boundary.
total += pos - head;
head = pos;
done = true;
}
else
{
// Determine how much data should be kept in the
// buffer.
if (tail - head > keepRegion)
{
pad = keepRegion;
}
else
{
pad = tail - head;
}
total += tail - head - pad;
// Move the data to the beging of the buffer.
System.arraycopy(buffer, tail - pad, buffer, 0, pad);
// Refill buffer with new data.
head = 0;
bytesRead = input.read(buffer, pad, bufSize - pad);
// [pprrrrrrr]
if (bytesRead != -1)
{
tail = pad + bytesRead;
}
else
{
// The last pad amount is left in the buffer.
// Boundary can't be in there so signal an error
// condition.
total += pad;
throw new MalformedStreamException(
"Stream ended unexpectedly");
}
}
}
return total;
}
/**
* Finds the beginning of the first <code>encapsulation</code>.
*
* @return <code>true</code> if an <code>encapsulation</code> was found in
* the stream.
*
* @exception IOException if an i/o error occurs.
*/
public boolean skipPreamble()
throws IOException
{
// First delimiter may be not preceeded with a CRLF.
System.arraycopy(boundary, 2, boundary, 0, boundary.length - 2);
boundaryLength = boundary.length - 2;
try
{
// Discard all data up to the delimiter.
discardBodyData();
// Read boundary - if succeded, the stream contains an
// encapsulation.
return readBoundary();
}
catch (MalformedStreamException e)
{
return false;
}
finally
{
// Restore delimiter.
System.arraycopy(boundary, 0, boundary, 2, boundary.length - 2);
boundaryLength = boundary.length;
boundary[0] = 0x0D;
boundary[1] = 0x0A;
}
}
/**
* Compares <code>count</code> first bytes in the arrays
* <code>a</code> and <code>b</code>.
*
* @param a The first array to compare.
* @param b The second array to compare.
* @param count How many bytes should be compared.
*
* @return <code>true</code> if <code>count</code> first bytes in arrays
* <code>a</code> and <code>b</code> are equal.
*/
public static boolean arrayequals(byte[] a,
byte[] b,
int count)
{
for (int i = 0; i < count; i++)
{
if (a[i] != b[i])
{
return false;
}
}
return true;
}
/**
* Searches for a byte of specified value in the <code>buffer</code>,
* starting at the specified <code>position</code>.
*
* @param value The value to find.
* @param pos The starting position for searching.
*
* @return The position of byte found, counting from beginning of the
* <code>buffer</code>, or <code>-1</code> if not found.
*/
protected int findByte(byte value,
int pos)
{
for (int i = pos; i < tail; i++)
{
if (buffer[i] == value)
{
return i;
}
}
return -1;
}
/**
* Searches for the <code>boundary</code> in the <code>buffer</code>
* region delimited by <code>head</code> and <code>tail</code>.
*
* @return The position of the boundary found, counting from the
* beginning of the <code>buffer</code>, or <code>-1</code> if
* not found.
*/
protected int findSeparator()
{
int first;
int match = 0;
int maxpos = tail - boundaryLength;
for (first = head;
(first <= maxpos) && (match != boundaryLength);
first++)
{
first = findByte(boundary[0], first);
if (first == -1 || (first > maxpos))
{
return -1;
}
for (match = 1; match < boundaryLength; match++)
{
if (buffer[first + match] != boundary[match])
{
break;
}
}
}
if (match == boundaryLength)
{
return first - 1;
}
return -1;
}
/**
* Returns a string representation of this object.
*
* @return The string representation of this object.
*/
public String toString()
{
StringBuffer sbTemp = new StringBuffer();
sbTemp.append("boundary='");
sbTemp.append(String.valueOf(boundary));
sbTemp.append("'\nbufSize=");
sbTemp.append(bufSize);
return sbTemp.toString();
}
/**
* Thrown to indicate that the input stream fails to follow the
* required syntax.
*/
public class MalformedStreamException
extends IOException
{
/**
* Constructs a <code>MalformedStreamException</code> with no
* detail message.
*/
public MalformedStreamException()
{
super();
}
/**
* Constructs an <code>MalformedStreamException</code> with
* the specified detail message.
*
* @param message The detail message.
*/
public MalformedStreamException(String message)
{
super(message);
}
}
/**
* Thrown upon attempt of setting an invalid boundary token.
*/
public class IllegalBoundaryException
extends IOException
{
/**
* Constructs an <code>IllegalBoundaryException</code> with no
* detail message.
*/
public IllegalBoundaryException()
{
super();
}
/**
* Constructs an <code>IllegalBoundaryException</code> with
* the specified detail message.
*
* @param message The detail message.
*/
public IllegalBoundaryException(String message)
{
super(message);
}
}
// ------------------------------------------------------ Debugging methods
// These are the methods that were used to debug this stuff.
/*
// Dump data.
protected void dump()
{
System.out.println("01234567890");
byte[] temp = new byte[buffer.length];
for(int i=0; i<buffer.length; i++)
{
if (buffer[i] == 0x0D || buffer[i] == 0x0A)
{
temp[i] = 0x21;
}
else
{
temp[i] = buffer[i];
}
}
System.out.println(new String(temp));
int i;
for (i=0; i<head; i++)
System.out.print(" ");
System.out.println("h");
for (i=0; i<tail; i++)
System.out.print(" ");
System.out.println("t");
System.out.flush();
}
// Main routine, for testing purposes only.
//
// @param args A String[] with the command line arguments.
// @exception Exception, a generic exception.
public static void main( String[] args )
throws Exception
{
File boundaryFile = new File("boundary.dat");
int boundarySize = (int)boundaryFile.length();
byte[] boundary = new byte[boundarySize];
FileInputStream input = new FileInputStream(boundaryFile);
input.read(boundary,0,boundarySize);
input = new FileInputStream("multipart.dat");
MultipartStream chunks = new MultipartStream(input, boundary);
int i = 0;
String header;
OutputStream output;
boolean nextChunk = chunks.skipPreamble();
while (nextChunk)
{
header = chunks.readHeaders();
System.out.println("!"+header+"!");
System.out.println("wrote part"+i+".dat");
output = new FileOutputStream("part"+(i++)+".dat");
chunks.readBodyData(output);
nextChunk = chunks.readBoundary();
}
}
*/
}
| |
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.util;
import junit.framework.*;
import java.io.*;
/**
* Test LongField code
*
* @author Marc Johnson (mjohnson at apache dot org)
*/
public final class TestLongField extends TestCase {
static private final long[] _test_array =
{
Long.MIN_VALUE, -1L, 0L, 1L, Long.MAX_VALUE
};
public void testConstructors()
{
try
{
new LongField(-1);
fail("Should have caught ArrayIndexOutOfBoundsException");
}
catch (ArrayIndexOutOfBoundsException ignored_e)
{
// as expected
}
LongField field = new LongField(2);
assertEquals(0L, field.get());
try
{
new LongField(-1, 1L);
fail("Should have caught ArrayIndexOutOfBoundsException");
}
catch (ArrayIndexOutOfBoundsException ignored_e)
{
// as expected
}
field = new LongField(2, 0x123456789ABCDEF0L);
assertEquals(0x123456789ABCDEF0L, field.get());
byte[] array = new byte[ 10 ];
try
{
new LongField(-1, 1L, array);
fail("Should have caught ArrayIndexOutOfBoundsException");
}
catch (ArrayIndexOutOfBoundsException ignored_e)
{
// as expected
}
field = new LongField(2, 0x123456789ABCDEF0L, array);
assertEquals(0x123456789ABCDEF0L, field.get());
assertEquals(( byte ) 0xF0, array[ 2 ]);
assertEquals(( byte ) 0xDE, array[ 3 ]);
assertEquals(( byte ) 0xBC, array[ 4 ]);
assertEquals(( byte ) 0x9A, array[ 5 ]);
assertEquals(( byte ) 0x78, array[ 6 ]);
assertEquals(( byte ) 0x56, array[ 7 ]);
assertEquals(( byte ) 0x34, array[ 8 ]);
assertEquals(( byte ) 0x12, array[ 9 ]);
array = new byte[ 9 ];
try
{
new LongField(2, 5L, array);
fail("should have gotten ArrayIndexOutOfBoundsException");
}
catch (ArrayIndexOutOfBoundsException ignored_e)
{
// as expected
}
for (long element : _test_array) {
array = new byte[ 8 ];
new LongField(0, element, array);
assertEquals(element, new LongField(0, array).get());
}
}
public void testSet()
{
LongField field = new LongField(0);
byte[] array = new byte[ 8 ];
for (int j = 0; j < _test_array.length; j++)
{
field.set(_test_array[ j ]);
assertEquals("testing _1 " + j, _test_array[ j ], field.get());
field = new LongField(0);
field.set(_test_array[ j ], array);
assertEquals("testing _2 ", _test_array[ j ], field.get());
assertEquals("testing _3.0 " + _test_array[ j ],
( byte ) (_test_array[ j ] % 256), array[ 0 ]);
assertEquals("testing _3.1 " + _test_array[ j ],
( byte ) ((_test_array[ j ] >> 8) % 256),
array[ 1 ]);
assertEquals("testing _3.2 " + _test_array[ j ],
( byte ) ((_test_array[ j ] >> 16) % 256),
array[ 2 ]);
assertEquals("testing _3.3 " + _test_array[ j ],
( byte ) ((_test_array[ j ] >> 24) % 256),
array[ 3 ]);
assertEquals("testing _3.4 " + _test_array[ j ],
( byte ) ((_test_array[ j ] >> 32) % 256),
array[ 4 ]);
assertEquals("testing _3.5 " + _test_array[ j ],
( byte ) ((_test_array[ j ] >> 40) % 256),
array[ 5 ]);
assertEquals("testing _3.6 " + _test_array[ j ],
( byte ) ((_test_array[ j ] >> 48) % 256),
array[ 6 ]);
assertEquals("testing _3.7 " + _test_array[ j ],
( byte ) ((_test_array[ j ] >> 56) % 256),
array[ 7 ]);
}
}
public void testReadFromBytes()
{
LongField field = new LongField(1);
byte[] array = new byte[ 8 ];
try
{
field.readFromBytes(array);
fail("should have caught ArrayIndexOutOfBoundsException");
}
catch (ArrayIndexOutOfBoundsException ignored_e)
{
// as expected
}
field = new LongField(0);
for (int j = 0; j < _test_array.length; j++)
{
array[ 0 ] = ( byte ) (_test_array[ j ] % 256);
array[ 1 ] = ( byte ) ((_test_array[ j ] >> 8) % 256);
array[ 2 ] = ( byte ) ((_test_array[ j ] >> 16) % 256);
array[ 3 ] = ( byte ) ((_test_array[ j ] >> 24) % 256);
array[ 4 ] = ( byte ) ((_test_array[ j ] >> 32) % 256);
array[ 5 ] = ( byte ) ((_test_array[ j ] >> 40) % 256);
array[ 6 ] = ( byte ) ((_test_array[ j ] >> 48) % 256);
array[ 7 ] = ( byte ) ((_test_array[ j ] >> 56) % 256);
field.readFromBytes(array);
assertEquals("testing " + j, _test_array[ j ], field.get());
}
}
public void testReadFromStream()
throws IOException
{
LongField field = new LongField(0);
byte[] buffer = new byte[ _test_array.length * 8 ];
for (int j = 0; j < _test_array.length; j++)
{
buffer[ (j * 8) + 0 ] = ( byte ) ((_test_array[ j ] >> 0) % 256);
buffer[ (j * 8) + 1 ] = ( byte ) ((_test_array[ j ] >> 8) % 256);
buffer[ (j * 8) + 2 ] = ( byte ) ((_test_array[ j ] >> 16) % 256);
buffer[ (j * 8) + 3 ] = ( byte ) ((_test_array[ j ] >> 24) % 256);
buffer[ (j * 8) + 4 ] = ( byte ) ((_test_array[ j ] >> 32) % 256);
buffer[ (j * 8) + 5 ] = ( byte ) ((_test_array[ j ] >> 40) % 256);
buffer[ (j * 8) + 6 ] = ( byte ) ((_test_array[ j ] >> 48) % 256);
buffer[ (j * 8) + 7 ] = ( byte ) ((_test_array[ j ] >> 56) % 256);
}
ByteArrayInputStream stream = new ByteArrayInputStream(buffer);
for (int j = 0; j < buffer.length / 8; j++)
{
field.readFromStream(stream);
assertEquals("Testing " + j, _test_array[ j ], field.get());
}
}
public void testWriteToBytes()
{
LongField field = new LongField(0);
byte[] array = new byte[ 8 ];
for (long element : _test_array) {
field.set(element);
field.writeToBytes(array);
long val = (( long ) array[ 7 ]) << 56;
val &= 0xFF00000000000000L;
val += ((( long ) array[ 6 ]) << 48) & 0x00FF000000000000L;
val += ((( long ) array[ 5 ]) << 40) & 0x0000FF0000000000L;
val += ((( long ) array[ 4 ]) << 32) & 0x000000FF00000000L;
val += ((( long ) array[ 3 ]) << 24) & 0x00000000FF000000L;
val += ((( long ) array[ 2 ]) << 16) & 0x0000000000FF0000L;
val += ((( long ) array[ 1 ]) << 8) & 0x000000000000FF00L;
val += (array[ 0 ] & 0x00000000000000FFL);
assertEquals("testing ", element, val);
}
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.application.impl;
import com.intellij.BundleBase;
import com.intellij.CommonBundle;
import com.intellij.concurrency.JobScheduler;
import com.intellij.diagnostic.PerformanceWatcher;
import com.intellij.diagnostic.ThreadDumper;
import com.intellij.execution.process.ProcessIOExecutorService;
import com.intellij.featureStatistics.fusCollectors.AppLifecycleUsageTriggerCollector;
import com.intellij.ide.*;
import com.intellij.ide.plugins.PluginManagerCore;
import com.intellij.idea.IdeaApplication;
import com.intellij.idea.Main;
import com.intellij.idea.StartupUtil;
import com.intellij.internal.statistic.eventLog.FeatureUsageLogger;
import com.intellij.internal.statistic.service.fus.collectors.FUSApplicationUsageTrigger;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.ex.ActionUtil;
import com.intellij.openapi.application.*;
import com.intellij.openapi.application.ex.ApplicationEx;
import com.intellij.openapi.application.ex.ApplicationUtil;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.components.ServiceKt;
import com.intellij.openapi.components.impl.PlatformComponentManagerImpl;
import com.intellij.openapi.components.impl.ServiceManagerImpl;
import com.intellij.openapi.components.impl.stores.StoreUtil;
import com.intellij.openapi.diagnostic.Attachment;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.diagnostic.RuntimeExceptionWithAttachments;
import com.intellij.openapi.extensions.ExtensionPointName;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.progress.*;
import com.intellij.openapi.progress.impl.CoreProgressManager;
import com.intellij.openapi.progress.util.PotemkinProgress;
import com.intellij.openapi.progress.util.ProgressWindow;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ProjectManager;
import com.intellij.openapi.project.ex.ProjectManagerEx;
import com.intellij.openapi.project.impl.ProjectManagerImpl;
import com.intellij.openapi.ui.DialogEarthquakeShaker;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.ui.MessageDialogBuilder;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.*;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.io.FileUtilRt;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.CharsetToolkit;
import com.intellij.openapi.wm.IdeFrame;
import com.intellij.openapi.wm.WindowManager;
import com.intellij.ui.AppIcon;
import com.intellij.ui.Splash;
import com.intellij.util.*;
import com.intellij.util.concurrency.AppExecutorUtil;
import com.intellij.util.concurrency.AppScheduledExecutorService;
import com.intellij.util.concurrency.Semaphore;
import com.intellij.util.containers.Stack;
import com.intellij.util.io.storage.HeavyProcessLatch;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.ApiStatus;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import org.jetbrains.ide.PooledThreadExecutor;
import org.picocontainer.MutablePicoContainer;
import sun.awt.AWTAccessor;
import sun.awt.AWTAutoShutdown;
import javax.swing.*;
import java.awt.*;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
public class ApplicationImpl extends PlatformComponentManagerImpl implements ApplicationEx {
private static final Logger LOG = Logger.getInstance("#com.intellij.application.impl.ApplicationImpl");
final ReadMostlyRWLock myLock;
private final ModalityInvokator myInvokator = new ModalityInvokatorImpl();
private final EventDispatcher<ApplicationListener> myDispatcher = EventDispatcher.create(ApplicationListener.class);
private final boolean myTestModeFlag;
private final boolean myHeadlessMode;
private final boolean myCommandLineMode;
private final boolean myIsInternal;
private final String myName;
private final Stack<Class> myWriteActionsStack = new Stack<>(); // contents modified in write action, read in read action
private final TransactionGuardImpl myTransactionGuard = new TransactionGuardImpl();
private int myWriteStackBase;
private volatile Thread myWriteActionThread;
private final long myStartTime;
@Nullable
private Splash mySplash;
private boolean mySaveAllowed;
private volatile boolean myExitInProgress;
private volatile boolean myDisposeInProgress;
private final Disposable myLastDisposable = Disposer.newDisposable(); // will be disposed last
private final AtomicBoolean mySaveSettingsIsInProgress = new AtomicBoolean(false);
@SuppressWarnings("UseOfArchaicSystemPropertyAccessors")
private static final int ourDumpThreadsOnLongWriteActionWaiting = Integer.getInteger("dump.threads.on.long.write.action.waiting", 0);
private final ExecutorService ourThreadExecutorsService = PooledThreadExecutor.INSTANCE;
private boolean myLoaded;
private static final String WAS_EVER_SHOWN = "was.ever.shown";
public ApplicationImpl(boolean isInternal,
boolean isUnitTestMode,
boolean isHeadless,
boolean isCommandLine,
@NotNull String appName,
@Nullable Splash splash) {
super(null);
ApplicationManager.setApplication(this, myLastDisposable); // reset back to null only when all components already disposed
getPicoContainer().registerComponentInstance(Application.class, this);
getPicoContainer().registerComponentInstance(TransactionGuard.class.getName(), myTransactionGuard);
//noinspection AssignmentToStaticFieldFromInstanceMethod
BundleBase.assertKeyIsFound = IconLoader.STRICT = isUnitTestMode || isInternal;
AWTExceptionHandler.register(); // do not crash AWT on exceptions
Disposer.setDebugMode(isInternal || isUnitTestMode || Disposer.isDebugDisposerOn());
myStartTime = System.currentTimeMillis();
mySplash = splash;
myName = appName;
myIsInternal = isInternal;
myTestModeFlag = isUnitTestMode;
myHeadlessMode = isHeadless;
myCommandLineMode = isCommandLine;
mySaveAllowed = !(isUnitTestMode || isHeadless);
if (!isUnitTestMode && !isHeadless) {
Disposer.register(this, Disposer.newDisposable(), "ui");
StartupUtil.addExternalInstanceListener(args -> invokeLater(() -> {
LOG.info("ApplicationImpl.externalInstanceListener invocation");
String currentDirectory = args.isEmpty() ? null : args.get(0);
List<String> realArgs = args.isEmpty() ? args : args.subList(1, args.size());
final Project project = CommandLineProcessor.processExternalCommandLine(realArgs, currentDirectory);
JFrame frame = project == null ? WindowManager.getInstance().findVisibleFrame() :
(JFrame)WindowManager.getInstance().getIdeFrame(project);
if (frame != null) {
if (frame instanceof IdeFrame) {
AppIcon.getInstance().requestFocus((IdeFrame)frame);
} else {
frame.toFront();
DialogEarthquakeShaker.shake(frame);
}
}
}));
//noinspection AssignmentToStaticFieldFromInstanceMethod
WindowsCommandLineProcessor.LISTENER = (currentDirectory, args) -> {
List<String> argsList = Arrays.asList(args);
LOG.info("Received external Windows command line: current directory " + currentDirectory + ", command line " + argsList);
invokeLater(() -> {
CommandLineProcessor.processExternalCommandLine(argsList, currentDirectory);
});
};
}
if (isUnitTestMode && IdeaApplication.getInstance() == null) {
String[] args = {"inspect", "", "", ""};
Main.setFlags(args); // set both isHeadless and isCommandLine to true
System.setProperty(IdeaApplication.IDEA_IS_UNIT_TEST, Boolean.TRUE.toString());
assert Main.isHeadless();
assert Main.isCommandLine();
//noinspection ResultOfObjectAllocationIgnored
new IdeaApplication(args);
}
gatherStatistics = LOG.isDebugEnabled() || isUnitTestMode() || isInternal();
Thread edt = UIUtil.invokeAndWaitIfNeeded(() -> {
// instantiate AppDelayQueue which starts "Periodic task thread" which we'll mark busy to prevent this EDT to die
// that thread was chosen because we know for sure it's running
AppScheduledExecutorService service = (AppScheduledExecutorService)AppExecutorUtil.getAppScheduledExecutorService();
Thread thread = service.getPeriodicTasksThread();
AWTAutoShutdown.getInstance().notifyThreadBusy(thread); // needed for EDT not to exit suddenly
Disposer.register(this, () -> {
AWTAutoShutdown.getInstance().notifyThreadFree(thread); // allow for EDT to exit - needed for Upsource
});
return Thread.currentThread();
});
myLock = new ReadMostlyRWLock(edt);
NoSwingUnderWriteAction.watchForEvents(this);
}
/**
* Executes a {@code runnable} in an "impatient" mode.
* In this mode any attempt to call {@link #runReadAction(Runnable)}
* would fail (i.e. throw {@link ApplicationUtil.CannotRunReadActionException})
* if there is a pending write action.
*/
@Override
public void executeByImpatientReader(@NotNull Runnable runnable) throws ApplicationUtil.CannotRunReadActionException {
if (isDispatchThread()) {
runnable.run();
}
else {
myLock.executeByImpatientReader(runnable);
}
}
@Override
public boolean isInImpatientReader() {
return myLock.isInImpatientReader();
}
private boolean disposeSelf(final boolean checkCanCloseProject) {
final ProjectManagerImpl manager = (ProjectManagerImpl)ProjectManagerEx.getInstanceEx();
if (manager == null) {
saveSettings(true);
}
else {
final boolean[] canClose = {true};
try {
CommandProcessor.getInstance().executeCommand(null, () -> {
saveSettings(true);
if (!manager.closeAndDisposeAllProjects(checkCanCloseProject)) {
canClose[0] = false;
}
}, ApplicationBundle.message("command.exit"), null);
}
catch (Throwable e) {
LOG.error(e);
}
if (!canClose[0]) {
return false;
}
}
runWriteAction(() -> Disposer.dispose(this));
Disposer.assertIsEmpty();
return true;
}
@Override
@NotNull
public String getName() {
return myName;
}
@Override
public boolean holdsReadLock() {
return myLock.isReadLockedByThisThread();
}
@NotNull
@Override
protected MutablePicoContainer createPicoContainer() {
return Extensions.getRootArea().getPicoContainer();
}
@Override
public boolean isInternal() {
return myIsInternal;
}
@Override
public boolean isEAP() {
return ApplicationInfoImpl.getShadowInstance().isEAP();
}
@Override
public boolean isUnitTestMode() {
return myTestModeFlag;
}
@Override
public boolean isHeadlessEnvironment() {
return myHeadlessMode;
}
@Override
public boolean isCommandLine() {
return myCommandLineMode;
}
@NotNull
@Override
public Future<?> executeOnPooledThread(@NotNull final Runnable action) {
ReadMostlyRWLock.SuspensionId suspensionId = myLock.currentReadPrivilege();
return ourThreadExecutorsService.submit(new Runnable() {
@Override
public String toString() {
return action.toString();
}
@Override
public void run() {
try (AccessToken ignored = myLock.applyReadPrivilege(suspensionId)) {
action.run();
}
catch (ProcessCanceledException e) {
// ignore
}
catch (Throwable t) {
LOG.error(t);
}
finally {
Thread.interrupted(); // reset interrupted status
}
}
});
}
@NotNull
@Override
public <T> Future<T> executeOnPooledThread(@NotNull final Callable<T> action) {
ReadMostlyRWLock.SuspensionId suspensionId = myLock.currentReadPrivilege();
return ourThreadExecutorsService.submit(new Callable<T>() {
@Override
public T call() {
try (AccessToken ignored = myLock.applyReadPrivilege(suspensionId)) {
return action.call();
}
catch (ProcessCanceledException e) {
// ignore
}
catch (Throwable t) {
LOG.error(t);
}
finally {
Thread.interrupted(); // reset interrupted status
}
return null;
}
@Override
public String toString() {
return action.toString();
}
});
}
@Override
public boolean isDispatchThread() {
return myLock.isWriteThread();
}
@Override
@NotNull
public ModalityInvokator getInvokator() {
return myInvokator;
}
@Override
public void invokeLater(@NotNull Runnable runnable) {
invokeLater(runnable, getDisposed());
}
@Override
public void invokeLater(@NotNull Runnable runnable, @NotNull Condition expired) {
invokeLater(runnable, ModalityState.defaultModalityState(), expired);
}
@Override
public void invokeLater(@NotNull Runnable runnable, @NotNull ModalityState state) {
invokeLater(runnable, state, getDisposed());
}
@Override
public void invokeLater(@NotNull Runnable runnable, @NotNull ModalityState state, @NotNull Condition expired) {
LaterInvocator.invokeLaterWithCallback(myTransactionGuard.wrapLaterInvocation(runnable, state), state, expired, null);
}
@Override
public void load() {
load(null);
}
@Override
public void load(@Nullable final String configPath) {
AccessToken token = HeavyProcessLatch.INSTANCE.processStarted("Loading application components");
try {
long start = System.currentTimeMillis();
ProgressIndicator indicator = mySplash == null ? null : new EmptyProgressIndicator() {
@Override
public void setFraction(double fraction) {
mySplash.showProgress("", (float)fraction);
}
};
init(indicator, () -> {
// create ServiceManagerImpl at first to force extension classes registration
getPicoContainer().getComponentInstance(ServiceManagerImpl.class);
String effectiveConfigPath = FileUtilRt.toSystemIndependentName(configPath == null ? PathManager.getConfigPath() : configPath);
ApplicationLoadListener[] applicationLoadListeners = ApplicationLoadListener.EP_NAME.getExtensions();
for (ApplicationLoadListener listener : applicationLoadListeners) {
try {
listener.beforeApplicationLoaded(this, effectiveConfigPath);
}
catch (Throwable e) {
LOG.error(e);
}
}
// we set it after beforeApplicationLoaded call, because app store can depends on stream provider state
ServiceKt.getStateStore(this).setPath(effectiveConfigPath);
for (ApplicationLoadListener listener : applicationLoadListeners) {
try {
listener.beforeComponentsCreated();
}
catch (Throwable e) {
LOG.error(e);
}
}
});
LOG.info(getComponentConfigCount() + " application components initialized in " + (System.currentTimeMillis() - start) + "ms");
}
finally {
token.finish();
}
myLoaded = true;
mySplash = null;
createLocatorFile();
}
@Override
protected void createComponents(@Nullable ProgressIndicator indicator) {
// we cannot wrap "init()" call because ProgressManager instance could be created only after component registration (our "componentsRegistered" callback)
Runnable task = () -> super.createComponents(indicator);
if (indicator == null) {
// no splash, no need to to use progress manager
task.run();
}
else {
ProgressManager.getInstance().runProcess(task, indicator);
}
}
@Override
@Nullable
protected ProgressIndicator getProgressIndicator() {
// could be called before full initialization
ProgressManager progressManager = (ProgressManager)getPicoContainer().getComponentInstance(ProgressManager.class.getName());
return progressManager == null ? null : progressManager.getProgressIndicator();
}
@Override
protected void setProgressDuringInit(@NotNull ProgressIndicator indicator) {
float start = PluginManagerCore.PLUGINS_PROGRESS_PART + PluginManagerCore.LOADERS_PROGRESS_PART;
indicator.setFraction(start + getPercentageOfComponentsLoaded() * (1 - start));
}
private static void createLocatorFile() {
File locatorFile = new File(PathManager.getSystemPath() + "/" + ApplicationEx.LOCATOR_FILE_NAME);
try {
byte[] data = PathManager.getHomePath().getBytes(CharsetToolkit.UTF8_CHARSET);
FileUtil.writeToFile(locatorFile, data);
}
catch (IOException e) {
LOG.warn("can't store a location in '" + locatorFile + "'", e);
}
}
@Override
public boolean isLoaded() {
return myLoaded;
}
@Override
public void dispose() {
HeavyProcessLatch.INSTANCE.stopThreadPrioritizing();
fireApplicationExiting();
ShutDownTracker.getInstance().ensureStopperThreadsFinished();
disposeComponents();
AppScheduledExecutorService service = (AppScheduledExecutorService)AppExecutorUtil.getAppScheduledExecutorService();
service.shutdownAppScheduledExecutorService();
super.dispose();
Disposer.dispose(myLastDisposable); // dispose it last
if (gatherStatistics) {
//noinspection TestOnlyProblems
LOG.info(writeActionStatistics());
LOG.info(ActionUtil.ActionPauses.STAT.statistics());
//noinspection TestOnlyProblems
LOG.info(((AppScheduledExecutorService)AppExecutorUtil.getAppScheduledExecutorService()).statistics()
+ "; ProcessIOExecutorService threads: "+((ProcessIOExecutorService)ProcessIOExecutorService.INSTANCE).getThreadCounter()
);
}
}
@TestOnly
@NotNull
public String writeActionStatistics() {
return ActionPauses.WRITE.statistics();
}
@Override
public boolean runProcessWithProgressSynchronously(@NotNull final Runnable process,
@NotNull String progressTitle,
boolean canBeCanceled,
Project project) {
return runProcessWithProgressSynchronously(process, progressTitle, canBeCanceled, project, null);
}
@Override
public boolean runProcessWithProgressSynchronously(@NotNull final Runnable process,
@NotNull final String progressTitle,
final boolean canBeCanceled,
@Nullable final Project project,
final JComponent parentComponent) {
return runProcessWithProgressSynchronously(process, progressTitle, canBeCanceled, project, parentComponent, null);
}
@Override
public boolean runProcessWithProgressSynchronously(@NotNull final Runnable process,
@NotNull final String progressTitle,
final boolean canBeCanceled,
@Nullable final Project project,
final JComponent parentComponent,
final String cancelText) {
assertIsDispatchThread();
boolean writeAccessAllowed = isWriteAccessAllowed();
if (writeAccessAllowed // Disallow running process in separate thread from under write action.
// The thread will deadlock trying to get read action otherwise.
|| isHeadlessEnvironment() && !isUnitTestMode()
) {
if (writeAccessAllowed) {
LOG.debug("Starting process with progress from within write action makes no sense");
}
try {
ProgressManager.getInstance().runProcess(process, new EmptyProgressIndicator());
}
catch (ProcessCanceledException e) {
// ok to ignore.
return false;
}
return true;
}
final ProgressWindow progress = new ProgressWindow(canBeCanceled, false, project, parentComponent, cancelText);
// in case of abrupt application exit when 'ProgressManager.getInstance().runProcess(process, progress)' below
// does not have a chance to run, and as a result the progress won't be disposed
Disposer.register(this, progress);
progress.setTitle(progressTitle);
final AtomicBoolean threadStarted = new AtomicBoolean();
//noinspection SSBasedInspection
SwingUtilities.invokeLater(() -> {
executeOnPooledThread(() -> {
try {
ProgressManager.getInstance().runProcess(process, progress);
}
catch (ProcessCanceledException e) {
progress.cancel();
// ok to ignore.
}
catch (RuntimeException e) {
progress.cancel();
throw e;
}
});
threadStarted.set(true);
});
progress.startBlocking();
LOG.assertTrue(threadStarted.get());
LOG.assertTrue(!progress.isRunning());
return !progress.isCanceled();
}
@Override
public boolean runProcessWithProgressSynchronouslyInReadAction(@Nullable final Project project,
@NotNull final String progressTitle,
final boolean canBeCanceled,
final String cancelText,
final JComponent parentComponent,
@NotNull final Runnable process) {
assertIsDispatchThread();
boolean writeAccessAllowed = isWriteAccessAllowed();
if (writeAccessAllowed // Disallow running process in separate thread from under write action.
// The thread will deadlock trying to get read action otherwise.
) {
throw new IncorrectOperationException("Starting process with progress from within write action makes no sense");
}
final ProgressWindow progress = new ProgressWindow(canBeCanceled, false, project, parentComponent, cancelText);
// in case of abrupt application exit when 'ProgressManager.getInstance().runProcess(process, progress)' below
// does not have a chance to run, and as a result the progress won't be disposed
Disposer.register(this, progress);
progress.setTitle(progressTitle);
final Semaphore readActionAcquired = new Semaphore();
readActionAcquired.down();
final Semaphore modalityEntered = new Semaphore();
modalityEntered.down();
executeOnPooledThread(() -> {
try {
ApplicationManager.getApplication().runReadAction(() -> {
readActionAcquired.up();
modalityEntered.waitFor();
ProgressManager.getInstance().runProcess(process, progress);
});
}
catch (ProcessCanceledException e) {
progress.cancel();
// ok to ignore.
}
catch (RuntimeException e) {
progress.cancel();
throw e;
}
});
readActionAcquired.waitFor();
progress.startBlocking(modalityEntered::up);
LOG.assertTrue(!progress.isRunning());
return !progress.isCanceled();
}
@Override
public void invokeAndWait(@NotNull Runnable runnable, @NotNull ModalityState modalityState) {
if (isDispatchThread()) {
runnable.run();
return;
}
if (holdsReadLock()) {
throw new IllegalStateException("Calling invokeAndWait from read-action leads to possible deadlock.");
}
LaterInvocator.invokeAndWait(myTransactionGuard.wrapLaterInvocation(runnable, modalityState), modalityState);
}
@Override
public void invokeAndWait(@NotNull Runnable runnable) throws ProcessCanceledException {
invokeAndWait(runnable, ModalityState.defaultModalityState());
}
@Override
@NotNull
public ModalityState getCurrentModalityState() {
if (Thread.currentThread() == myWriteActionThread) {
return getDefaultModalityState();
}
return LaterInvocator.getCurrentModalityState();
}
@Override
@NotNull
public ModalityState getModalityStateForComponent(@NotNull Component c) {
if (!isDispatchThread()) LOG.debug("please, use application dispatch thread to get a modality state");
Window window = UIUtil.getWindow(c);
if (window == null) return getNoneModalityState(); //?
return LaterInvocator.modalityStateForWindow(window);
}
@Override
@NotNull
public ModalityState getAnyModalityState() {
return AnyModalityState.ANY;
}
@Override
@NotNull
public ModalityState getDefaultModalityState() {
return isDispatchThread() ? getCurrentModalityState() : CoreProgressManager.getCurrentThreadProgressModality();
}
@Override
@NotNull
public ModalityState getNoneModalityState() {
return ModalityState.NON_MODAL;
}
@Override
public long getStartTime() {
return myStartTime;
}
@Override
public long getIdleTime() {
return IdeEventQueue.getInstance().getIdleTime();
}
@Override
public void exit() {
exit(false, false);
}
@Override
public void exit(boolean force, final boolean exitConfirmed) {
exit(false, exitConfirmed, false);
}
@Override
public void restart() {
restart(false);
}
@Override
public void restart(boolean exitConfirmed) {
exit(false, exitConfirmed, true);
}
/**
* Restarts the IDE with optional process elevation (on Windows).
*
* @param exitConfirmed if true, the IDE does not ask for exit confirmation.
* @param elevate if true and the IDE is running on Windows, the IDE is restarted in elevated mode (with admin privileges)
*/
public void restart(boolean exitConfirmed, boolean elevate) {
exit(false, exitConfirmed, true, elevate, ArrayUtil.EMPTY_STRING_ARRAY);
}
/**
* There are two ways we can get an exit notification.
* 1. From user input i.e. ExitAction
* 2. From the native system.
* We should not process any quit notifications if we are handling another one
*
* Note: there are possible scenarios when we get a quit notification at a moment when another
* quit message is shown. In that case, showing multiple messages sounds contra-intuitive as well
*/
public void exit(boolean force, boolean exitConfirmed, boolean restart) {
exit(force, exitConfirmed, restart, ArrayUtil.EMPTY_STRING_ARRAY);
}
public void exit(boolean force, boolean exitConfirmed, boolean restart, @NotNull String[] beforeRestart) {
exit(force, exitConfirmed, restart, false, beforeRestart);
}
private void exit(boolean force, boolean exitConfirmed, boolean restart, boolean elevate, @NotNull String[] beforeRestart) {
if (!force) {
if (myExitInProgress) return;
if (!exitConfirmed && getDefaultModalityState() != ModalityState.NON_MODAL) return;
}
myExitInProgress = true;
if (isDispatchThread()) {
doExit(force, exitConfirmed, restart, elevate, beforeRestart);
}
else {
invokeLater(() -> doExit(force, exitConfirmed, restart, elevate, beforeRestart), ModalityState.NON_MODAL);
}
}
private void doExit(boolean force, boolean exitConfirmed, boolean restart, boolean elevate, String[] beforeRestart) {
try {
if (!force && !confirmExitIfNeeded(exitConfirmed)) {
return;
}
AppLifecycleListener lifecycleListener = getMessageBus().syncPublisher(AppLifecycleListener.TOPIC);
lifecycleListener.appClosing();
myDisposeInProgress = true;
if (!force && !canExit()) {
return;
}
lifecycleListener.appWillBeClosed(restart);
FUSApplicationUsageTrigger.getInstance().trigger(AppLifecycleUsageTriggerCollector.class, "ide.close");
if (restart) {
FUSApplicationUsageTrigger.getInstance().trigger(AppLifecycleUsageTriggerCollector.class, "ide.close.restart");
}
FeatureUsageLogger.INSTANCE.log("lifecycle", "app.closed", Collections.singletonMap("restart", restart));
boolean success = disposeSelf(!force);
if (!success || isUnitTestMode() || Boolean.getBoolean("idea.test.guimode")) {
if (Boolean.getBoolean("idea.test.guimode")) {
IdeaApplication.getInstance().shutdown();
}
return;
}
int exitCode = 0;
if (restart && Restarter.isSupported()) {
try {
Restarter.scheduleRestart(elevate, beforeRestart);
}
catch (Throwable t) {
LOG.error("Restart failed", t);
Main.showMessage("Restart failed", t);
exitCode = Main.RESTART_FAILED;
}
}
System.exit(exitCode);
}
finally {
myDisposeInProgress = false;
myExitInProgress = false;
}
}
private static boolean confirmExitIfNeeded(boolean exitConfirmed) {
boolean hasUnsafeBgTasks = ProgressManager.getInstance().hasUnsafeProgressIndicator();
if (exitConfirmed && !hasUnsafeBgTasks) {
return true;
}
DialogWrapper.DoNotAskOption option = new DialogWrapper.DoNotAskOption() {
@Override
public boolean isToBeShown() {
return GeneralSettings.getInstance().isConfirmExit() && ProjectManager.getInstance().getOpenProjects().length > 0;
}
@Override
public void setToBeShown(boolean value, int exitCode) {
GeneralSettings.getInstance().setConfirmExit(value);
}
@Override
public boolean canBeHidden() {
return !hasUnsafeBgTasks;
}
@Override
public boolean shouldSaveOptionsOnCancel() {
return false;
}
@NotNull
@Override
public String getDoNotShowMessage() {
return "Do not ask me again";
}
};
if (hasUnsafeBgTasks || option.isToBeShown()) {
AtomicBoolean alreadyGone = new AtomicBoolean(false);
if (hasUnsafeBgTasks) {
Runnable dialogRemover = Messages.createMessageDialogRemover(null);
Runnable task = new Runnable() {
@Override
public void run() {
if (alreadyGone.get()) return;
if (!ProgressManager.getInstance().hasUnsafeProgressIndicator()) {
alreadyGone.set(true);
dialogRemover.run();
}
else {
JobScheduler.getScheduler().schedule(this, 1, TimeUnit.SECONDS);
}
}
};
JobScheduler.getScheduler().schedule(task, 1, TimeUnit.SECONDS);
}
String name = ApplicationNamesInfo.getInstance().getFullProductName();
String message = ApplicationBundle.message(hasUnsafeBgTasks ? "exit.confirm.prompt.tasks" : "exit.confirm.prompt", name);
int result = MessageDialogBuilder.yesNo(ApplicationBundle.message("exit.confirm.title"), message)
.yesText(ApplicationBundle.message("command.exit"))
.noText(CommonBundle.message("button.cancel"))
.doNotAsk(option).show();
if (alreadyGone.getAndSet(true)) {
if (!option.isToBeShown()) {
return true;
}
result = MessageDialogBuilder.yesNo(ApplicationBundle.message("exit.confirm.title"),
ApplicationBundle.message("exit.confirm.prompt", name))
.yesText(ApplicationBundle.message("command.exit"))
.noText(CommonBundle.message("button.cancel"))
.doNotAsk(option).show();
}
if (result != Messages.YES) {
return false;
}
}
return true;
}
private boolean canExit() {
for (ApplicationListener applicationListener : myDispatcher.getListeners()) {
if (!applicationListener.canExitApplication()) {
return false;
}
}
ProjectManagerEx projectManager = (ProjectManagerEx)ProjectManager.getInstance();
Project[] projects = projectManager.getOpenProjects();
for (Project project : projects) {
if (!projectManager.canClose(project)) {
return false;
}
}
return true;
}
@Override
public void runReadAction(@NotNull final Runnable action) {
if (isReadAccessAllowed()) {
action.run();
}
else {
startRead();
try {
action.run();
}
finally {
endRead();
}
}
}
@Override
public <T> T runReadAction(@NotNull final Computable<T> computation) {
if (isReadAccessAllowed()) {
return computation.compute();
}
startRead();
try {
return computation.compute();
}
finally {
endRead();
}
}
@Override
public <T, E extends Throwable> T runReadAction(@NotNull ThrowableComputable<T, E> computation) throws E {
if (isReadAccessAllowed()) {
return computation.compute();
}
startRead();
try {
return computation.compute();
}
finally {
endRead();
}
}
private void startRead() {
myLock.readLock();
}
private void endRead() {
myLock.readUnlock();
}
@ApiStatus.Experimental
public boolean runWriteActionWithNonCancellableProgressInDispatchThread(@NotNull String title,
@Nullable Project project,
@Nullable JComponent parentComponent,
@NotNull Consumer<ProgressIndicator> action) {
return runEdtProgressWriteAction(title, project, parentComponent, null, action);
}
@ApiStatus.Experimental
public boolean runWriteActionWithCancellableProgressInDispatchThread(@NotNull String title,
@Nullable Project project,
@Nullable JComponent parentComponent,
@NotNull Consumer<ProgressIndicator> action) {
return runEdtProgressWriteAction(title, project, parentComponent, IdeBundle.message("action.stop"), action);
}
private boolean runEdtProgressWriteAction(@NotNull String title,
@Nullable Project project,
@Nullable JComponent parentComponent,
@Nullable String cancelText,
@NotNull Consumer<ProgressIndicator> action) {
Class<?> clazz = action.getClass();
startWrite(clazz);
try {
PotemkinProgress indicator = new PotemkinProgress(title, project, parentComponent, cancelText);
indicator.runInSwingThread(() -> action.consume(indicator));
return !indicator.isCanceled();
}
finally {
endWrite(clazz);
}
}
@ApiStatus.Experimental
public boolean runWriteActionWithProgressInBackgroundThread(@NotNull String title,
@Nullable Project project,
@Nullable JComponent parentComponent,
@Nullable String cancelText,
@NotNull Consumer<ProgressIndicator> action) {
Class<?> clazz = action.getClass();
startWrite(clazz);
try {
PotemkinProgress indicator = new PotemkinProgress(title, project, parentComponent, cancelText);
indicator.runInBackground(() -> {
assert myWriteActionThread == null;
myWriteActionThread = Thread.currentThread();
try {
action.consume(indicator);
} finally {
myWriteActionThread = null;
}
});
return !indicator.isCanceled();
}
finally {
endWrite(clazz);
}
}
@Override
public void runWriteAction(@NotNull final Runnable action) {
Class<? extends Runnable> clazz = action.getClass();
startWrite(clazz);
try {
action.run();
}
finally {
endWrite(clazz);
}
}
@Override
public <T> T runWriteAction(@NotNull final Computable<T> computation) {
Class<? extends Computable> clazz = computation.getClass();
startWrite(clazz);
try {
return computation.compute();
}
finally {
endWrite(clazz);
}
}
@Override
public <T, E extends Throwable> T runWriteAction(@NotNull ThrowableComputable<T, E> computation) throws E {
Class<? extends ThrowableComputable> clazz = computation.getClass();
startWrite(clazz);
try {
return computation.compute();
}
finally {
endWrite(clazz);
}
}
@Override
public boolean hasWriteAction(@NotNull Class<?> actionClass) {
assertReadAccessAllowed();
for (int i = myWriteActionsStack.size() - 1; i >= 0; i--) {
Class action = myWriteActionsStack.get(i);
if (actionClass == action || ReflectionUtil.isAssignable(actionClass, action)) return true;
}
return false;
}
@Override
public void assertReadAccessAllowed() {
if (!isReadAccessAllowed()) {
LOG.error(
"Read access is allowed from event dispatch thread or inside read-action only" +
" (see com.intellij.openapi.application.Application.runReadAction())",
"Current thread: " + describe(Thread.currentThread()), "; dispatch thread: " + EventQueue.isDispatchThread() +"; isDispatchThread(): "+isDispatchThread(),
"SystemEventQueueThread: " + describe(getEventQueueThread()));
}
}
private static String describe(Thread o) {
return o == null ? "null" : o + " " + System.identityHashCode(o);
}
private static Thread getEventQueueThread() {
EventQueue eventQueue = Toolkit.getDefaultToolkit().getSystemEventQueue();
return AWTAccessor.getEventQueueAccessor().getDispatchThread(eventQueue);
}
@Override
public boolean isReadAccessAllowed() {
if (isDispatchThread()) {
return myWriteActionThread == null; // no reading from EDT during background write action
}
return myLock.isReadLockedByThisThread() || myWriteActionThread == Thread.currentThread();
}
@Override
public void assertIsDispatchThread() {
if (isDispatchThread()) return;
if (ShutDownTracker.isShutdownHookRunning()) return;
assertIsDispatchThread("Access is allowed from event dispatch thread only.");
}
private void assertIsDispatchThread(String message) {
if (isDispatchThread()) return;
throw new RuntimeExceptionWithAttachments(
message,
"EventQueue.isDispatchThread()=" + EventQueue.isDispatchThread() +
" Toolkit.getEventQueue()=" + Toolkit.getDefaultToolkit().getSystemEventQueue() +
"\nCurrent thread: " + describe(Thread.currentThread()) +
"\nSystemEventQueueThread: " + describe(getEventQueueThread()),
new Attachment("threadDump.txt", ThreadDumper.dumpThreadsToString()));
}
@Override
public void assertIsDispatchThread(@Nullable final JComponent component) {
if (component == null) return;
if (isDispatchThread()) {
return;
}
if (Boolean.TRUE.equals(component.getClientProperty(WAS_EVER_SHOWN))) {
assertIsDispatchThread();
}
else {
final JRootPane root = component.getRootPane();
if (root != null) {
component.putClientProperty(WAS_EVER_SHOWN, Boolean.TRUE);
assertIsDispatchThread();
}
}
}
@Override
public void assertTimeConsuming() {
if (myTestModeFlag || myHeadlessMode || ShutDownTracker.isShutdownHookRunning()) return;
LOG.assertTrue(!isDispatchThread(), "This operation is time consuming and must not be called on EDT");
}
@Override
public boolean tryRunReadAction(@NotNull Runnable action) {
//if we are inside read action, do not try to acquire read lock again since it will deadlock if there is a pending writeAction
if (isReadAccessAllowed()) {
action.run();
}
else {
if (!myLock.tryReadLock()) return false;
try {
action.run();
}
finally {
endRead();
}
}
return true;
}
@Override
public boolean isActive() {
if (isHeadlessEnvironment()) return true;
Window activeWindow = KeyboardFocusManager.getCurrentKeyboardFocusManager().getActiveWindow();
if (ApplicationActivationStateManager.getState().isInactive()
&& activeWindow != null) {
ApplicationActivationStateManager.updateState(activeWindow);
}
return ApplicationActivationStateManager.getState().isActive();
}
@NotNull
@Override
public AccessToken acquireReadActionLock() {
// if we are inside read action, do not try to acquire read lock again since it will deadlock if there is a pending writeAction
return isReadAccessAllowed() ? AccessToken.EMPTY_ACCESS_TOKEN : new ReadAccessToken();
}
private volatile boolean myWriteActionPending;
@Override
public boolean isWriteActionPending() {
return myWriteActionPending;
}
private final boolean gatherStatistics;
private static class ActionPauses {
private static final PausesStat WRITE = new PausesStat("Write action");
}
private void startWrite(@NotNull Class clazz) {
if (!isWriteAccessAllowed()) {
assertIsDispatchThread("Write access is allowed from event dispatch thread only");
}
HeavyProcessLatch.INSTANCE.stopThreadPrioritizing(); // let non-cancellable read actions complete faster, if present
boolean writeActionPending = myWriteActionPending;
if (gatherStatistics && myWriteActionsStack.isEmpty() && !writeActionPending) {
ActionPauses.WRITE.started("write action ("+clazz+")");
}
myWriteActionPending = true;
try {
ActivityTracker.getInstance().inc();
fireBeforeWriteActionStart(clazz);
if (!myLock.isWriteLocked() && !myLock.tryWriteLock()) {
Future<?> reportSlowWrite = ourDumpThreadsOnLongWriteActionWaiting <= 0 ? null :
JobScheduler.getScheduler()
.scheduleWithFixedDelay(() -> PerformanceWatcher.getInstance().dumpThreads("waiting", true),
ourDumpThreadsOnLongWriteActionWaiting,
ourDumpThreadsOnLongWriteActionWaiting, TimeUnit.MILLISECONDS);
long t = LOG.isDebugEnabled() ? System.currentTimeMillis() : 0;
myLock.writeLock();
if (LOG.isDebugEnabled()) {
long elapsed = System.currentTimeMillis() - t;
if (elapsed != 0) {
LOG.debug("Write action wait time: " + elapsed);
}
}
if (reportSlowWrite != null) {
reportSlowWrite.cancel(false);
}
}
}
finally {
myWriteActionPending = writeActionPending;
}
myWriteActionsStack.push(clazz);
fireWriteActionStarted(clazz);
}
private void endWrite(@NotNull Class clazz) {
try {
fireWriteActionFinished(clazz);
// fire listeners before popping stack because if somebody starts write action in a listener,
// there is a danger of unlocking the write lock before other listeners have been run (since write lock became non-reentrant).
}
finally {
myWriteActionsStack.pop();
if (gatherStatistics && myWriteActionsStack.isEmpty() && !myWriteActionPending) {
ActionPauses.WRITE.finished("write action ("+clazz+")");
}
if (myWriteActionsStack.size() == myWriteStackBase) {
myLock.writeUnlock();
}
if (myWriteActionsStack.isEmpty()) {
fireAfterWriteActionFinished(clazz);
}
}
}
@NotNull
@Override
public AccessToken acquireWriteActionLock(@NotNull Class clazz) {
return new WriteAccessToken(clazz);
}
private class WriteAccessToken extends AccessToken {
@NotNull private final Class clazz;
public WriteAccessToken(@NotNull Class clazz) {
this.clazz = clazz;
startWrite(clazz);
markThreadNameInStackTrace();
}
@Override
public void finish() {
try {
endWrite(clazz);
}
finally {
unmarkThreadNameInStackTrace();
}
}
private void markThreadNameInStackTrace() {
String id = id();
if (id != null) {
final Thread thread = Thread.currentThread();
thread.setName(thread.getName() + id);
}
}
private void unmarkThreadNameInStackTrace() {
String id = id();
if (id != null) {
final Thread thread = Thread.currentThread();
String name = thread.getName();
name = StringUtil.replace(name, id, "");
thread.setName(name);
}
}
private String id() {
Class aClass = getClass();
String name = aClass.getName();
while (name == null) {
aClass = aClass.getSuperclass();
name = aClass.getName();
}
name = name.substring(name.lastIndexOf('.') + 1);
name = name.substring(name.lastIndexOf('$') + 1);
if (!name.equals("AccessToken")) {
return " [" + name+"]";
}
return null;
}
}
private class ReadAccessToken extends AccessToken {
private ReadAccessToken() {
startRead();
}
@Override
public void finish() {
endRead();
}
}
@Override
public void assertWriteAccessAllowed() {
LOG.assertTrue(isWriteAccessAllowed(),
"Write access is allowed inside write-action only (see com.intellij.openapi.application.Application.runWriteAction())");
}
@Override
public boolean isWriteAccessAllowed() {
return isDispatchThread() && myLock.isWriteLocked() || myWriteActionThread == Thread.currentThread();
}
@Override
public boolean isWriteActionInProgress() {
return myLock.isWriteLocked();
}
public void executeSuspendingWriteAction(@Nullable Project project, @NotNull String title, @NotNull Runnable runnable) {
assertIsDispatchThread();
if (!myLock.isWriteLocked()) {
runModalProgress(project, title, runnable);
return;
}
myTransactionGuard.submitTransactionAndWait(() -> {
int prevBase = myWriteStackBase;
myWriteStackBase = myWriteActionsStack.size();
try (AccessToken ignored = myLock.writeSuspend()) {
runModalProgress(project, title, () -> {
try (AccessToken ignored1 = myLock.grantReadPrivilege()) {
runnable.run();
}
});
} finally {
myWriteStackBase = prevBase;
}
});
}
private static void runModalProgress(@Nullable Project project, @NotNull String title, @NotNull Runnable runnable) {
ProgressManager.getInstance().run(new Task.Modal(project, title, false) {
@Override
public void run(@NotNull ProgressIndicator indicator) {
runnable.run();
}
});
}
@Override
public void addApplicationListener(@NotNull ApplicationListener l) {
myDispatcher.addListener(l);
}
@Override
public void addApplicationListener(@NotNull ApplicationListener l, @NotNull Disposable parent) {
myDispatcher.addListener(l, parent);
}
@Override
public void removeApplicationListener(@NotNull ApplicationListener l) {
myDispatcher.removeListener(l);
}
private void fireApplicationExiting() {
myDispatcher.getMulticaster().applicationExiting();
}
private void fireBeforeWriteActionStart(@NotNull Class action) {
myDispatcher.getMulticaster().beforeWriteActionStart(action);
}
private void fireWriteActionStarted(@NotNull Class action) {
myDispatcher.getMulticaster().writeActionStarted(action);
}
private void fireWriteActionFinished(@NotNull Class action) {
myDispatcher.getMulticaster().writeActionFinished(action);
}
private void fireAfterWriteActionFinished(@NotNull Class action) {
myDispatcher.getMulticaster().afterWriteActionFinished(action);
}
@Override
public void saveSettings() {
saveSettings(false);
}
@Override
public void saveSettings(boolean isForce) {
if (!mySaveAllowed || !mySaveSettingsIsInProgress.compareAndSet(false, true)) {
return;
}
HeavyProcessLatch.INSTANCE.prioritizeUiActivity();
try {
StoreUtil.save(ServiceKt.getStateStore(this), null, isForce);
}
finally {
mySaveSettingsIsInProgress.set(false);
}
}
@Override
public void saveAll() {
StoreUtil.saveDocumentsAndProjectsAndApp(false);
}
@Override
public void setSaveAllowed(boolean value) {
mySaveAllowed = value;
}
@Override
public boolean isSaveAllowed() {
return mySaveAllowed;
}
@NotNull
@Override
public <T> T[] getExtensions(@NotNull final ExtensionPointName<T> extensionPointName) {
return Extensions.getRootArea().getExtensionPoint(extensionPointName).getExtensions();
}
@Override
public boolean isDisposeInProgress() {
return myDisposeInProgress || ShutDownTracker.isShutdownHookRunning();
}
@Override
public boolean isRestartCapable() {
return Restarter.isSupported();
}
@Override
protected boolean logSlowComponents() {
return super.logSlowComponents() || ApplicationInfoImpl.getShadowInstance().isEAP();
}
@TestOnly
public void setDisposeInProgress(boolean disposeInProgress) {
myDisposeInProgress = disposeInProgress;
}
@Override
public String toString() {
return "Application" +
(isDisposed() ? " (Disposed)" : "") +
(isUnitTestMode() ? " (Unit test)" : "") +
(isInternal() ? " (Internal)" : "") +
(isHeadlessEnvironment() ? " (Headless)" : "") +
(isCommandLine() ? " (Command line)" : "");
}
@TestOnly
void disableEventsUntil(@NotNull Disposable disposable) {
final List<ApplicationListener> listeners = new ArrayList<>(myDispatcher.getListeners());
myDispatcher.getListeners().removeAll(listeners);
Disposer.register(disposable, () -> myDispatcher.getListeners().addAll(listeners));
}
}
| |
/**
* Copyright 2011 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.bitcoin.core;
import com.google.bitcoin.core.Wallet.BalanceType;
import com.google.bitcoin.params.MainNetParams;
import com.google.bitcoin.params.TestNet2Params;
import com.google.bitcoin.params.UnitTestParams;
import com.google.bitcoin.store.BlockStore;
import com.google.bitcoin.store.MemoryBlockStore;
import com.google.bitcoin.utils.BriefLogFormatter;
import com.google.bitcoin.utils.TestUtils;
import com.google.common.util.concurrent.ListenableFuture;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.math.BigInteger;
import java.text.SimpleDateFormat;
import java.util.Date;
import static com.google.bitcoin.utils.TestUtils.createFakeBlock;
import static com.google.bitcoin.utils.TestUtils.createFakeTx;
import static org.junit.Assert.*;
// Handling of chain splits/reorgs are in ChainSplitTests.
public class BlockChainTest {
private BlockChain testNetChain;
private Wallet wallet;
private BlockChain chain;
private BlockStore blockStore;
private Address coinbaseTo;
private NetworkParameters unitTestParams;
private final StoredBlock[] block = new StoredBlock[1];
private Transaction coinbaseTransaction;
private static class TweakableTestNet2Params extends TestNet2Params {
public void setProofOfWorkLimit(BigInteger limit) {
proofOfWorkLimit = limit;
}
}
private static final TweakableTestNet2Params testNet = new TweakableTestNet2Params();
private void resetBlockStore() {
blockStore = new MemoryBlockStore(unitTestParams);
}
@Before
public void setUp() throws Exception {
BriefLogFormatter.initVerbose();
testNetChain = new BlockChain(testNet, new Wallet(testNet), new MemoryBlockStore(testNet));
Wallet.SendRequest.DEFAULT_FEE_PER_KB = BigInteger.ZERO;
unitTestParams = UnitTestParams.get();
wallet = new Wallet(unitTestParams) {
@Override
public void receiveFromBlock(Transaction tx, StoredBlock block, BlockChain.NewBlockType blockType,
int relativityOffset) throws VerificationException {
super.receiveFromBlock(tx, block, blockType, relativityOffset);
BlockChainTest.this.block[0] = block;
if (tx.isCoinBase()) {
BlockChainTest.this.coinbaseTransaction = tx;
}
}
};
wallet.addKey(new ECKey());
resetBlockStore();
chain = new BlockChain(unitTestParams, wallet, blockStore);
coinbaseTo = wallet.getKeys().get(0).toAddress(unitTestParams);
}
@After
public void tearDown() {
Wallet.SendRequest.DEFAULT_FEE_PER_KB = Transaction.REFERENCE_DEFAULT_MIN_TX_FEE;
}
@Test
public void testBasicChaining() throws Exception {
// Check that we can plug a few blocks together and the futures work.
ListenableFuture<StoredBlock> future = testNetChain.getHeightFuture(2);
// Block 1 from the testnet.
Block b1 = getBlock1();
assertTrue(testNetChain.add(b1));
assertFalse(future.isDone());
// Block 2 from the testnet.
Block b2 = getBlock2();
// Let's try adding an invalid block.
long n = b2.getNonce();
try {
b2.setNonce(12345);
testNetChain.add(b2);
fail();
} catch (VerificationException e) {
b2.setNonce(n);
}
// Now it works because we reset the nonce.
assertTrue(testNetChain.add(b2));
assertTrue(future.isDone());
assertEquals(2, future.get().getHeight());
}
@Test
public void receiveCoins() throws Exception {
// Quick check that we can actually receive coins.
Transaction tx1 = createFakeTx(unitTestParams,
Utils.toNanoCoins(1, 0),
wallet.getKeys().get(0).toAddress(unitTestParams));
Block b1 = createFakeBlock(blockStore, tx1).block;
chain.add(b1);
assertTrue(wallet.getBalance().compareTo(BigInteger.ZERO) > 0);
}
@Test
public void merkleRoots() throws Exception {
// Test that merkle root verification takes place when a relevant transaction is present and doesn't when
// there isn't any such tx present (as an optimization).
Transaction tx1 = createFakeTx(unitTestParams,
Utils.toNanoCoins(1, 0),
wallet.getKeys().get(0).toAddress(unitTestParams));
Block b1 = createFakeBlock(blockStore, tx1).block;
chain.add(b1);
resetBlockStore();
Sha256Hash hash = b1.getMerkleRoot();
b1.setMerkleRoot(Sha256Hash.ZERO_HASH);
try {
chain.add(b1);
fail();
} catch (VerificationException e) {
// Expected.
b1.setMerkleRoot(hash);
}
// Now add a second block with no relevant transactions and then break it.
Transaction tx2 = createFakeTx(unitTestParams, Utils.toNanoCoins(1, 0),
new ECKey().toAddress(unitTestParams));
Block b2 = createFakeBlock(blockStore, tx2).block;
b2.getMerkleRoot();
b2.setMerkleRoot(Sha256Hash.ZERO_HASH);
b2.solve();
chain.add(b2); // Broken block is accepted because its contents don't matter to us.
}
@Test
public void unconnectedBlocks() throws Exception {
Block b1 = unitTestParams.getGenesisBlock().createNextBlock(coinbaseTo);
Block b2 = b1.createNextBlock(coinbaseTo);
Block b3 = b2.createNextBlock(coinbaseTo);
// Connected.
assertTrue(chain.add(b1));
// Unconnected but stored. The head of the chain is still b1.
assertFalse(chain.add(b3));
assertEquals(chain.getChainHead().getHeader(), b1.cloneAsHeader());
// Add in the middle block.
assertTrue(chain.add(b2));
assertEquals(chain.getChainHead().getHeader(), b3.cloneAsHeader());
}
@Test
public void difficultyTransitions() throws Exception {
// Add a bunch of blocks in a loop until we reach a difficulty transition point. The unit test params have an
// artificially shortened period.
Block prev = unitTestParams.getGenesisBlock();
Utils.setMockClock(System.currentTimeMillis()/1000);
for (int i = 0; i < unitTestParams.getInterval() - 1; i++) {
Block newBlock = prev.createNextBlock(coinbaseTo, Utils.currentTimeMillis()/1000);
assertTrue(chain.add(newBlock));
prev = newBlock;
// The fake chain should seem to be "fast" for the purposes of difficulty calculations.
Utils.rollMockClock(2);
}
// Now add another block that has no difficulty adjustment, it should be rejected.
try {
chain.add(prev.createNextBlock(coinbaseTo, Utils.currentTimeMillis()/1000));
fail();
} catch (VerificationException e) {
}
// Create a new block with the right difficulty target given our blistering speed relative to the huge amount
// of time it's supposed to take (set in the unit test network parameters).
Block b = prev.createNextBlock(coinbaseTo, Utils.currentTimeMillis()/1000);
b.setDifficultyTarget(0x201fFFFFL);
b.solve();
assertTrue(chain.add(b));
// Successfully traversed a difficulty transition period.
}
@Test
public void badDifficulty() throws Exception {
assertTrue(testNetChain.add(getBlock1()));
Block b2 = getBlock2();
assertTrue(testNetChain.add(b2));
Block bad = new Block(testNet);
// Merkle root can be anything here, doesn't matter.
bad.setMerkleRoot(new Sha256Hash("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"));
// Nonce was just some number that made the hash < difficulty limit set below, it can be anything.
bad.setNonce(140548933);
bad.setTime(1279242649);
bad.setPrevBlockHash(b2.getHash());
// We're going to make this block so easy 50% of solutions will pass, and check it gets rejected for having a
// bad difficulty target. Unfortunately the encoding mechanism means we cannot make one that accepts all
// solutions.
bad.setDifficultyTarget(Block.EASIEST_DIFFICULTY_TARGET);
try {
testNetChain.add(bad);
// The difficulty target above should be rejected on the grounds of being easier than the networks
// allowable difficulty.
fail();
} catch (VerificationException e) {
assertTrue(e.getMessage(), e.getCause().getMessage().contains("Difficulty target is bad"));
}
// Accept any level of difficulty now.
BigInteger oldVal = testNet.getProofOfWorkLimit();
testNet.setProofOfWorkLimit(new BigInteger
("00ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", 16));
try {
testNetChain.add(bad);
// We should not get here as the difficulty target should not be changing at this point.
fail();
} catch (VerificationException e) {
assertTrue(e.getMessage(), e.getCause().getMessage().contains("Unexpected change in difficulty"));
}
testNet.setProofOfWorkLimit(oldVal);
// TODO: Test difficulty change is not out of range when a transition period becomes valid.
}
@Test
public void duplicates() throws Exception {
// Adding a block twice should not have any effect, in particular it should not send the block to the wallet.
Block b1 = unitTestParams.getGenesisBlock().createNextBlock(coinbaseTo);
Block b2 = b1.createNextBlock(coinbaseTo);
Block b3 = b2.createNextBlock(coinbaseTo);
assertTrue(chain.add(b1));
assertEquals(b1, block[0].getHeader());
assertTrue(chain.add(b2));
assertEquals(b2, block[0].getHeader());
assertTrue(chain.add(b3));
assertEquals(b3, block[0].getHeader());
assertEquals(b3, chain.getChainHead().getHeader());
assertTrue(chain.add(b2));
assertEquals(b3, chain.getChainHead().getHeader());
// Wallet was NOT called with the new block because the duplicate add was spotted.
assertEquals(b3, block[0].getHeader());
}
@Test
public void intraBlockDependencies() throws Exception {
// Covers issue 166 in which transactions that depend on each other inside a block were not always being
// considered relevant.
Address somebodyElse = new ECKey().toAddress(unitTestParams);
Block b1 = unitTestParams.getGenesisBlock().createNextBlock(somebodyElse);
ECKey key = new ECKey();
wallet.addKey(key);
Address addr = key.toAddress(unitTestParams);
// Create a tx that gives us some coins, and another that spends it to someone else in the same block.
Transaction t1 = TestUtils.createFakeTx(unitTestParams, Utils.toNanoCoins(1, 0), addr);
Transaction t2 = new Transaction(unitTestParams);
t2.addInput(t1.getOutputs().get(0));
t2.addOutput(Utils.toNanoCoins(2, 0), somebodyElse);
b1.addTransaction(t1);
b1.addTransaction(t2);
b1.solve();
chain.add(b1);
assertEquals(BigInteger.ZERO, wallet.getBalance());
}
@Test
public void coinbaseTransactionAvailability() throws Exception {
// Check that a coinbase transaction is only available to spend after NetworkParameters.getSpendableCoinbaseDepth() blocks.
// Create a second wallet to receive the coinbase spend.
Wallet wallet2 = new Wallet(unitTestParams);
ECKey receiveKey = new ECKey();
wallet2.addKey(receiveKey);
chain.addWallet(wallet2);
Address addressToSendTo = receiveKey.toAddress(unitTestParams);
// Create a block, sending the coinbase to the coinbaseTo address (which is in the wallet).
Block b1 = unitTestParams.getGenesisBlock().createNextBlockWithCoinbase(wallet.getKeys().get(0).getPubKey());
chain.add(b1);
// Check a transaction has been received.
assertNotNull(coinbaseTransaction);
// The coinbase tx is not yet available to spend.
assertEquals(BigInteger.ZERO, wallet.getBalance());
assertEquals(wallet.getBalance(BalanceType.ESTIMATED), Utils.toNanoCoins(50, 0));
assertTrue(!coinbaseTransaction.isMature());
// Attempt to spend the coinbase - this should fail as the coinbase is not mature yet.
try {
wallet.createSend(addressToSendTo, Utils.toNanoCoins(49, 0));
fail();
} catch (InsufficientMoneyException e) {
}
// Check that the coinbase is unavailable to spend for the next spendableCoinbaseDepth - 2 blocks.
for (int i = 0; i < unitTestParams.getSpendableCoinbaseDepth() - 2; i++) {
// Non relevant tx - just for fake block creation.
Transaction tx2 = createFakeTx(unitTestParams, Utils.toNanoCoins(1, 0),
new ECKey().toAddress(unitTestParams));
Block b2 = createFakeBlock(blockStore, tx2).block;
chain.add(b2);
// Wallet still does not have the coinbase transaction available for spend.
assertEquals(BigInteger.ZERO, wallet.getBalance());
assertEquals(wallet.getBalance(BalanceType.ESTIMATED), Utils.toNanoCoins(50, 0));
// The coinbase transaction is still not mature.
assertTrue(!coinbaseTransaction.isMature());
// Attempt to spend the coinbase - this should fail.
try {
wallet.createSend(addressToSendTo, Utils.toNanoCoins(49, 0));
fail();
} catch (InsufficientMoneyException e) {
}
}
// Give it one more block - should now be able to spend coinbase transaction. Non relevant tx.
Transaction tx3 = createFakeTx(unitTestParams, Utils.toNanoCoins(1, 0), new ECKey().toAddress(unitTestParams));
Block b3 = createFakeBlock(blockStore, tx3).block;
chain.add(b3);
// Wallet now has the coinbase transaction available for spend.
assertEquals(wallet.getBalance(), Utils.toNanoCoins(50, 0));
assertEquals(wallet.getBalance(BalanceType.ESTIMATED), Utils.toNanoCoins(50, 0));
assertTrue(coinbaseTransaction.isMature());
// Create a spend with the coinbase BTC to the address in the second wallet - this should now succeed.
Transaction coinbaseSend2 = wallet.createSend(addressToSendTo, Utils.toNanoCoins(49, 0));
assertNotNull(coinbaseSend2);
// Commit the coinbaseSpend to the first wallet and check the balances decrement.
wallet.commitTx(coinbaseSend2);
assertEquals(wallet.getBalance(BalanceType.ESTIMATED), Utils.toNanoCoins(1, 0));
// Available balance is zero as change has not been received from a block yet.
assertEquals(wallet.getBalance(BalanceType.AVAILABLE), Utils.toNanoCoins(0, 0));
// Give it one more block - change from coinbaseSpend should now be available in the first wallet.
Block b4 = createFakeBlock(blockStore, coinbaseSend2).block;
chain.add(b4);
assertEquals(wallet.getBalance(BalanceType.AVAILABLE), Utils.toNanoCoins(1, 0));
// Check the balances in the second wallet.
assertEquals(wallet2.getBalance(BalanceType.ESTIMATED), Utils.toNanoCoins(49, 0));
assertEquals(wallet2.getBalance(BalanceType.AVAILABLE), Utils.toNanoCoins(49, 0));
}
// Some blocks from the test net.
private static Block getBlock2() throws Exception {
Block b2 = new Block(testNet);
b2.setMerkleRoot(new Sha256Hash("addc858a17e21e68350f968ccd384d6439b64aafa6c193c8b9dd66320470838b"));
b2.setNonce(2642058077L);
b2.setTime(1296734343L);
b2.setPrevBlockHash(new Sha256Hash("000000033cc282bc1fa9dcae7a533263fd7fe66490f550d80076433340831604"));
assertEquals("000000037b21cac5d30fc6fda2581cf7b2612908aed2abbcc429c45b0557a15f", b2.getHashAsString());
b2.verifyHeader();
return b2;
}
private static Block getBlock1() throws Exception {
Block b1 = new Block(testNet);
b1.setMerkleRoot(new Sha256Hash("0e8e58ecdacaa7b3c6304a35ae4ffff964816d2b80b62b58558866ce4e648c10"));
b1.setNonce(236038445);
b1.setTime(1296734340);
b1.setPrevBlockHash(new Sha256Hash("00000007199508e34a9ff81e6ec0c477a4cccff2a4767a8eee39c11db367b008"));
assertEquals("000000033cc282bc1fa9dcae7a533263fd7fe66490f550d80076433340831604", b1.getHashAsString());
b1.verifyHeader();
return b1;
}
@Test
public void estimatedBlockTime() throws Exception {
NetworkParameters params = MainNetParams.get();
BlockChain prod = new BlockChain(params, new MemoryBlockStore(params));
Date d = prod.estimateBlockTime(200000);
// The actual date of block 200,000 was 2012-09-22 10:47:00
assertEquals(new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ").parse("2012-10-23T08:35:05.000-0700"), d);
}
@Test
public void falsePositives() throws Exception {
double decay = AbstractBlockChain.FP_ESTIMATOR_ALPHA;
assertTrue(0 == chain.getFalsePositiveRate()); // Exactly
chain.trackFalsePositives(55);
assertEquals(decay * 55, chain.getFalsePositiveRate(), 1e-4);
chain.trackFilteredTransactions(550);
double rate1 = chain.getFalsePositiveRate();
// Run this scenario a few more time for the filter to converge
for (int i = 1 ; i < 10 ; i++) {
chain.trackFalsePositives(55);
chain.trackFilteredTransactions(550);
}
// Ensure we are within 10%
assertEquals(0.1, chain.getFalsePositiveRate(), 0.01);
// Check that we get repeatable results after a reset
chain.resetFalsePositiveEstimate();
assertTrue(0 == chain.getFalsePositiveRate()); // Exactly
chain.trackFalsePositives(55);
assertEquals(decay * 55, chain.getFalsePositiveRate(), 1e-4);
chain.trackFilteredTransactions(550);
assertEquals(rate1, chain.getFalsePositiveRate(), 1e-4);
}
}
| |
package moe.crowdhopper.imgurapi.Endpoints;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import moe.crowdhopper.imgurapi.Exceptions.AuthorizationException;
import moe.crowdhopper.imgurapi.Exceptions.HTTPRequestException;
import moe.crowdhopper.imgurapi.Exceptions.RateLimitException;
import moe.crowdhopper.imgurapi.ImgurApi;
import moe.crowdhopper.imgurapi.Models.Album;
import moe.crowdhopper.imgurapi.Models.Image;
import org.json.JSONObject;
import com.crowdhopper.imgurapi.Exceptions.*;
import com.crowdhopper.imgurapi.Models.*;
import com.mashape.unirest.http.Unirest;
import com.mashape.unirest.http.exceptions.UnirestException;
import com.mashape.unirest.http.HttpResponse;
import com.mashape.unirest.http.JsonNode;
public class Albums extends Endpoint {
private static ImgurApi api = null;
public static void initialize(ImgurApi new_api) {
api = new_api;
}
//Returns the album specified by the given id.
public static Album getAlbum(String id) throws RateLimitException, HTTPRequestException {
api.checkCredits();
HttpResponse<JsonNode> response = null;
try {
response = Unirest.get(ImgurApi.API_URL + "album/{id}")
.header("Authorization", api.getHeader())
.routeParam("id", id)
.asJson();
} catch(UnirestException e) {
e.printStackTrace();
}
JSONObject raw = response.getBody().getObject();
api.checkStatus(raw);
return new Album(raw.optJSONObject("data"));
}
//Returns a list of the images in the album specified by the id.
public static List<Image> getAlbumImages(String id) throws RateLimitException, HTTPRequestException {
Album base = getAlbum(id);
return base.getImages();
}
/*Creates an album given paramaters
* Valid settings are:
* ids: a comma delimited string containing the image ids to be included in the album
* title: the title of the album
* description: the description of the album
* privacy: must be public static, hidden, or secret. Defaults to the user default if logged in.
* layout: must be blog, grid, horizontal, or vertical
* cover: the id of an image to use as the album cover
*/
public static String createAlbum(Map<String, String> settings) throws RateLimitException, HTTPRequestException {
api.checkCredits();
api.checkPosts();
Map<String, Object> fields = new LinkedHashMap<String, Object>();
fields.putAll(settings);
HttpResponse<JsonNode> response = null;
try {
response = Unirest.post(ImgurApi.API_URL + "album")
.header("Authorization", api.getHeader())
.fields(fields)
.asJson();
} catch (UnirestException e) {
e.printStackTrace();
}
api.refreshPost(response.getHeaders());
JSONObject raw = response.getBody().getObject();
api.checkStatus(raw);
return raw.optString("data");
}
public static String updateAlbum(String key, String value, String album) throws RateLimitException, HTTPRequestException {
Map<String, String> settings = new LinkedHashMap<String, String>();
settings.put(key, value);
return updateAlbum(settings, album);
}
/*Updates an album given parameters. Parameters are the same as createAlbum
* If the album was uploaded anonymously, album should be the deletehash returned on creation, otherwise it should be the album ID.
*/
public static String updateAlbum(Map<String, String> settings, String album) throws RateLimitException, HTTPRequestException {
api.checkCredits();
api.checkPosts();
Map<String, Object> fields = new LinkedHashMap<String, Object>();
fields.putAll(settings);
HttpResponse<JsonNode> response = null;
try {
response = Unirest.post(ImgurApi.API_URL + "album/{album}")
.header("Authorization", api.getHeader())
.routeParam("album", album)
.fields(fields)
.asJson();
} catch (UnirestException e) {
e.printStackTrace();
}
api.refreshPost(response.getHeaders());
JSONObject raw = response.getBody().getObject();
api.checkStatus(raw);
return raw.optString("data");
}
//Deletes the given album. If it's an anonymous album, the album's deletehash should be used in place of an ID.
public static String deleteAlbum(String album) throws RateLimitException, HTTPRequestException {
api.checkCredits();
HttpResponse<JsonNode> response = null;
try {
response = Unirest.delete(ImgurApi.API_URL + "album/{album}")
.header("Authorization", api.getHeader())
.routeParam("album", album)
.asJson();
} catch (UnirestException e) {
e.printStackTrace();
}
JSONObject raw = response.getBody().getObject();
api.checkStatus(raw);
return raw.optString("data");
}
//Favorites the album if logged in.
public static void favoriteAlbum(String id) throws RateLimitException, HTTPRequestException, AuthorizationException {
api.checkCredits();
api.checkAuthorization();
HttpResponse<JsonNode> response = null;
try {
response = Unirest.get(ImgurApi.API_URL + "album/{id}/favorite")
.header("Authorization", api.getHeader())
.routeParam("id", id)
.asJson();
} catch (UnirestException e) {
e.printStackTrace();
}
JSONObject raw = response.getBody().getObject();
api.checkStatus(raw);
}
//Sets the photos of the given album to be the given images.
public static String setPhotos(String album, String ids) throws RateLimitException, HTTPRequestException {
api.checkCredits();
api.checkPosts();
HttpResponse<JsonNode> response = null;
try {
response = Unirest.post(ImgurApi.API_URL + "album/{album}")
.header("Authorization", api.getHeader())
.routeParam("album", album)
.field("ids", ids)
.asJson();
} catch (UnirestException e) {
e.printStackTrace();
}
JSONObject raw = response.getBody().getObject();
api.checkStatus(raw);
return raw.optString("data");
}
//Adds photos to the given album
public static String addPhotos(String album, String ids) throws RateLimitException, HTTPRequestException {
api.checkCredits();
api.checkPosts();
HttpResponse<JsonNode> response = null;
try {
response = Unirest.put(ImgurApi.API_URL + "album/{album}/add")
.header("Authorization", api.getHeader())
.routeParam("album", album)
.field("ids", ids)
.asJson();
} catch (UnirestException e) {
e.printStackTrace();
}
JSONObject raw = response.getBody().getObject();
api.checkStatus(raw);
return raw.optString("data");
}
//Deletes photos from the given album
public static String deletePhotos(String album, String ids) throws RateLimitException, HTTPRequestException {
api.checkCredits();
HttpResponse<JsonNode> response = null;
try {
response = Unirest.delete(ImgurApi.API_URL + "album/{album}/remove_images")
.header("Authorization", api.getHeader())
.routeParam("album", album)
.field("ids", ids)
.asJson();
} catch (UnirestException e) {
e.printStackTrace();
}
JSONObject raw = response.getBody().getObject();
api.checkStatus(raw);
return raw.optString("data");
}
}
| |
/*
* Copyright 2013-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.netflix.zuul.filters;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.atomic.AtomicReference;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.extern.apachecommons.CommonsLog;
import org.springframework.cloud.client.discovery.DiscoveryClient;
import org.springframework.cloud.netflix.zuul.filters.ZuulProperties.ZuulRoute;
import org.springframework.util.AntPathMatcher;
import org.springframework.util.PathMatcher;
import org.springframework.util.PatternMatchUtils;
import org.springframework.util.StringUtils;
/**
* @author Spencer Gibb
*/
@CommonsLog
public class ProxyRouteLocator implements RouteLocator {
public static final String DEFAULT_ROUTE = "/**";
private DiscoveryClient discovery;
private ZuulProperties properties;
private PathMatcher pathMatcher = new AntPathMatcher();
private AtomicReference<Map<String, ZuulRoute>> routes = new AtomicReference<>();
private Map<String, ZuulRoute> staticRoutes = new LinkedHashMap<>();
private String servletPath;
public ProxyRouteLocator(String servletPath, DiscoveryClient discovery,
ZuulProperties properties) {
if (StringUtils.hasText(servletPath)) { // a servletPath is passed explicitly
this.servletPath = servletPath;
} else {
//set Zuul servlet path
this.servletPath = properties.getServletPath() != null? properties.getServletPath() : "";
}
this.discovery = discovery;
this.properties = properties;
}
public void addRoute(String path, String location) {
this.staticRoutes.put(path, new ZuulRoute(path, location));
resetRoutes();
}
public void addRoute(ZuulRoute route) {
this.staticRoutes.put(route.getPath(), route);
resetRoutes();
}
@Override
public Collection<String> getRoutePaths() {
return getRoutes().keySet();
}
public Map<String, String> getRoutes() {
if (this.routes.get() == null) {
this.routes.set(locateRoutes());
}
Map<String, String> values = new LinkedHashMap<>();
for (String key : this.routes.get().keySet()) {
String url = key;
values.put(url, this.routes.get().get(key).getLocation());
}
return values;
}
public ProxyRouteSpec getMatchingRoute(String path) {
log.info("Finding route for path: " + path);
String location = null;
String targetPath = null;
String id = null;
String prefix = this.properties.getPrefix();
log.debug("servletPath=" + this.servletPath);
if (StringUtils.hasText(this.servletPath) && !this.servletPath.equals("/")
&& path.startsWith(this.servletPath)) {
path = path.substring(this.servletPath.length());
}
log.debug("path=" + path);
Boolean retryable = this.properties.getRetryable();
if (!matchesIgnoredPatterns(path)) {
for (Entry<String, ZuulRoute> entry : this.routes.get().entrySet()) {
String pattern = entry.getKey();
log.debug("Matching pattern:" + pattern);
if (this.pathMatcher.match(pattern, path)) {
ZuulRoute route = entry.getValue();
id = route.getId();
location = route.getLocation();
targetPath = path;
if (path.startsWith(prefix) && this.properties.isStripPrefix()) {
targetPath = path.substring(prefix.length());
}
if (route.isStripPrefix()) {
int index = route.getPath().indexOf("*") - 1;
if (index > 0) {
String routePrefix = route.getPath().substring(0, index);
targetPath = targetPath.replaceFirst(routePrefix, "");
prefix = prefix + routePrefix;
}
}
if (route.getRetryable() != null) {
retryable = route.getRetryable();
}
break;
}
}
}
return (location == null ? null : new ProxyRouteSpec(id, targetPath, location,
prefix, retryable));
}
protected boolean matchesIgnoredPatterns(String path) {
for (String pattern : this.properties.getIgnoredPatterns()) {
log.debug("Matching ignored pattern:" + pattern);
if (this.pathMatcher.match(pattern, path)) {
log.debug("Path " + path + " matches ignored pattern " + pattern);
return true;
}
}
return false;
}
public void resetRoutes() {
this.routes.set(locateRoutes());
}
protected LinkedHashMap<String, ZuulRoute> locateRoutes() {
LinkedHashMap<String, ZuulRoute> routesMap = new LinkedHashMap<String, ZuulRoute>();
addConfiguredRoutes(routesMap);
routesMap.putAll(this.staticRoutes);
if (this.discovery != null) {
Map<String, ZuulRoute> staticServices = new LinkedHashMap<String, ZuulRoute>();
for (ZuulRoute route : routesMap.values()) {
String serviceId = route.getServiceId();
if (serviceId == null) {
serviceId = route.getId();
}
if (serviceId != null) {
staticServices.put(serviceId, route);
}
}
// Add routes for discovery services by default
List<String> services = this.discovery.getServices();
String[] ignored = this.properties.getIgnoredServices()
.toArray(new String[0]);
for (String serviceId : services) {
// Ignore specifically ignored services and those that were manually
// configured
String key = "/" + serviceId + "/**";
if (staticServices.containsKey(serviceId)
&& staticServices.get(serviceId).getUrl() == null) {
// Explicitly configured with no URL, cannot be ignored
// all static routes are already in routesMap
// Update location using serviceId if location is null
ZuulRoute staticRoute = staticServices.get(serviceId);
if (!StringUtils.hasText(staticRoute.getLocation())) {
staticRoute.setLocation(serviceId);
}
}
if (!PatternMatchUtils.simpleMatch(ignored, serviceId)
&& !routesMap.containsKey(key)) {
// Not ignored
routesMap.put(key, new ZuulRoute(key, serviceId));
}
}
}
if (routesMap.get(DEFAULT_ROUTE) != null) {
ZuulRoute defaultRoute = routesMap.get(DEFAULT_ROUTE);
// Move the defaultServiceId to the end
routesMap.remove(DEFAULT_ROUTE);
routesMap.put(DEFAULT_ROUTE, defaultRoute);
}
LinkedHashMap<String, ZuulRoute> values = new LinkedHashMap<>();
for (Entry<String, ZuulRoute> entry : routesMap.entrySet()) {
String path = entry.getKey();
// Prepend with slash if not already present.
if (!path.startsWith("/")) {
path = "/" + path;
}
if (StringUtils.hasText(this.properties.getPrefix())) {
path = this.properties.getPrefix() + path;
if (!path.startsWith("/")) {
path = "/" + path;
}
}
values.put(path, entry.getValue());
}
return values;
}
protected void addConfiguredRoutes(Map<String, ZuulRoute> routes) {
Map<String, ZuulRoute> routeEntries = this.properties.getRoutes();
for (ZuulRoute entry : routeEntries.values()) {
String route = entry.getPath();
if (routes.containsKey(route)) {
log.warn("Overwriting route " + route + ": already defined by "
+ routes.get(route));
}
routes.put(route, entry);
}
}
public String getTargetPath(String matchingRoute, String requestURI) {
String path = getRoutes().get(matchingRoute);
return (path != null ? path : requestURI);
}
@Data
@AllArgsConstructor
public static class ProxyRouteSpec {
private String id;
private String path;
private String location;
private String prefix;
private Boolean retryable;
}
}
| |
/*
* Copyright 2002-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.expression.spel;
import java.text.MessageFormat;
/**
* Contains all the messages that can be produced by the Spring Expression Language.
* Each message has a kind (info, warn, error) and a code number. Tests can be written to
* expect particular code numbers rather than particular text, enabling the message text
* to more easily be modified and the tests to run successfully in different locales.
*
* <p>When a message is formatted, it will have this kind of form
*
* <pre class="code">
* EL1004E: (pos 34): Type cannot be found 'String'
* </pre>
*
* The prefix captures the code and the error kind, whilst the position is included
* if it is known.
*
* @author Andy Clement
* @since 3.0
*/
public enum SpelMessage {
TYPE_CONVERSION_ERROR(Kind.ERROR, 1001,
"Type conversion problem, cannot convert from {0} to {1}"),
CONSTRUCTOR_NOT_FOUND(Kind.ERROR, 1002,
"Constructor call: No suitable constructor found on type {0} for arguments {1}"),
CONSTRUCTOR_INVOCATION_PROBLEM(Kind.ERROR, 1003,
"A problem occurred whilst attempting to construct an object of type ''{0}'' using arguments ''{1}''"),
METHOD_NOT_FOUND(Kind.ERROR, 1004,
"Method call: Method {0} cannot be found on {1} type"),
TYPE_NOT_FOUND(Kind.ERROR, 1005,
"Type cannot be found ''{0}''"),
FUNCTION_NOT_DEFINED(Kind.ERROR, 1006,
"The function ''{0}'' could not be found"),
PROPERTY_OR_FIELD_NOT_READABLE_ON_NULL(Kind.ERROR, 1007,
"Property or field ''{0}'' cannot be found on null"),
PROPERTY_OR_FIELD_NOT_READABLE(Kind.ERROR, 1008,
"Property or field ''{0}'' cannot be found on object of type ''{1}'' - maybe not public?"),
PROPERTY_OR_FIELD_NOT_WRITABLE_ON_NULL(Kind.ERROR, 1009,
"Property or field ''{0}'' cannot be set on null"),
PROPERTY_OR_FIELD_NOT_WRITABLE(Kind.ERROR, 1010,
"Property or field ''{0}'' cannot be set on object of type ''{1}'' - maybe not public?"),
METHOD_CALL_ON_NULL_OBJECT_NOT_ALLOWED(Kind.ERROR, 1011,
"Method call: Attempted to call method {0} on null context object"),
CANNOT_INDEX_INTO_NULL_VALUE(Kind.ERROR, 1012,
"Cannot index into a null value"),
NOT_COMPARABLE(Kind.ERROR, 1013,
"Cannot compare instances of {0} and {1}"),
INCORRECT_NUMBER_OF_ARGUMENTS_TO_FUNCTION(Kind.ERROR, 1014,
"Incorrect number of arguments for function, {0} supplied but function takes {1}"),
INVALID_TYPE_FOR_SELECTION(Kind.ERROR, 1015,
"Cannot perform selection on input data of type ''{0}''"),
RESULT_OF_SELECTION_CRITERIA_IS_NOT_BOOLEAN(Kind.ERROR, 1016,
"Result of selection criteria is not boolean"),
BETWEEN_RIGHT_OPERAND_MUST_BE_TWO_ELEMENT_LIST(Kind.ERROR, 1017,
"Right operand for the 'between' operator has to be a two-element list"),
INVALID_PATTERN(Kind.ERROR, 1018,
"Pattern is not valid ''{0}''"),
PROJECTION_NOT_SUPPORTED_ON_TYPE(Kind.ERROR, 1019,
"Projection is not supported on the type ''{0}''"),
ARGLIST_SHOULD_NOT_BE_EVALUATED(Kind.ERROR, 1020,
"The argument list of a lambda expression should never have getValue() called upon it"),
EXCEPTION_DURING_PROPERTY_READ(Kind.ERROR, 1021,
"A problem occurred whilst attempting to access the property ''{0}'': ''{1}''"),
FUNCTION_REFERENCE_CANNOT_BE_INVOKED(Kind.ERROR, 1022,
"The function ''{0}'' mapped to an object of type ''{1}'' which cannot be invoked"),
EXCEPTION_DURING_FUNCTION_CALL(Kind.ERROR, 1023,
"A problem occurred whilst attempting to invoke the function ''{0}'': ''{1}''"),
ARRAY_INDEX_OUT_OF_BOUNDS(Kind.ERROR, 1024,
"The array has ''{0}'' elements, index ''{1}'' is invalid"),
COLLECTION_INDEX_OUT_OF_BOUNDS(Kind.ERROR, 1025,
"The collection has ''{0}'' elements, index ''{1}'' is invalid"),
STRING_INDEX_OUT_OF_BOUNDS(Kind.ERROR, 1026,
"The string has ''{0}'' characters, index ''{1}'' is invalid"),
INDEXING_NOT_SUPPORTED_FOR_TYPE(Kind.ERROR, 1027,
"Indexing into type ''{0}'' is not supported"),
INSTANCEOF_OPERATOR_NEEDS_CLASS_OPERAND(Kind.ERROR, 1028,
"The operator 'instanceof' needs the right operand to be a class, not a ''{0}''"),
EXCEPTION_DURING_METHOD_INVOCATION(Kind.ERROR, 1029,
"A problem occurred when trying to execute method ''{0}'' on object of type ''{1}'': ''{2}''"),
OPERATOR_NOT_SUPPORTED_BETWEEN_TYPES(Kind.ERROR, 1030,
"The operator ''{0}'' is not supported between objects of type ''{1}'' and ''{2}''"),
PROBLEM_LOCATING_METHOD(Kind.ERROR, 1031,
"Problem locating method {0} on type {1}"),
SETVALUE_NOT_SUPPORTED( Kind.ERROR, 1032,
"setValue(ExpressionState, Object) not supported for ''{0}''"),
MULTIPLE_POSSIBLE_METHODS(Kind.ERROR, 1033,
"Method call of ''{0}'' is ambiguous, supported type conversions allow multiple variants to match"),
EXCEPTION_DURING_PROPERTY_WRITE(Kind.ERROR, 1034,
"A problem occurred whilst attempting to set the property ''{0}'': {1}"),
NOT_AN_INTEGER(Kind.ERROR, 1035,
"The value ''{0}'' cannot be parsed as an int"),
NOT_A_LONG(Kind.ERROR, 1036,
"The value ''{0}'' cannot be parsed as a long"),
INVALID_FIRST_OPERAND_FOR_MATCHES_OPERATOR(Kind.ERROR, 1037,
"First operand to matches operator must be a string. ''{0}'' is not"),
INVALID_SECOND_OPERAND_FOR_MATCHES_OPERATOR(Kind.ERROR, 1038,
"Second operand to matches operator must be a string. ''{0}'' is not"),
FUNCTION_MUST_BE_STATIC(Kind.ERROR, 1039,
"Only static methods can be called via function references. " +
"The method ''{0}'' referred to by name ''{1}'' is not static."),
NOT_A_REAL(Kind.ERROR, 1040,
"The value ''{0}'' cannot be parsed as a double"),
MORE_INPUT(Kind.ERROR,1041,
"After parsing a valid expression, there is still more data in the expression: ''{0}''"),
RIGHT_OPERAND_PROBLEM(Kind.ERROR, 1042,
"Problem parsing right operand"),
NOT_EXPECTED_TOKEN(Kind.ERROR, 1043,
"Unexpected token. Expected ''{0}'' but was ''{1}''"),
OOD(Kind.ERROR, 1044,
"Unexpectedly ran out of input"),
NON_TERMINATING_DOUBLE_QUOTED_STRING(Kind.ERROR, 1045,
"Cannot find terminating \" for string"),
NON_TERMINATING_QUOTED_STRING(Kind.ERROR, 1046,
"Cannot find terminating ' for string"),
MISSING_LEADING_ZERO_FOR_NUMBER(Kind.ERROR, 1047,
"A real number must be prefixed by zero, it cannot start with just ''.''"),
REAL_CANNOT_BE_LONG(Kind.ERROR, 1048,
"Real number cannot be suffixed with a long (L or l) suffix"),
UNEXPECTED_DATA_AFTER_DOT(Kind.ERROR, 1049,
"Unexpected data after ''.'': ''{0}''"),
MISSING_CONSTRUCTOR_ARGS(Kind.ERROR, 1050,
"The arguments '(...)' for the constructor call are missing"),
RUN_OUT_OF_ARGUMENTS(Kind.ERROR, 1051,
"Unexpected ran out of arguments"),
UNABLE_TO_GROW_COLLECTION(Kind.ERROR, 1052,
"Unable to grow collection"),
UNABLE_TO_GROW_COLLECTION_UNKNOWN_ELEMENT_TYPE(Kind.ERROR, 1053,
"Unable to grow collection: unable to determine list element type"),
UNABLE_TO_CREATE_LIST_FOR_INDEXING(Kind.ERROR, 1054,
"Unable to dynamically create a List to replace a null value"),
UNABLE_TO_CREATE_MAP_FOR_INDEXING(Kind.ERROR, 1055,
"Unable to dynamically create a Map to replace a null value"),
UNABLE_TO_DYNAMICALLY_CREATE_OBJECT(Kind.ERROR, 1056,
"Unable to dynamically create instance of ''{0}'' to replace a null value"),
NO_BEAN_RESOLVER_REGISTERED(Kind.ERROR, 1057,
"No bean resolver registered in the context to resolve access to bean ''{0}''"),
EXCEPTION_DURING_BEAN_RESOLUTION(Kind.ERROR, 1058,
"A problem occurred when trying to resolve bean ''{0}'':''{1}''"),
INVALID_BEAN_REFERENCE(Kind.ERROR, 1059,
"@ or & can only be followed by an identifier or a quoted name"),
TYPE_NAME_EXPECTED_FOR_ARRAY_CONSTRUCTION(Kind.ERROR, 1060,
"Expected the type of the new array to be specified as a String but found ''{0}''"),
INCORRECT_ELEMENT_TYPE_FOR_ARRAY(Kind.ERROR, 1061,
"The array of type ''{0}'' cannot have an element of type ''{1}'' inserted"),
MULTIDIM_ARRAY_INITIALIZER_NOT_SUPPORTED(Kind.ERROR, 1062,
"Using an initializer to build a multi-dimensional array is not currently supported"),
MISSING_ARRAY_DIMENSION(Kind.ERROR, 1063,
"A required array dimension has not been specified"),
INITIALIZER_LENGTH_INCORRECT(Kind.ERROR, 1064,
"array initializer size does not match array dimensions"),
UNEXPECTED_ESCAPE_CHAR(Kind.ERROR, 1065, "unexpected escape character."),
OPERAND_NOT_INCREMENTABLE(Kind.ERROR, 1066,
"the expression component ''{0}'' does not support increment"),
OPERAND_NOT_DECREMENTABLE(Kind.ERROR, 1067,
"the expression component ''{0}'' does not support decrement"),
NOT_ASSIGNABLE(Kind.ERROR, 1068,
"the expression component ''{0}'' is not assignable"),
MISSING_CHARACTER(Kind.ERROR, 1069,
"missing expected character ''{0}''"),
LEFT_OPERAND_PROBLEM(Kind.ERROR, 1070,
"Problem parsing left operand"),
MISSING_SELECTION_EXPRESSION(Kind.ERROR, 1071,
"A required selection expression has not been specified"),
EXCEPTION_RUNNING_COMPILED_EXPRESSION(Kind.ERROR,1072,
"An exception occurred whilst evaluating a compiled expression");
private final Kind kind;
private final int code;
private final String message;
private SpelMessage(Kind kind, int code, String message) {
this.kind = kind;
this.code = code;
this.message = message;
}
/**
* Produce a complete message including the prefix, the position (if known)
* and with the inserts applied to the message.
* @param pos the position (ignored and not included in the message if less than 0)
* @param inserts the inserts to put into the formatted message
* @return a formatted message
*/
public String formatMessage(int pos, Object... inserts) {
StringBuilder formattedMessage = new StringBuilder();
formattedMessage.append("EL").append(this.code);
switch (this.kind) {
case ERROR:
formattedMessage.append("E");
break;
}
formattedMessage.append(":");
if (pos != -1) {
formattedMessage.append("(pos ").append(pos).append("): ");
}
formattedMessage.append(MessageFormat.format(this.message, inserts));
return formattedMessage.toString();
}
public static enum Kind { INFO, WARNING, ERROR }
}
| |
package org.ndexbio.common.persistence;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import java.util.TreeSet;
import java.util.UUID;
import org.ndexbio.cxio.core.AspectIterator;
import org.ndexbio.cxio.core.writers.NiceCXNetworkWriter;
import org.ndexbio.cxio.aspects.datamodels.CyVisualPropertiesElement;
import org.ndexbio.cxio.aspects.datamodels.EdgeAttributesElement;
import org.ndexbio.cxio.aspects.datamodels.EdgesElement;
import org.ndexbio.cxio.aspects.datamodels.NetworkAttributesElement;
import org.ndexbio.cxio.aspects.datamodels.NodeAttributesElement;
import org.ndexbio.cxio.aspects.datamodels.NodesElement;
import org.ndexbio.cxio.aspects.datamodels.SubNetworkElement;
import org.ndexbio.cxio.metadata.MetaDataCollection;
import org.ndexbio.cxio.metadata.MetaDataElement;
import org.ndexbio.model.cx.CitationElement;
import org.ndexbio.model.cx.EdgeCitationLinksElement;
import org.ndexbio.model.cx.EdgeSupportLinksElement;
import org.ndexbio.model.cx.FunctionTermElement;
import org.ndexbio.model.cx.NamespacesElement;
import org.ndexbio.model.cx.NiceCXNetwork;
import org.ndexbio.model.cx.NodeCitationLinksElement;
import org.ndexbio.model.cx.NodeSupportLinksElement;
import org.ndexbio.model.cx.SupportElement;
import org.ndexbio.model.exceptions.NdexException;
import org.ndexbio.model.internal.CXNetwork;
import org.ndexbio.rest.Configuration;
import com.fasterxml.jackson.databind.ObjectMapper;
public class CXNetworkSampleGenerator {
// private InputStream inputStream;
private UUID networkId;
private Long subNetworkId;
private MetaDataCollection srcMetaDataCollection;
// private Long currentTime;
// size of sample is number of edges.
public final int sampleSize;
public CXNetworkSampleGenerator(UUID networkUUID, Long subNetworkID, MetaDataCollection srcMetaData, int sampleSize) {
this.networkId = networkUUID;
this.subNetworkId = subNetworkID;
this.srcMetaDataCollection = srcMetaData;
// this.currentTime = Long.valueOf(Calendar.getInstance().getTimeInMillis());
this.sampleSize = sampleSize;
}
private MetaDataElement getMetaDataElementTempleteFromSrc (String aspectName) {
MetaDataElement old = this.srcMetaDataCollection.getMetaDataElement(aspectName);
// if ( old == null)
// throw new NdexException("MetaData " + aspectName + " is missing in network " + this.networkId.toString());
MetaDataElement result = new MetaDataElement();
result.setConsistencyGroup(old !=null? old.getConsistencyGroup() : Long.valueOf(1l));
result.setName(aspectName);
result.setVersion(old!=null? old.getVersion() : "1.0");
// result.setLastUpdate(currentTime);
return result;
}
public void createSampleNetwork() throws IOException, NdexException {
//CXNetwork result = new CXNetwork();
NiceCXNetwork result = new NiceCXNetwork();
MetaDataCollection metadata = new MetaDataCollection();
result.setMetadata(metadata);
String iteratorPathPrefix = Configuration.getInstance().getNdexRoot() + "/data/" ;
String pathPrefix = iteratorPathPrefix + networkId + "/aspects/";
// if sample is for a subNetwork, get ids of 500 edges from the subNetwork aspect
Set<Long> edgeIds = new HashSet<>(sampleSize);
if ( subNetworkId != null) {
try (AspectIterator<SubNetworkElement> subNetIterator = new AspectIterator<>(networkId.toString(),SubNetworkElement.ASPECT_NAME , SubNetworkElement.class,iteratorPathPrefix ) ) {
while ( subNetIterator.hasNext()) {
SubNetworkElement subNetwork = subNetIterator.next();
if (subNetworkId.equals(subNetwork.getId()) ) {
int i = 0;
for (Long edgeId : subNetwork.getEdges() ) {
edgeIds.add(edgeId);
i++;
if ( i >= sampleSize) break;
}
}
}
}
if (edgeIds.isEmpty()) { // try the subNetworks aspect to be compatible with the old cyCX spec.
try (AspectIterator<SubNetworkElement> subNetIterator = new AspectIterator<>(networkId.toString(),"subNetworks" , SubNetworkElement.class ,iteratorPathPrefix) ) {
while ( subNetIterator.hasNext()) {
SubNetworkElement subNetwork = subNetIterator.next();
if (subNetworkId.equals(subNetwork.getId()) ) {
int i = 0;
for (Long edgeId : subNetwork.getEdges() ) {
edgeIds.add(edgeId);
i++;
if ( i >= sampleSize) break;
}
}
}
}
}
/* // add metadata entry
MetaDataElement mdElmt = getMetaDataElementTempleteFromSrc(SubNetworkElement.ASPECT_NAME);
mdElmt.setElementCount(1L);
metadata.add(mdElmt); */
}
// first round. Get 500 edges and the node Ids they reference.
int i = 0;
Set<Long> nodeIds = new TreeSet<>();
//go through Edge aspect
Long edgeIdCounter = null;
try (AspectIterator<EdgesElement> it = new AspectIterator<>(networkId.toString(),EdgesElement.ASPECT_NAME , EdgesElement.class ,iteratorPathPrefix)) {
while (it.hasNext()) {
EdgesElement edge = it.next();
if ( subNetworkId == null || edgeIds.contains(edge.getId() ) ) {
result.addEdge(edge);
nodeIds.add(edge.getSource());
nodeIds.add(edge.getTarget());
if ( edgeIdCounter == null ||edge.getId() > edgeIdCounter.longValue() )
edgeIdCounter = Long.valueOf(edge.getId());
edgeIds.add(edge.getId());
i++;
}
if (i == sampleSize)
break;
}
}
MetaDataElement edgemd = this.getMetaDataElementTempleteFromSrc(EdgesElement.ASPECT_NAME);
edgemd.setElementCount(Long.valueOf(result.getEdges().size()));
edgemd.setIdCounter(edgeIdCounter);
metadata.add(edgemd);
//go through node aspect
Long nodeIdCounter = null;
try (FileInputStream inputStream = new FileInputStream(pathPrefix + NodesElement.ASPECT_NAME)) {
Iterator<NodesElement> it = new ObjectMapper().readerFor(NodesElement.class).readValues(inputStream);
while (it.hasNext()) {
NodesElement node = it.next();
if (nodeIds.contains(node.getId())) {
result.addNode(node);
if ( nodeIdCounter == null ||node.getId() > nodeIdCounter.longValue() )
nodeIdCounter = Long.valueOf(node.getId());
}
}
}
MetaDataElement nodemd = this.getMetaDataElementTempleteFromSrc(NodesElement.ASPECT_NAME);
nodemd.setElementCount(Long.valueOf(nodeIds.size()));
nodemd.setIdCounter(nodeIdCounter);
metadata.add(nodemd);
//process node attribute aspect
long nodeAttrCounter = 0;
java.nio.file.Path nodeAspectFile = Paths.get(pathPrefix + NodeAttributesElement.ASPECT_NAME);
if ( Files.exists(nodeAspectFile)) {
try (FileInputStream inputStream = new FileInputStream(pathPrefix + NodeAttributesElement.ASPECT_NAME)) {
Iterator<NodeAttributesElement> it = new ObjectMapper().readerFor(NodeAttributesElement.class).readValues(inputStream);
while (it.hasNext()) {
NodeAttributesElement na = it.next();
Long id = na.getPropertyOf();
if ( nodeIds.contains(id) &&
((subNetworkId == null && na.getSubnetwork() == null) ||
(subNetworkId != null && na.getSubnetwork() !=null && subNetworkId.equals(na.getSubnetwork()) )) ) {
result.addNodeAttribute(id, na);
nodeAttrCounter ++;
}
}
}
}
if ( nodeAttrCounter >0) {
MetaDataElement nodeAttrmd = this.getMetaDataElementTempleteFromSrc(NodeAttributesElement.ASPECT_NAME);
nodeAttrmd.setElementCount(Long.valueOf(nodeAttrCounter));
metadata.add(nodeAttrmd);
}
//process edge attribute aspect
long edgeAttrCounter = 0;
java.nio.file.Path edgeAspectFile = Paths.get(pathPrefix + EdgeAttributesElement.ASPECT_NAME);
if ( Files.exists(edgeAspectFile)) {
try (FileInputStream inputStream = new FileInputStream(pathPrefix + EdgeAttributesElement.ASPECT_NAME)) {
Iterator<EdgeAttributesElement> it = new ObjectMapper().readerFor(EdgeAttributesElement.class).readValues(inputStream);
while (it.hasNext()) {
EdgeAttributesElement ea = it.next();
Long id = ea.getPropertyOf();
if ( result.getEdges().containsKey(id) &&
((subNetworkId == null && ea.getSubnetwork() == null) ||
(subNetworkId != null && ea.getSubnetwork() !=null && subNetworkId.equals(ea.getSubnetwork()) ))) {
result.addEdgeAttribute(id, ea);
edgeAttrCounter ++;
}
}
}
}
if ( edgeAttrCounter >0) {
MetaDataElement edgeAttrmd = this.getMetaDataElementTempleteFromSrc(EdgeAttributesElement.ASPECT_NAME);
edgeAttrmd.setElementCount(Long.valueOf(edgeAttrCounter));
metadata.add(edgeAttrmd);
}
//process network attribute aspect
long networkAttrCounter = 0;
java.nio.file.Path networkAttrAspectFile = Paths.get(pathPrefix + NetworkAttributesElement.ASPECT_NAME);
if ( Files.exists(networkAttrAspectFile)) {
try (FileInputStream inputStream = new FileInputStream(pathPrefix + NetworkAttributesElement.ASPECT_NAME)) {
Iterator<NetworkAttributesElement> it = new ObjectMapper().readerFor(NetworkAttributesElement.class).readValues(inputStream);
while (it.hasNext()) {
NetworkAttributesElement nAtt = it.next();
if ((subNetworkId == null && nAtt.getSubnetwork() == null) ||
(subNetworkId != null && nAtt.getSubnetwork() !=null && subNetworkId.equals(nAtt.getSubnetwork()) )) {
result.addNetworkAttribute(nAtt);
networkAttrCounter ++;
}
}
}
}
if ( networkAttrCounter >0) {
MetaDataElement netAttrmd = this.getMetaDataElementTempleteFromSrc(NetworkAttributesElement.ASPECT_NAME);
netAttrmd.setElementCount(Long.valueOf(networkAttrCounter));
metadata.add(netAttrmd);
}
//process namespace aspect
java.nio.file.Path nsAspectFile = Paths.get(pathPrefix + NamespacesElement.ASPECT_NAME);
if ( Files.exists(nsAspectFile)) {
try (FileInputStream inputStream = new FileInputStream(pathPrefix + NamespacesElement.ASPECT_NAME)) {
Iterator<NamespacesElement> it = new ObjectMapper().readerFor(NamespacesElement.class).readValues(inputStream);
while (it.hasNext()) {
NamespacesElement ns = it.next();
result.setNamespaces(ns);
}
}
MetaDataElement nsmd = this.getMetaDataElementTempleteFromSrc(NamespacesElement.ASPECT_NAME);
nsmd.setElementCount(1L);
metadata.add(nsmd);
}
//process cyVisualProperty aspect
java.nio.file.Path cyVisPropAspectFile = Paths.get(pathPrefix + CyVisualPropertiesElement.ASPECT_NAME);
if ( Files.exists(cyVisPropAspectFile)) {
long vpropCount = 0;
try (FileInputStream inputStream = new FileInputStream(pathPrefix + CyVisualPropertiesElement.ASPECT_NAME)) {
Iterator<CyVisualPropertiesElement> it = new ObjectMapper().readerFor(CyVisualPropertiesElement.class).readValues(inputStream);
while (it.hasNext()) {
CyVisualPropertiesElement elmt = it.next();
if ( elmt.getProperties_of().equals("nodes")) {
if ( nodeIds.contains(elmt.getApplies_to())) {
result.addOpaqueAspect(elmt);
vpropCount++;
}
} else if (elmt.getProperties_of().equals("edges")) {
if ( edgeIds.contains(elmt.getApplies_to())) {
result.addOpaqueAspect(elmt);
vpropCount++;
}
} else {
result.addOpaqueAspect(elmt);
vpropCount++;
}
}
}
if ( vpropCount > 0) {
MetaDataElement nsmd = this.getMetaDataElementTempleteFromSrc(CyVisualPropertiesElement.ASPECT_NAME);
nsmd.setElementCount(vpropCount);
metadata.add(nsmd);
}
}
//special case to support old visualProperty aspect
java.nio.file.Path cyVisPropAspectFileOld = Paths.get(pathPrefix + "visualProperties");
if ( Files.exists(cyVisPropAspectFileOld)) {
long vpropCount = 0;
try (FileInputStream inputStream = new FileInputStream(pathPrefix + "visualProperties")) {
Iterator<CyVisualPropertiesElement> it = new ObjectMapper().readerFor(CyVisualPropertiesElement.class).readValues(inputStream);
while (it.hasNext()) {
CyVisualPropertiesElement elmt = it.next();
result.addOpaqueAspect(elmt);
vpropCount++;
}
}
if ( vpropCount > 0) {
MetaDataElement nsmd = this.getMetaDataElementTempleteFromSrc(CyVisualPropertiesElement.ASPECT_NAME);
nsmd.setElementCount(vpropCount);
metadata.add(nsmd);
}
}
// process function terms
long aspElmtCount = 0;
try (AspectIterator<FunctionTermElement> it = new AspectIterator<>(networkId.toString(), FunctionTermElement.ASPECT_NAME, FunctionTermElement.class,iteratorPathPrefix)) {
while (it.hasNext()) {
FunctionTermElement fun = it.next();
if ( nodeIds.contains(fun.getNodeID())) {
result.addNodeAssociatedAspectElement(fun.getNodeID(), fun);
aspElmtCount ++;
}
}
}
if ( aspElmtCount > 0) {
MetaDataElement nsmd = this.getMetaDataElementTempleteFromSrc(FunctionTermElement.ASPECT_NAME);
nsmd.setElementCount(aspElmtCount);
metadata.add(nsmd);
}
Set<Long> citationIds = new TreeSet<> ();
//process citation links aspects
aspElmtCount = 0;
try (AspectIterator<NodeCitationLinksElement> it = new AspectIterator<>(networkId.toString(), NodeCitationLinksElement.ASPECT_NAME, NodeCitationLinksElement.class,iteratorPathPrefix)) {
while (it.hasNext()) {
NodeCitationLinksElement cl = it.next();
for ( Long rNodeId : cl.getSourceIds()) {
if ( nodeIds.contains(rNodeId)) {
result.addNodeAssociatedAspectElement(rNodeId,
( cl.getCitationIds().size() ==1? cl : new NodeCitationLinksElement(rNodeId, cl.getCitationIds())) );
aspElmtCount ++;
citationIds.addAll(cl.getCitationIds());
}
}
}
}
if ( aspElmtCount > 0) {
MetaDataElement nsmd = this.getMetaDataElementTempleteFromSrc(NodeCitationLinksElement.ASPECT_NAME);
nsmd.setElementCount(aspElmtCount);
metadata.add(nsmd);
}
aspElmtCount = 0;
try (AspectIterator<EdgeCitationLinksElement> it = new AspectIterator<>(networkId.toString(), EdgeCitationLinksElement.ASPECT_NAME, EdgeCitationLinksElement.class, iteratorPathPrefix)) {
while (it.hasNext()) {
EdgeCitationLinksElement cl = it.next();
for ( Long rEdgeId : cl.getSourceIds()) {
if ( result.getEdges().containsKey(rEdgeId)) {
result.addEdgeAssociatedAspectElement(rEdgeId,
( cl.getCitationIds().size() ==1? cl : new EdgeCitationLinksElement(rEdgeId, cl.getCitationIds())) );
aspElmtCount++;
citationIds.addAll(cl.getCitationIds());
}
}
}
}
if ( aspElmtCount > 0) {
MetaDataElement nsmd = this.getMetaDataElementTempleteFromSrc(EdgeCitationLinksElement.ASPECT_NAME);
nsmd.setElementCount(aspElmtCount);
metadata.add(nsmd);
}
if( !citationIds.isEmpty()) {
try (AspectIterator<CitationElement> it = new AspectIterator<>(networkId.toString(), CitationElement.ASPECT_NAME, CitationElement.class, iteratorPathPrefix)) {
while (it.hasNext()) {
CitationElement c = it.next();
if ( citationIds.contains(c.getId()))
result.addCitation(c);
}
}
MetaDataElement nsmd = this.getMetaDataElementTempleteFromSrc(CitationElement.ASPECT_NAME);
nsmd.setElementCount(Long.valueOf(citationIds.size()));
nsmd.setIdCounter(Collections.max(citationIds));
metadata.add(nsmd);
}
// support and related aspects
Set<Long> supportIds = new TreeSet<> ();
//process support links aspects
aspElmtCount = 0;
try (AspectIterator<NodeSupportLinksElement> it = new AspectIterator<>(networkId.toString(), NodeSupportLinksElement.ASPECT_NAME, NodeSupportLinksElement.class, iteratorPathPrefix)) {
while (it.hasNext()) {
NodeSupportLinksElement cl = it.next();
for ( Long rNodeId : cl.getSourceIds()) {
if ( nodeIds.contains(rNodeId)) {
result.addNodeAssociatedAspectElement(rNodeId,
( cl.getSupportIds().size() ==1? cl : new NodeSupportLinksElement(rNodeId, cl.getSupportIds())) );
aspElmtCount ++;
supportIds.addAll(cl.getSupportIds());
}
}
}
}
if ( aspElmtCount > 0) {
MetaDataElement nsmd = this.getMetaDataElementTempleteFromSrc(NodeSupportLinksElement.ASPECT_NAME);
nsmd.setElementCount(aspElmtCount);
metadata.add(nsmd);
}
aspElmtCount = 0;
try (AspectIterator<EdgeSupportLinksElement> it = new AspectIterator<>(networkId.toString(), EdgeSupportLinksElement.ASPECT_NAME, EdgeSupportLinksElement.class, iteratorPathPrefix)) {
while (it.hasNext()) {
EdgeSupportLinksElement cl = it.next();
for ( Long rEdgeId : cl.getSourceIds()) {
if ( result.getEdges().containsKey(rEdgeId)) {
result.addEdgeAssociatedAspectElement(rEdgeId,
( cl.getSupportIds().size() ==1? cl : new EdgeSupportLinksElement(rEdgeId, cl.getSupportIds())) );
aspElmtCount++;
supportIds.addAll(cl.getSupportIds());
}
}
}
}
if ( aspElmtCount > 0) {
MetaDataElement nsmd = this.getMetaDataElementTempleteFromSrc(EdgeSupportLinksElement.ASPECT_NAME);
nsmd.setElementCount(aspElmtCount);
metadata.add(nsmd);
}
if( !supportIds.isEmpty()) {
try (AspectIterator<SupportElement> it = new AspectIterator<>(networkId.toString(), SupportElement.ASPECT_NAME, SupportElement.class, iteratorPathPrefix)) {
while (it.hasNext()) {
SupportElement e = it.next();
if ( supportIds.contains(e.getId()))
result.addSupport(e);
}
}
MetaDataElement nsmd = this.getMetaDataElementTempleteFromSrc(SupportElement.ASPECT_NAME);
nsmd.setElementCount(Long.valueOf(supportIds.size()));
nsmd.setIdCounter(Collections.max(supportIds));
metadata.add(nsmd);
}
//write the sample network out to disk and update the db.
try (FileOutputStream out = new FileOutputStream(Configuration.getInstance().getNdexRoot() + "/data/" + networkId + "/sample.cx")) {
NiceCXNetworkWriter writer = new NiceCXNetworkWriter(out, true);
writer.writeNiceCXNetwork(result);
}
}
}
| |
package ameba.mvc.template.internal;
import com.google.common.collect.Lists;
import org.apache.commons.lang3.StringUtils;
import org.glassfish.jersey.internal.Errors;
import org.glassfish.jersey.internal.util.Producer;
import org.glassfish.jersey.internal.util.ReflectionHelper;
import org.glassfish.jersey.message.internal.MediaTypes;
import org.glassfish.jersey.process.Inflector;
import org.glassfish.jersey.server.ExtendedUriInfo;
import org.glassfish.jersey.server.internal.inject.ConfiguredValidator;
import org.glassfish.jersey.server.model.*;
import org.glassfish.jersey.server.model.internal.ModelHelper;
import org.glassfish.jersey.server.model.internal.ModelProcessorUtil;
import org.glassfish.jersey.server.mvc.Template;
import org.glassfish.jersey.server.mvc.Viewable;
import org.glassfish.jersey.server.mvc.internal.LocalizationMessages;
import org.glassfish.jersey.server.mvc.internal.TemplateInflector;
import javax.inject.Inject;
import javax.inject.Provider;
import javax.ws.rs.Consumes;
import javax.ws.rs.HttpMethod;
import javax.ws.rs.Produces;
import javax.ws.rs.container.ContainerRequestContext;
import javax.ws.rs.container.ResourceContext;
import javax.ws.rs.core.Configuration;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.security.AccessController;
import java.util.List;
/**
* {@link ModelProcessor Model processor} enhancing (sub-)resources with {@value HttpMethod#GET} methods responsible of producing
* implicit {@link org.glassfish.jersey.server.mvc.Viewable viewables}.
* <p/>
* Note: Resource classes has to be annotated with {@link Template} annotation in order to be enhanced by this model processor.
*
* @author Michal Gajdos (michal.gajdos at oracle.com)
* @author icode
* @see Template
*/
class TemplateModelProcessor implements ModelProcessor {
/**
* Path parameter representing implicit template name.
*/
private static final String IMPLICIT_VIEW_PATH_PARAMETER = "implicit-view-path-parameter";
private static final String IMPLICIT_VIEW_PATH_PARAMETER_TEMPLATE = "{" + IMPLICIT_VIEW_PATH_PARAMETER + "}";
private final ResourceContext resourceContext;
private final Provider<ExtendedUriInfo> extendedUriInfoProvider;
private final Provider<ConfiguredValidator> validatorProvider;
/**
* Create a {@code TemplateModelProcessor} instance.
*
* @param resourceContext (injected) resource context.
* @param extendedUriInfoProvider (injected) extended uri info provider.
*/
@Inject
TemplateModelProcessor(final ResourceContext resourceContext,
final Provider<ConfiguredValidator> validatorProvider,
final Provider<ExtendedUriInfo> extendedUriInfoProvider) {
this.resourceContext = resourceContext;
this.validatorProvider = validatorProvider;
this.extendedUriInfoProvider = extendedUriInfoProvider;
}
/**
* {@inheritDoc}
*/
@Override
public ResourceModel processResourceModel(final ResourceModel resourceModel, final Configuration configuration) {
return processModel(resourceModel, false);
}
/** {@inheritDoc} */
@Override
public ResourceModel processSubResource(final ResourceModel subResourceModel, final Configuration configuration) {
return processModel(subResourceModel, true);
}
/**
* Enhance {@link org.glassfish.jersey.server.model.RuntimeResource runtime resources} of given
* {@link org.glassfish.jersey.server.model.ResourceModel resource model} with methods obtained with
* {@link #getEnhancingMethods(org.glassfish.jersey.server.model.RuntimeResource)}.
*
* @param resourceModel resource model to enhance runtime resources of.
* @param subResourceModel determines whether the resource model represents sub-resource.
* @return enhanced resource model.
*/
private ResourceModel processModel(final ResourceModel resourceModel, final boolean subResourceModel) {
ResourceModel.Builder newModelBuilder = processTemplateAnnotatedInvocables(resourceModel, subResourceModel);
for (RuntimeResource resource : resourceModel.getRuntimeResourceModel().getRuntimeResources()) {
ModelProcessorUtil.enhanceResource(resource, newModelBuilder, getEnhancingMethods(resource), false);
}
return newModelBuilder.build();
}
/**
* Process all {@link Invocable invocables} and defines
* {@link org.glassfish.jersey.server.model.Invocable#getRoutingResponseType() routing response types}
* as {@link Viewable} for all methods annotated with {@link Template}.
*
* @param resourceModel resource model to process.
* @param subResourceModel determines whether the resource model represents sub-resource.
* @return Modified resource model.
*/
private ResourceModel.Builder processTemplateAnnotatedInvocables(ResourceModel resourceModel,
final boolean subResourceModel) {
ResourceModel.Builder modelBuilder = new ResourceModel.Builder(subResourceModel);
for (Resource resource : resourceModel.getResources()) {
Resource newResource = processResource(resource);
modelBuilder.addResource(newResource);
}
return modelBuilder;
}
private Resource processResource(final Resource resource) {
Resource.Builder resourceBuilder = Resource.builder(resource.getPath());
for (final ResourceMethod resourceMethod : resource.getResourceMethods()) {
ResourceMethod.Builder builder = resourceBuilder.addMethod(resourceMethod);
if (resourceMethod.getInvocable().getHandlingMethod().isAnnotationPresent(Template.class)) {
builder.routingResponseType(Viewable.class);
}
}
if (resource.getResourceLocator() != null) {
resourceBuilder.addMethod(resource.getResourceLocator());
}
for (Resource child : resource.getChildResources()) {
resourceBuilder.addChildResource(processResource(child));
}
return resourceBuilder.build();
}
/**
* Returns a list of enhancing methods for a given {@link org.glassfish.jersey.server.model.RuntimeResource runtime resource}.
*
* @param runtimeResource runtime resource to create enhancing methods for.
* @return list of enhancing methods.
*/
private List<ModelProcessorUtil.Method> getEnhancingMethods(final RuntimeResource runtimeResource) {
final List<ModelProcessorUtil.Method> newMethods = Lists.newArrayList();
for (final Resource resource : runtimeResource.getResources()) {
// Handler classes.
for (final Class<?> handlerClass : resource.getHandlerClasses()) {
createEnhancingMethods(handlerClass, null, newMethods);
}
// Names - if there are no handler classes / instances.
if (resource.getHandlerClasses().isEmpty() && resource.getHandlerInstances().isEmpty()) {
for (String resourceName : resource.getNames()) {
final Class<Object> resourceClass = AccessController
.doPrivileged(ReflectionHelper.classForNamePA(resourceName));
if (resourceClass != null) {
createEnhancingMethods(resourceClass, null, newMethods);
}
}
}
// Handler instances.
Errors.process((Producer<Void>) () -> {
for (final Object handlerInstance : resource.getHandlerInstances()) {
final Class<?> handlerInstanceClass = handlerInstance.getClass();
if (!resource.getHandlerClasses().contains(handlerInstanceClass)) {
createEnhancingMethods(handlerInstanceClass, handlerInstance, newMethods);
} else {
Errors.warning(resource,
LocalizationMessages.TEMPLATE_HANDLER_ALREADY_ENHANCED(handlerInstanceClass));
}
}
return null;
});
}
return newMethods;
}
/**
* Creates enhancing methods for given resource.
*
* @param resourceClass resource class for which enhancing methods should be created.
* @param resourceInstance resource instance for which enhancing methods should be created. May be {@code null}.
* @param newMethods list to store new methods into.
*/
private void createEnhancingMethods(final Class<?> resourceClass, final Object resourceInstance,
final List<ModelProcessorUtil.Method> newMethods) {
final Template template = resourceClass.getAnnotation(Template.class);
if (template != null) {
final Class<?> annotatedResourceClass = ModelHelper.getAnnotatedResourceClass(resourceClass);
final List<MediaType> produces = MediaTypes
.createQualitySourceMediaTypes(annotatedResourceClass.getAnnotation(Produces.class));
final List<MediaType> consumes = MediaTypes.createFrom(annotatedResourceClass.getAnnotation(Consumes.class));
final TemplateInflectorImpl inflector = new TemplateInflectorImpl(template.name(),
resourceClass, resourceInstance);
newMethods.add(new ModelProcessorUtil.Method(HttpMethod.GET, consumes, produces, inflector));
newMethods.add(new ModelProcessorUtil.Method(IMPLICIT_VIEW_PATH_PARAMETER_TEMPLATE, HttpMethod.GET,
consumes, produces, inflector));
}
}
/**
* Inflector producing response with {@link org.glassfish.jersey.server.mvc.spi.ResolvedViewable resolved viewable} where
* model is the resource class annotated with {@link Template} or 404 as its status code.
*/
private class TemplateInflectorImpl implements TemplateInflector, Inflector<ContainerRequestContext, Response> {
private final String[] templateName;
private final Class<?> resourceClass;
private final Object resourceInstance;
private Class<?> modelClass;
/**
* Create enhancing template {@link Inflector inflector} method.
*
* @param templateName template name for the produced {@link org.glassfish.jersey.server.mvc.Viewable viewable}.
* @param resourceClass model class for the produced {@link org.glassfish.jersey.server.mvc.Viewable viewable}.
* Should not be {@code null}.
* @param resourceInstance model for the produced {@link org.glassfish.jersey.server.mvc.Viewable viewable}. May be
* {@code null}.
*/
private TemplateInflectorImpl(final String templateName, final Class<?> resourceClass,
final Object resourceInstance) {
this.templateName = new String[]{templateName};
this.resourceClass = resourceClass;
this.resourceInstance = resourceInstance;
}
private TemplateInflectorImpl(final String[] templateName, final Class<?> resourceClass,
final Object resourceInstance) {
this.templateName = templateName;
this.resourceClass = resourceClass;
this.resourceInstance = resourceInstance;
}
@Override
public Response apply(ContainerRequestContext requestContext) {
final List<String> templateNames = getTemplateNames(requestContext);
final Object model = getModel(extendedUriInfoProvider.get());
// Validate resource class.
final ConfiguredValidator validator = validatorProvider.get();
if (validator != null) {
validator.validateResourceAndInputParams(model, null, null);
}
return Response.ok().entity(new ImplicitViewable(templateNames, model, resourceClass)).build();
}
@Override
public Class<?> getModelClass() {
return modelClass;
}
private Object setModelClass(final Object model) {
if (modelClass == null) {
modelClass = model.getClass();
}
return model;
}
/**
* Obtains a model object for a viewable.
*
* @param extendedUriInfo uri info to obtain last matched resource from.
* @return a model object.
*/
private Object getModel(final ExtendedUriInfo extendedUriInfo) {
final List<Object> matchedResources = extendedUriInfo.getMatchedResources();
if (resourceInstance != null) {
return setModelClass(resourceInstance);
} else if (matchedResources.size() > 1) {
return setModelClass(matchedResources.get(1));
} else {
return setModelClass(resourceContext.getResource(resourceClass));
}
}
/**
* Returns a list of template names to be considered as candidates for resolving
* {@link org.glassfish.jersey.server.mvc.Viewable viewable} into
* {@link org.glassfish.jersey.server.mvc.spi.ResolvedViewable resolved viewable}.
* <p/>
* Order of template names to be resolved is as follows:
* <ul>
* <li>{{@value #IMPLICIT_VIEW_PATH_PARAMETER}} value</li>
* <li>{@link org.glassfish.jersey.server.mvc.Template#name()}</li>
* <li>last sub-resource manager path</li>
* <li>index</li>
* </ul>
*
* @param requestContext request context to obtain {@link #IMPLICIT_VIEW_PATH_PARAMETER} value from.
* @return a non-empty list of template names.
*/
private List<String> getTemplateNames(final ContainerRequestContext requestContext) {
final List<String> templateNames = Lists.newArrayList();
// Template name extracted from path param.
final String pathTemplate = requestContext.getUriInfo().getPathParameters().getFirst(IMPLICIT_VIEW_PATH_PARAMETER);
if (pathTemplate != null) {
templateNames.add(pathTemplate);
}
// Annotation.
if (this.templateName != null && this.templateName.length > 0) {
for (String name : this.templateName) {
if (StringUtils.isNotBlank(name)) {
templateNames.add(name);
}
}
}
// Sub-resource path.
final ExtendedUriInfo uriInfo = extendedUriInfoProvider.get();
final List<RuntimeResource> matchedRuntimeResources = uriInfo.getMatchedRuntimeResources();
if (matchedRuntimeResources.size() > 1) {
// > 1 to check that we matched sub-resource
final RuntimeResource lastMatchedRuntimeResource = matchedRuntimeResources.get(0);
final Resource lastMatchedResource = lastMatchedRuntimeResource.getResources().get(0);
String path = lastMatchedResource.getPath();
if (path != null && !IMPLICIT_VIEW_PATH_PARAMETER_TEMPLATE.equals(path)) {
path = path.charAt(0) == '/' ? path.substring(1, path.length()) : path;
templateNames.add(path);
}
}
// Index.
if (templateNames.isEmpty()) {
templateNames.add("index");
}
return templateNames;
}
}
}
| |
/*
* Copyright 2013 Cloudera.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kitesdk.data.spi.hive;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.annotation.Nullable;
import org.apache.avro.Schema;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.Order;
import org.apache.hadoop.hive.metastore.api.SerDeInfo;
import org.apache.hadoop.hive.metastore.api.SkewedInfo;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.kitesdk.data.DatasetDescriptor;
import org.kitesdk.data.DatasetException;
import org.kitesdk.data.DatasetIOException;
import org.kitesdk.data.DatasetOperationException;
import org.kitesdk.data.Format;
import org.kitesdk.data.Formats;
import org.kitesdk.data.PartitionStrategy;
import org.kitesdk.data.UnknownFormatException;
import org.kitesdk.data.impl.Accessor;
import org.kitesdk.data.spi.FieldPartitioner;
import org.kitesdk.data.spi.SchemaUtil;
class HiveUtils {
static final String HDFS_SCHEME = "hdfs";
private static final String CUSTOM_PROPERTIES_PROPERTY_NAME = "kite.custom.property.names";
private static final String PARTITION_EXPRESSION_PROPERTY_NAME = "kite.partition.expression";
private static final String COMPRESSION_TYPE_PROPERTY_NAME = "kite.compression.type";
private static final String OLD_CUSTOM_PROPERTIES_PROPERTY_NAME = "cdk.custom.property.names";
private static final String OLD_PARTITION_EXPRESSION_PROPERTY_NAME = "cdk.partition.expression";
private static final String AVRO_SCHEMA_URL_PROPERTY_NAME = "avro.schema.url";
private static final String AVRO_SCHEMA_LITERAL_PROPERTY_NAME = "avro.schema.literal";
private static final Splitter NAME_SPLITTER = Splitter.on(',');
private static final Joiner NAME_JOINER = Joiner.on(',');
private static final Map<Format, String> FORMAT_TO_SERDE = ImmutableMap
.<Format, String>builder()
.put(Formats.AVRO, "org.apache.hadoop.hive.serde2.avro.AvroSerDe")
.put(Formats.PARQUET, getHiveParquetSerde())
.build();
private static final Map<String, Format> SERDE_TO_FORMAT = ImmutableMap
.<String, Format>builder()
.put("org.apache.hadoop.hive.serde2.avro.AvroSerDe", Formats.AVRO)
.put("org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe",
Formats.PARQUET)
.put("parquet.hive.serde.ParquetHiveSerDe", Formats.PARQUET)
.build();
private static final Map<Format, String> FORMAT_TO_INPUT_FORMAT = ImmutableMap
.<Format, String>builder()
.put(Formats.AVRO,
"org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat")
.put(Formats.PARQUET, getHiveParquetInputFormat())
.build();
private static final Map<Format, String> FORMAT_TO_OUTPUT_FORMAT = ImmutableMap
.<Format, String>builder()
.put(Formats.AVRO,
"org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat")
.put(Formats.PARQUET, getHiveParquetOutputFormat())
.build();
static DatasetDescriptor descriptorForTable(Configuration conf, Table table) {
final DatasetDescriptor.Builder builder = new DatasetDescriptor.Builder();
Format format;
final String serializationLib = table.getSd().getSerdeInfo().getSerializationLib();
if (SERDE_TO_FORMAT.containsKey(serializationLib)) {
format = SERDE_TO_FORMAT.get(serializationLib);
builder.format(format);
} else {
// TODO: should this use an "unknown" format? others fail in open()
throw new UnknownFormatException(
"Unknown format for serde:" + serializationLib);
}
final Path dataLocation = new Path(table.getSd().getLocation());
final FileSystem fs = fsForPath(conf, dataLocation);
builder.location(fs.makeQualified(dataLocation));
// custom properties
Map<String, String> properties = table.getParameters();
String namesProperty = coalesce(
properties.get(CUSTOM_PROPERTIES_PROPERTY_NAME),
properties.get(OLD_CUSTOM_PROPERTIES_PROPERTY_NAME));
if (namesProperty != null) {
for (String property : NAME_SPLITTER.split(namesProperty)) {
builder.property(property, properties.get(property));
}
}
PartitionStrategy partitionStrategy = null;
if (isPartitioned(table)) {
String partitionProperty = coalesce(
properties.get(PARTITION_EXPRESSION_PROPERTY_NAME),
properties.get(OLD_PARTITION_EXPRESSION_PROPERTY_NAME));
if (partitionProperty != null) {
partitionStrategy = Accessor.getDefault()
.fromExpression(partitionProperty);
} else {
// build a partition strategy for the table from the Hive strategy
partitionStrategy = fromPartitionColumns(getPartCols(table));
}
builder.partitionStrategy(partitionStrategy);
}
String schemaUrlString = properties.get(AVRO_SCHEMA_URL_PROPERTY_NAME);
if (schemaUrlString != null) {
try {
// URI.create is safe because this library wrote the URI
builder.schemaUri(URI.create(schemaUrlString));
} catch (IOException e) {
throw new DatasetIOException("Could not read schema", e);
}
} else {
String schemaLiteral = properties.get(AVRO_SCHEMA_LITERAL_PROPERTY_NAME);
if (schemaLiteral != null) {
builder.schemaLiteral(schemaLiteral);
} else {
builder.schema(HiveSchemaConverter.convertTable(
table.getTableName(), table.getSd().getCols(),
partitionStrategy));
}
}
String compressionType = properties.get(COMPRESSION_TYPE_PROPERTY_NAME);
if (compressionType != null) {
builder.compressionType(compressionType);
}
try {
return builder.build();
} catch (IllegalStateException ex) {
throw new DatasetException("Cannot find schema: missing metadata");
}
}
private static boolean isPartitioned(Table table) {
return (getPartCols(table).size() != 0);
}
private static List<FieldSchema> getPartCols(Table table) {
List<FieldSchema> partKeys = table.getPartitionKeys();
if (partKeys == null) {
partKeys = new ArrayList<FieldSchema>();
table.setPartitionKeys(partKeys);
}
return partKeys;
}
/**
* Returns the first non-null value from the sequence or null if there is no
* non-null value.
*/
private static <T> T coalesce(T... values) {
for (T value : values) {
if (value != null) {
return value;
}
}
return null;
}
static Table tableForDescriptor(String namespace, String name,
DatasetDescriptor descriptor,
boolean external) {
return tableForDescriptor(namespace, name, descriptor, external, true);
}
static Table tableForDescriptor(String namespace, String name,
DatasetDescriptor descriptor,
boolean external,
boolean includeSchema) {
final Table table = createEmptyTable(namespace, name);
if (external) {
// you'd think this would do it...
table.setTableType(TableType.EXTERNAL_TABLE.toString());
// but it doesn't work without some additional magic:
table.getParameters().put("EXTERNAL", "TRUE");
table.getSd().setLocation(descriptor.getLocation().toString());
} else {
table.setTableType(TableType.MANAGED_TABLE.toString());
}
addPropertiesForDescriptor(table, descriptor);
// translate from Format to SerDe
final Format format = descriptor.getFormat();
if (FORMAT_TO_SERDE.containsKey(format)) {
table.getSd().getSerdeInfo().setSerializationLib(FORMAT_TO_SERDE.get(format));
table.getSd().setInputFormat(FORMAT_TO_INPUT_FORMAT.get(format));
table.getSd().setOutputFormat(FORMAT_TO_OUTPUT_FORMAT.get(format));
} else {
throw new UnknownFormatException(
"No known serde for format:" + format.getName());
}
if (includeSchema) {
URL schemaURL = descriptor.getSchemaUrl();
if (useSchemaURL(schemaURL)) {
table.getParameters().put(
AVRO_SCHEMA_URL_PROPERTY_NAME,
descriptor.getSchemaUrl().toExternalForm());
} else {
table.getParameters().put(
AVRO_SCHEMA_LITERAL_PROPERTY_NAME,
descriptor.getSchema().toString());
}
}
table.getParameters().put(COMPRESSION_TYPE_PROPERTY_NAME,
descriptor.getCompressionType().getName());
// convert the schema to Hive columns
table.getSd().setCols(HiveSchemaConverter.convertSchema(descriptor.getSchema()));
// copy partitioning info
if (descriptor.isPartitioned()) {
PartitionStrategy ps = descriptor.getPartitionStrategy();
table.getParameters().put(PARTITION_EXPRESSION_PROPERTY_NAME,
Accessor.getDefault().toExpression(ps));
table.setPartitionKeys(partitionColumns(ps, descriptor.getSchema()));
}
return table;
}
private static boolean useSchemaURL(@Nullable URL schemaURL) {
try {
return ((schemaURL != null) &&
HDFS_SCHEME.equals(schemaURL.toURI().getScheme()));
} catch (URISyntaxException ex) {
return false;
}
}
static Table createEmptyTable(String namespace, String name) {
Table table = new Table();
table.setDbName(namespace);
table.setTableName(name);
table.setPartitionKeys(new ArrayList<FieldSchema>());
table.setParameters(new HashMap<String, String>());
StorageDescriptor sd = new StorageDescriptor();
sd.setSerdeInfo(new SerDeInfo());
sd.setNumBuckets(-1);
sd.setBucketCols(new ArrayList<String>());
sd.setCols(new ArrayList<FieldSchema>());
sd.setParameters(new HashMap<String, String>());
sd.setSortCols(new ArrayList<Order>());
sd.getSerdeInfo().setParameters(new HashMap<String, String>());
SkewedInfo skewInfo = new SkewedInfo();
skewInfo.setSkewedColNames(new ArrayList<String>());
skewInfo.setSkewedColValues(new ArrayList<List<String>>());
skewInfo.setSkewedColValueLocationMaps(new HashMap<List<String>, String>());
sd.setSkewedInfo(skewInfo);
table.setSd(sd);
return table;
}
public static void updateTableSchema(Table table, DatasetDescriptor descriptor) {
URL schemaURL = descriptor.getSchemaUrl();
if (table.getParameters().get(AVRO_SCHEMA_LITERAL_PROPERTY_NAME) != null) {
if (useSchemaURL(schemaURL)) {
table.getParameters().remove(AVRO_SCHEMA_LITERAL_PROPERTY_NAME);
table.getParameters().put(AVRO_SCHEMA_URL_PROPERTY_NAME,
schemaURL.toExternalForm());
} else {
table.getParameters().put(
AVRO_SCHEMA_LITERAL_PROPERTY_NAME,
descriptor.getSchema().toString());
}
} else if (table.getParameters().get(AVRO_SCHEMA_URL_PROPERTY_NAME) != null) {
if (schemaURL == null) {
throw new DatasetOperationException(
"Cannot update " + AVRO_SCHEMA_URL_PROPERTY_NAME +
" since descriptor schema URL is not set.");
}
table.getParameters().put(
AVRO_SCHEMA_URL_PROPERTY_NAME,
schemaURL.toExternalForm());
} else {
// neither the literal or the URL are set, so add the URL if specified
// and the schema literal if not.
if (useSchemaURL(schemaURL)) {
table.getParameters().put(
AVRO_SCHEMA_URL_PROPERTY_NAME,
schemaURL.toExternalForm());
} else if (descriptor.getSchema() != null) {
table.getParameters().put(
AVRO_SCHEMA_LITERAL_PROPERTY_NAME,
descriptor.getSchema().toString());
} else {
throw new DatasetException("Table schema cannot be updated since it is" +
" not set on the descriptor.");
}
}
// keep the custom properties up-to-date
addPropertiesForDescriptor(table, descriptor);
}
static FileSystem fsForPath(Configuration conf, Path path) {
try {
return path.getFileSystem(conf);
} catch (IOException ex) {
throw new DatasetIOException("Cannot access FileSystem for uri:" + path, ex);
}
}
private static void addPropertiesForDescriptor(Table table,
DatasetDescriptor descriptor) {
// copy custom properties to the table
if (!descriptor.listProperties().isEmpty()) {
for (String property : descriptor.listProperties()) {
// no need to check the reserved list, those are not set on descriptors
table.getParameters().put(property, descriptor.getProperty(property));
}
// set which properties are custom and should be set on descriptors
table.getParameters().put(CUSTOM_PROPERTIES_PROPERTY_NAME,
NAME_JOINER.join(descriptor.listProperties()));
}
}
/**
* Returns the correct dataset path for the given name and root directory.
*
* @param root A Path
* @param namespace A String namespace, or logical group
* @param name A String dataset name
* @return the correct dataset Path
*/
static Path pathForDataset(Path root, String namespace, String name) {
Preconditions.checkNotNull(namespace, "Namespace cannot be null");
Preconditions.checkNotNull(name, "Dataset name cannot be null");
// Why replace '.' here? Is this a namespacing hack?
return new Path(root, new Path(namespace, name.replace('.', Path.SEPARATOR_CHAR)));
}
@SuppressWarnings("deprecation")
static List<FieldSchema> partitionColumns(PartitionStrategy strategy, Schema schema) {
List<FieldSchema> columns = Lists.newArrayList();
for (FieldPartitioner<?, ?> fp : Accessor.getDefault().getFieldPartitioners(strategy)) {
columns.add(new FieldSchema(fp.getName(),
getHiveType(SchemaUtil.getPartitionType(fp, schema)),
"Partition column derived from '" + fp.getSourceName() + "' column, " +
"generated by Kite."));
}
return columns;
}
private static final Map<String, String> PROVIDED_TYPES = ImmutableMap
.<String, String>builder()
.put("tinyint", "int")
.put("smallint", "int")
.put("int", "int")
.put("bigint", "long")
.build();
/**
* Builds a {@link PartitionStrategy} from a list of Hive partition fields.
*
* @param fields a List of FieldSchemas
* @return a PartitionStrategy for the Hive partitions
*/
@VisibleForTesting
static PartitionStrategy fromPartitionColumns(List<FieldSchema> fields) {
PartitionStrategy.Builder builder = new PartitionStrategy.Builder();
for (FieldSchema hiveSchema : fields) {
TypeInfo type = HiveSchemaConverter.parseTypeInfo(hiveSchema.getType());
// any types not in the map will be treated as Strings
builder.provided(hiveSchema.getName(),
PROVIDED_TYPES.get(type.getTypeName()));
}
return builder.build();
}
private static String getHiveType(Class<?> type) {
String typeName = PrimitiveObjectInspectorUtils.getTypeNameFromPrimitiveJava(type);
if (typeName == null) {
throw new DatasetException("Unsupported FieldPartitioner type: " + type);
}
return typeName;
}
private static String getHiveParquetInputFormat() {
String newClass = "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat";
String oldClass = "parquet.hive.DeprecatedParquetInputFormat";
try {
Class.forName(newClass);
return newClass;
} catch (ClassNotFoundException ex) {
return oldClass;
}
}
private static String getHiveParquetOutputFormat() {
String newClass = "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat";
String oldClass = "parquet.hive.DeprecatedParquetOutputFormat";
try {
Class.forName(newClass);
return newClass;
} catch (ClassNotFoundException ex) {
return oldClass;
}
}
private static String getHiveParquetSerde() {
String newClass = "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe";
String oldClass = "parquet.hive.serde.ParquetHiveSerDe";
try {
Class.forName(newClass);
return newClass;
} catch (ClassNotFoundException ex) {
return oldClass;
}
}
}
| |
package se.emilsjolander.stickylistheaders;
import java.util.LinkedList;
import java.util.List;
import android.content.Context;
import android.database.DataSetObserver;
import android.graphics.drawable.Drawable;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.Checkable;
import android.widget.ListAdapter;
/**
* A {@link ListAdapter} which wraps a {@link StickyListHeadersAdapter} and
* automatically handles wrapping the result of
* {@link StickyListHeadersAdapter#getView(int, android.view.View, android.view.ViewGroup)}
* and
* {@link StickyListHeadersAdapter#getHeaderView(int, android.view.View, android.view.ViewGroup)}
* appropriately.
*
* @author Jake Wharton (jakewharton@gmail.com)
*/
class AdapterWrapper extends BaseAdapter implements StickyListHeadersAdapter {
public interface OnHeaderClickListener{
public void onHeaderClick(View header, int itemPosition, long headerId);
}
final StickyListHeadersAdapter mDelegate;
private final List<View> mHeaderCache = new LinkedList<View>();
private final Context mContext;
private Drawable mDivider;
private int mDividerHeight;
private OnHeaderClickListener mOnHeaderClickListener;
private DataSetObserver mDataSetObserver = new DataSetObserver() {
@Override
public void onInvalidated() {
mHeaderCache.clear();
AdapterWrapper.super.notifyDataSetInvalidated();
}
@Override
public void onChanged() {
AdapterWrapper.super.notifyDataSetChanged();
}
};
AdapterWrapper(Context context,
StickyListHeadersAdapter delegate) {
this.mContext = context;
this.mDelegate = delegate;
delegate.registerDataSetObserver(mDataSetObserver);
}
void setDivider(Drawable divider) {
this.mDivider = divider;
}
void setDividerHeight(int dividerHeight) {
this.mDividerHeight = dividerHeight;
}
@Override
public boolean areAllItemsEnabled() {
return mDelegate.areAllItemsEnabled();
}
@Override
public boolean isEnabled(int position) {
return mDelegate.isEnabled(position);
}
@Override
public int getCount() {
return mDelegate.getCount();
}
@Override
public Object getItem(int position) {
return mDelegate.getItem(position);
}
@Override
public long getItemId(int position) {
return mDelegate.getItemId(position);
}
@Override
public boolean hasStableIds() {
return mDelegate.hasStableIds();
}
@Override
public int getItemViewType(int position) {
return mDelegate.getItemViewType(position);
}
@Override
public int getViewTypeCount() {
return mDelegate.getViewTypeCount();
}
@Override
public boolean isEmpty() {
return mDelegate.isEmpty();
}
/**
* Will recycle header from {@link WrapperView} if it exists
*/
private void recycleHeaderIfExists(WrapperView wv) {
View header = wv.mHeader;
if (header != null) {
mHeaderCache.add(header);
}
}
/**
* Get a header view. This optionally pulls a header from the supplied
* {@link WrapperView} and will also recycle the divider if it exists.
*/
private View configureHeader(WrapperView wv, final int position) {
View header = wv.mHeader == null ? popHeader() : wv.mHeader;
header = mDelegate.getHeaderView(position, header, wv);
if (header == null) {
throw new NullPointerException("Header view must not be null.");
}
//if the header isn't clickable, the listselector will be drawn on top of the header
header.setClickable(true);
header.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
if(mOnHeaderClickListener != null){
long headerId = mDelegate.getHeaderId(position);
mOnHeaderClickListener.onHeaderClick(v, position, headerId);
}
}
});
return header;
}
private View popHeader() {
if(mHeaderCache.size() > 0) {
return mHeaderCache.remove(0);
}
return null;
}
/** Returns {@code true} if the previous position has the same header ID. */
private boolean previousPositionHasSameHeader(int position) {
return position != 0
&& mDelegate.getHeaderId(position) == mDelegate
.getHeaderId(position - 1);
}
@Override
public WrapperView getView(int position, View convertView, ViewGroup parent) {
WrapperView wv = (convertView == null) ? new WrapperView(mContext) : (WrapperView) convertView;
View item = mDelegate.getView(position, wv.mItem, wv);
View header = null;
if (previousPositionHasSameHeader(position)) {
recycleHeaderIfExists(wv);
} else {
header = configureHeader(wv, position);
}
if((item instanceof Checkable) && !(wv instanceof CheckableWrapperView)) {
// Need to create Checkable subclass of WrapperView for ListView to work correctly
wv = new CheckableWrapperView(mContext);
} else if(!(item instanceof Checkable) && (wv instanceof CheckableWrapperView)) {
wv = new WrapperView(mContext);
}
wv.update(item, header, mDivider, mDividerHeight);
return wv;
}
public void setOnHeaderClickListener(OnHeaderClickListener onHeaderClickListener){
this.mOnHeaderClickListener = onHeaderClickListener;
}
@Override
public boolean equals(Object o) {
return mDelegate.equals(o);
}
@Override
public View getDropDownView(int position, View convertView, ViewGroup parent) {
return ((BaseAdapter) mDelegate).getDropDownView(position, convertView, parent);
}
@Override
public int hashCode() {
return mDelegate.hashCode();
}
@Override
public void notifyDataSetChanged() {
((BaseAdapter) mDelegate).notifyDataSetChanged();
}
@Override
public void notifyDataSetInvalidated() {
((BaseAdapter) mDelegate).notifyDataSetInvalidated();
}
@Override
public String toString() {
return mDelegate.toString();
}
@Override
public View getHeaderView(int position, View convertView, ViewGroup parent) {
return mDelegate.getHeaderView(position, convertView, parent);
}
@Override
public long getHeaderId(int position) {
return mDelegate.getHeaderId(position);
}
}
| |
package org.semanticweb.elk.reasoner.indexing.classes;
/*
* #%L
* ELK Reasoner
* $Id:$
* $HeadURL:$
* %%
* Copyright (C) 2011 - 2015 Department of Computer Science, University of Oxford
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import org.semanticweb.elk.owl.interfaces.ElkClassAssertionAxiom;
import org.semanticweb.elk.owl.interfaces.ElkDeclarationAxiom;
import org.semanticweb.elk.owl.interfaces.ElkDifferentIndividualsAxiom;
import org.semanticweb.elk.owl.interfaces.ElkDisjointClassesAxiom;
import org.semanticweb.elk.owl.interfaces.ElkDisjointUnionAxiom;
import org.semanticweb.elk.owl.interfaces.ElkEquivalentClassesAxiom;
import org.semanticweb.elk.owl.interfaces.ElkEquivalentObjectPropertiesAxiom;
import org.semanticweb.elk.owl.interfaces.ElkObjectPropertyAssertionAxiom;
import org.semanticweb.elk.owl.interfaces.ElkObjectPropertyDomainAxiom;
import org.semanticweb.elk.owl.interfaces.ElkObjectPropertyRangeAxiom;
import org.semanticweb.elk.owl.interfaces.ElkReflexiveObjectPropertyAxiom;
import org.semanticweb.elk.owl.interfaces.ElkSameIndividualAxiom;
import org.semanticweb.elk.owl.interfaces.ElkSubClassOfAxiom;
import org.semanticweb.elk.owl.interfaces.ElkSubObjectPropertyOfAxiom;
import org.semanticweb.elk.owl.interfaces.ElkTransitiveObjectPropertyAxiom;
import org.semanticweb.elk.reasoner.indexing.model.CachedIndexedObject;
import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkClassAssertionAxiomConversion;
import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkDeclarationAxiomConversion;
import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkDifferentIndividualsAxiomBinaryConversion;
import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkDifferentIndividualsAxiomNaryConversion;
import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkDisjointClassesAxiomBinaryConversion;
import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkDisjointClassesAxiomNaryConversion;
import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkDisjointUnionAxiomBinaryConversion;
import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkDisjointUnionAxiomEquivalenceConversion;
import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkDisjointUnionAxiomNaryConversion;
import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkDisjointUnionAxiomOwlNothingConversion;
import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkDisjointUnionAxiomSubClassConversion;
import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkEquivalentClassesAxiomEquivalenceConversion;
import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkEquivalentClassesAxiomSubClassConversion;
import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkEquivalentObjectPropertiesAxiomConversion;
import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkObjectPropertyAssertionAxiomConversion;
import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkObjectPropertyDomainAxiomConversion;
import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkObjectPropertyRangeAxiomConversion;
import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkReflexiveObjectPropertyAxiomConversion;
import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkSameIndividualAxiomConversion;
import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkSubClassOfAxiomConversion;
import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkSubObjectPropertyOfAxiomConversion;
import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkTransitiveObjectPropertyAxiomConversion;
import org.semanticweb.elk.reasoner.indexing.model.ModifiableIndexedClass;
import org.semanticweb.elk.reasoner.indexing.model.ModifiableIndexedClassExpression;
import org.semanticweb.elk.reasoner.indexing.model.ModifiableIndexedClassExpressionList;
import org.semanticweb.elk.reasoner.indexing.model.ModifiableIndexedEntity;
import org.semanticweb.elk.reasoner.indexing.model.ModifiableIndexedIndividual;
import org.semanticweb.elk.reasoner.indexing.model.ModifiableIndexedObject;
import org.semanticweb.elk.reasoner.indexing.model.ModifiableIndexedObjectIntersectionOf;
import org.semanticweb.elk.reasoner.indexing.model.ModifiableIndexedObjectProperty;
import org.semanticweb.elk.reasoner.indexing.model.ModifiableIndexedPropertyChain;
import org.semanticweb.elk.reasoner.indexing.model.ModifiableOntologyIndex;
import org.semanticweb.elk.reasoner.indexing.model.OccurrenceIncrement;
/**
* A {@link ModifiableIndexedObject.Factory} that constructs objects using
* another {@link ModifiableIndexedObject.Factory} and updates the occurrence
* counts for the constructed objects using the provided
* {@link OccurrenceIncrement}.
*
* @author "Yevgeny Kazakov"
*
*/
public class UpdatingModifiableIndexedObjectFactory
extends
UpdatingCachedIndexedObjectFactory
implements
ModifiableIndexedObject.Factory {
private final ModifiableIndexedObject.Factory baseFactory_;
public <F extends CachedIndexedObject.Factory & ModifiableIndexedObject.Factory> UpdatingModifiableIndexedObjectFactory(
F baseFactory, ModifiableOntologyIndex index,
OccurrenceIncrement increment) {
super(baseFactory, index, increment);
this.baseFactory_ = baseFactory;
}
@Override
public ModifiableElkClassAssertionAxiomConversion getElkClassAssertionAxiomConversion(
ElkClassAssertionAxiom originalAxiom,
ModifiableIndexedIndividual instance,
ModifiableIndexedClassExpression type) {
return update(baseFactory_.getElkClassAssertionAxiomConversion(
originalAxiom, instance, type));
}
@Override
public ModifiableElkDeclarationAxiomConversion getElkDeclarationAxiomConversion(
ElkDeclarationAxiom originalAxiom, ModifiableIndexedEntity entity) {
return update(baseFactory_
.getElkDeclarationAxiomConversion(originalAxiom, entity));
}
@Override
public ModifiableElkDifferentIndividualsAxiomBinaryConversion getElkDifferentIndividualsAxiomBinaryConversion(
ElkDifferentIndividualsAxiom originalAxiom,
int firstIndividualPosition, int secondIndividualPosition,
ModifiableIndexedObjectIntersectionOf conjunction,
ModifiableIndexedClass bottom) {
return update(
baseFactory_.getElkDifferentIndividualsAxiomBinaryConversion(
originalAxiom, firstIndividualPosition,
secondIndividualPosition, conjunction, bottom));
}
@Override
public ModifiableElkDifferentIndividualsAxiomNaryConversion getElkDifferentIndividualsAxiomNaryConversion(
ElkDifferentIndividualsAxiom originalAxiom,
ModifiableIndexedClassExpressionList differentIndividuals) {
return update(
baseFactory_.getElkDifferentIndividualsAxiomNaryConversion(
originalAxiom, differentIndividuals));
}
@Override
public ModifiableElkDisjointClassesAxiomBinaryConversion getElkDisjointClassesAxiomBinaryConversion(
ElkDisjointClassesAxiom originalAxiom, int firstClassPosition,
int secondClassPosition,
ModifiableIndexedObjectIntersectionOf conjunction,
ModifiableIndexedClass bottom) {
return update(baseFactory_.getElkDisjointClassesAxiomBinaryConversion(
originalAxiom, firstClassPosition, secondClassPosition,
conjunction, bottom));
}
@Override
public ModifiableElkDisjointClassesAxiomNaryConversion getElkDisjointClassesAxiomNaryConversion(
ElkDisjointClassesAxiom originalAxiom,
ModifiableIndexedClassExpressionList disjointClasses) {
return update(baseFactory_.getElkDisjointClassesAxiomNaryConversion(
originalAxiom, disjointClasses));
}
@Override
public ModifiableElkDisjointUnionAxiomBinaryConversion getElkDisjointUnionAxiomBinaryConversion(
ElkDisjointUnionAxiom originalAxiom, int firstDisjunctPosition,
int secondDisjunctPosition,
ModifiableIndexedObjectIntersectionOf conjunction,
ModifiableIndexedClass bottom) {
return update(baseFactory_.getElkDisjointUnionAxiomBinaryConversion(
originalAxiom, firstDisjunctPosition, secondDisjunctPosition,
conjunction, bottom));
}
@Override
public ModifiableElkDisjointUnionAxiomEquivalenceConversion getElkDisjointUnionAxiomEquivalenceConversion(
ElkDisjointUnionAxiom originalAxiom,
ModifiableIndexedClass definedClass,
ModifiableIndexedClassExpression definition) {
return update(baseFactory_.getElkDisjointUnionAxiomEquivalenceConversion(
originalAxiom, definedClass, definition));
}
@Override
public ModifiableElkDisjointUnionAxiomNaryConversion getElkDisjointUnionAxiomNaryConversion(
ElkDisjointUnionAxiom originalAxiom,
ModifiableIndexedClassExpressionList disjointClasses) {
return update(baseFactory_.getElkDisjointUnionAxiomNaryConversion(
originalAxiom, disjointClasses));
}
@Override
public ModifiableElkDisjointUnionAxiomOwlNothingConversion getElkDisjointUnionAxiomOwlNothingConversion(
ElkDisjointUnionAxiom originalAxiom,
ModifiableIndexedClass definedClass,
ModifiableIndexedClass bottom) {
return update(baseFactory_.getElkDisjointUnionAxiomOwlNothingConversion(
originalAxiom, definedClass, bottom));
}
@Override
public ModifiableElkDisjointUnionAxiomSubClassConversion getElkDisjointUnionAxiomSubClassConversion(
ElkDisjointUnionAxiom originalAxiom, int disjunctPosition,
ModifiableIndexedClassExpression disjunct,
ModifiableIndexedClass definedClass) {
return update(baseFactory_.getElkDisjointUnionAxiomSubClassConversion(
originalAxiom, disjunctPosition, disjunct, definedClass));
}
@Override
public ModifiableElkEquivalentClassesAxiomEquivalenceConversion getElkEquivalentClassesAxiomEquivalenceConversion(
ElkEquivalentClassesAxiom originalAxiom, int definedClassPosition,
int secondMemberPosition, ModifiableIndexedClassExpression firstMember,
ModifiableIndexedClassExpression secondMember) {
return update(
baseFactory_.getElkEquivalentClassesAxiomEquivalenceConversion(
originalAxiom, definedClassPosition, secondMemberPosition,
firstMember, secondMember));
}
@Override
public ModifiableElkEquivalentClassesAxiomSubClassConversion getElkEquivalentClassesAxiomSubClassConversion(
ElkEquivalentClassesAxiom originalAxiom, int subClassPosition,
int superClassPosition, ModifiableIndexedClassExpression subClass,
ModifiableIndexedClassExpression superClass) {
return update(
baseFactory_.getElkEquivalentClassesAxiomSubClassConversion(
originalAxiom, subClassPosition, superClassPosition,
subClass, superClass));
}
@Override
public ModifiableElkEquivalentObjectPropertiesAxiomConversion getElkEquivalentObjectPropertiesAxiomConversion(
ElkEquivalentObjectPropertiesAxiom originalAxiom,
int subPropertyPosition, int superPropertyPosition,
ModifiableIndexedObjectProperty subProperty,
ModifiableIndexedObjectProperty superProperty) {
return update(
baseFactory_.getElkEquivalentObjectPropertiesAxiomConversion(
originalAxiom, subPropertyPosition,
superPropertyPosition, subProperty, superProperty));
}
@Override
public ModifiableElkObjectPropertyAssertionAxiomConversion getElkObjectPropertyAssertionAxiomConversion(
ElkObjectPropertyAssertionAxiom originalAxiom,
ModifiableIndexedClassExpression subClass,
ModifiableIndexedClassExpression superClass) {
return update(baseFactory_.getElkObjectPropertyAssertionAxiomConversion(
originalAxiom, subClass, superClass));
}
@Override
public ModifiableElkObjectPropertyDomainAxiomConversion getElkObjectPropertyDomainAxiomConversion(
ElkObjectPropertyDomainAxiom originalAxiom,
ModifiableIndexedClassExpression subClass,
ModifiableIndexedClassExpression superClass) {
return update(baseFactory_.getElkObjectPropertyDomainAxiomConversion(
originalAxiom, subClass, superClass));
}
@Override
public ModifiableElkObjectPropertyRangeAxiomConversion getElkObjectPropertyRangeAxiomConversion(
ElkObjectPropertyRangeAxiom originalAxiom,
ModifiableIndexedObjectProperty property,
ModifiableIndexedClassExpression range) {
return update(baseFactory_.getElkObjectPropertyRangeAxiomConversion(
originalAxiom, property, range));
}
@Override
public ModifiableElkReflexiveObjectPropertyAxiomConversion getElkReflexiveObjectPropertyAxiomConversion(
ElkReflexiveObjectPropertyAxiom originalAxiom,
ModifiableIndexedClassExpression subClass,
ModifiableIndexedClassExpression superClass) {
return update(baseFactory_.getElkReflexiveObjectPropertyAxiomConversion(
originalAxiom, subClass, superClass));
}
@Override
public ModifiableElkSameIndividualAxiomConversion getElkSameIndividualAxiomConversion(
ElkSameIndividualAxiom originalAxiom, int subIndividualPosition,
int superIndividualPosition,
ModifiableIndexedIndividual subIndividual,
ModifiableIndexedIndividual superIndividual) {
return update(baseFactory_.getElkSameIndividualAxiomConversion(
originalAxiom, subIndividualPosition, superIndividualPosition,
subIndividual, superIndividual));
}
@Override
public ModifiableElkSubClassOfAxiomConversion getElkSubClassOfAxiomConversion(
ElkSubClassOfAxiom originalAxiom,
ModifiableIndexedClassExpression subClass,
ModifiableIndexedClassExpression superClass) {
return update(baseFactory_.getElkSubClassOfAxiomConversion(
originalAxiom, subClass, superClass));
}
@Override
public ModifiableElkSubObjectPropertyOfAxiomConversion getElkSubObjectPropertyOfAxiomConversion(
ElkSubObjectPropertyOfAxiom originalAxiom,
ModifiableIndexedPropertyChain subPropertyChain,
ModifiableIndexedObjectProperty superProperty) {
return update(baseFactory_.getElkSubObjectPropertyOfAxiomConversion(
originalAxiom, subPropertyChain, superProperty));
}
@Override
public ModifiableElkTransitiveObjectPropertyAxiomConversion getElkTransitiveObjectPropertyAxiomConversion(
ElkTransitiveObjectPropertyAxiom originalAxiom,
ModifiableIndexedPropertyChain subPropertyChain,
ModifiableIndexedObjectProperty superProperty) {
return update(
baseFactory_.getElkTransitiveObjectPropertyAxiomConversion(
originalAxiom, subPropertyChain, superProperty));
}
}
| |
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.connections.infinispan;
import java.time.Instant;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import org.infinispan.Cache;
import org.infinispan.client.hotrod.ProtocolVersion;
import org.infinispan.client.hotrod.RemoteCache;
import org.infinispan.commons.api.BasicCache;
import org.infinispan.commons.dataconversion.MediaType;
import org.infinispan.commons.time.TimeService;
import org.infinispan.commons.util.FileLookup;
import org.infinispan.commons.util.FileLookupFactory;
import org.infinispan.configuration.cache.ConfigurationBuilder;
import org.infinispan.configuration.global.GlobalConfigurationBuilder;
import org.infinispan.configuration.global.TransportConfigurationBuilder;
import org.infinispan.eviction.EvictionStrategy;
import org.infinispan.eviction.EvictionType;
import org.infinispan.factories.GlobalComponentRegistry;
import org.infinispan.factories.impl.BasicComponentRegistry;
import org.infinispan.factories.impl.ComponentRef;
import org.infinispan.manager.EmbeddedCacheManager;
import org.infinispan.persistence.manager.PersistenceManager;
import org.infinispan.persistence.remote.RemoteStore;
import org.infinispan.remoting.transport.Transport;
import org.infinispan.remoting.transport.jgroups.JGroupsTransport;
import org.infinispan.util.EmbeddedTimeService;
import org.jboss.logging.Logger;
import org.jgroups.JChannel;
import org.keycloak.common.util.Time;
import org.keycloak.models.KeycloakSession;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public class InfinispanUtil {
protected static final Logger logger = Logger.getLogger(InfinispanUtil.class);
public static final int MAXIMUM_REPLACE_RETRIES = 25;
// See if we have RemoteStore (external JDG) configured for cross-Data-Center scenario
public static Set<RemoteStore> getRemoteStores(Cache ispnCache) {
return ispnCache.getAdvancedCache().getComponentRegistry().getComponent(PersistenceManager.class).getStores(RemoteStore.class);
}
public static RemoteCache getRemoteCache(Cache ispnCache) {
Set<RemoteStore> remoteStores = getRemoteStores(ispnCache);
if (remoteStores.isEmpty()) {
return null;
} else {
return remoteStores.iterator().next().getRemoteCache();
}
}
public static TopologyInfo getTopologyInfo(KeycloakSession session) {
return session.getProvider(InfinispanConnectionProvider.class).getTopologyInfo();
}
/**
*
* @param cache
* @return true if cluster coordinator OR if it's local cache
*/
public static boolean isCoordinator(Cache cache) {
Transport transport = cache.getCacheManager().getTransport();
return transport == null || transport.isCoordinator();
}
/**
* Convert the given value to the proper value, which can be used when calling operations for the infinispan remoteCache.
*
* Infinispan HotRod protocol of versions older than 3.0 uses the "lifespan" or "maxIdle" as the normal expiration time when the value is 30 days or less.
* However for the bigger values, it assumes that the value is unix timestamp.
*
* @param ispnCache
* @param lifespanOrigMs
* @return
*/
public static long toHotrodTimeMs(BasicCache ispnCache, long lifespanOrigMs) {
if (ispnCache instanceof RemoteCache && lifespanOrigMs > 2592000000L) {
RemoteCache remoteCache = (RemoteCache) ispnCache;
ProtocolVersion protocolVersion = remoteCache.getRemoteCacheManager().getConfiguration().version();
if (ProtocolVersion.PROTOCOL_VERSION_30.compareTo(protocolVersion) > 0) {
return Time.currentTimeMillis() + lifespanOrigMs;
}
}
return lifespanOrigMs;
}
private static final Object CHANNEL_INIT_SYNCHRONIZER = new Object();
public static void configureTransport(GlobalConfigurationBuilder gcb, String nodeName, String siteName, String jgroupsUdpMcastAddr,
String jgroupsConfigPath) {
if (nodeName == null) {
gcb.transport().defaultTransport();
} else {
FileLookup fileLookup = FileLookupFactory.newInstance();
synchronized (CHANNEL_INIT_SYNCHRONIZER) {
String originalMcastAddr = System.getProperty(InfinispanConnectionProvider.JGROUPS_UDP_MCAST_ADDR);
if (jgroupsUdpMcastAddr == null) {
System.getProperties().remove(InfinispanConnectionProvider.JGROUPS_UDP_MCAST_ADDR);
} else {
System.setProperty(InfinispanConnectionProvider.JGROUPS_UDP_MCAST_ADDR, jgroupsUdpMcastAddr);
}
try {
JChannel channel = new JChannel(fileLookup.lookupFileLocation(jgroupsConfigPath, InfinispanUtil.class.getClassLoader()).openStream());
channel.setName(nodeName);
JGroupsTransport transport = new JGroupsTransport(channel);
TransportConfigurationBuilder transportBuilder = gcb.transport()
.nodeName(nodeName)
.siteId(siteName)
.transport(transport);
// Use the cluster corresponding to current site. This is needed as the nodes in different DCs should not share same cluster
if (siteName != null) {
transportBuilder.clusterName(siteName);
}
transportBuilder.jmx()
.domain(InfinispanConnectionProvider.JMX_DOMAIN + "-" + nodeName)
.enable();
logger.infof("Configured jgroups transport with the channel name: %s", nodeName);
} catch (Exception e) {
throw new RuntimeException(e);
} finally {
if (originalMcastAddr == null) {
System.getProperties().remove(InfinispanConnectionProvider.JGROUPS_UDP_MCAST_ADDR);
} else {
System.setProperty(InfinispanConnectionProvider.JGROUPS_UDP_MCAST_ADDR, originalMcastAddr);
}
}
}
}
}
public static ConfigurationBuilder createCacheConfigurationBuilder() {
ConfigurationBuilder builder = new ConfigurationBuilder();
// need to force the encoding to application/x-java-object to avoid unnecessary conversion of keys/values. See WFLY-14356.
builder.encoding().mediaType(MediaType.APPLICATION_OBJECT_TYPE);
return builder;
}
public static ConfigurationBuilder getActionTokenCacheConfig() {
ConfigurationBuilder cb = createCacheConfigurationBuilder();
cb.memory()
.evictionStrategy(EvictionStrategy.NONE)
.evictionType(EvictionType.COUNT)
.size(InfinispanConnectionProvider.ACTION_TOKEN_CACHE_DEFAULT_MAX);
cb.expiration()
.maxIdle(InfinispanConnectionProvider.ACTION_TOKEN_MAX_IDLE_SECONDS, TimeUnit.SECONDS)
.wakeUpInterval(InfinispanConnectionProvider.ACTION_TOKEN_WAKE_UP_INTERVAL_SECONDS, TimeUnit.SECONDS);
return cb;
}
/**
* Replaces the {@link TimeService} in infinispan with the one that respects Keycloak {@link Time}.
* @param cacheManager
* @return Runnable to revert replacement of the infinispan time service
*/
public static Runnable setTimeServiceToKeycloakTime(EmbeddedCacheManager cacheManager) {
TimeService previousTimeService = replaceComponent(cacheManager, TimeService.class, KEYCLOAK_TIME_SERVICE, true);
AtomicReference<TimeService> ref = new AtomicReference<>(previousTimeService);
return () -> {
if (ref.get() == null) {
logger.warn("Calling revert of the TimeService when testing TimeService was already reverted");
return;
}
logger.info("Revert set KeycloakIspnTimeService to the infinispan cacheManager");
replaceComponent(cacheManager, TimeService.class, ref.getAndSet(null), true);
};
}
/**
* Forked from org.infinispan.test.TestingUtil class
*
* Replaces a component in a running cache manager (global component registry).
*
* @param cacheMgr cache in which to replace component
* @param componentType component type of which to replace
* @param replacementComponent new instance
* @param rewire if true, ComponentRegistry.rewire() is called after replacing.
*
* @return the original component that was replaced
*/
private static <T> T replaceComponent(EmbeddedCacheManager cacheMgr, Class<T> componentType, T replacementComponent, boolean rewire) {
GlobalComponentRegistry cr = cacheMgr.getGlobalComponentRegistry();
BasicComponentRegistry bcr = cr.getComponent(BasicComponentRegistry.class);
ComponentRef<T> old = bcr.getComponent(componentType);
bcr.replaceComponent(componentType.getName(), replacementComponent, true);
if (rewire) {
cr.rewire();
cr.rewireNamedRegistries();
}
return old != null ? old.wired() : null;
}
public static final TimeService KEYCLOAK_TIME_SERVICE = new EmbeddedTimeService() {
private long getCurrentTimeMillis() {
return Time.currentTimeMillis();
}
@Override
public long wallClockTime() {
return getCurrentTimeMillis();
}
@Override
public long time() {
return TimeUnit.MILLISECONDS.toNanos(getCurrentTimeMillis());
}
@Override
public Instant instant() {
return Instant.ofEpochMilli(getCurrentTimeMillis());
}
};
}
| |
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.elasticbeanstalk.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
* <p>
* A specification identifying an individual configuration option along with its current value. For a list of possible
* option values, go to <a href="http://docs.aws.amazon.com/elasticbeanstalk/latest/dg/command-options.html">Option
* Values</a> in the <i>AWS Elastic Beanstalk Developer Guide</i>.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elasticbeanstalk-2010-12-01/ConfigurationOptionSetting"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ConfigurationOptionSetting implements Serializable, Cloneable {
/**
* <p>
* A unique resource name for a time-based scaling configuration option.
* </p>
*/
private String resourceName;
/**
* <p>
* A unique namespace identifying the option's associated AWS resource.
* </p>
*/
private String namespace;
/**
* <p>
* The name of the configuration option.
* </p>
*/
private String optionName;
/**
* <p>
* The current value for the configuration option.
* </p>
*/
private String value;
/**
* Default constructor for ConfigurationOptionSetting object. Callers should use the setter or fluent setter
* (with...) methods to initialize the object after creating it.
*/
public ConfigurationOptionSetting() {
}
/**
* Constructs a new ConfigurationOptionSetting object. Callers should use the setter or fluent setter (with...)
* methods to initialize any additional object members.
*
* @param namespace
* A unique namespace identifying the option's associated AWS resource.
* @param optionName
* The name of the configuration option.
* @param value
* The current value for the configuration option.
*/
public ConfigurationOptionSetting(String namespace, String optionName, String value) {
setNamespace(namespace);
setOptionName(optionName);
setValue(value);
}
/**
* <p>
* A unique resource name for a time-based scaling configuration option.
* </p>
*
* @param resourceName
* A unique resource name for a time-based scaling configuration option.
*/
public void setResourceName(String resourceName) {
this.resourceName = resourceName;
}
/**
* <p>
* A unique resource name for a time-based scaling configuration option.
* </p>
*
* @return A unique resource name for a time-based scaling configuration option.
*/
public String getResourceName() {
return this.resourceName;
}
/**
* <p>
* A unique resource name for a time-based scaling configuration option.
* </p>
*
* @param resourceName
* A unique resource name for a time-based scaling configuration option.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ConfigurationOptionSetting withResourceName(String resourceName) {
setResourceName(resourceName);
return this;
}
/**
* <p>
* A unique namespace identifying the option's associated AWS resource.
* </p>
*
* @param namespace
* A unique namespace identifying the option's associated AWS resource.
*/
public void setNamespace(String namespace) {
this.namespace = namespace;
}
/**
* <p>
* A unique namespace identifying the option's associated AWS resource.
* </p>
*
* @return A unique namespace identifying the option's associated AWS resource.
*/
public String getNamespace() {
return this.namespace;
}
/**
* <p>
* A unique namespace identifying the option's associated AWS resource.
* </p>
*
* @param namespace
* A unique namespace identifying the option's associated AWS resource.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ConfigurationOptionSetting withNamespace(String namespace) {
setNamespace(namespace);
return this;
}
/**
* <p>
* The name of the configuration option.
* </p>
*
* @param optionName
* The name of the configuration option.
*/
public void setOptionName(String optionName) {
this.optionName = optionName;
}
/**
* <p>
* The name of the configuration option.
* </p>
*
* @return The name of the configuration option.
*/
public String getOptionName() {
return this.optionName;
}
/**
* <p>
* The name of the configuration option.
* </p>
*
* @param optionName
* The name of the configuration option.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ConfigurationOptionSetting withOptionName(String optionName) {
setOptionName(optionName);
return this;
}
/**
* <p>
* The current value for the configuration option.
* </p>
*
* @param value
* The current value for the configuration option.
*/
public void setValue(String value) {
this.value = value;
}
/**
* <p>
* The current value for the configuration option.
* </p>
*
* @return The current value for the configuration option.
*/
public String getValue() {
return this.value;
}
/**
* <p>
* The current value for the configuration option.
* </p>
*
* @param value
* The current value for the configuration option.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ConfigurationOptionSetting withValue(String value) {
setValue(value);
return this;
}
/**
* Returns a string representation of this object; useful for testing and debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getResourceName() != null)
sb.append("ResourceName: ").append(getResourceName()).append(",");
if (getNamespace() != null)
sb.append("Namespace: ").append(getNamespace()).append(",");
if (getOptionName() != null)
sb.append("OptionName: ").append(getOptionName()).append(",");
if (getValue() != null)
sb.append("Value: ").append(getValue());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ConfigurationOptionSetting == false)
return false;
ConfigurationOptionSetting other = (ConfigurationOptionSetting) obj;
if (other.getResourceName() == null ^ this.getResourceName() == null)
return false;
if (other.getResourceName() != null && other.getResourceName().equals(this.getResourceName()) == false)
return false;
if (other.getNamespace() == null ^ this.getNamespace() == null)
return false;
if (other.getNamespace() != null && other.getNamespace().equals(this.getNamespace()) == false)
return false;
if (other.getOptionName() == null ^ this.getOptionName() == null)
return false;
if (other.getOptionName() != null && other.getOptionName().equals(this.getOptionName()) == false)
return false;
if (other.getValue() == null ^ this.getValue() == null)
return false;
if (other.getValue() != null && other.getValue().equals(this.getValue()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getResourceName() == null) ? 0 : getResourceName().hashCode());
hashCode = prime * hashCode + ((getNamespace() == null) ? 0 : getNamespace().hashCode());
hashCode = prime * hashCode + ((getOptionName() == null) ? 0 : getOptionName().hashCode());
hashCode = prime * hashCode + ((getValue() == null) ? 0 : getValue().hashCode());
return hashCode;
}
@Override
public ConfigurationOptionSetting clone() {
try {
return (ConfigurationOptionSetting) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.operator;
import com.facebook.airlift.stats.CounterStat;
import com.facebook.airlift.stats.GcMonitor;
import com.facebook.presto.Session;
import com.facebook.presto.execution.FragmentResultCacheContext;
import com.facebook.presto.execution.Lifespan;
import com.facebook.presto.execution.TaskId;
import com.facebook.presto.execution.TaskMetadataContext;
import com.facebook.presto.execution.TaskState;
import com.facebook.presto.execution.TaskStateMachine;
import com.facebook.presto.execution.buffer.LazyOutputBuffer;
import com.facebook.presto.memory.QueryContext;
import com.facebook.presto.memory.QueryContextVisitor;
import com.facebook.presto.memory.context.LocalMemoryContext;
import com.facebook.presto.memory.context.MemoryTrackingContext;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.util.concurrent.AtomicDouble;
import com.google.common.util.concurrent.ListenableFuture;
import io.airlift.units.DataSize;
import io.airlift.units.Duration;
import org.joda.time.DateTime;
import javax.annotation.concurrent.GuardedBy;
import javax.annotation.concurrent.ThreadSafe;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.Executor;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.collect.ImmutableSet.toImmutableSet;
import static com.google.common.collect.Iterables.transform;
import static com.google.common.collect.Sets.newConcurrentHashSet;
import static io.airlift.units.DataSize.Unit.BYTE;
import static java.lang.Math.max;
import static java.lang.Math.toIntExact;
import static java.util.Objects.requireNonNull;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.NANOSECONDS;
import static java.util.stream.Collectors.toList;
@ThreadSafe
public class TaskContext
{
private final QueryContext queryContext;
private final TaskStateMachine taskStateMachine;
private final GcMonitor gcMonitor;
private final Executor notificationExecutor;
private final ScheduledExecutorService yieldExecutor;
private final Session session;
private final long createNanos = System.nanoTime();
private final AtomicLong startNanos = new AtomicLong();
private final AtomicLong startFullGcCount = new AtomicLong(-1);
private final AtomicLong startFullGcTimeNanos = new AtomicLong(-1);
private final AtomicLong endNanos = new AtomicLong();
private final AtomicLong endFullGcCount = new AtomicLong(-1);
private final AtomicLong endFullGcTimeNanos = new AtomicLong(-1);
private final AtomicReference<DateTime> executionStartTime = new AtomicReference<>();
private final AtomicReference<DateTime> lastExecutionStartTime = new AtomicReference<>();
private final AtomicReference<DateTime> executionEndTime = new AtomicReference<>();
private final Set<Lifespan> completedDriverGroups = newConcurrentHashSet();
private final List<PipelineContext> pipelineContexts = new CopyOnWriteArrayList<>();
private final boolean perOperatorCpuTimerEnabled;
private final boolean cpuTimerEnabled;
private final boolean perOperatorAllocationTrackingEnabled;
private final boolean allocationTrackingEnabled;
private final boolean legacyLifespanCompletionCondition;
private final Optional<FragmentResultCacheContext> fragmentResultCacheContext;
private final Object cumulativeMemoryLock = new Object();
private final AtomicDouble cumulativeUserMemory = new AtomicDouble(0.0);
private final AtomicLong peakTotalMemoryInBytes = new AtomicLong(0);
private final AtomicLong peakUserMemoryInBytes = new AtomicLong(0);
@GuardedBy("cumulativeMemoryLock")
private long lastUserMemoryReservation;
@GuardedBy("cumulativeMemoryLock")
private long lastTaskStatCallNanos;
private final MemoryTrackingContext taskMemoryContext;
private final TaskMetadataContext taskMetadataContext;
public static TaskContext createTaskContext(
QueryContext queryContext,
TaskStateMachine taskStateMachine,
GcMonitor gcMonitor,
Executor notificationExecutor,
ScheduledExecutorService yieldExecutor,
Session session,
MemoryTrackingContext taskMemoryContext,
boolean perOperatorCpuTimerEnabled,
boolean cpuTimerEnabled,
boolean perOperatorAllocationTrackingEnabled,
boolean allocationTrackingEnabled,
boolean legacyLifespanCompletionCondition,
Optional<FragmentResultCacheContext> fragmentResultCacheContext)
{
TaskContext taskContext = new TaskContext(
queryContext,
taskStateMachine,
gcMonitor,
notificationExecutor,
yieldExecutor,
session,
taskMemoryContext,
perOperatorCpuTimerEnabled,
cpuTimerEnabled,
perOperatorAllocationTrackingEnabled,
allocationTrackingEnabled,
legacyLifespanCompletionCondition,
fragmentResultCacheContext);
taskContext.initialize();
return taskContext;
}
private TaskContext(QueryContext queryContext,
TaskStateMachine taskStateMachine,
GcMonitor gcMonitor,
Executor notificationExecutor,
ScheduledExecutorService yieldExecutor,
Session session,
MemoryTrackingContext taskMemoryContext,
boolean perOperatorCpuTimerEnabled,
boolean cpuTimerEnabled,
boolean perOperatorAllocationTrackingEnabled,
boolean allocationTrackingEnabled,
boolean legacyLifespanCompletionCondition,
Optional<FragmentResultCacheContext> fragmentResultCacheContext)
{
this.taskStateMachine = requireNonNull(taskStateMachine, "taskStateMachine is null");
this.gcMonitor = requireNonNull(gcMonitor, "gcMonitor is null");
this.queryContext = requireNonNull(queryContext, "queryContext is null");
this.notificationExecutor = requireNonNull(notificationExecutor, "notificationExecutor is null");
this.yieldExecutor = requireNonNull(yieldExecutor, "yieldExecutor is null");
this.session = session;
this.taskMemoryContext = requireNonNull(taskMemoryContext, "taskMemoryContext is null");
// Initialize the local memory contexts with the LazyOutputBuffer tag as LazyOutputBuffer will do the local memory allocations
taskMemoryContext.initializeLocalMemoryContexts(LazyOutputBuffer.class.getSimpleName());
this.perOperatorCpuTimerEnabled = perOperatorCpuTimerEnabled;
this.cpuTimerEnabled = cpuTimerEnabled;
this.perOperatorAllocationTrackingEnabled = perOperatorAllocationTrackingEnabled;
this.allocationTrackingEnabled = allocationTrackingEnabled;
this.legacyLifespanCompletionCondition = legacyLifespanCompletionCondition;
this.fragmentResultCacheContext = requireNonNull(fragmentResultCacheContext, "fragmentResultCacheContext is null");
this.taskMetadataContext = new TaskMetadataContext();
}
// the state change listener is added here in a separate initialize() method
// instead of the constructor to prevent leaking the "this" reference to
// another thread, which will cause unsafe publication of this instance.
private void initialize()
{
taskStateMachine.addStateChangeListener(this::updateStatsIfDone);
}
public TaskId getTaskId()
{
return taskStateMachine.getTaskId();
}
public PipelineContext addPipelineContext(int pipelineId, boolean inputPipeline, boolean outputPipeline, boolean partitioned)
{
PipelineContext pipelineContext = new PipelineContext(
pipelineId,
this,
notificationExecutor,
yieldExecutor,
taskMemoryContext.newMemoryTrackingContext(),
inputPipeline,
outputPipeline,
partitioned);
pipelineContexts.add(pipelineContext);
return pipelineContext;
}
public Session getSession()
{
return session;
}
public void start()
{
DateTime now = DateTime.now();
executionStartTime.compareAndSet(null, now);
startNanos.compareAndSet(0, System.nanoTime());
startFullGcCount.compareAndSet(-1, gcMonitor.getMajorGcCount());
startFullGcTimeNanos.compareAndSet(-1, gcMonitor.getMajorGcTime().roundTo(NANOSECONDS));
// always update last execution start time
lastExecutionStartTime.set(now);
}
private void updateStatsIfDone(TaskState newState)
{
if (newState.isDone()) {
DateTime now = DateTime.now();
long majorGcCount = gcMonitor.getMajorGcCount();
long majorGcTime = gcMonitor.getMajorGcTime().roundTo(NANOSECONDS);
// before setting the end times, make sure a start has been recorded
executionStartTime.compareAndSet(null, now);
startNanos.compareAndSet(0, System.nanoTime());
startFullGcCount.compareAndSet(-1, majorGcCount);
startFullGcTimeNanos.compareAndSet(-1, majorGcTime);
// Only update last start time, if the nothing was started
lastExecutionStartTime.compareAndSet(null, now);
// use compare and set from initial value to avoid overwriting if there
// were a duplicate notification, which shouldn't happen
executionEndTime.compareAndSet(null, now);
endNanos.compareAndSet(0, System.nanoTime());
endFullGcCount.compareAndSet(-1, majorGcCount);
endFullGcTimeNanos.compareAndSet(-1, majorGcTime);
}
}
public void failed(Throwable cause)
{
taskStateMachine.failed(cause);
}
public boolean isDone()
{
return taskStateMachine.getState().isDone();
}
public TaskState getState()
{
return taskStateMachine.getState();
}
public TaskMetadataContext getTaskMetadataContext()
{
return taskMetadataContext;
}
public DataSize getMemoryReservation()
{
return new DataSize(taskMemoryContext.getUserMemory(), BYTE);
}
public DataSize getSystemMemoryReservation()
{
return new DataSize(taskMemoryContext.getSystemMemory(), BYTE);
}
/**
* Returns the completed driver groups (excluding taskWide).
* A driver group is considered complete if all drivers associated with it
* has completed, and no new drivers associated with it will be created.
*/
public Set<Lifespan> getCompletedDriverGroups()
{
return completedDriverGroups;
}
public void addCompletedDriverGroup(Lifespan driverGroup)
{
checkArgument(!driverGroup.isTaskWide(), "driverGroup is task-wide, not a driver group.");
completedDriverGroups.add(driverGroup);
}
public List<PipelineContext> getPipelineContexts()
{
return pipelineContexts;
}
public synchronized ListenableFuture<?> reserveSpill(long bytes)
{
checkArgument(bytes >= 0, "bytes is negative");
return queryContext.reserveSpill(bytes);
}
public synchronized void freeSpill(long bytes)
{
checkArgument(bytes >= 0, "bytes is negative");
queryContext.freeSpill(bytes);
}
public LocalMemoryContext localSystemMemoryContext()
{
return taskMemoryContext.localSystemMemoryContext();
}
public void moreMemoryAvailable()
{
pipelineContexts.forEach(PipelineContext::moreMemoryAvailable);
}
public boolean isPerOperatorAllocationTrackingEnabled()
{
return perOperatorAllocationTrackingEnabled;
}
public boolean isAllocationTrackingEnabled()
{
return allocationTrackingEnabled;
}
public boolean isPerOperatorCpuTimerEnabled()
{
return perOperatorCpuTimerEnabled;
}
public boolean isCpuTimerEnabled()
{
return cpuTimerEnabled;
}
public boolean isLegacyLifespanCompletionCondition()
{
return legacyLifespanCompletionCondition;
}
public Optional<FragmentResultCacheContext> getFragmentResultCacheContext()
{
return fragmentResultCacheContext;
}
public CounterStat getInputDataSize()
{
CounterStat stat = new CounterStat();
for (PipelineContext pipelineContext : pipelineContexts) {
if (pipelineContext.isInputPipeline()) {
stat.merge(pipelineContext.getInputDataSize());
}
}
return stat;
}
public CounterStat getInputPositions()
{
CounterStat stat = new CounterStat();
for (PipelineContext pipelineContext : pipelineContexts) {
if (pipelineContext.isInputPipeline()) {
stat.merge(pipelineContext.getInputPositions());
}
}
return stat;
}
public CounterStat getOutputDataSize()
{
CounterStat stat = new CounterStat();
for (PipelineContext pipelineContext : pipelineContexts) {
if (pipelineContext.isOutputPipeline()) {
stat.merge(pipelineContext.getOutputDataSize());
}
}
return stat;
}
public CounterStat getOutputPositions()
{
CounterStat stat = new CounterStat();
for (PipelineContext pipelineContext : pipelineContexts) {
if (pipelineContext.isOutputPipeline()) {
stat.merge(pipelineContext.getOutputPositions());
}
}
return stat;
}
public Duration getFullGcTime()
{
long startFullGcTimeNanos = this.startFullGcTimeNanos.get();
if (startFullGcTimeNanos < 0) {
return new Duration(0, MILLISECONDS);
}
long endFullGcTimeNanos = this.endFullGcTimeNanos.get();
if (endFullGcTimeNanos < 0) {
endFullGcTimeNanos = gcMonitor.getMajorGcTime().roundTo(NANOSECONDS);
}
return new Duration(max(0, endFullGcTimeNanos - startFullGcTimeNanos), NANOSECONDS);
}
public int getFullGcCount()
{
long startFullGcCount = this.startFullGcCount.get();
if (startFullGcCount < 0) {
return 0;
}
long endFullGcCount = this.endFullGcCount.get();
if (endFullGcCount <= 0) {
endFullGcCount = gcMonitor.getMajorGcCount();
}
return toIntExact(max(0, endFullGcCount - startFullGcCount));
}
public TaskStats getTaskStats()
{
// check for end state to avoid callback ordering problems
updateStatsIfDone(taskStateMachine.getState());
List<PipelineStats> pipelineStats = ImmutableList.copyOf(transform(pipelineContexts, PipelineContext::getPipelineStats));
long lastExecutionEndTime = 0;
int totalDrivers = 0;
int queuedDrivers = 0;
int queuedPartitionedDrivers = 0;
int runningDrivers = 0;
int runningPartitionedDrivers = 0;
int blockedDrivers = 0;
int completedDrivers = 0;
long totalScheduledTime = 0;
long totalCpuTime = 0;
long totalBlockedTime = 0;
long totalAllocation = 0;
long rawInputDataSize = 0;
long rawInputPositions = 0;
long processedInputDataSize = 0;
long processedInputPositions = 0;
long outputDataSize = 0;
long outputPositions = 0;
long physicalWrittenDataSize = 0;
for (PipelineStats pipeline : pipelineStats) {
if (pipeline.getLastEndTime() != null) {
lastExecutionEndTime = max(pipeline.getLastEndTime().getMillis(), lastExecutionEndTime);
}
totalDrivers += pipeline.getTotalDrivers();
queuedDrivers += pipeline.getQueuedDrivers();
queuedPartitionedDrivers += pipeline.getQueuedPartitionedDrivers();
runningDrivers += pipeline.getRunningDrivers();
runningPartitionedDrivers += pipeline.getRunningPartitionedDrivers();
blockedDrivers += pipeline.getBlockedDrivers();
completedDrivers += pipeline.getCompletedDrivers();
totalScheduledTime += pipeline.getTotalScheduledTimeInNanos();
totalCpuTime += pipeline.getTotalCpuTimeInNanos();
totalBlockedTime += pipeline.getTotalBlockedTimeInNanos();
totalAllocation += pipeline.getTotalAllocationInBytes();
if (pipeline.isInputPipeline()) {
rawInputDataSize += pipeline.getRawInputDataSizeInBytes();
rawInputPositions += pipeline.getRawInputPositions();
processedInputDataSize += pipeline.getProcessedInputDataSizeInBytes();
processedInputPositions += pipeline.getProcessedInputPositions();
}
if (pipeline.isOutputPipeline()) {
outputDataSize += pipeline.getOutputDataSizeInBytes();
outputPositions += pipeline.getOutputPositions();
}
physicalWrittenDataSize += pipeline.getPhysicalWrittenDataSizeInBytes();
}
long startNanos = this.startNanos.get();
if (startNanos == 0) {
startNanos = System.nanoTime();
}
long queuedTimeInNanos = startNanos - createNanos;
long endNanos = this.endNanos.get();
long elapsedTimeInNanos;
if (endNanos >= startNanos) {
elapsedTimeInNanos = endNanos - createNanos;
}
else {
elapsedTimeInNanos = 0;
}
int fullGcCount = getFullGcCount();
Duration fullGcTime = getFullGcTime();
long userMemory = taskMemoryContext.getUserMemory();
long systemMemory = taskMemoryContext.getSystemMemory();
updatePeakMemory();
synchronized (cumulativeMemoryLock) {
double sinceLastPeriodMillis = (System.nanoTime() - lastTaskStatCallNanos) / 1_000_000.0;
long averageMemoryForLastPeriod = (userMemory + lastUserMemoryReservation) / 2;
cumulativeUserMemory.addAndGet(averageMemoryForLastPeriod * sinceLastPeriodMillis);
lastTaskStatCallNanos = System.nanoTime();
lastUserMemoryReservation = userMemory;
}
Set<PipelineStats> runningPipelineStats = pipelineStats.stream()
.filter(pipeline -> pipeline.getRunningDrivers() > 0 || pipeline.getRunningPartitionedDrivers() > 0 || pipeline.getBlockedDrivers() > 0)
.collect(toImmutableSet());
ImmutableSet<BlockedReason> blockedReasons = runningPipelineStats.stream()
.flatMap(pipeline -> pipeline.getBlockedReasons().stream())
.collect(toImmutableSet());
boolean fullyBlocked = !runningPipelineStats.isEmpty() && runningPipelineStats.stream().allMatch(PipelineStats::isFullyBlocked);
return new TaskStats(
taskStateMachine.getCreatedTime(),
executionStartTime.get(),
lastExecutionStartTime.get(),
lastExecutionEndTime == 0 ? null : new DateTime(lastExecutionEndTime),
executionEndTime.get(),
elapsedTimeInNanos,
queuedTimeInNanos,
totalDrivers,
queuedDrivers,
queuedPartitionedDrivers,
runningDrivers,
runningPartitionedDrivers,
blockedDrivers,
completedDrivers,
cumulativeUserMemory.get(),
userMemory,
taskMemoryContext.getRevocableMemory(),
systemMemory,
peakTotalMemoryInBytes.get(),
peakUserMemoryInBytes.get(),
queryContext.getPeakNodeTotalMemory(),
totalScheduledTime,
totalCpuTime,
totalBlockedTime,
fullyBlocked && (runningDrivers > 0 || runningPartitionedDrivers > 0),
blockedReasons,
totalAllocation,
rawInputDataSize,
rawInputPositions,
processedInputDataSize,
processedInputPositions,
outputDataSize,
outputPositions,
physicalWrittenDataSize,
fullGcCount,
fullGcTime.toMillis(),
pipelineStats);
}
public void updatePeakMemory()
{
long userMemory = taskMemoryContext.getUserMemory();
long systemMemory = taskMemoryContext.getSystemMemory();
peakTotalMemoryInBytes.accumulateAndGet(userMemory + systemMemory, Math::max);
peakUserMemoryInBytes.accumulateAndGet(userMemory, Math::max);
}
public <C, R> R accept(QueryContextVisitor<C, R> visitor, C context)
{
return visitor.visitTaskContext(this, context);
}
public <C, R> List<R> acceptChildren(QueryContextVisitor<C, R> visitor, C context)
{
return pipelineContexts.stream()
.map(pipelineContext -> pipelineContext.accept(visitor, context))
.collect(toList());
}
@VisibleForTesting
public synchronized MemoryTrackingContext getTaskMemoryContext()
{
return taskMemoryContext;
}
@VisibleForTesting
public QueryContext getQueryContext()
{
return queryContext;
}
}
| |
package com.mindoo.domino.jna.internal.structs;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.Arrays;
import java.util.Formatter;
import java.util.List;
import com.mindoo.domino.jna.IAdaptable;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
/**
* The Universal Note ID (UNID) identifies all copies of the same note in different replicas of the same
* database universally (across all servers).<br>
* <br>
* If one note in one database has the same UNID as another note in a replica database, then the
* two notes are replicas of each other.<br>
* <br>
* The UNID is used to reference a specific note from another note. Specifically, the FIELD_LINK ($REF)
* field of a response note contains the UNID of it's parent.<br>
* <br>
* Similarly, Doc Links (see NOTELINK) contains the UNID of the linked-to note plus the database ID
* where the linked-to note can be found. The important characteristic of the UNID is that it
* continues to reference a specific note even if the note being referenced is updated.<br>
* <br>
* The Domino replicator uses the Universal Note ID to match the notes in one database with
* their respective copies in replica databases. For example, if database A is a replica copy
* of database B, database A contains a note with a particular UNID, and database B contains
* a note with the same UNID, then the replicator concludes that these two notes are replica
* copies of one another. On the other hand, if database A contains a note with a particular
* UNID but database B does not, then the replicator will create a copy of that note and
* add it to database B.<br>
* <br>
* One database must never contain two notes with the same UNID. If the replicator finds two
* notes with the same UNID in the same database, it generates an error message in the log
* and does not replicate the document.<br>
* <br>
* The "File" member of the UNID contains a number derived in different ways depending on
* the release of Domino or Notes.<br>
* Pre- 2.1 versions of Notes set the "File" member to the creation timedate of the NSF file
* in which the note is created. Notes 2.1 sets the "File" member to a user-unique identifier,
* derived in part from information in the ID of the user creating the note, and in part
* from the database where the note is created. Notes 3.0 sets the "File" member to a
* random number generated at the time the note is created.<br>
* <br>
* The "Note" member of the UNID contains the date/time when the very first copy of the note
* was stored into the first NSF (Note: date/time from $CREATED item, if exists, takes precedence).
*/
public class NotesUniversalNoteIdStruct extends BaseStructure implements IAdaptable {
/** C type : DBID */
public NotesTimeDateStruct File;
/** C type : TIMEDATE */
public NotesTimeDateStruct Note;
/**
* @deprecated only public to be used by JNA; use static newInstance method instead to run in AccessController.doPrivileged block
*/
public NotesUniversalNoteIdStruct() {
super();
}
public static NotesUniversalNoteIdStruct newInstance() {
return AccessController.doPrivileged(new PrivilegedAction<NotesUniversalNoteIdStruct>() {
@Override
public NotesUniversalNoteIdStruct run() {
return new NotesUniversalNoteIdStruct();
}
});
}
@Override
protected List<String> getFieldOrder() {
return Arrays.asList("File", "Note");
}
/**
* @param File C type : DBID
* @param Note C type : TIMEDATE
* @deprecated only public to be used by JNA; use static newInstance method instead to run in AccessController.doPrivileged block
*/
public NotesUniversalNoteIdStruct(NotesTimeDateStruct File, NotesTimeDateStruct Note) {
super();
this.File = File;
this.Note = Note;
}
public static NotesUniversalNoteIdStruct newInstance(final NotesTimeDateStruct File, final NotesTimeDateStruct Note) {
return AccessController.doPrivileged(new PrivilegedAction<NotesUniversalNoteIdStruct>() {
@Override
public NotesUniversalNoteIdStruct run() {
return new NotesUniversalNoteIdStruct(File, Note);
}
});
}
/**
* @deprecated only public to be used by JNA; use static newInstance method instead to run in AccessController.doPrivileged block
*
* @param peer pointer
*/
public NotesUniversalNoteIdStruct(Pointer peer) {
super(peer);
}
public static NotesUniversalNoteIdStruct newInstance(final Pointer p) {
return AccessController.doPrivileged(new PrivilegedAction<NotesUniversalNoteIdStruct>() {
@Override
public NotesUniversalNoteIdStruct run() {
return new NotesUniversalNoteIdStruct(p);
}
});
}
public static NotesUniversalNoteIdStruct.ByReference newInstanceByReference() {
return AccessController.doPrivileged(new PrivilegedAction<NotesUniversalNoteIdStruct.ByReference>() {
@Override
public NotesUniversalNoteIdStruct.ByReference run() {
return new NotesUniversalNoteIdStruct.ByReference();
}
});
}
public static class ByReference extends NotesUniversalNoteIdStruct implements Structure.ByReference {
};
public static class ByValue extends NotesUniversalNoteIdStruct implements Structure.ByValue {
public static NotesUniversalNoteIdStruct.ByValue newInstance() {
return AccessController.doPrivileged(new PrivilegedAction<NotesUniversalNoteIdStruct.ByValue>() {
@Override
public NotesUniversalNoteIdStruct.ByValue run() {
return new NotesUniversalNoteIdStruct.ByValue();
}
});
}
};
/**
* Computes the hex UNID from the OID data
*
* @return UNID
*/
@Override
public String toString() {
write();
Pointer oidPtr = getPointer();
Formatter formatter = new Formatter();
ByteBuffer data = oidPtr.getByteBuffer(0, 16).order(ByteOrder.LITTLE_ENDIAN);
formatter.format("%016x", data.getLong());
formatter.format("%016x", data.getLong());
String unidStr = formatter.toString().toUpperCase();
formatter.close();
return unidStr;
}
/**
* Changes the internal value to a UNID formatted as string
*
* @param unidStr UNID string
*/
public void setUnid(String unidStr) {
if (unidStr.length() != 32) {
throw new IllegalArgumentException("UNID is expected to have 32 characters");
}
int fileInnards1 = (int) (Long.parseLong(unidStr.substring(0,8), 16) & 0xffffffff);
int fileInnards0 = (int) (Long.parseLong(unidStr.substring(8,16), 16) & 0xffffffff);
int noteInnards1 = (int) (Long.parseLong(unidStr.substring(16,24), 16) & 0xffffffff);
int noteInnards0 = (int) (Long.parseLong(unidStr.substring(24,32), 16) & 0xffffffff);
NotesTimeDateStruct file = NotesTimeDateStruct.newInstance(new int[] {fileInnards0, fileInnards1});
NotesTimeDateStruct note = NotesTimeDateStruct.newInstance(new int[] {noteInnards0, noteInnards1});
this.File = file;
this.Note = note;
write();
}
/**
* Converts a hex encoded UNID to a {@link NotesUniversalNoteIdStruct} object
*
* @param unidStr UNID string
* @return UNID object
*/
public static NotesUniversalNoteIdStruct fromString(String unidStr) {
NotesUniversalNoteIdStruct unid = NotesUniversalNoteIdStruct.newInstance();
unid.setUnid(unidStr);
return unid;
}
@Override
public <T> T getAdapter(Class<T> clazz) {
if (clazz == NotesUniversalNoteIdStruct.class) {
return (T) this;
}
else if (clazz == Pointer.class) {
return (T) getPointer();
}
return null;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode;
import static org.junit.Assert.fail;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import java.io.IOException;
import java.net.URI;
import java.util.Collection;
import org.apache.hadoop.hdfs.server.common.Storage.FormatConfirmable;
import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.junit.Test;
public class TestGenericJournalConf {
private static final String DUMMY_URI = "dummy://test";
/**
* Test that an exception is thrown if a journal class doesn't exist
* in the configuration
*/
@Test(expected=IllegalArgumentException.class)
public void testNotConfigured() throws Exception {
MiniDFSCluster cluster = null;
Configuration conf = new Configuration();
conf.set(DFSConfigKeys.DFS_NAMENODE_EDITS_DIR_KEY,
"dummy://test");
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(0).build();
cluster.waitActive();
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
/**
* Test that an exception is thrown if a journal class doesn't
* exist in the classloader.
*/
@Test(expected=IllegalArgumentException.class)
public void testClassDoesntExist() throws Exception {
MiniDFSCluster cluster = null;
Configuration conf = new Configuration();
conf.set(DFSConfigKeys.DFS_NAMENODE_EDITS_PLUGIN_PREFIX + ".dummy",
"org.apache.hadoop.nonexistent");
conf.set(DFSConfigKeys.DFS_NAMENODE_EDITS_DIR_KEY,
"dummy://test");
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(0).build();
cluster.waitActive();
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
/**
* Test that a implementation of JournalManager without a
* (Configuration,URI) constructor throws an exception
*/
@Test
public void testBadConstructor() throws Exception {
MiniDFSCluster cluster = null;
Configuration conf = new Configuration();
conf.set(DFSConfigKeys.DFS_NAMENODE_EDITS_PLUGIN_PREFIX + ".dummy",
BadConstructorJournalManager.class.getName());
conf.set(DFSConfigKeys.DFS_NAMENODE_EDITS_DIR_KEY,
"dummy://test");
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(0).build();
cluster.waitActive();
fail("Should have failed before this point");
} catch (IllegalArgumentException iae) {
if (!iae.getMessage().contains("Unable to construct journal")) {
fail("Should have failed with unable to construct exception");
}
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
/**
* Test that a dummy implementation of JournalManager can
* be initialized on startup
*/
@Test
public void testDummyJournalManager() throws Exception {
MiniDFSCluster cluster = null;
Configuration conf = new Configuration();
conf.set(DFSConfigKeys.DFS_NAMENODE_EDITS_PLUGIN_PREFIX + ".dummy",
DummyJournalManager.class.getName());
conf.set(DFSConfigKeys.DFS_NAMENODE_EDITS_DIR_KEY, DUMMY_URI);
conf.setInt(DFSConfigKeys.DFS_NAMENODE_CHECKED_VOLUMES_MINIMUM_KEY, 0);
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(0).build();
cluster.waitActive();
assertTrue(DummyJournalManager.shouldPromptCalled);
assertTrue(DummyJournalManager.formatCalled);
assertNotNull(DummyJournalManager.conf);
assertEquals(new URI(DUMMY_URI), DummyJournalManager.uri);
assertNotNull(DummyJournalManager.nsInfo);
assertEquals(DummyJournalManager.nsInfo.getClusterID(),
cluster.getNameNode().getNamesystem().getClusterId());
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
public static class DummyJournalManager implements JournalManager {
static Configuration conf = null;
static URI uri = null;
static NamespaceInfo nsInfo = null;
static boolean formatCalled = false;
static boolean shouldPromptCalled = false;
public DummyJournalManager(Configuration conf, URI u,
NamespaceInfo nsInfo) {
// Set static vars so the test case can verify them.
DummyJournalManager.conf = conf;
DummyJournalManager.uri = u;
DummyJournalManager.nsInfo = nsInfo;
}
@Override
public void format(NamespaceInfo nsInfo) throws IOException {
formatCalled = true;
}
@Override
public EditLogOutputStream startLogSegment(long txId) throws IOException {
return mock(EditLogOutputStream.class);
}
@Override
public void finalizeLogSegment(long firstTxId, long lastTxId)
throws IOException {
// noop
}
@Override
public void selectInputStreams(Collection<EditLogInputStream> streams,
long fromTxnId, boolean inProgressOk) {
}
@Override
public void setOutputBufferCapacity(int size) {}
@Override
public void purgeLogsOlderThan(long minTxIdToKeep)
throws IOException {}
@Override
public void recoverUnfinalizedSegments() throws IOException {}
@Override
public void close() throws IOException {}
@Override
public boolean hasSomeData() throws IOException {
shouldPromptCalled = true;
return false;
}
}
public static class BadConstructorJournalManager extends DummyJournalManager {
public BadConstructorJournalManager() {
super(null, null, null);
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.