gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* This file is part of the DITA Open Toolkit project.
* See the accompanying license.txt file for applicable licenses.
*/
/*
* (c) Copyright IBM Corp. 2007 All Rights Reserved.
*/
package org.dita.dost.writer;
import org.dita.dost.exception.DITAOTException;
import org.dita.dost.exception.DITAOTXMLErrorHandler;
import org.dita.dost.log.MessageUtils;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import javax.xml.parsers.DocumentBuilder;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.net.URI;
import java.util.Collection;
import static org.dita.dost.module.GenMapAndTopicListModule.ELEMENT_STUB;
import static org.dita.dost.reader.ChunkMapReader.*;
import static org.dita.dost.util.Constants.*;
import static org.dita.dost.util.URLUtils.*;
import static org.dita.dost.util.FileUtils.*;
import static org.dita.dost.util.XMLUtils.getDocumentBuilder;
import static org.dita.dost.util.XMLUtils.getXMLReader;
/**
* ChunkTopicParser class, writing chunking content into relative topic files
* and then update list. Not reusable and not thread-safe.
*
* <p>
* TODO: Refactor to be a SAX filter.
* </p>
*/
public final class SeparateChunkTopicParser extends AbstractChunkTopicParser {
private final XMLReader reader;
/**
* Constructor.
*/
public SeparateChunkTopicParser() {
super(true);
try {
reader = getXMLReader();
reader.setContentHandler(this);
reader.setFeature(FEATURE_NAMESPACE_PREFIX, true);
} catch (final Exception e) {
throw new RuntimeException("Failed to initialize XML parser: " + e.getMessage(), e);
}
}
@Override
public void write(final File fileDir) throws DITAOTException {
// pass map's directory path
filePath = fileDir.toURI();
final URI hrefValue = toURI(getValue(rootTopicref, ATTRIBUTE_NAME_HREF));
final URI copytoValue = toURI(getValue(rootTopicref, ATTRIBUTE_NAME_COPY_TO));
final String scopeValue = getCascadeValue(rootTopicref, ATTRIBUTE_NAME_SCOPE);
// Chimera path, has fragment
URI parseFilePath;
final Collection<String> chunkValue = split(getValue(rootTopicref,ATTRIBUTE_NAME_CHUNK));
final String processRoleValue = getCascadeValue(rootTopicref, ATTRIBUTE_NAME_PROCESSING_ROLE);
boolean dotchunk = false;
if (copytoValue != null && !chunkValue.contains(CHUNK_TO_CONTENT)) {
if (hrefValue != null && hrefValue.getFragment() != null) {
parseFilePath = setFragment(copytoValue, hrefValue.getFragment());
} else {
parseFilePath = copytoValue;
}
} else {
parseFilePath = hrefValue;
}
// if @copy-to is processed in chunk module, the list file needs to be
// updated.
// Because @copy-to should be included in fulltopiclist, and the source
// of coyy-to should be excluded in fulltopiclist.
if (copytoValue != null && chunkValue.contains(CHUNK_TO_CONTENT)) {
copyto.add(copytoValue);
if (hrefValue != null && hrefValue.getFragment() != null) {
copytoSource.add(stripFragment(hrefValue));
copytotarget2source.put(copytoValue, stripFragment(hrefValue));
} else {
copytoSource.add(hrefValue);
copytotarget2source.put(copytoValue, hrefValue);
}
}
try {
if (parseFilePath != null && !ATTR_SCOPE_VALUE_EXTERNAL.equals(scopeValue)
&& !ATTR_PROCESSING_ROLE_VALUE_RESOURCE_ONLY.equals(processRoleValue)) {
// if the path to target file make sense
currentParsingFile = filePath.resolve(parseFilePath);
URI outputFileName;
/*
* FIXME: we have code flaws here, references in ditamap need to
* be updated to new created file.
*/
String id = null;
String firstTopicID = null;
if (parseFilePath.getFragment() != null) {
id = parseFilePath.getFragment();
if (chunkValue.contains(CHUNK_SELECT_BRANCH)) {
outputFileName = filePath.resolve(id + FILE_EXTENSION_DITA);
targetTopicId = id;
startFromFirstTopic = false;
selectMethod = CHUNK_SELECT_BRANCH;
} else if (chunkValue.contains(CHUNK_SELECT_DOCUMENT)) {
firstTopicID = getFirstTopicId(filePath.resolve(parseFilePath).getPath());
topicDoc = getTopicDoc(filePath.resolve(parseFilePath));
if (firstTopicID != null) {
outputFileName = filePath.resolve(firstTopicID + FILE_EXTENSION_DITA);
targetTopicId = firstTopicID;
} else {
outputFileName = setPath(currentParsingFile, currentParsingFile.getPath() + FILE_EXTENSION_CHUNK);
dotchunk = true;
targetTopicId = null;
}
selectMethod = CHUNK_SELECT_DOCUMENT;
} else {
outputFileName = filePath.resolve(id + FILE_EXTENSION_DITA);
targetTopicId = id;
startFromFirstTopic = false;
selectMethod = CHUNK_SELECT_TOPIC;
}
} else {
firstTopicID = getFirstTopicId(filePath.resolve(parseFilePath).getPath());
topicDoc = getTopicDoc(filePath.resolve(parseFilePath));
if (firstTopicID != null) {
outputFileName = filePath.resolve(firstTopicID + FILE_EXTENSION_DITA);
targetTopicId = firstTopicID;
} else {
outputFileName = setPath(currentParsingFile, currentParsingFile.getPath() + FILE_EXTENSION_CHUNK);
dotchunk = true;
targetTopicId = null;
}
selectMethod = CHUNK_SELECT_DOCUMENT;
}
if (copytoValue != null) {
// use @copy-to value as the new file name
outputFileName = filePath.resolve(copytoValue);
}
if (new File(outputFileName).exists()) {
final URI t = outputFileName;
outputFileName = filePath.resolve(generateFilename());
conflictTable.put(outputFileName, t);
dotchunk = false;
}
output = new OutputStreamWriter(new FileOutputStream(new File(outputFileName)), UTF8);
outputFile = outputFileName;
if (!dotchunk) {
changeTable.put(filePath.resolve(parseFilePath),
setFragment(outputFileName, id));
// new generated file
changeTable.put(outputFileName, outputFileName);
}
// change the href value
if (firstTopicID == null) {
rootTopicref.setAttribute(ATTRIBUTE_NAME_HREF,
setFragment(getRelativePath(filePath.resolve(FILE_NAME_STUB_DITAMAP), outputFileName), id).toString());
} else {
rootTopicref.setAttribute(ATTRIBUTE_NAME_HREF,
setFragment(getRelativePath(filePath.resolve(FILE_NAME_STUB_DITAMAP), outputFileName), firstTopicID).toString());
}
include = false;
// just a mark?
stub = rootTopicref.getOwnerDocument().createElement(ELEMENT_STUB);
siblingStub = rootTopicref.getOwnerDocument().createElement(ELEMENT_STUB);
// <element>
// <stub/>
// ...
// </element>
// <siblingstub/>
// ...
// Place stub
if (rootTopicref.hasChildNodes()) {
final NodeList list = rootTopicref.getElementsByTagName(MAP_TOPICMETA.localName);
if (list.getLength() > 0) {
final Node node = list.item(0);
final Node nextSibling = node.getNextSibling();
// no sibling so node is the last child
if (nextSibling == null) {
node.getParentNode().appendChild(stub);
} else {
// has sibling node
node.getParentNode().insertBefore(stub, nextSibling);
}
} else {
// no topicmeta tag.
rootTopicref.insertBefore(stub, rootTopicref.getFirstChild());
}
// element.insertBefore(stub,element.getFirstChild());
} else {
rootTopicref.appendChild(stub);
}
// Place siblingStub
if (rootTopicref.getNextSibling() != null) {
rootTopicref.getParentNode().insertBefore(siblingStub, rootTopicref.getNextSibling());
} else {
rootTopicref.getParentNode().appendChild(siblingStub);
}
reader.setErrorHandler(new DITAOTXMLErrorHandler(currentParsingFile.getPath(), logger));
logger.info("Processing " + currentParsingFile);
reader.parse(currentParsingFile.toString());
output.flush();
// remove stub and siblingStub
stub.getParentNode().removeChild(stub);
siblingStub.getParentNode().removeChild(siblingStub);
}
} catch (final RuntimeException e) {
throw e;
} catch (final Exception e) {
logger.error(e.getMessage(), e);
} finally {
try {
if (output != null) {
output.close();
output = null;
if (dotchunk && !new File(currentParsingFile).delete()) {
logger.error(MessageUtils.getInstance()
.getMessage("DOTJ009E", currentParsingFile.getPath(), outputFile.getPath()).toString());
}
if (dotchunk && !new File(outputFile).renameTo(new File(currentParsingFile))) {
logger.error(MessageUtils.getInstance()
.getMessage("DOTJ009E", currentParsingFile.getPath(), outputFile.getPath()).toString());
}
}
} catch (final Exception ex) {
logger.error(ex.getMessage(), ex);
}
}
}
/**
* get the document node of a topic file.
* @param absolutePathToFile topic file
* @return element.
*/
private Element getTopicDoc(final URI absolutePathToFile){
final DocumentBuilder builder = getDocumentBuilder();
try {
final Document doc = builder.parse(absolutePathToFile.toString());
return doc.getDocumentElement();
} catch (final SAXException | IOException e) {
logger.error("Failed to parse " + absolutePathToFile + ": " + e.getMessage(), e);
}
return null;
}
}
| |
/*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.gemstone.gemfire.cache.query.partitioned;
import java.io.File;
import java.util.Collection;
import com.gemstone.gemfire.cache.Cache;
import com.gemstone.gemfire.cache.CacheException;
import com.gemstone.gemfire.cache.query.Index;
import com.gemstone.gemfire.cache.query.data.Portfolio;
import com.gemstone.gemfire.cache.query.data.PortfolioData;
import com.gemstone.gemfire.cache.query.internal.index.IndexManager.TestHook;
import com.gemstone.gemfire.cache.query.internal.index.IndexUtils;
import com.gemstone.gemfire.cache30.CacheSerializableRunnable;
import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.gemstone.gemfire.internal.cache.PartitionedRegionDUnitTestCase;
import dunit.AsyncInvocation;
import dunit.DistributedTestCase;
import dunit.Host;
import dunit.SerializableRunnable;
import dunit.VM;
/**
* @author rdubey
*
*/
public class PRBasicIndexCreationDeadlockDUnitTest extends
PartitionedRegionDUnitTestCase
{
/**
* constructor
*
* @param name
*/
public PRBasicIndexCreationDeadlockDUnitTest(String name) {
super(name);
}
// int totalNumBuckets = 131;
int queryTestCycle = 10;
PRQueryDUnitHelper PRQHelp = new PRQueryDUnitHelper("");
final String name = "PartionedPortfolios";
final String localName = "LocalPortfolios";
final int cnt = 0, cntDest = 1003;
final int redundancy = 0;
public static volatile boolean hook_vm1, hook_vm2;
public void testIndexCreationMessageDiskRecoveryDeadLock() {
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
Class valueConstraint = Portfolio.class;
final String fileName1 = "PRPersistentIndexCreation_1.xml";
final String fileName2 = "PRPersistentIndexCreation_2.xml";
final File dir1 = new File("overflowData1");
final File dir2 = new File("overflowData2");
AsyncInvocation[] asyns = new AsyncInvocation[3];
try {
vm0.invoke(new CacheSerializableRunnable("Create disk store directories") {
@Override
public void run2() throws CacheException {
boolean success = (dir1).mkdir();
success = (dir2).mkdir();
}
});
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreateThrougXML(name, fileName1));
vm1.invoke(PRQHelp.getCacheSerializableRunnableForPRCreateThrougXML(name, fileName2));
final Portfolio[] portfoliosAndPositions = PRQHelp.createPortfoliosAndPositions(100);
// Putting the data into the PR's created
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPutsKeyValue(name, portfoliosAndPositions,
0, 100));
vm0.invoke(new CacheSerializableRunnable("Close VM0 cache") {
@Override
public void run2() throws CacheException {
GemFireCacheImpl.getInstance().close();
}
});
vm1.invoke(new CacheSerializableRunnable("Close VM1 cache") {
@Override
public void run2() throws CacheException {
GemFireCacheImpl.getInstance().close();
}
});
// Restart the caches with testHook.
asyns[0] = vm0.invokeAsync(new CacheSerializableRunnable("Restart VM0 cache") {
@Override
public void run2() throws CacheException {
GemFireCacheImpl.testCacheXml = PRQHelp.findFile(fileName1);
IndexUtils.testHook = new IndexUtilTestHook();
PRQHelp.getCache();
}
});
//asyns[1] =
vm0.invoke(new CacheSerializableRunnable("Checking hook in VM0 cache") {
@Override
public void run2() throws CacheException {
IndexUtilTestHook hook = (IndexUtilTestHook) IndexUtils.testHook;
while (hook == null) {
hook = (IndexUtilTestHook) IndexUtils.testHook;
pause(20);
}
while (!hook.isHooked()) {
pause(30);
}
hook.setHooked(false);
hook_vm1 = true;
/*while (!hook_vm2) {
pause(40);
}
hook.setHooked(false);*/
}
});
asyns[1] = vm1.invokeAsync(new CacheSerializableRunnable("Restart VM1 cache") {
@Override
public void run2() throws CacheException {
GemFireCacheImpl.testCacheXml = PRQHelp.findFile(fileName2);
PRQHelp.getCache();
}
});
asyns[2] = vm0.invokeAsync(new CacheSerializableRunnable("Checking hook in VM0 cache again") {
@Override
public void run2() throws CacheException {
IndexUtilTestHook hook = (IndexUtilTestHook) IndexUtils.testHook;
while (hook == null || !hook_vm1) {
hook = (IndexUtilTestHook) IndexUtils.testHook;
pause(20);
}
while (!hook.isHooked()) {
pause(30);
}
if (hook.isHooked() && hook_vm1){
hook.setHooked(false);
hook_vm2 = true;
}
}
});
for (AsyncInvocation async: asyns) {
DistributedTestCase.join(async, 10000, null);
}
} finally {
vm0.invoke(new CacheSerializableRunnable("Close VM0 cache") {
@Override
public void run2() throws CacheException {
dir1.delete();
dir2.delete();
IndexUtils.testHook = null;
}
});
for (AsyncInvocation async: asyns) {
DistributedTestCase.join(async, 10000, null);
}
for (AsyncInvocation async: asyns) {
if (async.exceptionOccurred()) {
fail("Exception occured: " + async.getException());
}
}
}
}
public class IndexUtilTestHook implements TestHook {
private boolean hooked = false;
public void setHooked(boolean hooked) {
this.hooked = hooked;
}
public boolean isHooked() {
return hooked;
}
@Override
public synchronized void hook(int spot) throws RuntimeException {
GemFireCacheImpl.getInstance().getLogger().fine("IndexUtilTestHook is set");
switch (spot) {
case 0:
hooked = true;
while(hooked) {pause(300);}
pause(1000);
break;
default:
break;
}
}
}
}
| |
package org.apache.lucene.codecs.lucene42;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.DocValuesProducer;
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.DocsAndPositionsEnum;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.SegmentReadState;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.store.ByteArrayDataInput;
import org.apache.lucene.store.ChecksumIndexInput;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.IntsRef;
import org.apache.lucene.util.IntsRefBuilder;
import org.apache.lucene.util.PagedBytes;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.fst.BytesRefFSTEnum;
import org.apache.lucene.util.fst.BytesRefFSTEnum.InputOutput;
import org.apache.lucene.util.fst.FST;
import org.apache.lucene.util.fst.FST.Arc;
import org.apache.lucene.util.fst.FST.BytesReader;
import org.apache.lucene.util.fst.PositiveIntOutputs;
import org.apache.lucene.util.fst.Util;
import org.apache.lucene.util.packed.BlockPackedReader;
import org.apache.lucene.util.packed.MonotonicBlockPackedReader;
import org.apache.lucene.util.packed.PackedInts;
/**
* Reader for 4.2 docvalues
* @deprecated only for reading old 4.x segments
*/
@Deprecated
final class Lucene42DocValuesProducer extends DocValuesProducer {
// metadata maps (just file pointers and minimal stuff)
private final Map<String,NumericEntry> numerics;
private final Map<String,BinaryEntry> binaries;
private final Map<String,FSTEntry> fsts;
private final IndexInput data;
private final int version;
private final int numEntries;
// ram instances we have already loaded
private final Map<String,NumericDocValues> numericInstances = new HashMap<>();
private final Map<String,BinaryDocValues> binaryInstances = new HashMap<>();
private final Map<String,FST<Long>> fstInstances = new HashMap<>();
private final Map<String,Accountable> numericInfo = new HashMap<>();
private final Map<String,Accountable> binaryInfo = new HashMap<>();
private final Map<String,Accountable> addressInfo = new HashMap<>();
private final int maxDoc;
private final AtomicLong ramBytesUsed;
static final byte NUMBER = 0;
static final byte BYTES = 1;
static final byte FST = 2;
static final int BLOCK_SIZE = 4096;
static final byte DELTA_COMPRESSED = 0;
static final byte TABLE_COMPRESSED = 1;
static final byte UNCOMPRESSED = 2;
static final byte GCD_COMPRESSED = 3;
static final int VERSION_START = 0;
static final int VERSION_GCD_COMPRESSION = 1;
static final int VERSION_CHECKSUM = 2;
static final int VERSION_CURRENT = VERSION_CHECKSUM;
private final boolean merging;
// clone for merge: when merging we don't do any instances.put()s
Lucene42DocValuesProducer(Lucene42DocValuesProducer original) throws IOException {
assert Thread.holdsLock(original);
numerics = original.numerics;
binaries = original.binaries;
fsts = original.fsts;
data = original.data.clone();
version = original.version;
numEntries = original.numEntries;
numericInstances.putAll(original.numericInstances);
binaryInstances.putAll(original.binaryInstances);
fstInstances.putAll(original.fstInstances);
numericInfo.putAll(original.numericInfo);
binaryInfo.putAll(original.binaryInfo);
addressInfo.putAll(original.addressInfo);
maxDoc = original.maxDoc;
ramBytesUsed = new AtomicLong(original.ramBytesUsed.get());
merging = true;
}
Lucene42DocValuesProducer(SegmentReadState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension) throws IOException {
maxDoc = state.segmentInfo.getDocCount();
merging = false;
String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension);
// read in the entries from the metadata file.
ChecksumIndexInput in = state.directory.openChecksumInput(metaName, state.context);
boolean success = false;
ramBytesUsed = new AtomicLong(RamUsageEstimator.shallowSizeOfInstance(getClass()));
try {
version = CodecUtil.checkHeader(in, metaCodec,
VERSION_START,
VERSION_CURRENT);
numerics = new HashMap<>();
binaries = new HashMap<>();
fsts = new HashMap<>();
numEntries = readFields(in, state.fieldInfos);
if (version >= VERSION_CHECKSUM) {
CodecUtil.checkFooter(in);
} else {
CodecUtil.checkEOF(in);
}
success = true;
} finally {
if (success) {
IOUtils.close(in);
} else {
IOUtils.closeWhileHandlingException(in);
}
}
String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension);
this.data = state.directory.openInput(dataName, state.context);
success = false;
try {
final int version2 = CodecUtil.checkHeader(data, dataCodec,
VERSION_START,
VERSION_CURRENT);
if (version != version2) {
throw new CorruptIndexException("Format versions mismatch: meta=" + version + ", data=" + version2, data);
}
if (version >= VERSION_CHECKSUM) {
// NOTE: data file is too costly to verify checksum against all the bytes on open,
// but for now we at least verify proper structure of the checksum footer: which looks
// for FOOTER_MAGIC + algorithmID. This is cheap and can detect some forms of corruption
// such as file truncation.
CodecUtil.retrieveChecksum(data);
}
success = true;
} finally {
if (!success) {
IOUtils.closeWhileHandlingException(this.data);
}
}
}
private int readFields(IndexInput meta, FieldInfos infos) throws IOException {
int numEntries = 0;
int fieldNumber = meta.readVInt();
while (fieldNumber != -1) {
numEntries++;
FieldInfo info = infos.fieldInfo(fieldNumber);
if (info == null) {
// trickier to validate more: because we re-use for norms, because we use multiple entries
// for "composite" types like sortedset, etc.
throw new CorruptIndexException("Invalid field number: " + fieldNumber, meta);
}
int fieldType = meta.readByte();
if (fieldType == NUMBER) {
NumericEntry entry = new NumericEntry();
entry.offset = meta.readLong();
entry.format = meta.readByte();
switch(entry.format) {
case DELTA_COMPRESSED:
case TABLE_COMPRESSED:
case GCD_COMPRESSED:
case UNCOMPRESSED:
break;
default:
throw new CorruptIndexException("Unknown format: " + entry.format, meta);
}
if (entry.format != UNCOMPRESSED) {
entry.packedIntsVersion = meta.readVInt();
}
numerics.put(info.name, entry);
} else if (fieldType == BYTES) {
BinaryEntry entry = new BinaryEntry();
entry.offset = meta.readLong();
entry.numBytes = meta.readLong();
entry.minLength = meta.readVInt();
entry.maxLength = meta.readVInt();
if (entry.minLength != entry.maxLength) {
entry.packedIntsVersion = meta.readVInt();
entry.blockSize = meta.readVInt();
}
binaries.put(info.name, entry);
} else if (fieldType == FST) {
FSTEntry entry = new FSTEntry();
entry.offset = meta.readLong();
entry.numOrds = meta.readVLong();
fsts.put(info.name, entry);
} else {
throw new CorruptIndexException("invalid entry type: " + fieldType, meta);
}
fieldNumber = meta.readVInt();
}
return numEntries;
}
@Override
public synchronized NumericDocValues getNumeric(FieldInfo field) throws IOException {
NumericDocValues instance = numericInstances.get(field.name);
if (instance == null) {
instance = loadNumeric(field);
if (!merging) {
numericInstances.put(field.name, instance);
}
}
return instance;
}
@Override
public long ramBytesUsed() {
return ramBytesUsed.get();
}
@Override
public synchronized Collection<Accountable> getChildResources() {
List<Accountable> resources = new ArrayList<>();
resources.addAll(Accountables.namedAccountables("numeric field", numericInfo));
resources.addAll(Accountables.namedAccountables("binary field", binaryInfo));
resources.addAll(Accountables.namedAccountables("addresses field", addressInfo));
resources.addAll(Accountables.namedAccountables("terms dict field", fstInstances));
return Collections.unmodifiableList(resources);
}
@Override
public void checkIntegrity() throws IOException {
if (version >= VERSION_CHECKSUM) {
CodecUtil.checksumEntireFile(data);
}
}
@Override
public String toString() {
return getClass().getSimpleName() + "(entries=" + numEntries + ")";
}
private NumericDocValues loadNumeric(FieldInfo field) throws IOException {
NumericEntry entry = numerics.get(field.name);
data.seek(entry.offset);
switch (entry.format) {
case TABLE_COMPRESSED:
int size = data.readVInt();
if (size > 256) {
throw new CorruptIndexException("TABLE_COMPRESSED cannot have more than 256 distinct values, got=" + size, data);
}
final long decode[] = new long[size];
for (int i = 0; i < decode.length; i++) {
decode[i] = data.readLong();
}
final int formatID = data.readVInt();
final int bitsPerValue = data.readVInt();
final PackedInts.Reader ordsReader = PackedInts.getReaderNoHeader(data, PackedInts.Format.byId(formatID), entry.packedIntsVersion, maxDoc, bitsPerValue);
if (!merging) {
ramBytesUsed.addAndGet(RamUsageEstimator.sizeOf(decode) + ordsReader.ramBytesUsed());
numericInfo.put(field.name, ordsReader);
}
return new NumericDocValues() {
@Override
public long get(int docID) {
return decode[(int)ordsReader.get(docID)];
}
};
case DELTA_COMPRESSED:
final int blockSize = data.readVInt();
final BlockPackedReader reader = new BlockPackedReader(data, entry.packedIntsVersion, blockSize, maxDoc, false);
if (!merging) {
ramBytesUsed.addAndGet(reader.ramBytesUsed());
numericInfo.put(field.name, reader);
}
return reader;
case UNCOMPRESSED:
final byte bytes[] = new byte[maxDoc];
data.readBytes(bytes, 0, bytes.length);
if (!merging) {
ramBytesUsed.addAndGet(RamUsageEstimator.sizeOf(bytes));
numericInfo.put(field.name, Accountables.namedAccountable("byte array", maxDoc));
}
return new NumericDocValues() {
@Override
public long get(int docID) {
return bytes[docID];
}
};
case GCD_COMPRESSED:
final long min = data.readLong();
final long mult = data.readLong();
final int quotientBlockSize = data.readVInt();
final BlockPackedReader quotientReader = new BlockPackedReader(data, entry.packedIntsVersion, quotientBlockSize, maxDoc, false);
if (!merging) {
ramBytesUsed.addAndGet(quotientReader.ramBytesUsed());
numericInfo.put(field.name, quotientReader);
}
return new NumericDocValues() {
@Override
public long get(int docID) {
return min + mult * quotientReader.get(docID);
}
};
default:
throw new AssertionError();
}
}
@Override
public synchronized BinaryDocValues getBinary(FieldInfo field) throws IOException {
BinaryDocValues instance = binaryInstances.get(field.name);
if (instance == null) {
instance = loadBinary(field);
if (!merging) {
binaryInstances.put(field.name, instance);
}
}
return instance;
}
private BinaryDocValues loadBinary(FieldInfo field) throws IOException {
BinaryEntry entry = binaries.get(field.name);
data.seek(entry.offset);
PagedBytes bytes = new PagedBytes(16);
bytes.copy(data, entry.numBytes);
final PagedBytes.Reader bytesReader = bytes.freeze(true);
if (!merging) {
binaryInfo.put(field.name, bytesReader);
}
if (entry.minLength == entry.maxLength) {
final int fixedLength = entry.minLength;
if (!merging) {
ramBytesUsed.addAndGet(bytesReader.ramBytesUsed());
}
return new BinaryDocValues() {
@Override
public BytesRef get(int docID) {
final BytesRef term = new BytesRef();
bytesReader.fillSlice(term, fixedLength * (long)docID, fixedLength);
return term;
}
};
} else {
final MonotonicBlockPackedReader addresses = MonotonicBlockPackedReader.of(data, entry.packedIntsVersion, entry.blockSize, maxDoc, false);
if (!merging) {
addressInfo.put(field.name, addresses);
ramBytesUsed.addAndGet(bytesReader.ramBytesUsed() + addresses.ramBytesUsed());
}
return new BinaryDocValues() {
@Override
public BytesRef get(int docID) {
long startAddress = docID == 0 ? 0 : addresses.get(docID-1);
long endAddress = addresses.get(docID);
final BytesRef term = new BytesRef();
bytesReader.fillSlice(term, startAddress, (int) (endAddress - startAddress));
return term;
}
};
}
}
@Override
public SortedDocValues getSorted(FieldInfo field) throws IOException {
final FSTEntry entry = fsts.get(field.name);
FST<Long> instance;
synchronized(this) {
instance = fstInstances.get(field.name);
if (instance == null) {
data.seek(entry.offset);
instance = new FST<>(data, PositiveIntOutputs.getSingleton());
if (!merging) {
ramBytesUsed.addAndGet(instance.ramBytesUsed());
fstInstances.put(field.name, instance);
}
}
}
final NumericDocValues docToOrd = getNumeric(field);
final FST<Long> fst = instance;
// per-thread resources
final BytesReader in = fst.getBytesReader();
final Arc<Long> firstArc = new Arc<>();
final Arc<Long> scratchArc = new Arc<>();
final IntsRefBuilder scratchInts = new IntsRefBuilder();
final BytesRefFSTEnum<Long> fstEnum = new BytesRefFSTEnum<>(fst);
return new SortedDocValues() {
final BytesRefBuilder term = new BytesRefBuilder();
@Override
public int getOrd(int docID) {
return (int) docToOrd.get(docID);
}
@Override
public BytesRef lookupOrd(int ord) {
try {
in.setPosition(0);
fst.getFirstArc(firstArc);
IntsRef output = Util.getByOutput(fst, ord, in, firstArc, scratchArc, scratchInts);
term.grow(output.length);
term.clear();
return Util.toBytesRef(output, term);
} catch (IOException bogus) {
throw new RuntimeException(bogus);
}
}
@Override
public int lookupTerm(BytesRef key) {
try {
InputOutput<Long> o = fstEnum.seekCeil(key);
if (o == null) {
return -getValueCount()-1;
} else if (o.input.equals(key)) {
return o.output.intValue();
} else {
return (int) -o.output-1;
}
} catch (IOException bogus) {
throw new RuntimeException(bogus);
}
}
@Override
public int getValueCount() {
return (int)entry.numOrds;
}
@Override
public TermsEnum termsEnum() {
return new FSTTermsEnum(fst);
}
};
}
@Override
public SortedSetDocValues getSortedSet(FieldInfo field) throws IOException {
final FSTEntry entry = fsts.get(field.name);
if (entry.numOrds == 0) {
return DocValues.emptySortedSet(); // empty FST!
}
FST<Long> instance;
synchronized(this) {
instance = fstInstances.get(field.name);
if (instance == null) {
data.seek(entry.offset);
instance = new FST<>(data, PositiveIntOutputs.getSingleton());
if (!merging) {
ramBytesUsed.addAndGet(instance.ramBytesUsed());
fstInstances.put(field.name, instance);
}
}
}
final BinaryDocValues docToOrds = getBinary(field);
final FST<Long> fst = instance;
// per-thread resources
final BytesReader in = fst.getBytesReader();
final Arc<Long> firstArc = new Arc<>();
final Arc<Long> scratchArc = new Arc<>();
final IntsRefBuilder scratchInts = new IntsRefBuilder();
final BytesRefFSTEnum<Long> fstEnum = new BytesRefFSTEnum<>(fst);
final ByteArrayDataInput input = new ByteArrayDataInput();
return new SortedSetDocValues() {
final BytesRefBuilder term = new BytesRefBuilder();
BytesRef ordsRef;
long currentOrd;
@Override
public long nextOrd() {
if (input.eof()) {
return NO_MORE_ORDS;
} else {
currentOrd += input.readVLong();
return currentOrd;
}
}
@Override
public void setDocument(int docID) {
ordsRef = docToOrds.get(docID);
input.reset(ordsRef.bytes, ordsRef.offset, ordsRef.length);
currentOrd = 0;
}
@Override
public BytesRef lookupOrd(long ord) {
try {
in.setPosition(0);
fst.getFirstArc(firstArc);
IntsRef output = Util.getByOutput(fst, ord, in, firstArc, scratchArc, scratchInts);
term.grow(output.length);
term.clear();
return Util.toBytesRef(output, term);
} catch (IOException bogus) {
throw new RuntimeException(bogus);
}
}
@Override
public long lookupTerm(BytesRef key) {
try {
InputOutput<Long> o = fstEnum.seekCeil(key);
if (o == null) {
return -getValueCount()-1;
} else if (o.input.equals(key)) {
return o.output.intValue();
} else {
return -o.output-1;
}
} catch (IOException bogus) {
throw new RuntimeException(bogus);
}
}
@Override
public long getValueCount() {
return entry.numOrds;
}
@Override
public TermsEnum termsEnum() {
return new FSTTermsEnum(fst);
}
};
}
@Override
public Bits getDocsWithField(FieldInfo field) throws IOException {
if (field.getDocValuesType() == DocValuesType.SORTED_SET) {
return DocValues.docsWithValue(getSortedSet(field), maxDoc);
} else {
return new Bits.MatchAllBits(maxDoc);
}
}
@Override
public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException {
throw new IllegalStateException("Lucene 4.2 does not support SortedNumeric: how did you pull this off?");
}
@Override
public synchronized DocValuesProducer getMergeInstance() throws IOException {
return new Lucene42DocValuesProducer(this);
}
@Override
public void close() throws IOException {
data.close();
}
static class NumericEntry {
long offset;
byte format;
int packedIntsVersion;
}
static class BinaryEntry {
long offset;
long numBytes;
int minLength;
int maxLength;
int packedIntsVersion;
int blockSize;
}
static class FSTEntry {
long offset;
long numOrds;
}
// exposes FSTEnum directly as a TermsEnum: avoids binary-search next()
static class FSTTermsEnum extends TermsEnum {
final BytesRefFSTEnum<Long> in;
// this is all for the complicated seek(ord)...
// maybe we should add a FSTEnum that supports this operation?
final FST<Long> fst;
final FST.BytesReader bytesReader;
final Arc<Long> firstArc = new Arc<>();
final Arc<Long> scratchArc = new Arc<>();
final IntsRefBuilder scratchInts = new IntsRefBuilder();
final BytesRefBuilder scratchBytes = new BytesRefBuilder();
FSTTermsEnum(FST<Long> fst) {
this.fst = fst;
in = new BytesRefFSTEnum<>(fst);
bytesReader = fst.getBytesReader();
}
@Override
public BytesRef next() throws IOException {
InputOutput<Long> io = in.next();
if (io == null) {
return null;
} else {
return io.input;
}
}
@Override
public SeekStatus seekCeil(BytesRef text) throws IOException {
if (in.seekCeil(text) == null) {
return SeekStatus.END;
} else if (term().equals(text)) {
// TODO: add SeekStatus to FSTEnum like in https://issues.apache.org/jira/browse/LUCENE-3729
// to remove this comparision?
return SeekStatus.FOUND;
} else {
return SeekStatus.NOT_FOUND;
}
}
@Override
public boolean seekExact(BytesRef text) throws IOException {
if (in.seekExact(text) == null) {
return false;
} else {
return true;
}
}
@Override
public void seekExact(long ord) throws IOException {
// TODO: would be better to make this simpler and faster.
// but we dont want to introduce a bug that corrupts our enum state!
bytesReader.setPosition(0);
fst.getFirstArc(firstArc);
IntsRef output = Util.getByOutput(fst, ord, bytesReader, firstArc, scratchArc, scratchInts);
BytesRefBuilder scratchBytes = new BytesRefBuilder();
scratchBytes.clear();
Util.toBytesRef(output, scratchBytes);
// TODO: we could do this lazily, better to try to push into FSTEnum though?
in.seekExact(scratchBytes.get());
}
@Override
public BytesRef term() throws IOException {
return in.current().input;
}
@Override
public long ord() throws IOException {
return in.current().output;
}
@Override
public int docFreq() throws IOException {
throw new UnsupportedOperationException();
}
@Override
public long totalTermFreq() throws IOException {
throw new UnsupportedOperationException();
}
@Override
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, int flags) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse, int flags) throws IOException {
throw new UnsupportedOperationException();
}
}
}
| |
/**
* JBoss, Home of Professional Open Source.
* Copyright 2014-2020 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.pnc.demo.data;
import com.google.common.base.Preconditions;
import org.jboss.pnc.common.concurrent.Sequence;
import org.jboss.pnc.common.json.moduleconfig.DemoDataConfig;
import org.jboss.pnc.common.json.moduleconfig.SystemConfig;
import org.jboss.pnc.enums.ArtifactQuality;
import org.jboss.pnc.enums.BuildStatus;
import org.jboss.pnc.enums.BuildType;
import org.jboss.pnc.enums.MilestoneCloseStatus;
import org.jboss.pnc.enums.RepositoryType;
import org.jboss.pnc.enums.SupportLevel;
import org.jboss.pnc.enums.SystemImageType;
import org.jboss.pnc.model.Artifact;
import org.jboss.pnc.model.BuildConfigSetRecord;
import org.jboss.pnc.model.BuildConfiguration;
import org.jboss.pnc.model.BuildConfigurationAudited;
import org.jboss.pnc.model.BuildConfigurationSet;
import org.jboss.pnc.model.BuildEnvironment;
import org.jboss.pnc.model.BuildRecord;
import org.jboss.pnc.model.IdRev;
import org.jboss.pnc.model.Product;
import org.jboss.pnc.model.ProductMilestone;
import org.jboss.pnc.model.ProductMilestoneRelease;
import org.jboss.pnc.model.ProductRelease;
import org.jboss.pnc.model.ProductVersion;
import org.jboss.pnc.model.Project;
import org.jboss.pnc.model.RepositoryConfiguration;
import org.jboss.pnc.model.TargetRepository;
import org.jboss.pnc.model.User;
import org.jboss.pnc.spi.datastore.Datastore;
import org.jboss.pnc.spi.datastore.repositories.ArtifactAuditedRepository;
import org.jboss.pnc.spi.datastore.repositories.ArtifactRepository;
import org.jboss.pnc.spi.datastore.repositories.BuildConfigSetRecordRepository;
import org.jboss.pnc.spi.datastore.repositories.BuildConfigurationAuditedRepository;
import org.jboss.pnc.spi.datastore.repositories.BuildConfigurationRepository;
import org.jboss.pnc.spi.datastore.repositories.BuildConfigurationSetRepository;
import org.jboss.pnc.spi.datastore.repositories.BuildEnvironmentRepository;
import org.jboss.pnc.spi.datastore.repositories.BuildRecordRepository;
import org.jboss.pnc.spi.datastore.repositories.ProductMilestoneReleaseRepository;
import org.jboss.pnc.spi.datastore.repositories.ProductMilestoneRepository;
import org.jboss.pnc.spi.datastore.repositories.ProductReleaseRepository;
import org.jboss.pnc.spi.datastore.repositories.ProductRepository;
import org.jboss.pnc.spi.datastore.repositories.ProductVersionRepository;
import org.jboss.pnc.spi.datastore.repositories.ProjectRepository;
import org.jboss.pnc.spi.datastore.repositories.RepositoryConfigurationRepository;
import org.jboss.pnc.spi.datastore.repositories.SequenceHandlerRepository;
import org.jboss.pnc.spi.datastore.repositories.TargetRepositoryRepository;
import org.jboss.pnc.spi.datastore.repositories.UserRepository;
import javax.ejb.Singleton;
import javax.ejb.TransactionAttribute;
import javax.ejb.TransactionAttributeType;
import javax.inject.Inject;
import java.sql.Timestamp;
import java.time.Instant;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.time.temporal.ChronoUnit;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.logging.Logger;
import static org.jboss.pnc.spi.datastore.predicates.ArtifactPredicates.withIdentifierAndSha256;
/**
* Data for the DEMO. Note: The database initialization requires two separate transactions in order for the build
* configuration audit record to be created and then linked to a build record.
*/
@Singleton
public class DatabaseDataInitializer {
public static final Logger log = Logger.getLogger(DatabaseDataInitializer.class.getName());
public static final String PNC_PRODUCT_NAME = "Project Newcastle Demo Product";
public static final String PNC_PRODUCT_VERSION_1 = "1.0";
public static final String PNC_PRODUCT_VERSION_2 = "2.0";
public static final String PNC_PRODUCT_RELEASE = "1.0.0.GA";
public static final String PNC_PRODUCT_MILESTONE1 = "1.0.0.Build1";
public static final String PNC_PRODUCT_MILESTONE2 = "1.0.0.Build2";
public static final String PNC_PRODUCT_MILESTONE3 = "1.0.0.Build3";
public static final String PNC_PROJECT_1_NAME = "Project Newcastle Demo Project 1";
public static final String PNC_PROJECT_BUILD_CFG_ID = "pnc-1.0.0.DR1";
@Inject
private ArtifactRepository artifactRepository;
@Inject
private ArtifactAuditedRepository artifactAuditedRepository;
@Inject
private TargetRepositoryRepository targetRepositoryRepository;
@Inject
private ProjectRepository projectRepository;
@Inject
private ProductRepository productRepository;
@Inject
private RepositoryConfigurationRepository repositoryConfigurationRepository;
@Inject
private BuildConfigurationRepository buildConfigurationRepository;
@Inject
BuildConfigurationAuditedRepository buildConfigurationAuditedRepository;
@Inject
private ProductVersionRepository productVersionRepository;
@Inject
private ProductMilestoneRepository productMilestoneRepository;
@Inject
private ProductMilestoneReleaseRepository productMilestoneReleaseRepository;
@Inject
private ProductReleaseRepository productReleaseRepository;
@Inject
private BuildConfigurationSetRepository buildConfigurationSetRepository;
@Inject
private UserRepository userRepository;
@Inject
private BuildRecordRepository buildRecordRepository;
@Inject
private BuildConfigSetRecordRepository buildConfigSetRecordRepository;
@Inject
private BuildEnvironmentRepository environmentRepository;
@Inject
private SequenceHandlerRepository sequenceHandlerRepository;
@Inject
private Datastore datastore;
@Inject
DemoDataConfig demoDataConfig;
@Inject
SystemConfig systemConfig;
BuildConfiguration buildConfiguration1;
BuildConfiguration buildConfiguration2;
BuildConfigurationSet buildConfigurationSet1;
ProductMilestone demoProductMilestone1;
User demoUser;
User pncAdminUser;
public void verifyData() {
// Check number of entities in DB
Preconditions.checkState(projectRepository.count() > 0, "Expecting number of Projects > 0");
Preconditions.checkState(productRepository.count() > 0, "Expecting number of Products > 0");
Preconditions
.checkState(buildConfigurationRepository.count() > 0, "Expecting number of BuildConfigurations > 0");
Preconditions.checkState(productVersionRepository.count() > 0, "Expecting number of ProductVersions > 0");
Preconditions
.checkState(buildConfigurationSetRepository.count() > 0, "Expecting number of BuildRepositorySets > 0");
Preconditions.checkState(artifactRepository.count() > 0, "Expecting number of Artifacts > 0");
BuildConfiguration buildConfigurationDB = buildConfigurationRepository.queryAll().get(0);
// Check that BuildConfiguration and BuildConfigurationSet have a ProductVersion associated
BuildConfigurationSet buildConfigurationSet = buildConfigurationDB.getBuildConfigurationSets()
.iterator()
.next();
Preconditions.checkState(
buildConfigurationSet.getProductVersion() != null,
"Product version of buildConfiguration must be not null");
BuildConfigurationSet buildConfigurationSetDB = buildConfigurationSetRepository.queryAll().get(0);
Preconditions.checkState(
buildConfigurationSetDB.getProductVersion() != null,
"Product version of buildConfigurationSet must be not null");
// Check that mapping between Product and Build Configuration via BuildConfigurationSet is correct
Preconditions.checkState(
buildConfigurationSetDB.getProductVersion().getProduct().getName().equals(PNC_PRODUCT_NAME),
"Product mapped to Project must be " + PNC_PRODUCT_NAME);
Preconditions.checkState(
buildConfigurationSetDB.getProductVersion().getVersion().equals(PNC_PRODUCT_VERSION_1),
"Product version mapped to Project must be " + PNC_PRODUCT_VERSION_1);
// Check that BuildConfiguration and BuildConfigurationSet have a ProductVersion associated
Preconditions.checkState(
buildConfigurationSet.getProductVersion().getVersion().equals(PNC_PRODUCT_VERSION_1),
"Product version mapped to BuildConfiguration must be " + PNC_PRODUCT_VERSION_1);
Preconditions.checkState(
buildConfigurationSet.getProductVersion().getProduct().getName().equals(PNC_PRODUCT_NAME),
"Product mapped to BuildConfiguration must be " + PNC_PRODUCT_NAME);
// Check data of BuildConfiguration
Preconditions.checkState(
buildConfigurationDB.getProject().getName().equals(PNC_PROJECT_1_NAME),
"Project mapped to BuildConfiguration must be " + PNC_PROJECT_1_NAME);
}
@TransactionAttribute(TransactionAttributeType.REQUIRES_NEW)
public void initiliazeProjectProductData() {
BuildEnvironment environment1Unsaved = BuildEnvironment.Builder.newBuilder()
.name("Demo Environment 1")
.description("Basic Java and Maven Environment")
.attribute("JDK", "1.7.0")
.attribute("OS", "Linux")
.systemImageId("12345678")
.systemImageRepositoryUrl("my.registry/newcastle")
.systemImageType(SystemImageType.DOCKER_IMAGE)
.deprecated(false)
.build();
BuildEnvironment environment1 = environmentRepository.save(environment1Unsaved);
BuildEnvironment environment2Unsaved = BuildEnvironment.Builder.newBuilder()
.name("Demo Environment 2")
.description("Basic Java and Maven Environment")
.attribute("JDK", "1.7.0")
.attribute("OS", "Linux")
.systemImageId("12345679")
.systemImageRepositoryUrl("my.registry/newcastle")
.systemImageType(SystemImageType.DOCKER_IMAGE)
.deprecated(true)
.build();
BuildEnvironment environment2 = environmentRepository.save(environment2Unsaved);
/*
* All the bi-directional mapping settings are managed inside the Builders
*/
// Example product and product version
Product product = Product.Builder.newBuilder()
.name(PNC_PRODUCT_NAME)
.abbreviation("PNC")
.description("Example Product for Project Newcastle Demo")
.build();
product = productRepository.save(product);
// Example product version, release, and milestone of the product
ProductVersion productVersion1 = ProductVersion.Builder.newBuilder()
.version(PNC_PRODUCT_VERSION_1)
.product(product)
.generateBrewTagPrefix(
product.getAbbreviation(),
PNC_PRODUCT_VERSION_1,
systemConfig.getBrewTagPattern())
.build();
productVersion1 = productVersionRepository.save(productVersion1);
ProductVersion productVersion2 = ProductVersion.Builder.newBuilder()
.version(PNC_PRODUCT_VERSION_2)
.product(product)
.generateBrewTagPrefix(
product.getAbbreviation(),
PNC_PRODUCT_VERSION_2,
systemConfig.getBrewTagPattern())
.build();
productVersion2 = productVersionRepository.save(productVersion2);
final int DAYS_IN_A_WEEK = 7;
final Date TODAY = Date.from(LocalDateTime.now().atZone(ZoneId.systemDefault()).toInstant());
final Date ONE_WEEK_BEFORE_TODAY = Date
.from(LocalDateTime.now().minusDays(DAYS_IN_A_WEEK).atZone(ZoneId.systemDefault()).toInstant());
final Date ONE_WEEK_AFTER_TODAY = Date
.from(LocalDateTime.now().plusDays(DAYS_IN_A_WEEK).atZone(ZoneId.systemDefault()).toInstant());
demoProductMilestone1 = ProductMilestone.Builder.newBuilder()
.version(PNC_PRODUCT_MILESTONE1)
.startingDate(ONE_WEEK_BEFORE_TODAY)
.plannedEndDate(TODAY)
.productVersion(productVersion1)
.build();
demoProductMilestone1 = productMilestoneRepository.save(demoProductMilestone1);
ProductMilestone demoProductMilestone2 = ProductMilestone.Builder.newBuilder()
.version(PNC_PRODUCT_MILESTONE2)
.startingDate(TODAY)
.plannedEndDate(ONE_WEEK_AFTER_TODAY)
.productVersion(productVersion1)
.build();
demoProductMilestone2 = productMilestoneRepository.save(demoProductMilestone2);
Instant t0 = TODAY.toInstant();
Instant successTime = t0.plus(10, ChronoUnit.MINUTES);
ProductMilestone demoProductMilestone3 = ProductMilestone.Builder.newBuilder()
.version(PNC_PRODUCT_MILESTONE3)
.startingDate(TODAY)
.plannedEndDate(ONE_WEEK_AFTER_TODAY)
.endDate(ONE_WEEK_AFTER_TODAY)
.productVersion(productVersion1)
.build();
demoProductMilestone3 = productMilestoneRepository.save(demoProductMilestone3);
ProductMilestoneRelease milestoneRelease1 = new ProductMilestoneRelease();
milestoneRelease1.setId(Sequence.nextId());
milestoneRelease1.setMilestone(demoProductMilestone3);
// first store with latter starting date to test sort function
milestoneRelease1.setStartingDate(Date.from(t0.plus(2, ChronoUnit.MINUTES)));
milestoneRelease1.setStatus(MilestoneCloseStatus.SYSTEM_ERROR);
productMilestoneReleaseRepository.save(milestoneRelease1);
ProductMilestoneRelease milestoneRelease2 = new ProductMilestoneRelease();
milestoneRelease2.setId(Sequence.nextId());
milestoneRelease2.setMilestone(demoProductMilestone3);
milestoneRelease2.setStartingDate(Date.from(t0));
milestoneRelease2.setStatus(MilestoneCloseStatus.FAILED);
productMilestoneReleaseRepository.save(milestoneRelease2);
ProductMilestoneRelease milestoneRelease3 = new ProductMilestoneRelease();
milestoneRelease3.setId(Sequence.nextId());
milestoneRelease3.setMilestone(demoProductMilestone3);
milestoneRelease3.setStartingDate(Date.from(successTime));
milestoneRelease3.setStatus(MilestoneCloseStatus.SUCCEEDED);
productMilestoneReleaseRepository.save(milestoneRelease3);
ProductRelease productRelease = ProductRelease.Builder.newBuilder()
.version(PNC_PRODUCT_RELEASE)
.productMilestone(demoProductMilestone1)
.supportLevel(SupportLevel.EARLYACCESS)
.build();
productRelease = productReleaseRepository.save(productRelease);
productVersion1.setCurrentProductMilestone(demoProductMilestone3);
productVersion1 = productVersionRepository.save(productVersion1);
// Example projects
Project project1 = Project.Builder.newBuilder()
.name(PNC_PROJECT_1_NAME)
.description("Example Project for Newcastle Demo")
.projectUrl("https://github.com/project-ncl/pnc")
.build();
Project project2 = Project.Builder.newBuilder()
.name("Causeway")
.description("Causeway - Koji integration")
.projectUrl("https://github.com/project-ncl/causeway")
.build();
Project project3 = Project.Builder.newBuilder()
.name("Pnc Build Agent")
.description("Pnc Build Agent - remote client to execute commands.")
.projectUrl("https://github.com/project-ncl/pnc-build-agent")
.build();
Project project4 = Project.Builder.newBuilder()
.name("Dependency Analysis")
.description("Dependency Analysis - Analise project dependencies.")
.projectUrl("https://github.com/project-ncl/dependency-analysis")
.build();
Project project5 = Project.Builder.newBuilder()
.name("termd")
.description("Remote shell.")
.projectUrl("https://github.com/project-ncl/termd")
.build();
projectRepository.save(project1);
projectRepository.save(project2);
projectRepository.save(project3);
projectRepository.save(project4);
projectRepository.save(project5);
RepositoryConfiguration repositoryConfiguration1 = createRepositoryConfiguration(
demoDataConfig.getInternalRepo(0),
"https://github.com/project-ncl/pnc.git");
RepositoryConfiguration repositoryConfiguration2 = createRepositoryConfiguration(
demoDataConfig.getInternalRepo(1),
null);
RepositoryConfiguration repositoryConfiguration3 = createRepositoryConfiguration(
demoDataConfig.getInternalRepo(2),
null);
RepositoryConfiguration repositoryConfiguration4 = createRepositoryConfiguration(
demoDataConfig.getInternalRepo(3),
null);
RepositoryConfiguration repositoryConfiguration5 = createRepositoryConfiguration(
demoDataConfig.getInternalRepo(4),
null);
repositoryConfigurationRepository.save(repositoryConfiguration1);
repositoryConfigurationRepository.save(repositoryConfiguration2);
repositoryConfigurationRepository.save(repositoryConfiguration3);
repositoryConfigurationRepository.save(repositoryConfiguration4);
repositoryConfigurationRepository.save(repositoryConfiguration5);
// Example build configurations
Map<String, String> genericParameters = new HashMap<>();
genericParameters.put("KEY", "VALUE");
buildConfiguration1 = BuildConfiguration.Builder.newBuilder()
.id(sequenceHandlerRepository.getNextID(BuildConfiguration.SEQUENCE_NAME).intValue())
.name(PNC_PROJECT_BUILD_CFG_ID)
.project(project1)
.description("Test build config for project newcastle")
.buildType(BuildType.MVN)
.buildEnvironment(environment1)
.buildScript("mvn deploy -DskipTests=true")
.repositoryConfiguration(repositoryConfiguration1)
.productVersion(productVersion1)
.scmRevision("*/v0.2")
.genericParameters(genericParameters)
.build();
buildConfiguration1 = buildConfigurationRepository.save(buildConfiguration1);
buildConfiguration2 = BuildConfiguration.Builder.newBuilder()
.id(sequenceHandlerRepository.getNextID(BuildConfiguration.SEQUENCE_NAME).intValue())
.name("termd")
.project(project2)
.buildType(BuildType.MVN)
.description("Test configueration for Termd.")
.buildEnvironment(environment1)
.buildScript("mvn deploy -DskipTests=true")
.productVersion(productVersion1)
.repositoryConfiguration(repositoryConfiguration2)
.scmRevision("master")
.build();
buildConfiguration2 = buildConfigurationRepository.save(buildConfiguration2);
BuildConfiguration buildConfiguration3 = BuildConfiguration.Builder.newBuilder()
.id(sequenceHandlerRepository.getNextID(BuildConfiguration.SEQUENCE_NAME).intValue())
.name("pnc-build-agent-0.4")
.project(project3)
.description("Test config for Pnc Build Agent.")
.buildType(BuildType.MVN)
.buildEnvironment(environment1)
.buildScript("mvn deploy -DskipTests=true")
.productVersion(productVersion2)
.repositoryConfiguration(repositoryConfiguration3)
.dependency(buildConfiguration2)
.build();
buildConfiguration3 = buildConfigurationRepository.save(buildConfiguration3);
BuildConfiguration buildConfiguration4 = BuildConfiguration.Builder.newBuilder()
.id(sequenceHandlerRepository.getNextID(BuildConfiguration.SEQUENCE_NAME).intValue())
.name("dependency-analysis-1.3")
.project(project4)
.description("Test config for Dependency Analysis.")
.buildType(BuildType.MVN)
.buildEnvironment(environment1)
.buildScript("mvn deploy -DskipTests=true")
.repositoryConfiguration(repositoryConfiguration4)
.dependency(buildConfiguration1)
.build();
buildConfiguration4 = buildConfigurationRepository.save(buildConfiguration4);
BuildConfiguration buildConfiguration5 = BuildConfiguration.Builder.newBuilder()
.id(sequenceHandlerRepository.getNextID(BuildConfiguration.SEQUENCE_NAME).intValue())
.name("maven-plugin-test")
.project(project5)
.description("Test build for Plugins with external downloads")
.buildType(BuildType.MVN)
.buildEnvironment(environment1)
.buildScript("mvn clean deploy")
.repositoryConfiguration(repositoryConfiguration5)
.build();
buildConfiguration5 = buildConfigurationRepository.save(buildConfiguration5);
// Build config set containing the three example build configs
buildConfigurationSet1 = BuildConfigurationSet.Builder.newBuilder()
.name("Example Build Group 1")
.buildConfiguration(buildConfiguration1)
.buildConfiguration(buildConfiguration2)
.buildConfiguration(buildConfiguration3)
.productVersion(productVersion1)
.build();
BuildConfigurationSet buildConfigurationSet2 = BuildConfigurationSet.Builder.newBuilder()
.name("Fabric Build Group")
.buildConfiguration(buildConfiguration4)
.productVersion(productVersion1)
.build();
demoUser = User.Builder.newBuilder()
.username("demo-user")
.firstName("Demo First Name")
.lastName("Demo Last Name")
.email("demo-user@pnc.com")
.build();
pncAdminUser = User.Builder.newBuilder()
.username("pnc-admin")
.firstName("pnc-admin")
.lastName("pnc-admin")
.email("pnc-admin@pnc.com")
.build();
buildConfigurationSetRepository.save(buildConfigurationSet1);
buildConfigurationSetRepository.save(buildConfigurationSet2);
demoUser = userRepository.save(demoUser);
pncAdminUser = userRepository.save(pncAdminUser);
}
/**
* Build record needs to be initialized in a separate transaction so that the audited build configuration can be
* set.
*/
@TransactionAttribute(TransactionAttributeType.REQUIRES_NEW)
public void initiliazeBuildRecordDemoData() {
TargetRepository targetRepository = TargetRepository.newBuilder()
.repositoryType(RepositoryType.MAVEN)
.repositoryPath("builds-untested")
.identifier("indy-maven")
.temporaryRepo(false)
.build();
targetRepositoryRepository.save(targetRepository);
Artifact builtArtifact1 = Artifact.Builder.newBuilder()
.identifier("demo:built-artifact1:jar:1.0")
.targetRepository(targetRepository)
.filename("demo built artifact 1")
.md5("4af310bf0ef67bc7d143f35818ea1ed2")
.sha1("3a8ff25c890f2a4a283876a91037ff6c57474a14")
.sha256("1660168483cb8a05d1cc2e77c861682a42ed9517ba945159d5538950c5db00fa")
.size(10L)
.artifactQuality(ArtifactQuality.NEW)
.build();
Artifact builtArtifact2 = Artifact.Builder.newBuilder()
.identifier("demo:built-artifact2:jar:1.0")
.targetRepository(targetRepository)
.filename("demo built artifact 2")
.md5("4af310bf0ef67bc7d143f35818ea1ed2")
.sha1("61dad16e14438d2d8c8cbd18b267d62944f37898")
.sha256("2fafc2ed0f752ac2540283d48c5cd663254a853c5cb13dec02dce023fc7471a9")
.size(11L)
.artifactQuality(ArtifactQuality.NEW)
.build();
Artifact builtArtifact3 = Artifact.Builder.newBuilder()
.identifier("demo:built-artifact11:jar:1.0")
.targetRepository(targetRepository)
.filename("demo built artifact 11")
.md5("5c8e1503e77dc8e370610098e01f0a8e")
.sha1("550748f6f58ed8d4f6b63850a867ac207da30013")
.sha256("b39f88c9937f201981767e539025121971e72bc590ea20ed7fdfffafc05f55a9")
.size(10L)
.artifactQuality(ArtifactQuality.NEW)
.build();
Artifact builtArtifact4 = Artifact.Builder.newBuilder()
.identifier("demo:built-artifact22:jar:1.0")
.targetRepository(targetRepository)
.filename("demo built artifact 21")
.md5("48312fb24c7b2a116c2139d5b39bad66")
.sha1("6ce2fd75c35e7eed2c45338b943be34d0b974f16")
.sha256("61c9ccd3ba0013311ddb89cb9a29389b6761061bdcdfb48f0096bf98c7279a21")
.size(11L)
.artifactQuality(ArtifactQuality.NEW)
.build();
builtArtifact1 = artifactRepository.save(builtArtifact1);
builtArtifact2 = artifactRepository.save(builtArtifact2);
builtArtifact3 = artifactRepository.save(builtArtifact3);
builtArtifact4 = artifactRepository.save(builtArtifact4);
Artifact importedArtifact1 = Artifact.Builder.newBuilder()
.identifier("demo:imported-artifact1:jar:1.0")
.targetRepository(targetRepository)
.filename("demo imported artifact 1")
.originUrl("http://central/import1.jar")
.importDate(Date.from(Instant.now()))
.md5("md-fake-abcd1234")
.sha1("sha1-fake-abcd1234")
.sha256("sha256-fake-abcd1234")
.size(10L)
.artifactQuality(ArtifactQuality.NEW)
.deployPath("/imported1")
.build();
Artifact importedArtifact2 = Artifact.Builder.newBuilder()
.identifier("demo:imported-artifact2:jar:1.0")
.targetRepository(targetRepository)
.filename("demo imported artifact 2")
.originUrl("http://central/import2.jar")
.importDate(Date.from(Instant.now()))
.md5("md-fake-abcd1234")
.sha1("sha1-fake-abcd1234")
.sha256("sha256-fake-abcd1234")
.size(10L)
.artifactQuality(ArtifactQuality.NEW)
.deployPath("/imported2")
.build();
importedArtifact1 = artifactRepository.save(importedArtifact1);
importedArtifact2 = artifactRepository.save(importedArtifact2);
Set<BuildRecord> buildRecords = new HashSet<>();
final int INITIAL_REVISION = 1;
IdRev buildConfig1AuditIdRev = new IdRev(buildConfiguration1.getId(), INITIAL_REVISION);
BuildConfigurationAudited buildConfigAudited1 = buildConfigurationAuditedRepository
.queryById(buildConfig1AuditIdRev);
if (buildConfigAudited1 != null) {
int nextId = datastore.getNextBuildRecordId();
log.info("####nextId: " + nextId);
BuildRecord buildRecord1 = BuildRecord.Builder.newBuilder()
.id(nextId)
.buildConfigurationAudited(buildConfigAudited1)
.submitTime(Timestamp.from(Instant.now().minus(8, ChronoUnit.MINUTES)))
.startTime(Timestamp.from(Instant.now().minus(5, ChronoUnit.MINUTES)))
.endTime(Timestamp.from(Instant.now()))
.dependency(importedArtifact1)
.dependency(importedArtifact2)
.user(pncAdminUser)
.repourLog("This is a wannabe alignment log.")
.buildLog("Very short demo log: The quick brown fox jumps over the lazy dog.")
.status(BuildStatus.SUCCESS)
.productMilestone(demoProductMilestone1)
.sshCommand("ssh worker@localhost -P 9999")
.sshPassword("dontchangeme")
.buildEnvironment(buildConfigAudited1.getBuildEnvironment())
.scmRepoURL(buildConfigAudited1.getRepositoryConfiguration().getInternalUrl())
.scmRevision(buildConfigAudited1.getScmRevision())
.executionRootName("org.jboss.pnc:parent")
.executionRootVersion("1.2.3")
.temporaryBuild(false)
.build();
log.info("Saving buildRecord1: " + buildRecord1);
BuildRecord savedBuildRecord1 = buildRecordRepository.save(buildRecord1);
builtArtifact1.setBuildRecord(savedBuildRecord1);
builtArtifact2.setBuildRecord(savedBuildRecord1);
log.info(
"Saved buildRecord1: " + savedBuildRecord1 + "BuildConfigurationAuditedIdRev: "
+ savedBuildRecord1.getBuildConfigurationAuditedIdRev());
buildRecords.add(buildRecord1);
nextId = datastore.getNextBuildRecordId();
log.info("####nextId: " + nextId);
BuildRecord tempRecord1 = BuildRecord.Builder.newBuilder()
.id(nextId)
.buildConfigurationAudited(buildConfigAudited1)
.submitTime(Timestamp.from(Instant.now()))
.startTime(Timestamp.from(Instant.now()))
.endTime(Timestamp.from(Instant.now()))
.user(pncAdminUser)
.repourLog("This is a wannabe alignment log.")
.buildLog("Very short demo log: The quick brown fox jumps over the lazy dog.")
.status(BuildStatus.SUCCESS)
.buildEnvironment(buildConfigAudited1.getBuildEnvironment())
.scmRepoURL(buildConfigAudited1.getRepositoryConfiguration().getInternalUrl())
.scmRevision(buildConfigAudited1.getScmRevision())
.executionRootName("org.jboss.pnc:parent")
.executionRootVersion("1.2.3")
.temporaryBuild(true)
.build();
log.info("Saving tempRecord1: " + tempRecord1);
BuildRecord savedTempRecord1 = buildRecordRepository.save(tempRecord1);
builtArtifact3.setBuildRecord(savedTempRecord1);
builtArtifact4.setBuildRecord(savedTempRecord1);
log.info(
"Saved buildRecord1: " + savedTempRecord1 + "BuildConfigurationAuditedIdRev: "
+ savedTempRecord1.getBuildConfigurationAuditedIdRev());
buildRecords.add(tempRecord1);
}
Artifact builtArtifact5 = Artifact.Builder.newBuilder()
.identifier("demo:built-artifact3:jar:1.0")
.targetRepository(targetRepository)
.filename("demo built artifact 3")
.md5("17353a18678c6c249e3052edec2e4c5c")
.sha1("61dad16e14438d2d8c8cbd18b267d62944f37898")
.sha256("1660168483cb8a05d1cc2e77c861682a42ed9517ba945159d5538950c5db00fa")
.size(10L)
.artifactQuality(ArtifactQuality.NEW)
.deployPath("/built3")
.build();
Artifact builtArtifact6 = Artifact.Builder.newBuilder()
.identifier("demo:built-artifact4:jar:1.0")
.targetRepository(targetRepository)
.filename("demo built artifact 4")
.md5("md-fake-abcd1234")
.sha1("sha1-fake-abcd1234")
.sha256("sha256-fake-abcd1234")
.size(10L)
.artifactQuality(ArtifactQuality.NEW)
.deployPath("/built4")
.build();
Artifact builtArtifact7 = Artifact.Builder.newBuilder()
.identifier("demo:built-artifact5:jar:1.0")
.targetRepository(targetRepository)
.filename("demo built artifact 7")
.md5("adsfs6df548w1327cx78he873217df98")
.sha1("a56asdf87a3cvx231b87987fasd6f5ads4f32sdf")
.sha256("sad5f64sf87b3cvx2b1v87tr89h7d3f5g432xcz1zv87fawrv23n8796534564er")
.size(10L)
.artifactQuality(ArtifactQuality.NEW)
.deployPath("/built5")
.build();
Artifact builtArtifact8 = Artifact.Builder.newBuilder()
.identifier("demo:built-artifact6:jar:1.0")
.targetRepository(targetRepository)
.filename("demo built artifact 8")
.md5("md-fake-abcdefg1234")
.sha1("sha1-fake-abcdefg1234")
.sha256("sha256-fake-abcdefg1234")
.size(10L)
.artifactQuality(ArtifactQuality.NEW)
.deployPath("/built6")
.build();
builtArtifact5 = artifactRepository.save(builtArtifact5);
builtArtifact6 = artifactRepository.save(builtArtifact6);
builtArtifact7 = artifactRepository.save(builtArtifact7);
builtArtifact8 = artifactRepository.save(builtArtifact8);
Artifact dependencyBuiltArtifact1 = artifactRepository
.queryByPredicates(withIdentifierAndSha256(builtArtifact1.getIdentifier(), builtArtifact1.getSha256()));
// For timestamp tests where concrete timestamp is needed
Calendar calendar = Calendar.getInstance();
calendar.set(2019, Calendar.JANUARY, 10);
IdRev buildConfig2AuditIdRev = new IdRev(buildConfiguration2.getId(), INITIAL_REVISION);
BuildConfigurationAudited buildConfigAudited2 = buildConfigurationAuditedRepository
.queryById(buildConfig2AuditIdRev);
if (buildConfigAudited2 != null) {
int nextId = datastore.getNextBuildRecordId();
log.info("####nextId: " + nextId);
BuildRecord buildRecord2 = BuildRecord.Builder.newBuilder()
.id(nextId)
.buildConfigurationAudited(buildConfigAudited2)
.submitTime(Timestamp.from(Instant.now().minus(8, ChronoUnit.MINUTES)))
.startTime(Timestamp.from(Instant.now().minus(5, ChronoUnit.MINUTES)))
.endTime(Timestamp.from(Instant.now()))
.dependency(dependencyBuiltArtifact1)
.dependency(importedArtifact1)
.user(demoUser)
.buildLog("Very short demo log: The quick brown fox jumps over the lazy dog.")
.status(BuildStatus.SUCCESS)
.buildEnvironment(buildConfigAudited2.getBuildEnvironment())
.executionRootName("org.jboss.pnc:parent")
.executionRootVersion("1.2.4")
.temporaryBuild(false)
.build();
nextId = datastore.getNextBuildRecordId();
log.info("####nextId: " + nextId);
BuildRecord savedBuildRecord2 = buildRecordRepository.save(buildRecord2);
builtArtifact5.setBuildRecord(savedBuildRecord2);
builtArtifact6.setBuildRecord(savedBuildRecord2);
buildRecords.add(buildRecord2);
BuildRecord tempRecord1 = BuildRecord.Builder.newBuilder()
.id(nextId)
.buildConfigurationAudited(buildConfigAudited2)
.submitTime(Timestamp.from(calendar.toInstant().minus(8, ChronoUnit.HOURS)))
.startTime(Timestamp.from(calendar.toInstant().minus(5, ChronoUnit.HOURS)))
.endTime(Timestamp.from(calendar.toInstant()))
.user(demoUser)
.buildLog("Is it free?")
.status(BuildStatus.SUCCESS)
.buildEnvironment(buildConfigAudited2.getBuildEnvironment())
.executionRootName("org.jboss.pnc:parent")
.executionRootVersion("1.2.4")
.temporaryBuild(true)
.build();
BuildRecord savedTempRecord1 = buildRecordRepository.save(tempRecord1);
builtArtifact7.setBuildRecord(savedTempRecord1);
builtArtifact8.setBuildRecord(savedTempRecord1);
buildRecords.add(tempRecord1);
}
BuildConfigSetRecord buildConfigSetRecord1 = BuildConfigSetRecord.Builder.newBuilder()
.buildConfigurationSet(buildConfigurationSet1)
.startTime(Timestamp.from(Instant.now()))
.endTime(Timestamp.from(Instant.now()))
.user(demoUser)
.status(BuildStatus.FAILED)
.temporaryBuild(false)
.build();
buildConfigSetRecordRepository.save(buildConfigSetRecord1);
BuildConfigSetRecord buildConfigSetRecord2 = BuildConfigSetRecord.Builder.newBuilder()
.buildConfigurationSet(buildConfigurationSet1)
.buildRecords(buildRecords)
.startTime(Timestamp.from(Instant.now()))
.endTime(Timestamp.from(Instant.now()))
.user(demoUser)
.status(BuildStatus.SUCCESS)
.temporaryBuild(false)
.build();
buildConfigSetRecordRepository.save(buildConfigSetRecord2);
BuildConfigSetRecord buildConfigSetRecord3 = BuildConfigSetRecord.Builder.newBuilder()
.buildConfigurationSet(buildConfigurationSet1)
.startTime(Timestamp.from(calendar.toInstant().minus(20, ChronoUnit.DAYS)))
.endTime(Timestamp.from(calendar.toInstant().minus(20, ChronoUnit.DAYS)))
.user(demoUser)
.status(BuildStatus.SUCCESS)
.temporaryBuild(true)
.build();
buildConfigSetRecordRepository.save(buildConfigSetRecord3);
demoProductMilestone1 = productMilestoneRepository.queryById(demoProductMilestone1.getId());
demoProductMilestone1.addDistributedArtifact(builtArtifact1);
demoProductMilestone1.addDistributedArtifact(builtArtifact5);
demoProductMilestone1.addDistributedArtifact(importedArtifact2);
demoProductMilestone1 = productMilestoneRepository.save(demoProductMilestone1);
}
private RepositoryConfiguration createRepositoryConfiguration(String internalScmUrl, String externalUrl) {
return RepositoryConfiguration.Builder.newBuilder()
.internalUrl(internalScmUrl)
.externalUrl(externalUrl)
.build();
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.budgets.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p>
* Request of CreateNotification
* </p>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class CreateNotificationRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The <code>accountId</code> that is associated with the budget that you want to create a notification for.
* </p>
*/
private String accountId;
/**
* <p>
* The name of the budget that you want Amazon Web Services to notify you about. Budget names must be unique within
* an account.
* </p>
*/
private String budgetName;
/**
* <p>
* The notification that you want to create.
* </p>
*/
private Notification notification;
/**
* <p>
* A list of subscribers that you want to associate with the notification. Each notification can have one SNS
* subscriber and up to 10 email subscribers.
* </p>
*/
private java.util.List<Subscriber> subscribers;
/**
* <p>
* The <code>accountId</code> that is associated with the budget that you want to create a notification for.
* </p>
*
* @param accountId
* The <code>accountId</code> that is associated with the budget that you want to create a notification for.
*/
public void setAccountId(String accountId) {
this.accountId = accountId;
}
/**
* <p>
* The <code>accountId</code> that is associated with the budget that you want to create a notification for.
* </p>
*
* @return The <code>accountId</code> that is associated with the budget that you want to create a notification for.
*/
public String getAccountId() {
return this.accountId;
}
/**
* <p>
* The <code>accountId</code> that is associated with the budget that you want to create a notification for.
* </p>
*
* @param accountId
* The <code>accountId</code> that is associated with the budget that you want to create a notification for.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateNotificationRequest withAccountId(String accountId) {
setAccountId(accountId);
return this;
}
/**
* <p>
* The name of the budget that you want Amazon Web Services to notify you about. Budget names must be unique within
* an account.
* </p>
*
* @param budgetName
* The name of the budget that you want Amazon Web Services to notify you about. Budget names must be unique
* within an account.
*/
public void setBudgetName(String budgetName) {
this.budgetName = budgetName;
}
/**
* <p>
* The name of the budget that you want Amazon Web Services to notify you about. Budget names must be unique within
* an account.
* </p>
*
* @return The name of the budget that you want Amazon Web Services to notify you about. Budget names must be unique
* within an account.
*/
public String getBudgetName() {
return this.budgetName;
}
/**
* <p>
* The name of the budget that you want Amazon Web Services to notify you about. Budget names must be unique within
* an account.
* </p>
*
* @param budgetName
* The name of the budget that you want Amazon Web Services to notify you about. Budget names must be unique
* within an account.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateNotificationRequest withBudgetName(String budgetName) {
setBudgetName(budgetName);
return this;
}
/**
* <p>
* The notification that you want to create.
* </p>
*
* @param notification
* The notification that you want to create.
*/
public void setNotification(Notification notification) {
this.notification = notification;
}
/**
* <p>
* The notification that you want to create.
* </p>
*
* @return The notification that you want to create.
*/
public Notification getNotification() {
return this.notification;
}
/**
* <p>
* The notification that you want to create.
* </p>
*
* @param notification
* The notification that you want to create.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateNotificationRequest withNotification(Notification notification) {
setNotification(notification);
return this;
}
/**
* <p>
* A list of subscribers that you want to associate with the notification. Each notification can have one SNS
* subscriber and up to 10 email subscribers.
* </p>
*
* @return A list of subscribers that you want to associate with the notification. Each notification can have one
* SNS subscriber and up to 10 email subscribers.
*/
public java.util.List<Subscriber> getSubscribers() {
return subscribers;
}
/**
* <p>
* A list of subscribers that you want to associate with the notification. Each notification can have one SNS
* subscriber and up to 10 email subscribers.
* </p>
*
* @param subscribers
* A list of subscribers that you want to associate with the notification. Each notification can have one SNS
* subscriber and up to 10 email subscribers.
*/
public void setSubscribers(java.util.Collection<Subscriber> subscribers) {
if (subscribers == null) {
this.subscribers = null;
return;
}
this.subscribers = new java.util.ArrayList<Subscriber>(subscribers);
}
/**
* <p>
* A list of subscribers that you want to associate with the notification. Each notification can have one SNS
* subscriber and up to 10 email subscribers.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setSubscribers(java.util.Collection)} or {@link #withSubscribers(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param subscribers
* A list of subscribers that you want to associate with the notification. Each notification can have one SNS
* subscriber and up to 10 email subscribers.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateNotificationRequest withSubscribers(Subscriber... subscribers) {
if (this.subscribers == null) {
setSubscribers(new java.util.ArrayList<Subscriber>(subscribers.length));
}
for (Subscriber ele : subscribers) {
this.subscribers.add(ele);
}
return this;
}
/**
* <p>
* A list of subscribers that you want to associate with the notification. Each notification can have one SNS
* subscriber and up to 10 email subscribers.
* </p>
*
* @param subscribers
* A list of subscribers that you want to associate with the notification. Each notification can have one SNS
* subscriber and up to 10 email subscribers.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateNotificationRequest withSubscribers(java.util.Collection<Subscriber> subscribers) {
setSubscribers(subscribers);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getAccountId() != null)
sb.append("AccountId: ").append(getAccountId()).append(",");
if (getBudgetName() != null)
sb.append("BudgetName: ").append(getBudgetName()).append(",");
if (getNotification() != null)
sb.append("Notification: ").append(getNotification()).append(",");
if (getSubscribers() != null)
sb.append("Subscribers: ").append(getSubscribers());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof CreateNotificationRequest == false)
return false;
CreateNotificationRequest other = (CreateNotificationRequest) obj;
if (other.getAccountId() == null ^ this.getAccountId() == null)
return false;
if (other.getAccountId() != null && other.getAccountId().equals(this.getAccountId()) == false)
return false;
if (other.getBudgetName() == null ^ this.getBudgetName() == null)
return false;
if (other.getBudgetName() != null && other.getBudgetName().equals(this.getBudgetName()) == false)
return false;
if (other.getNotification() == null ^ this.getNotification() == null)
return false;
if (other.getNotification() != null && other.getNotification().equals(this.getNotification()) == false)
return false;
if (other.getSubscribers() == null ^ this.getSubscribers() == null)
return false;
if (other.getSubscribers() != null && other.getSubscribers().equals(this.getSubscribers()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getAccountId() == null) ? 0 : getAccountId().hashCode());
hashCode = prime * hashCode + ((getBudgetName() == null) ? 0 : getBudgetName().hashCode());
hashCode = prime * hashCode + ((getNotification() == null) ? 0 : getNotification().hashCode());
hashCode = prime * hashCode + ((getSubscribers() == null) ? 0 : getSubscribers().hashCode());
return hashCode;
}
@Override
public CreateNotificationRequest clone() {
return (CreateNotificationRequest) super.clone();
}
}
| |
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.worker.block;
import alluxio.Configuration;
import alluxio.PropertyKey;
import alluxio.StorageTierAssoc;
import alluxio.WorkerStorageTierAssoc;
import alluxio.exception.BlockAlreadyExistsException;
import alluxio.exception.BlockDoesNotExistException;
import alluxio.exception.ExceptionMessage;
import alluxio.exception.InvalidWorkerStateException;
import alluxio.exception.PreconditionMessage;
import alluxio.exception.WorkerOutOfSpaceException;
import alluxio.exception.status.AlluxioStatusException;
import alluxio.underfs.UfsManager;
import alluxio.underfs.UnderFileSystem;
import alluxio.underfs.options.OpenOptions;
import alluxio.util.network.NetworkAddressUtils;
import alluxio.worker.block.io.BlockReader;
import alluxio.worker.block.io.LocalFileBlockWriter;
import alluxio.worker.block.meta.UnderFileSystemBlockMeta;
import com.google.common.base.Preconditions;
import com.google.common.io.Closer;
import io.netty.buffer.ByteBuf;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.nio.channels.ReadableByteChannel;
import javax.annotation.concurrent.NotThreadSafe;
/**
* This class implements a {@link BlockReader} to read a block directly from UFS, and
* optionally cache the block to the Alluxio worker if the whole block it is read.
*/
@NotThreadSafe
public final class UnderFileSystemBlockReader implements BlockReader {
private static final Logger LOG = LoggerFactory.getLogger(UnderFileSystemBlockReader.class);
/** An object storing the mapping of tier aliases to ordinals. */
private final StorageTierAssoc mStorageTierAssoc = new WorkerStorageTierAssoc();
/** The initial size of the block allocated in Alluxio storage when the block is cached. */
private final long mInitialBlockSize;
/** The block metadata for the UFS block. */
private final UnderFileSystemBlockMeta mBlockMeta;
/** The Local block store. It is used to interact with Alluxio. */
private final BlockStore mLocalBlockStore;
/** The input stream to read from UFS. */
private InputStream mUnderFileSystemInputStream;
/** The block writer to write the block to Alluxio. */
private LocalFileBlockWriter mBlockWriter;
/** If set, the reader is closed and should not be used afterwards. */
private boolean mClosed;
/** The manager for different ufs. */
private final UfsManager mUfsManager;
/**
* The position of mUnderFileSystemInputStream (if not null) is blockStart + mInStreamPos.
* When mUnderFileSystemInputStream is not set, this is set to -1 (an invalid state) when
* mUnderFileSystemInputStream is null. Check mUnderFileSystemInputStream directly to see whether
* that is valid instead of relying on this invalid state of the position to be safe.
*/
private long mInStreamPos;
/**
* Creates an instance of {@link UnderFileSystemBlockReader} and initializes it with a reading
* offset.
*
* @param blockMeta the block meta
* @param offset the position within the block to start the read
* @param localBlockStore the Local block store
* @param ufsManager the manager of ufs
* @return the block reader
* @throws BlockDoesNotExistException if the UFS block does not exist in the UFS block store
*/
public static UnderFileSystemBlockReader create(UnderFileSystemBlockMeta blockMeta, long offset,
BlockStore localBlockStore, UfsManager ufsManager)
throws BlockDoesNotExistException, IOException {
UnderFileSystemBlockReader ufsBlockReader =
new UnderFileSystemBlockReader(blockMeta, localBlockStore, ufsManager);
ufsBlockReader.init(offset);
return ufsBlockReader;
}
/**
* Creates an instance of {@link UnderFileSystemBlockReader}.
*
* @param blockMeta the block meta
* @param localBlockStore the Local block store
* @param ufsManager the manager of ufs
*/
private UnderFileSystemBlockReader(UnderFileSystemBlockMeta blockMeta, BlockStore localBlockStore,
UfsManager ufsManager) {
mInitialBlockSize = Configuration.getBytes(PropertyKey.WORKER_FILE_BUFFER_SIZE);
mBlockMeta = blockMeta;
mLocalBlockStore = localBlockStore;
mInStreamPos = -1;
mUfsManager = ufsManager;
}
/**
* Initializes the reader. This is only called in the factory method.
*
* @param offset the position within the block to start the read
* @throws BlockDoesNotExistException if the UFS block does not exist in the UFS block store
*/
private void init(long offset) throws BlockDoesNotExistException, IOException {
UnderFileSystem ufs = mUfsManager.get(mBlockMeta.getMountId());
ufs.connectFromWorker(
NetworkAddressUtils.getConnectHost(NetworkAddressUtils.ServiceType.WORKER_RPC));
if (!ufs.isFile(mBlockMeta.getUnderFileSystemPath())) {
throw new BlockDoesNotExistException(
ExceptionMessage.UFS_PATH_DOES_NOT_EXIST.getMessage(mBlockMeta.getUnderFileSystemPath()));
}
updateUnderFileSystemInputStream(offset);
updateBlockWriter(offset);
}
@Override
public ReadableByteChannel getChannel() {
throw new UnsupportedOperationException("UFSFileBlockReader#getChannel is not supported");
}
@Override
public long getLength() {
return mBlockMeta.getBlockSize();
}
@Override
public ByteBuffer read(long offset, long length) throws IOException {
Preconditions.checkState(!mClosed);
updateUnderFileSystemInputStream(offset);
updateBlockWriter(offset);
long bytesToRead = Math.min(length, mBlockMeta.getBlockSize() - offset);
if (bytesToRead <= 0) {
return ByteBuffer.allocate(0);
}
byte[] data = new byte[(int) bytesToRead];
int bytesRead = 0;
Preconditions.checkNotNull(mUnderFileSystemInputStream);
while (bytesRead < bytesToRead) {
int read;
try {
read = mUnderFileSystemInputStream.read(data, bytesRead, (int) (bytesToRead - bytesRead));
} catch (IOException e) {
throw AlluxioStatusException.fromIOException(e);
}
if (read == -1) {
break;
}
bytesRead += read;
}
mInStreamPos += bytesRead;
// We should always read the number of bytes as expected since the UFS file length (hence block
// size) should be always accurate.
Preconditions
.checkState(bytesRead == bytesToRead, PreconditionMessage.NOT_ENOUGH_BYTES_READ.toString(),
bytesRead, bytesToRead, mBlockMeta.getUnderFileSystemPath());
if (mBlockWriter != null && mBlockWriter.getPosition() < mInStreamPos) {
Preconditions.checkState(mBlockWriter.getPosition() >= offset);
ByteBuffer buffer = ByteBuffer.wrap(data, (int) (mBlockWriter.getPosition() - offset),
(int) (mInStreamPos - mBlockWriter.getPosition()));
mBlockWriter.append(buffer.duplicate());
}
return ByteBuffer.wrap(data, 0, bytesRead);
}
/**
* This interface is supposed to be used for sequence block reads.
*
* @param buf the byte buffer
* @return the number of bytes read, -1 if it reaches EOF and none was read
*/
@Override
public int transferTo(ByteBuf buf) throws IOException {
Preconditions.checkState(!mClosed);
if (mUnderFileSystemInputStream == null) {
return -1;
}
if (mBlockMeta.getBlockSize() <= mInStreamPos) {
return -1;
}
// Make a copy of the state to keep track of what we have read in this transferTo call.
ByteBuf bufCopy = null;
if (mBlockWriter != null) {
bufCopy = buf.duplicate();
bufCopy.readerIndex(bufCopy.writerIndex());
}
int bytesToRead =
(int) Math.min((long) buf.writableBytes(), mBlockMeta.getBlockSize() - mInStreamPos);
int bytesRead = buf.writeBytes(mUnderFileSystemInputStream, bytesToRead);
if (bytesRead <= 0) {
return bytesRead;
}
mInStreamPos += bytesRead;
if (mBlockWriter != null) {
bufCopy.writerIndex(buf.writerIndex());
while (bufCopy.readableBytes() > 0) {
mBlockWriter.transferFrom(bufCopy);
}
}
return bytesRead;
}
/**
* Closes the block reader. After this, this block reader should not be used anymore.
* This is recommended to be called after the client finishes reading the block. It is usually
* triggered when the client unlocks the block.
*/
@Override
public void close() throws IOException {
if (mClosed) {
return;
}
try {
// This aborts the block if the block is not fully read.
updateBlockWriter(mBlockMeta.getBlockSize());
Closer closer = Closer.create();
if (mBlockWriter != null) {
closer.register(mBlockWriter);
}
if (mUnderFileSystemInputStream != null) {
closer.register(mUnderFileSystemInputStream);
}
closer.close();
} finally {
mClosed = true;
}
}
@Override
public boolean isClosed() {
return mClosed;
}
/**
* Updates the UFS input stream given an offset to read.
*
* @param offset the read offset within the block
*/
private void updateUnderFileSystemInputStream(long offset) throws IOException {
if ((mUnderFileSystemInputStream != null) && offset != mInStreamPos) {
mUnderFileSystemInputStream.close();
mUnderFileSystemInputStream = null;
mInStreamPos = -1;
}
if (mUnderFileSystemInputStream == null && offset < mBlockMeta.getBlockSize()) {
UnderFileSystem ufs = mUfsManager.get(mBlockMeta.getMountId());
mUnderFileSystemInputStream = ufs.open(mBlockMeta.getUnderFileSystemPath(),
OpenOptions.defaults().setOffset(mBlockMeta.getOffset() + offset));
mInStreamPos = offset;
}
}
/**
* Updates the block writer given an offset to read. If the offset is beyond the current
* position of the block writer, the block writer will be aborted.
*
* @param offset the read offset
*/
private void updateBlockWriter(long offset) throws IOException {
try {
if (mBlockWriter != null && offset > mBlockWriter.getPosition()) {
mBlockWriter.close();
mBlockWriter = null;
mLocalBlockStore.abortBlock(mBlockMeta.getSessionId(), mBlockMeta.getBlockId());
}
} catch (BlockDoesNotExistException e) {
// This can only happen when the session is expired.
LOG.warn("Block {} does not exist when being aborted. The session may have expired.",
mBlockMeta.getBlockId());
} catch (BlockAlreadyExistsException | InvalidWorkerStateException | IOException e) {
// We cannot skip the exception here because we need to make sure that the user of this
// reader does not commit the block if it fails to abort the block.
throw AlluxioStatusException.fromCheckedException(e);
}
try {
if (mBlockWriter == null && offset == 0 && !mBlockMeta.isNoCache()) {
BlockStoreLocation loc = BlockStoreLocation.anyDirInTier(mStorageTierAssoc.getAlias(0));
String blockPath = mLocalBlockStore
.createBlock(mBlockMeta.getSessionId(), mBlockMeta.getBlockId(), loc,
mInitialBlockSize).getPath();
mBlockWriter = new LocalFileBlockWriter(blockPath);
}
} catch (IOException | BlockAlreadyExistsException | WorkerOutOfSpaceException e) {
// This can happen when there are concurrent UFS readers who are all trying to cache to block.
LOG.debug(
"Failed to update block writer for UFS block [blockId: {}, ufsPath: {}, offset: {}]",
mBlockMeta.getBlockId(), mBlockMeta.getUnderFileSystemPath(), offset, e);
mBlockWriter = null;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.jms.client;
import javax.jms.BytesMessage;
import javax.jms.CompletionListener;
import javax.jms.DeliveryMode;
import javax.jms.Destination;
import javax.jms.IllegalStateException;
import javax.jms.InvalidDestinationException;
import javax.jms.JMSException;
import javax.jms.MapMessage;
import javax.jms.Message;
import javax.jms.MessageProducer;
import javax.jms.ObjectMessage;
import javax.jms.Queue;
import javax.jms.QueueSender;
import javax.jms.StreamMessage;
import javax.jms.TextMessage;
import javax.jms.Topic;
import javax.jms.TopicPublisher;
import org.apache.activemq.artemis.api.core.ActiveMQException;
import org.apache.activemq.artemis.api.core.SimpleString;
import org.apache.activemq.artemis.api.core.client.ClientMessage;
import org.apache.activemq.artemis.api.core.client.ClientProducer;
import org.apache.activemq.artemis.api.core.client.ClientSession;
import org.apache.activemq.artemis.api.core.client.SendAcknowledgementHandler;
import org.apache.activemq.artemis.utils.UUID;
import org.apache.activemq.artemis.utils.UUIDGenerator;
/**
* ActiveMQ Artemis implementation of a JMS MessageProducer.
*/
public class ActiveMQMessageProducer implements MessageProducer, QueueSender, TopicPublisher {
private final ActiveMQConnection connection;
private final SimpleString connID;
private final ClientProducer clientProducer;
private final ClientSession clientSession;
private boolean disableMessageID = false;
private boolean disableMessageTimestamp = false;
private int defaultPriority = Message.DEFAULT_PRIORITY;
private long defaultTimeToLive = Message.DEFAULT_TIME_TO_LIVE;
private int defaultDeliveryMode = Message.DEFAULT_DELIVERY_MODE;
private long defaultDeliveryDelay = Message.DEFAULT_DELIVERY_DELAY;
private final ActiveMQDestination defaultDestination;
// Constructors --------------------------------------------------
protected ActiveMQMessageProducer(final ActiveMQConnection connection,
final ClientProducer producer,
final ActiveMQDestination defaultDestination,
final ClientSession clientSession) throws JMSException {
this.connection = connection;
connID = connection.getClientID() != null ? new SimpleString(connection.getClientID()) : connection.getUID();
this.clientProducer = producer;
this.defaultDestination = defaultDestination;
this.clientSession = clientSession;
}
// MessageProducer implementation --------------------------------
public void setDisableMessageID(final boolean value) throws JMSException {
checkClosed();
disableMessageID = value;
}
public boolean getDisableMessageID() throws JMSException {
checkClosed();
return disableMessageID;
}
public void setDisableMessageTimestamp(final boolean value) throws JMSException {
checkClosed();
disableMessageTimestamp = value;
}
public boolean getDisableMessageTimestamp() throws JMSException {
checkClosed();
return disableMessageTimestamp;
}
public void setDeliveryMode(final int deliveryMode) throws JMSException {
checkClosed();
if (deliveryMode != DeliveryMode.NON_PERSISTENT && deliveryMode != DeliveryMode.PERSISTENT) {
throw ActiveMQJMSClientBundle.BUNDLE.illegalDeliveryMode(deliveryMode);
}
defaultDeliveryMode = deliveryMode;
}
public int getDeliveryMode() throws JMSException {
checkClosed();
return defaultDeliveryMode;
}
public void setPriority(final int defaultPriority) throws JMSException {
checkClosed();
if (defaultPriority < 0 || defaultPriority > 9) {
throw new JMSException("Illegal priority value: " + defaultPriority);
}
this.defaultPriority = defaultPriority;
}
public int getPriority() throws JMSException {
checkClosed();
return defaultPriority;
}
public void setTimeToLive(final long timeToLive) throws JMSException {
checkClosed();
defaultTimeToLive = timeToLive;
}
public long getTimeToLive() throws JMSException {
checkClosed();
return defaultTimeToLive;
}
public Destination getDestination() throws JMSException {
checkClosed();
return defaultDestination;
}
public void close() throws JMSException {
connection.getThreadAwareContext().assertNotCompletionListenerThread();
try {
clientProducer.close();
}
catch (ActiveMQException e) {
throw JMSExceptionHelper.convertFromActiveMQException(e);
}
}
public void send(final Message message) throws JMSException {
checkDefaultDestination();
doSendx(defaultDestination, message, defaultDeliveryMode, defaultPriority, defaultTimeToLive, null);
}
public void send(final Message message,
final int deliveryMode,
final int priority,
final long timeToLive) throws JMSException {
checkDefaultDestination();
doSendx(defaultDestination, message, deliveryMode, priority, timeToLive, null);
}
public void send(final Destination destination, final Message message) throws JMSException {
send(destination, message, defaultDeliveryMode, defaultPriority, defaultTimeToLive);
}
public void send(final Destination destination,
final Message message,
final int deliveryMode,
final int priority,
final long timeToLive) throws JMSException {
checkClosed();
checkDestination(destination);
doSendx((ActiveMQDestination) destination, message, deliveryMode, priority, timeToLive, null);
}
@Override
public void setDeliveryDelay(long deliveryDelay) throws JMSException {
this.defaultDeliveryDelay = deliveryDelay;
}
@Override
public long getDeliveryDelay() throws JMSException {
return defaultDeliveryDelay;
}
@Override
public void send(Message message, CompletionListener completionListener) throws JMSException {
send(message, defaultDeliveryMode, defaultPriority, defaultTimeToLive, completionListener);
}
@Override
public void send(Message message,
int deliveryMode,
int priority,
long timeToLive,
CompletionListener completionListener) throws JMSException {
checkCompletionListener(completionListener);
checkDefaultDestination();
doSendx(defaultDestination, message, deliveryMode, priority, timeToLive, completionListener);
}
@Override
public void send(Destination destination,
Message message,
CompletionListener completionListener) throws JMSException {
send(destination, message, defaultDeliveryMode, defaultPriority, defaultTimeToLive, completionListener);
}
@Override
public void send(Destination destination,
Message message,
int deliveryMode,
int priority,
long timeToLive,
CompletionListener completionListener) throws JMSException {
checkClosed();
checkCompletionListener(completionListener);
checkDestination(destination);
doSendx((ActiveMQDestination) destination, message, deliveryMode, priority, timeToLive, completionListener);
}
// TopicPublisher Implementation ---------------------------------
public Topic getTopic() throws JMSException {
return (Topic) getDestination();
}
public void publish(final Message message) throws JMSException {
send(message);
}
public void publish(final Topic topic, final Message message) throws JMSException {
send(topic, message);
}
public void publish(final Message message,
final int deliveryMode,
final int priority,
final long timeToLive) throws JMSException {
send(message, deliveryMode, priority, timeToLive);
}
public void publish(final Topic topic,
final Message message,
final int deliveryMode,
final int priority,
final long timeToLive) throws JMSException {
checkDestination(topic);
doSendx((ActiveMQDestination) topic, message, deliveryMode, priority, timeToLive, null);
}
// QueueSender Implementation ------------------------------------
public void send(final Queue queue, final Message message) throws JMSException {
send((Destination) queue, message);
}
public void send(final Queue queue,
final Message message,
final int deliveryMode,
final int priority,
final long timeToLive) throws JMSException {
checkDestination(queue);
doSendx((ActiveMQDestination) queue, message, deliveryMode, priority, timeToLive, null);
}
public Queue getQueue() throws JMSException {
return (Queue) getDestination();
}
// Public --------------------------------------------------------
@Override
public String toString() {
return "ActiveMQMessageProducer->" + clientProducer;
}
/**
* Check if the default destination has been set
*/
private void checkDefaultDestination() {
if (defaultDestination == null) {
throw new UnsupportedOperationException("Cannot specify destination if producer has a default destination");
}
}
/**
* Check if the destination is sent correctly
*/
private void checkDestination(Destination destination) throws InvalidDestinationException {
if (destination != null && !(destination instanceof ActiveMQDestination)) {
throw new InvalidDestinationException("Foreign destination:" + destination);
}
if (destination != null && defaultDestination != null) {
throw new UnsupportedOperationException("Cannot specify destination if producer has a default destination");
}
if (destination == null) {
throw ActiveMQJMSClientBundle.BUNDLE.nullTopic();
}
}
private void checkCompletionListener(CompletionListener completionListener) {
if (completionListener == null) {
throw ActiveMQJMSClientBundle.BUNDLE.nullArgumentNotAllowed("CompletionListener");
}
}
private void doSendx(ActiveMQDestination destination,
final Message jmsMessage,
final int deliveryMode,
final int priority,
final long timeToLive,
CompletionListener completionListener) throws JMSException {
jmsMessage.setJMSDeliveryMode(deliveryMode);
jmsMessage.setJMSPriority(priority);
if (timeToLive == 0) {
jmsMessage.setJMSExpiration(0);
}
else {
jmsMessage.setJMSExpiration(System.currentTimeMillis() + timeToLive);
}
if (!disableMessageTimestamp) {
jmsMessage.setJMSTimestamp(System.currentTimeMillis());
}
else {
jmsMessage.setJMSTimestamp(0);
}
SimpleString address = null;
if (destination == null) {
if (defaultDestination == null) {
throw new UnsupportedOperationException("Destination must be specified on send with an anonymous producer");
}
destination = defaultDestination;
}
else {
if (defaultDestination != null) {
if (!destination.equals(defaultDestination)) {
throw new UnsupportedOperationException("Where a default destination is specified " + "for the sender and a destination is " + "specified in the arguments to the send, " + "these destinations must be equal");
}
}
address = destination.getSimpleAddress();
if (!connection.containsKnownDestination(address)) {
try {
ClientSession.AddressQuery query = clientSession.addressQuery(address);
// if it's autoCreateJMSQueue we will let the PostOffice.route to execute the creation at the server's side
// as that's a more efficient path for such operation
if (!query.isExists() && !query.isAutoCreateJmsQueues()) {
throw new InvalidDestinationException("Destination " + address + " does not exist");
}
else {
connection.addKnownDestination(address);
}
}
catch (ActiveMQException e) {
throw JMSExceptionHelper.convertFromActiveMQException(e);
}
}
}
ActiveMQMessage activeMQJmsMessage;
boolean foreign = false;
// First convert from foreign message if appropriate
if (!(jmsMessage instanceof ActiveMQMessage)) {
// JMS 1.1 Sect. 3.11.4: A provider must be prepared to accept, from a client,
// a message whose implementation is not one of its own.
if (jmsMessage instanceof BytesMessage) {
activeMQJmsMessage = new ActiveMQBytesMessage((BytesMessage) jmsMessage, clientSession);
}
else if (jmsMessage instanceof MapMessage) {
activeMQJmsMessage = new ActiveMQMapMessage((MapMessage) jmsMessage, clientSession);
}
else if (jmsMessage instanceof ObjectMessage) {
activeMQJmsMessage = new ActiveMQObjectMessage((ObjectMessage) jmsMessage, clientSession);
}
else if (jmsMessage instanceof StreamMessage) {
activeMQJmsMessage = new ActiveMQStreamMessage((StreamMessage) jmsMessage, clientSession);
}
else if (jmsMessage instanceof TextMessage) {
activeMQJmsMessage = new ActiveMQTextMessage((TextMessage) jmsMessage, clientSession);
}
else {
activeMQJmsMessage = new ActiveMQMessage(jmsMessage, clientSession);
}
// Set the destination on the original message
jmsMessage.setJMSDestination(destination);
foreign = true;
}
else {
activeMQJmsMessage = (ActiveMQMessage) jmsMessage;
}
if (!disableMessageID) {
// Generate a JMS id
UUID uid = UUIDGenerator.getInstance().generateUUID();
activeMQJmsMessage.getCoreMessage().setUserID(uid);
activeMQJmsMessage.resetMessageID(null);
}
if (foreign) {
jmsMessage.setJMSMessageID(activeMQJmsMessage.getJMSMessageID());
}
activeMQJmsMessage.setJMSDestination(destination);
try {
activeMQJmsMessage.doBeforeSend();
}
catch (Exception e) {
JMSException je = new JMSException(e.getMessage());
je.initCause(e);
throw je;
}
if (defaultDeliveryDelay > 0) {
activeMQJmsMessage.setJMSDeliveryTime(System.currentTimeMillis() + defaultDeliveryDelay);
}
ClientMessage coreMessage = activeMQJmsMessage.getCoreMessage();
coreMessage.putStringProperty(ActiveMQConnection.CONNECTION_ID_PROPERTY_NAME, connID);
try {
/**
* Using a completionListener requires wrapping using a {@link CompletionListenerWrapper},
* so we avoid it if we can.
*/
if (completionListener != null) {
clientProducer.send(address, coreMessage, new CompletionListenerWrapper(completionListener, jmsMessage, this));
}
else {
clientProducer.send(address, coreMessage);
}
}
catch (ActiveMQException e) {
throw JMSExceptionHelper.convertFromActiveMQException(e);
}
}
private void checkClosed() throws JMSException {
if (clientProducer.isClosed() || clientSession.isClosed()) {
throw new IllegalStateException("Producer is closed");
}
}
private static final class CompletionListenerWrapper implements SendAcknowledgementHandler {
private final CompletionListener completionListener;
private final Message jmsMessage;
private final ActiveMQMessageProducer producer;
/**
* @param jmsMessage
* @param producer
*/
public CompletionListenerWrapper(CompletionListener listener,
Message jmsMessage,
ActiveMQMessageProducer producer) {
this.completionListener = listener;
this.jmsMessage = jmsMessage;
this.producer = producer;
}
@Override
public void sendAcknowledged(org.apache.activemq.artemis.api.core.Message clientMessage) {
if (jmsMessage instanceof StreamMessage) {
try {
((StreamMessage) jmsMessage).reset();
}
catch (JMSException e) {
// HORNETQ-1209 XXX ignore?
}
}
if (jmsMessage instanceof BytesMessage) {
try {
((BytesMessage) jmsMessage).reset();
}
catch (JMSException e) {
// HORNETQ-1209 XXX ignore?
}
}
try {
producer.connection.getThreadAwareContext().setCurrentThread(true);
completionListener.onCompletion(jmsMessage);
}
finally {
producer.connection.getThreadAwareContext().clearCurrentThread(true);
}
}
@Override
public String toString() {
return CompletionListenerWrapper.class.getSimpleName() + "( completionListener=" + completionListener + ")";
}
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Strings;
import org.elasticsearch.index.analysis.AnalyzerScope;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.TimeSeriesParams.MetricType;
import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.io.StringReader;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import static java.util.Collections.emptyList;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.Matchers.equalTo;
public class MappingLookupTests extends ESTestCase {
private static MappingLookup createMappingLookup(
List<FieldMapper> fieldMappers,
List<ObjectMapper> objectMappers,
List<RuntimeField> runtimeFields
) {
RootObjectMapper.Builder builder = new RootObjectMapper.Builder("_doc");
Map<String, RuntimeField> runtimeFieldTypes = runtimeFields.stream().collect(Collectors.toMap(RuntimeField::name, r -> r));
builder.setRuntime(runtimeFieldTypes);
Mapping mapping = new Mapping(builder.build(MapperBuilderContext.ROOT), new MetadataFieldMapper[0], Collections.emptyMap());
return MappingLookup.fromMappers(mapping, fieldMappers, objectMappers, emptyList());
}
public void testOnlyRuntimeField() {
MappingLookup mappingLookup = createMappingLookup(
emptyList(),
emptyList(),
Collections.singletonList(new TestRuntimeField("test", "type"))
);
assertEquals(0, size(mappingLookup.fieldMappers()));
assertEquals(0, mappingLookup.objectMappers().size());
assertNull(mappingLookup.getMapper("test"));
assertThat(mappingLookup.fieldTypesLookup().get("test"), instanceOf(TestRuntimeField.TestRuntimeFieldType.class));
}
public void testRuntimeFieldLeafOverride() {
MockFieldMapper fieldMapper = new MockFieldMapper("test");
MappingLookup mappingLookup = createMappingLookup(
Collections.singletonList(fieldMapper),
emptyList(),
Collections.singletonList(new TestRuntimeField("test", "type"))
);
assertThat(mappingLookup.getMapper("test"), instanceOf(MockFieldMapper.class));
assertEquals(1, size(mappingLookup.fieldMappers()));
assertEquals(0, mappingLookup.objectMappers().size());
assertThat(mappingLookup.fieldTypesLookup().get("test"), instanceOf(TestRuntimeField.TestRuntimeFieldType.class));
}
public void testSubfieldOverride() {
MockFieldMapper fieldMapper = new MockFieldMapper("object.subfield");
ObjectMapper objectMapper = new ObjectMapper(
"object",
"object",
new Explicit<>(true, true),
ObjectMapper.Dynamic.TRUE,
Collections.singletonMap("object.subfield", fieldMapper)
);
MappingLookup mappingLookup = createMappingLookup(
Collections.singletonList(fieldMapper),
Collections.singletonList(objectMapper),
Collections.singletonList(new TestRuntimeField("object.subfield", "type"))
);
assertThat(mappingLookup.getMapper("object.subfield"), instanceOf(MockFieldMapper.class));
assertEquals(1, size(mappingLookup.fieldMappers()));
assertEquals(1, mappingLookup.objectMappers().size());
assertThat(mappingLookup.fieldTypesLookup().get("object.subfield"), instanceOf(TestRuntimeField.TestRuntimeFieldType.class));
}
public void testAnalyzers() throws IOException {
FakeFieldType fieldType1 = new FakeFieldType("field1");
FieldMapper fieldMapper1 = new FakeFieldMapper(fieldType1, "index1");
FakeFieldType fieldType2 = new FakeFieldType("field2");
FieldMapper fieldMapper2 = new FakeFieldMapper(fieldType2, "index2");
MappingLookup mappingLookup = createMappingLookup(Arrays.asList(fieldMapper1, fieldMapper2), emptyList(), emptyList());
assertAnalyzes(mappingLookup.indexAnalyzer("field1", f -> null), "field1", "index1");
assertAnalyzes(mappingLookup.indexAnalyzer("field2", f -> null), "field2", "index2");
expectThrows(
IllegalArgumentException.class,
() -> mappingLookup.indexAnalyzer("field3", f -> { throw new IllegalArgumentException(); }).tokenStream("field3", "blah")
);
}
public void testEmptyMappingLookup() {
MappingLookup mappingLookup = MappingLookup.EMPTY;
assertEquals("{\"_doc\":{}}", Strings.toString(mappingLookup.getMapping()));
assertFalse(mappingLookup.hasMappings());
assertNull(mappingLookup.getMapping().getMeta());
assertEquals(0, mappingLookup.getMapping().getMetadataMappersMap().size());
assertFalse(mappingLookup.fieldMappers().iterator().hasNext());
assertEquals(0, mappingLookup.getMatchingFieldNames("*").size());
}
public void testValidateDoesNotShadow() {
FakeFieldType dim = new FakeFieldType("dim") {
@Override
public boolean isDimension() {
return true;
}
};
FieldMapper dimMapper = new FakeFieldMapper(dim, "index1");
MetricType metricType = randomFrom(MetricType.values());
FakeFieldType metric = new FakeFieldType("metric") {
@Override
public MetricType getMetricType() {
return metricType;
}
};
FieldMapper metricMapper = new FakeFieldMapper(metric, "index1");
FakeFieldType plain = new FakeFieldType("plain");
FieldMapper plainMapper = new FakeFieldMapper(plain, "index1");
MappingLookup mappingLookup = createMappingLookup(List.of(dimMapper, metricMapper, plainMapper), emptyList(), emptyList());
mappingLookup.validateDoesNotShadow("not_mapped");
Exception e = expectThrows(MapperParsingException.class, () -> mappingLookup.validateDoesNotShadow("dim"));
assertThat(e.getMessage(), equalTo("Field [dim] attempted to shadow a time_series_dimension"));
e = expectThrows(MapperParsingException.class, () -> mappingLookup.validateDoesNotShadow("metric"));
assertThat(e.getMessage(), equalTo("Field [metric] attempted to shadow a time_series_metric"));
mappingLookup.validateDoesNotShadow("plain");
}
public void testShadowingOnConstruction() {
FakeFieldType dim = new FakeFieldType("dim") {
@Override
public boolean isDimension() {
return true;
}
};
FieldMapper dimMapper = new FakeFieldMapper(dim, "index1");
MetricType metricType = randomFrom(MetricType.values());
FakeFieldType metric = new FakeFieldType("metric") {
@Override
public MetricType getMetricType() {
return metricType;
}
};
FieldMapper metricMapper = new FakeFieldMapper(metric, "index1");
boolean shadowDim = randomBoolean();
TestRuntimeField shadowing = new TestRuntimeField(shadowDim ? "dim" : "metric", "keyword");
Exception e = expectThrows(
MapperParsingException.class,
() -> createMappingLookup(List.of(dimMapper, metricMapper), emptyList(), List.of(shadowing))
);
assertThat(
e.getMessage(),
equalTo(
shadowDim
? "Field [dim] attempted to shadow a time_series_dimension"
: "Field [metric] attempted to shadow a time_series_metric"
)
);
}
private void assertAnalyzes(Analyzer analyzer, String field, String output) throws IOException {
try (TokenStream tok = analyzer.tokenStream(field, new StringReader(""))) {
CharTermAttribute term = tok.addAttribute(CharTermAttribute.class);
assertTrue(tok.incrementToken());
assertEquals(output, term.toString());
}
}
private static int size(Iterable<?> iterable) {
int count = 0;
for (Object obj : iterable) {
count++;
}
return count;
}
private static class FakeAnalyzer extends Analyzer {
private final String output;
FakeAnalyzer(String output) {
this.output = output;
}
@Override
protected TokenStreamComponents createComponents(String fieldName) {
Tokenizer tokenizer = new Tokenizer() {
boolean incremented = false;
final CharTermAttribute term = addAttribute(CharTermAttribute.class);
@Override
public boolean incrementToken() {
if (incremented) {
return false;
}
term.setLength(0).append(output);
incremented = true;
return true;
}
};
return new TokenStreamComponents(tokenizer);
}
}
static class FakeFieldType extends TermBasedFieldType {
private FakeFieldType(String name) {
super(name, true, false, true, TextSearchInfo.SIMPLE_MATCH_ONLY, Collections.emptyMap());
}
@Override
public ValueFetcher valueFetcher(SearchExecutionContext context, String format) {
throw new UnsupportedOperationException();
}
@Override
public String typeName() {
return "fake";
}
}
static class FakeFieldMapper extends FieldMapper {
final String indexedValue;
FakeFieldMapper(FakeFieldType fieldType, String indexedValue) {
super(
fieldType.name(),
fieldType,
new NamedAnalyzer("fake", AnalyzerScope.INDEX, new FakeAnalyzer(indexedValue)),
MultiFields.empty(),
CopyTo.empty()
);
this.indexedValue = indexedValue;
}
@Override
protected void parseCreateField(DocumentParserContext context) {}
@Override
protected String contentType() {
return null;
}
@Override
public Builder getMergeBuilder() {
return null;
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.execution;
import com.facebook.presto.OutputBuffers;
import com.facebook.presto.UnpartitionedPagePartitionFunction;
import com.facebook.presto.client.FailureInfo;
import com.facebook.presto.connector.dual.DualMetadata;
import com.facebook.presto.connector.dual.DualSplit;
import com.facebook.presto.execution.SharedBuffer.QueueState;
import com.facebook.presto.execution.StateMachine.StateChangeListener;
import com.facebook.presto.execution.TestSqlTaskManager.MockLocationFactory;
import com.facebook.presto.metadata.InMemoryNodeManager;
import com.facebook.presto.metadata.MetadataManager;
import com.facebook.presto.metadata.NodeVersion;
import com.facebook.presto.metadata.PrestoNode;
import com.facebook.presto.metadata.QualifiedTableName;
import com.facebook.presto.operator.TaskContext;
import com.facebook.presto.spi.ColumnHandle;
import com.facebook.presto.spi.FixedSplitSource;
import com.facebook.presto.spi.HostAddress;
import com.facebook.presto.spi.Node;
import com.facebook.presto.spi.Split;
import com.facebook.presto.spi.SplitSource;
import com.facebook.presto.spi.TableHandle;
import com.facebook.presto.sql.analyzer.Session;
import com.facebook.presto.sql.analyzer.Type;
import com.facebook.presto.sql.planner.PlanFragment;
import com.facebook.presto.sql.planner.PlanFragment.OutputPartitioning;
import com.facebook.presto.sql.planner.PlanFragment.PlanDistribution;
import com.facebook.presto.sql.planner.StageExecutionPlan;
import com.facebook.presto.sql.planner.Symbol;
import com.facebook.presto.sql.planner.plan.ExchangeNode;
import com.facebook.presto.sql.planner.plan.JoinNode;
import com.facebook.presto.sql.planner.plan.JoinNode.EquiJoinClause;
import com.facebook.presto.sql.planner.plan.PlanFragmentId;
import com.facebook.presto.sql.planner.plan.PlanNodeId;
import com.facebook.presto.sql.planner.plan.TableScanNode;
import com.facebook.presto.util.Threads;
import com.google.common.base.Optional;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Multimap;
import io.airlift.units.DataSize;
import io.airlift.units.DataSize.Unit;
import io.airlift.units.Duration;
import org.joda.time.DateTime;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import javax.annotation.concurrent.GuardedBy;
import java.net.URI;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicLong;
import static com.facebook.presto.OutputBuffers.INITIAL_EMPTY_OUTPUT_BUFFERS;
import static com.facebook.presto.sql.planner.plan.TableScanNode.GeneratedPartitions;
import static com.facebook.presto.util.Failures.toFailures;
import static com.google.common.base.Preconditions.checkNotNull;
import static io.airlift.units.DataSize.Unit.MEGABYTE;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.fail;
public class TestSqlStageExecution
{
public static final Session SESSION = new Session("user", "source", "catalog", "schema", "address", "agent");
MetadataManager metadata;
LocationFactory locationFactory = new MockLocationFactory();
@BeforeMethod
public void setUp()
throws Exception
{
metadata = new MetadataManager();
metadata.addInternalSchemaMetadata(MetadataManager.INTERNAL_CONNECTOR_ID, new DualMetadata());
}
@Test
public void testSplitAssignment()
throws Exception
{
final InMemoryNodeManager nodeManager = new InMemoryNodeManager();
ImmutableList.Builder<Node> nodeBuilder = ImmutableList.builder();
nodeBuilder.add(new PrestoNode("other1", URI.create("http://127.0.0.1:11"), NodeVersion.UNKNOWN));
nodeBuilder.add(new PrestoNode("other2", URI.create("http://127.0.0.1:12"), NodeVersion.UNKNOWN));
nodeBuilder.add(new PrestoNode("other3", URI.create("http://127.0.0.1:13"), NodeVersion.UNKNOWN));
ImmutableList<Node> nodes = nodeBuilder.build();
nodeManager.addNode("foo", nodes);
NodeScheduler nodeScheduler = new NodeScheduler(nodeManager, new NodeSchedulerConfig());
// Start sql stage execution
SqlStageExecution sqlStageExecution1 = createSqlStageExecution(nodeScheduler, 2, 20);
Future future1 = sqlStageExecution1.start();
future1.get(1, TimeUnit.SECONDS);
Map<Node, RemoteTask> tasks1 = sqlStageExecution1.getTasks();
for (Map.Entry<Node, RemoteTask> entry : tasks1.entrySet()) {
assertEquals(entry.getValue().getQueuedSplits(), 5);
}
// Add new node
Node additionalNode = new PrestoNode("other4", URI.create("http://127.0.0.1:14"), NodeVersion.UNKNOWN);
nodeManager.addNode("foo", additionalNode);
// Schedule next query
SqlStageExecution sqlStageExecution2 = createSqlStageExecution(nodeScheduler, 2, 20);
Future future2 = sqlStageExecution2.start();
future2.get(1, TimeUnit.SECONDS);
Map<Node, RemoteTask> tasks2 = sqlStageExecution2.getTasks();
RemoteTask task = tasks2.get(additionalNode);
assertNotNull(task);
for (Map.Entry<Node, RemoteTask> entry : tasks2.entrySet()) {
assertEquals(entry.getValue().getQueuedSplits(), 4);
}
}
@Test
public void testSplitAssignmentBatchSizeGreaterThanMaxPending()
throws Exception
{
final InMemoryNodeManager nodeManager = new InMemoryNodeManager();
ImmutableList.Builder<Node> nodeBuilder = ImmutableList.builder();
nodeBuilder.add(new PrestoNode("other1", URI.create("http://127.0.0.1:11"), NodeVersion.UNKNOWN));
nodeBuilder.add(new PrestoNode("other2", URI.create("http://127.0.0.1:12"), NodeVersion.UNKNOWN));
nodeBuilder.add(new PrestoNode("other3", URI.create("http://127.0.0.1:13"), NodeVersion.UNKNOWN));
ImmutableList<Node> nodes = nodeBuilder.build();
nodeManager.addNode("foo", nodes);
NodeScheduler nodeScheduler = new NodeScheduler(nodeManager, new NodeSchedulerConfig());
// Start sql stage execution
SqlStageExecution sqlStageExecution1 = createSqlStageExecution(nodeScheduler, 10, 2);
Future future1 = sqlStageExecution1.start();
// The stage scheduler will block and this will cause a timeout exception
try {
future1.get(2, TimeUnit.SECONDS);
}
catch (TimeoutException e) {
}
Map<Node, RemoteTask> tasks1 = sqlStageExecution1.getTasks();
for (Map.Entry<Node, RemoteTask> entry : tasks1.entrySet()) {
assertEquals(entry.getValue().getQueuedSplits(), 2);
}
}
private SqlStageExecution createSqlStageExecution(NodeScheduler nodeScheduler, int splitBatchSize, int maxPendingSplitsPerNode)
{
int splitCount = 20;
ExecutorService remoteTaskExecutor = Executors.newCachedThreadPool(Threads.daemonThreadsNamed("remoteTaskExecutor"));
MockRemoteTaskFactory remoteTaskFactory = new MockRemoteTaskFactory(remoteTaskExecutor);
ExecutorService executor = Executors.newCachedThreadPool(Threads.daemonThreadsNamed("stageExecutor"));
OutputBuffers outputBuffers = INITIAL_EMPTY_OUTPUT_BUFFERS
.withBuffer("out", new UnpartitionedPagePartitionFunction())
.withNoMoreBufferIds();
StageExecutionPlan tableScanPlan = createTableScanPlan("test", metadata, splitCount);
return new SqlStageExecution(new QueryId("query"),
locationFactory,
tableScanPlan,
nodeScheduler,
remoteTaskFactory,
SESSION,
splitBatchSize,
maxPendingSplitsPerNode,
8, // initialHashPartitions
executor,
outputBuffers);
}
@Test(enabled = false)
public void testYieldCausesFullSchedule()
throws Exception
{
ExecutorService executor = Executors.newCachedThreadPool(Threads.daemonThreadsNamed("test"));
SqlStageExecution stageExecution = null;
try {
MetadataManager metadata = new MetadataManager();
metadata.addInternalSchemaMetadata(MetadataManager.INTERNAL_CONNECTOR_ID, new DualMetadata());
StageExecutionPlan joinPlan = createJoinPlan("A", metadata);
InMemoryNodeManager nodeManager = new InMemoryNodeManager();
nodeManager.addNode("foo", new PrestoNode("other", URI.create("http://127.0.0.1:11"), NodeVersion.UNKNOWN));
OutputBuffers outputBuffers = INITIAL_EMPTY_OUTPUT_BUFFERS
.withBuffer("out", new UnpartitionedPagePartitionFunction())
.withNoMoreBufferIds();
stageExecution = new SqlStageExecution(new QueryId("query"),
new MockLocationFactory(),
joinPlan,
new NodeScheduler(nodeManager, new NodeSchedulerConfig()), new MockRemoteTaskFactory(executor),
SESSION,
1000,
1,
8,
executor,
outputBuffers);
Future<?> future = stageExecution.start();
long start = System.nanoTime();
while (true) {
StageInfo stageInfo = stageExecution.getStageInfo();
assertEquals(stageInfo.getState(), StageState.SCHEDULING);
StageInfo tableScanInfo = stageInfo.getSubStages().get(0);
StageState tableScanState = tableScanInfo.getState();
switch (tableScanState) {
case PLANNED:
case SCHEDULING:
case SCHEDULED:
break;
case RUNNING:
// there should be two tasks (even though only one can ever be used)
assertEquals(stageInfo.getTasks().size(), 2);
assertEquals(tableScanInfo.getTasks().size(), 1);
assertEquals(tableScanInfo.getTasks().get(0).getOutputBuffers().getState(), QueueState.NO_MORE_QUEUES);
return;
case FINISHED:
case CANCELED:
case FAILED:
fail("Unexpected state for table scan stage " + tableScanState);
break;
}
if (TimeUnit.NANOSECONDS.toSeconds(System.nanoTime() - start) > 1) {
fail("Expected test to complete within 1 second");
}
try {
future.get(50, TimeUnit.MILLISECONDS);
}
catch (TimeoutException e) {
}
}
}
finally {
if (stageExecution != null) {
stageExecution.cancel(false);
}
executor.shutdownNow();
}
}
private StageExecutionPlan createJoinPlan(String planId, MetadataManager metadata)
{
// create table scan for build data with a single split, so it is only waiting on the no-more buffers call
StageExecutionPlan build = createTableScanPlan("build", metadata, 1);
// create an exchange to read the build data
ExchangeNode buildExchange = new ExchangeNode(new PlanNodeId(planId + "-build"),
build.getFragment().getId(),
ImmutableList.copyOf(build.getFragment().getSymbols().keySet()));
// create table scan for probe data with three splits, so it will not send the no-more buffers call
StageExecutionPlan probe = createTableScanPlan("probe", metadata, 10);
// create an exchange to read the probe data
ExchangeNode probeExchange = new ExchangeNode(new PlanNodeId(planId + "-probe"),
probe.getFragment().getId(),
ImmutableList.copyOf(probe.getFragment().getSymbols().keySet()));
// join build and probe
PlanFragment joinPlan = new PlanFragment(
new PlanFragmentId(planId),
new JoinNode(new PlanNodeId(planId), JoinNode.Type.INNER, probeExchange, buildExchange, ImmutableList.<EquiJoinClause>of()),
probe.getFragment().getSymbols(), // this is wrong, but it works
PlanDistribution.SOURCE,
new PlanNodeId(planId),
OutputPartitioning.NONE,
ImmutableList.<Symbol>of());
return new StageExecutionPlan(joinPlan,
probe.getDataSource(),
ImmutableList.of(probe, build)
);
}
private StageExecutionPlan createTableScanPlan(String planId, MetadataManager metadata, int splitCount)
{
TableHandle tableHandle = metadata.getTableHandle(new QualifiedTableName("default", "default", DualMetadata.NAME)).get();
ColumnHandle columnHandle = metadata.getColumnHandle(tableHandle, DualMetadata.COLUMN_NAME).get();
Symbol symbol = new Symbol(DualMetadata.COLUMN_NAME);
// table scan with splitCount splits
Split split = new DualSplit(HostAddress.fromString("127.0.0.1"));
PlanNodeId tableScanNodeId = new PlanNodeId(planId);
PlanFragment testFragment = new PlanFragment(
new PlanFragmentId(planId),
new TableScanNode(
tableScanNodeId,
tableHandle,
ImmutableList.of(symbol),
ImmutableMap.of(symbol, columnHandle),
null,
Optional.<GeneratedPartitions>absent()),
ImmutableMap.<Symbol, Type>of(symbol, Type.VARCHAR),
PlanDistribution.SOURCE,
tableScanNodeId,
OutputPartitioning.NONE,
ImmutableList.<Symbol>of());
ImmutableList.Builder<Split> splits = ImmutableList.builder();
for (int i = 0; i < splitCount; i++) {
splits.add(new DualSplit(HostAddress.fromString("127.0.0.1")));
}
SplitSource splitSource = new FixedSplitSource(null, splits.build());
return new StageExecutionPlan(testFragment,
Optional.of(splitSource),
ImmutableList.<StageExecutionPlan>of()
);
}
private static class MockRemoteTaskFactory
implements RemoteTaskFactory
{
private final Executor executor;
private MockRemoteTaskFactory(Executor executor)
{
this.executor = executor;
}
@Override
public RemoteTask createRemoteTask(
Session session,
TaskId taskId,
Node node,
PlanFragment fragment,
Multimap<PlanNodeId, Split> initialSplits,
OutputBuffers outputBuffers)
{
return new MockRemoteTask(taskId, fragment, executor, initialSplits);
}
private static class MockRemoteTask
implements RemoteTask
{
private final AtomicLong nextTaskInfoVersion = new AtomicLong(TaskInfo.STARTING_VERSION);
private final URI location;
private final TaskStateMachine taskStateMachine;
private final TaskContext taskContext;
private final SharedBuffer sharedBuffer;
private final PlanFragment fragment;
@GuardedBy("this")
private final Set<PlanNodeId> noMoreSplits = new HashSet<>();
@GuardedBy("this")
private final Multimap<PlanNodeId, Split> splits = HashMultimap.create();
public MockRemoteTask(TaskId taskId,
PlanFragment fragment,
Executor executor,
Multimap<PlanNodeId, Split> initialSplits)
{
this.taskStateMachine = new TaskStateMachine(checkNotNull(taskId, "taskId is null"), checkNotNull(executor, "executor is null"));
Session session = new Session("user", "source", "catalog", "schema", "address", "agent");
this.taskContext = new TaskContext(taskStateMachine, executor, session, new DataSize(256, MEGABYTE), new DataSize(1, MEGABYTE), true);
this.location = URI.create("fake://task/" + taskId);
this.sharedBuffer = new SharedBuffer(taskId, executor, checkNotNull(new DataSize(1, Unit.BYTE), "maxBufferSize is null"), INITIAL_EMPTY_OUTPUT_BUFFERS);
this.fragment = checkNotNull(fragment, "fragment is null");
splits.putAll(initialSplits);
}
@Override
public String getNodeId()
{
return "node";
}
@Override
public TaskInfo getTaskInfo()
{
TaskState state = taskStateMachine.getState();
List<FailureInfo> failures = ImmutableList.of();
if (state == TaskState.FAILED) {
failures = toFailures(taskStateMachine.getFailureCauses());
}
return new TaskInfo(
taskStateMachine.getTaskId(),
nextTaskInfoVersion.getAndIncrement(),
state,
location,
DateTime.now(),
sharedBuffer.getInfo(),
ImmutableSet.<PlanNodeId>of(),
taskContext.getTaskStats(),
failures);
}
@Override
public void start()
{
}
@Override
public void addSplits(PlanNodeId sourceId, Iterable<? extends Split> splits)
{
checkNotNull(splits, "splits is null");
for (Split split : splits) {
this.splits.put(sourceId, split);
}
}
@Override
public void noMoreSplits(PlanNodeId sourceId)
{
noMoreSplits.add(sourceId);
if (noMoreSplits.containsAll(fragment.getSources())) {
taskStateMachine.finished();
}
}
@Override
public void setOutputBuffers(OutputBuffers outputBuffers)
{
sharedBuffer.setOutputBuffers(outputBuffers);
}
@Override
public void addStateChangeListener(final StateChangeListener<TaskInfo> stateChangeListener)
{
taskStateMachine.addStateChangeListener(new StateChangeListener<TaskState>()
{
@Override
public void stateChanged(TaskState newValue)
{
stateChangeListener.stateChanged(getTaskInfo());
}
});
}
@Override
public void cancel()
{
taskStateMachine.cancel();
}
@Override
public Duration waitForTaskToFinish(Duration maxWait)
throws InterruptedException
{
while (true) {
TaskState currentState = taskStateMachine.getState();
if (maxWait.toMillis() <= 1 || currentState.isDone()) {
return maxWait;
}
maxWait = taskStateMachine.waitForStateChange(currentState, maxWait);
}
}
@Override
public int getQueuedSplits()
{
if (taskStateMachine.getState().isDone()) {
return 0;
}
return splits.size();
}
}
}
}
| |
package nl.entreco.reversi.game;
import android.databinding.ObservableField;
import android.databinding.ObservableInt;
import android.os.Handler;
import android.os.Looper;
import android.support.annotation.MainThread;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import com.google.firebase.database.DatabaseReference;
import java.util.List;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import nl.entreco.reversi.data.History;
import nl.entreco.reversi.model.Arbiter;
import nl.entreco.reversi.model.GameCallback;
import nl.entreco.reversi.model.Move;
import nl.entreco.reversi.model.Player;
import nl.entreco.reversi.model.Stone;
import nl.entreco.reversi.model.players.DrawPlayer;
import nl.entreco.reversi.model.players.RandomPlayer;
import nl.entreco.reversi.model.players.UserPlayer;
public class Game implements GameCallback {
@NonNull public final ObservableInt score1;
@NonNull public final ObservableInt score2;
@NonNull public final ObservableField<Player> winner;
@NonNull public final ObservableField<Player> current;
@NonNull public final ObservableField<Player> rejected;
@NonNull public final ObservableField<Player> player1;
@NonNull public final ObservableField<Player> player2;
@NonNull private final BoardAdapter adapter;
@NonNull private final Arbiter arbiter;
@NonNull private final CreateMatchUsecase createMatchUsecase;
@NonNull private final Handler main;
@NonNull private final ScheduledExecutorService background;
@Nullable private DatabaseReference matchReference;
public Game(@NonNull final BoardAdapter adapter, @NonNull final Arbiter arbiter, @NonNull final
CreateMatchUsecase createMatchUsecase) {
this.adapter = adapter;
this.arbiter = arbiter;
this.createMatchUsecase = createMatchUsecase;
this.player1 = new ObservableField<>();
this.player2 = new ObservableField<>();
this.score1 = new ObservableInt(0);
this.score2 = new ObservableInt(0);
this.winner = new ObservableField<>();
this.current = new ObservableField<>();
this.rejected = new ObservableField<>();
this.main = setupHandler();
this.background = setupScheduler();
}
@NonNull
ScheduledExecutorService setupScheduler() {
return Executors.newSingleThreadScheduledExecutor();
}
@NonNull
Handler setupHandler() {
return new Handler(Looper.getMainLooper());
}
public void startGame() {
addDefaultPlayersIfEmpty();
matchReference = createMatchUsecase.createRemoteMatch(player1.get(), player2.get());
this.winner.set(null);
this.player1.get().onJoinedGame(matchReference.getKey());
this.player2.get().onJoinedGame(matchReference.getKey());
this.arbiter.addPlayer(player1.get());
this.arbiter.addPlayer(player2.get());
this.player1.get().setCallback(this);
this.player2.get().setCallback(this);
this.arbiter.start(this);
this.score1.set(2);
this.score2.set(2);
this.adapter.start();
}
private void addDefaultPlayersIfEmpty() {
if (player1.get() == null || player2.get() == null) {
final Player p1 = new UserPlayer();
p1.setStoneColor(Stone.WHITE);
final Player p2 = new RandomPlayer();
p1.setStoneColor(Stone.BLACK);
this.player1.set(p1);
this.player2.set(p2);
}
}
@MainThread
@Override
public void setCurrentPlayer(@NonNull final Player player) {
main.post(new Runnable() {
@Override
public void run() {
arbiter.startTimer(player);
current.set(player);
adapter.setCurrentPlayer(player, Game.this);
}
});
}
@Override
public void submitMove(@NonNull final Player player, @NonNull final Move move) {
main.post(new Runnable() {
@Override
public void run() {
final List<Stone> flipped = arbiter.onMoveReceived(player, move.toString());
if (flipped.size() > 0) {
if (player.getStoneColor() == Stone.WHITE) {
score1.set(score1.get() + 1 + flipped.size());
score2.set(score2.get() - flipped.size());
} else {
score1.set(score1.get() - flipped.size());
score2.set(score2.get() + 1 + flipped.size());
}
if (matchReference != null) {
matchReference.child("history").push().setValue(new History(player, move, score1.get(), score2.get()));
}
current.set(null);
adapter.update(move, player.getStoneColor());
arbiter.notifyNextPlayer(player);
}
}
});
}
@Override
public void onMoveRejected(@Nullable final Player player) {
main.post(new Runnable() {
@Override
public void run() {
rejected.set(player);
main.postDelayed(new Runnable() {
@Override
public void run() {
rejected.set(null);
}
}, 10L);
}
});
}
@Override
public void onGameFinished(final int score) {
main.post(new Runnable() {
@Override
public void run() {
current.set(null);
if(score == 0){
// draw
winner.set(new DrawPlayer());
} else {
// we have a winner
winner.set(score <= Stone.WHITE ? player1.get() : player2.get());
}
matchReference.child("result").setValue(score);
}
});
}
@Override
public void notifyNextPlayer(@NonNull final Player player, @NonNull final String board) {
background.schedule(new Runnable() {
@Override
public void run() {
player.yourTurn(board);
}
}, 100, TimeUnit.MILLISECONDS);
}
@Override
public void notifyMoveRejected(@NonNull final Player player, @NonNull final String board) {
background.schedule(new Runnable() {
@Override
public void run() {
player.onMoveRejected(board);
}
}, 50, TimeUnit.MILLISECONDS);
}
@Override
public void notifyPlayerGameFinished(@NonNull final Player player, final int yourScore,
final int opponentScore) {
main.post(new Runnable() {
@Override
public void run() {
player.onGameFinished(yourScore, opponentScore);
}
});
}
public void clear() {
this.arbiter.clear();
this.player1.set(null);
this.player2.set(null);
this.score1.set(0);
this.score2.set(0);
this.winner.set(null);
this.current.set(null);
this.rejected.set(null);
}
public void setWhitePlayer(Player player) {
player.setStoneColor(Stone.WHITE);
this.player1.set(player);
}
public void setBlackPlayer(Player player) {
player.setStoneColor(Stone.BLACK);
this.player2.set(player);
}
}
| |
/**
* Copyright (C) 2004-2009 Jive Software. All rights reserved.
* Copyright (C) 2014-2015 TU Berlin. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.tu_berlin.cit.rwx4j.xmpp.packet;
import org.dom4j.Element;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
/**
* Message packet.<p>
*
* A message can have one of several {@link Type Types}. For each message type,
* different message fields are typically used as follows:
*
* <table border="1">
* <tr><td> </td><td colspan="5"><b>Message type</b></td></tr>
* <tr><td><i>Field</i></td><td><b>Normal</b></td><td><b>Chat</b></td><td><b>Group Chat</b></td><td><b>Headline</b></td><td><b>Error</b></td></tr>
* <tr><td><i>subject</i></td> <td>SHOULD</td><td>SHOULD NOT</td><td>SHOULD NOT</td><td>SHOULD NOT</td><td>SHOULD NOT</td></tr>
* <tr><td><i>thread</i></td> <td>OPTIONAL</td><td>SHOULD</td><td>OPTIONAL</td><td>OPTIONAL</td><td>SHOULD NOT</td></tr>
* <tr><td><i>body</i></td> <td>SHOULD</td><td>SHOULD</td><td>SHOULD</td><td>SHOULD</td><td>SHOULD NOT</td></tr>
* <tr><td><i>error</i></td> <td>MUST NOT</td><td>MUST NOT</td><td>MUST NOT</td><td>MUST NOT</td><td>MUST</td></tr>
* </table>
*/
public class Message extends Packet {
/**
* Constructs a new Message.
*/
public Message() {
this.element = docFactory.createDocument().addElement("message");
}
/**
* Constructs a new Message using an existing Element. This is useful
* for parsing incoming message Elements into Message objects.
*
* @param element the message Element.
*/
public Message(Element element) {
super(element);
}
/**
* Constructs a new Message using an existing Element. This is useful
* for parsing incoming message Elements into Message objects. Stringprep validation
* on the TO address can be disabled. The FROM address will not be validated since the
* server is the one that sets that value.
*
* @param element the message Element.
* @param skipValidation true if stringprep should not be applied to the TO address.
*/
public Message(Element element, boolean skipValidation) {
super(element, skipValidation);
}
/**
* Constructs a new Message that is a copy of an existing Message.
*
* @param message the message packet.
* @see #createCopy()
*/
private Message(Message message) {
Element elementCopy = message.element.createCopy();
docFactory.createDocument().add(elementCopy);
this.element = elementCopy;
// Copy cached JIDs (for performance reasons)
this.toJID = message.toJID;
this.fromJID = message.fromJID;
}
/**
* Returns the type of this message
*
* @return the message type.
* @see Type
*/
public Type getType() {
String type = element.attributeValue("type");
if (type != null) {
return Type.valueOf(type);
}
else {
return Type.normal;
}
}
/**
* Sets the type of this message.
*
* @param type the message type.
* @see Type
*/
public void setType(Type type) {
element.addAttribute("type", type==null?null:type.toString());
}
/**
* Returns the subject of this message or <tt>null</tt> if there is no subject..
*
* @return the subject.
*/
public String getSubject() {
return element.elementText("subject");
}
/**
* Sets the subject of this message.
*
* @param subject the subject.
*/
public void setSubject(String subject) {
Element subjectElement = element.element("subject");
// If subject is null, clear the subject.
if (subject == null && subjectElement != null) {
element.remove(subjectElement);
return;
}
// Do nothing if the new subject is null
if (subject == null) {
return;
}
if (subjectElement == null) {
subjectElement = element.addElement("subject");
}
subjectElement.setText(subject);
}
/**
* Returns the body of this message or <tt>null</tt> if there is no body.
*
* @return the body.
*/
public String getBody() {
return element.elementText("body");
}
/**
* Sets the body of this message.
*
* @param body the body.
*/
public void setBody(String body) {
Element bodyElement = element.element("body");
// If body is null, clear the body.
if (body == null) {
if (bodyElement != null) {
element.remove(bodyElement);
}
return;
}
if (bodyElement == null) {
bodyElement = element.addElement("body");
}
bodyElement.setText(body);
}
/**
* Returns the thread value of this message, an identifier that is used for
* tracking a conversation thread ("instant messaging session")
* between two entities. If the thread is not set, <tt>null</tt> will be
* returned.
*
* @return the thread value.
*/
public String getThread() {
return element.elementText("thread");
}
/**
* Sets the thread value of this message, an identifier that is used for
* tracking a conversation thread ("instant messaging session")
* between two entities.
*
* @param thread thread value.
*/
public void setThread(String thread) {
Element threadElement = element.element("thread");
// If thread is null, clear the thread.
if (thread == null) {
if (threadElement != null) {
element.remove(threadElement);
}
return;
}
if (threadElement == null) {
threadElement = element.addElement("thread");
}
threadElement.setText(thread);
}
/**
* Returns the first child element of this packet that matches the
* given name and namespace. If no matching element is found,
* <tt>null</tt> will be returned. This is a convenience method to avoid
* manipulating this underlying packet's Element instance directly.<p>
*
* Child elements in extended namespaces are used to extend the features
* of XMPP. Examples include a "user is typing" indicator and invitations to
* group chat rooms. Although any valid XML can be included in a child element
* in an extended namespace, many common features have been standardized
* as <a href="http://xmpp.org/extensions/">XMPP Extension Protocols</a>
* (XEPs).
*
* @param name the element name.
* @param namespace the element namespace.
* @return the first matching child element, or <tt>null</tt> if there
* is no matching child element.
*/
@SuppressWarnings("unchecked")
public Element getChildElement(String name, String namespace) {
for (Iterator<Element> i=element.elementIterator(name); i.hasNext(); ) {
Element element = i.next();
if (element.getNamespaceURI().equals(namespace)) {
return element;
}
}
return null;
}
@SuppressWarnings("unchecked")
public List<Element> getChildElements(String name, String namespace) {
ArrayList<Element> elements = new ArrayList<Element>();
for (Iterator<Element> i=element.elementIterator(name); i.hasNext(); ) {
Element element = i.next();
if (element.getNamespaceURI().equals(namespace)) {
elements.add(element);
}
}
return elements;
}
/**
* Adds a new child element to this packet with the given name and
* namespace. The newly created Element is returned. This is a
* convenience method to avoid manipulating this underlying packet's
* Element instance directly.<p>
*
* Child elements in extended namespaces are used to extend the features
* of XMPP. Examples include a "user is typing" indicator and invitations to
* group chat rooms. Although any valid XML can be included in a child element
* in an extended namespace, many common features have been standardized
* as <a href="http://xmpp.org/extensions/">XMPP Extension Protocols</a>
* (XEPs).
*
* @param name the element name.
* @param namespace the element namespace.
* @return the newly created child element.
*/
public Element addChildElement(String name, String namespace) {
return element.addElement(name, namespace);
}
/**
* Returns a deep copy of this Message.
*
* @return a deep copy of this Message.
*/
public Message createCopy() {
return new Message(this);
}
/**
* Type-safe enumeration for the type of a message. The types are:
*
* <ul>
* <li>{@link #normal Message.Type.normal} -- (Default) a normal text message
* used in email like interface.
* <li>{@link #chat Message.Type.cha}t -- a typically short text message used
* in line-by-line chat interfaces.
* <li>{@link #groupchat Message.Type.groupchat} -- a chat message sent to a
* groupchat server for group chats.
* <li>{@link #headline Message.Type.headline} -- a text message to be displayed
* in scrolling marquee displays.
* <li>{@link #error Message.Type.error} -- indicates a messaging error.
* </ul>
*/
public enum Type {
/**
* (Default) a normal text message used in email like interface.
*/
normal,
/**
* Typically short text message used in line-by-line chat interfaces.
*/
chat,
/**
* Chat message sent to a groupchat server for group chats.
*/
groupchat,
/**
* Text message to be displayed in scrolling marquee displays.
*/
headline,
/**
* Indicates a messaging error.
*/
error;
}
}
| |
/*
* Licensed to GraphHopper and Peter Karich under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* GraphHopper licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.graphhopper.http;
import com.graphhopper.PathWrapper;
import com.graphhopper.GHRequest;
import com.graphhopper.GHResponse;
import com.graphhopper.GraphHopperAPI;
import com.graphhopper.util.*;
import com.graphhopper.util.shapes.GHPoint;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map.Entry;
import java.util.Set;
import org.json.JSONArray;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Main wrapper of the GraphHopper Directions API for a simple and efficient usage.
* <p>
* @author Peter Karich
*/
public class GraphHopperWeb implements GraphHopperAPI
{
private final Logger logger = LoggerFactory.getLogger(getClass());
private Downloader downloader = new Downloader("GraphHopper Java Client");
private String routeServiceUrl = "https://graphhopper.com/api/1/route";
private String key = "";
private boolean instructions = true;
private boolean calcPoints = true;
private boolean elevation = false;
private final Set<String> ignoreSet;
public GraphHopperWeb()
{
// some parameters are supported directly via Java API so ignore them when writing the getHints map
ignoreSet = new HashSet<String>();
ignoreSet.add("calc_points");
ignoreSet.add("calcpoints");
ignoreSet.add("instructions");
ignoreSet.add("elevation");
ignoreSet.add("key");
// some parameters are in the request:
ignoreSet.add("algorithm");
ignoreSet.add("locale");
ignoreSet.add("point");
ignoreSet.add("vehicle");
// some are special and need to be avoided
ignoreSet.add("points_encoded");
ignoreSet.add("pointsencoded");
ignoreSet.add("type");
}
public void setDownloader( Downloader downloader )
{
this.downloader = downloader;
}
@Override
public boolean load( String serviceUrl )
{
this.routeServiceUrl = serviceUrl;
return true;
}
public GraphHopperWeb setKey( String key )
{
if (key == null || key.isEmpty())
throw new IllegalStateException("Key cannot be empty");
this.key = key;
return this;
}
public GraphHopperWeb setCalcPoints( boolean calcPoints )
{
this.calcPoints = calcPoints;
return this;
}
public GraphHopperWeb setInstructions( boolean b )
{
instructions = b;
return this;
}
public GraphHopperWeb setElevation( boolean withElevation )
{
this.elevation = withElevation;
return this;
}
@Override
public GHResponse route( GHRequest request )
{
try
{
String places = "";
for (GHPoint p : request.getPoints())
{
places += "point=" + p.lat + "," + p.lon + "&";
}
boolean tmpInstructions = request.getHints().getBool("instructions", instructions);
boolean tmpCalcPoints = request.getHints().getBool("calcPoints", calcPoints);
if (tmpInstructions && !tmpCalcPoints)
throw new IllegalStateException("Cannot calculate instructions without points (only points without instructions). "
+ "Use calcPoints=false and instructions=false to disable point and instruction calculation");
boolean tmpElevation = request.getHints().getBool("elevation", elevation);
String url = routeServiceUrl
+ "?"
+ places
+ "&type=json"
+ "&instructions=" + tmpInstructions
+ "&points_encoded=true"
+ "&calc_points=" + tmpCalcPoints
+ "&algorithm=" + request.getAlgorithm()
+ "&locale=" + request.getLocale().toString()
+ "&elevation=" + tmpElevation;
if (!request.getVehicle().isEmpty())
url += "&vehicle=" + request.getVehicle();
if (!key.isEmpty())
url += "&key=" + key;
for (Entry<String, String> entry : request.getHints().toMap().entrySet())
{
String urlKey = entry.getKey();
String urlValue = entry.getValue();
// use lower case conversion for check only!
if (ignoreSet.contains(urlKey.toLowerCase()))
continue;
if (urlValue != null && !urlValue.isEmpty())
url += "&" + WebHelper.encodeURL(urlKey) + "=" + WebHelper.encodeURL(urlValue);
}
String str = downloader.downloadAsString(url, true);
JSONObject json = new JSONObject(str);
GHResponse res = new GHResponse();
res.addErrors(readErrors(json));
if (res.hasErrors())
return res;
JSONArray paths = json.getJSONArray("paths");
for (int index = 0; index < paths.length(); index++)
{
JSONObject path = paths.getJSONObject(index);
PathWrapper altRsp = createPathWrapper(path, tmpCalcPoints, tmpInstructions, tmpElevation);
res.add(altRsp);
}
return res;
} catch (Exception ex)
{
throw new RuntimeException("Problem while fetching path " + request.getPoints() + ": " + ex.getMessage(), ex);
}
}
public static PathWrapper createPathWrapper( JSONObject path,
boolean tmpCalcPoints, boolean tmpInstructions, boolean tmpElevation )
{
PathWrapper altRsp = new PathWrapper();
altRsp.addErrors(readErrors(path));
if (altRsp.hasErrors())
return altRsp;
double distance = path.getDouble("distance");
long time = path.getLong("time");
if (tmpCalcPoints)
{
String pointStr = path.getString("points");
PointList pointList = WebHelper.decodePolyline(pointStr, 100, tmpElevation);
altRsp.setPoints(pointList);
if (tmpInstructions)
{
JSONArray instrArr = path.getJSONArray("instructions");
InstructionList il = new InstructionList(null);
int viaCount = 1;
for (int instrIndex = 0; instrIndex < instrArr.length(); instrIndex++)
{
JSONObject jsonObj = instrArr.getJSONObject(instrIndex);
double instDist = jsonObj.getDouble("distance");
String text = jsonObj.getString("text");
long instTime = jsonObj.getLong("time");
int sign = jsonObj.getInt("sign");
JSONArray iv = jsonObj.getJSONArray("interval");
int from = iv.getInt(0);
int to = iv.getInt(1);
PointList instPL = new PointList(to - from, tmpElevation);
for (int j = from; j <= to; j++)
{
instPL.add(pointList, j);
}
InstructionAnnotation ia = InstructionAnnotation.EMPTY;
if (jsonObj.has("annotation_importance") && jsonObj.has("annotation_text"))
{
ia = new InstructionAnnotation(jsonObj.getInt("annotation_importance"), jsonObj.getString("annotation_text"));
}
Instruction instr;
if (sign == Instruction.USE_ROUNDABOUT || sign == Instruction.LEAVE_ROUNDABOUT)
{
RoundaboutInstruction ri = new RoundaboutInstruction(sign, text, ia, instPL);
if (jsonObj.has("exit_number"))
{
ri.setExitNumber(jsonObj.getInt("exit_number"));
}
if (jsonObj.has("turn_angle"))
{
// TODO provide setTurnAngle setter
double angle = jsonObj.getDouble("turn_angle");
ri.setDirOfRotation(angle);
ri.setRadian((angle < 0 ? -Math.PI : Math.PI) - angle);
}
instr = ri;
} else if (sign == Instruction.REACHED_VIA)
{
ViaInstruction tmpInstr = new ViaInstruction(text, ia, instPL);
tmpInstr.setViaCount(viaCount);
viaCount++;
instr = tmpInstr;
} else if (sign == Instruction.FINISH)
{
instr = new FinishInstruction(instPL, 0);
} else
{
instr = new Instruction(sign, text, ia, instPL);
}
// The translation is done from the routing service so just use the provided string
// instead of creating a combination with sign and name etc
instr.setUseRawName();
instr.setDistance(instDist).setTime(instTime);
il.add(instr);
}
altRsp.setInstructions(il);
}
}
altRsp.setDistance(distance).setTime(time);
return altRsp;
}
public static List<Throwable> readErrors( JSONObject json )
{
List<Throwable> errors = new ArrayList<Throwable>();
JSONArray errorJson;
if (json.has("message"))
{
if (json.has("hints"))
{
errorJson = json.getJSONArray("hints");
} else
{
// should not happen
errors.add(new RuntimeException(json.getString("message")));
return errors;
}
} else
return errors;
for (int i = 0; i < errorJson.length(); i++)
{
JSONObject error = errorJson.getJSONObject(i);
String exClass = "";
if (error.has("details"))
exClass = error.getString("details");
String exMessage = error.getString("message");
if (exClass.equals(UnsupportedOperationException.class.getName()))
errors.add(new UnsupportedOperationException(exMessage));
else if (exClass.equals(IllegalStateException.class.getName()))
errors.add(new IllegalStateException(exMessage));
else if (exClass.equals(RuntimeException.class.getName()))
errors.add(new RuntimeException(exMessage));
else if (exClass.equals(IllegalArgumentException.class.getName()))
errors.add(new IllegalArgumentException(exMessage));
else if (exClass.isEmpty())
errors.add(new RuntimeException(exMessage));
else
errors.add(new RuntimeException(exClass + " " + exMessage));
}
if (json.has("message") && errors.isEmpty())
errors.add(new RuntimeException(json.getString("message")));
return errors;
}
}
| |
package buoy.widget;
import buoy.event.*;
import java.awt.*;
import java.util.*;
import javax.swing.*;
/**
* A BDialog is a WidgetContainer corresponding to a dialog window. It may contain up to two child Widgets:
* a BMenuBar, and a single other Widget (usually a WidgetContainer of some sort) which fills the rest
* of the window.
* <p>
* In addition to the event types generated by all Widgets, BDialogs generate the following event types:
* <ul>
* <li>{@link buoy.event.RepaintEvent RepaintEvent}</li>
* <li>{@link buoy.event.WindowActivatedEvent WindowActivatedEvent}</li>
* <li>{@link buoy.event.WindowClosingEvent WindowClosingEvent}</li>
* <li>{@link buoy.event.WindowDeactivatedEvent WindowDeactivatedEvent}</li>
* <li>{@link buoy.event.WindowDeiconifiedEvent WindowDeiconifiedEvent}</li>
* <li>{@link buoy.event.WindowIconifiedEvent WindowIconifiedEvent}</li>
* <li>{@link buoy.event.WindowResizedEvent WindowResizedEvent}</li>
* </ul>
*
* @author Peter Eastman
*/
public class BDialog extends WindowWidget
{
private BMenuBar menubar;
/**
* Create a non-modal BDialog with no title or parent window.
*/
public BDialog()
{
component = createComponent(null, null, false);
initInternal();
}
/**
* Create a non-modal BDialog with no parent window.
*
* @param title the title of the dialog
*/
public BDialog(String title)
{
component = createComponent(null, null, false);
initInternal();
getComponent().setTitle(title);
}
/**
* Create a new BDialog with no title.
*
* @param parent the parent window (a BFrame or BDialog) for this dialog
* @param modal specifies whether this is a modal dialog
*/
public BDialog(WindowWidget parent, boolean modal)
{
component = createComponent((Window) parent.getComponent(), null, modal);
setParent(parent);
initInternal();
}
/**
* Create a new BDialog.
*
* @param parent the parent window (a BFrame or BDialog) for this dialog
* @param title the title of the dialog
* @param modal specifies whether this is a modal dialog
*/
public BDialog(WindowWidget parent, String title, boolean modal)
{
component = createComponent((Window) parent.getComponent(), title, modal);
setParent(parent);
initInternal();
}
/**
* Create the JDialog which serves as this Widget's Component. This method is protected so that
* subclasses can override it.
*
* @param parent the parent JFrame or JDialog (may be null)
* @param title the title of the dialog (may be null)
* @param modal specifies whether this is a modal dialog
*/
protected JDialog createComponent(Window parent, String title, boolean modal)
{
if (parent instanceof Dialog)
return new BDialogComponent((Dialog) parent, title, modal);
else if (parent instanceof Frame)
return new BDialogComponent((Frame) parent, title, modal);
else if (parent == null)
return new BDialogComponent();
else
throw new IllegalArgumentException("parent must be a BFrame or BDialog");
}
public JDialog getComponent()
{
return (JDialog) component;
}
/**
* Perform internal initialization.
*/
private void initInternal()
{
getComponent().getContentPane().setLayout(null);
getComponent().setDefaultCloseOperation(WindowConstants.DO_NOTHING_ON_CLOSE);
}
/**
* Get the number of children in this container.
*/
public int getChildCount()
{
return ((menubar == null ? 0 : 1) + (content == null ? 0 : 1));
}
/**
* Get a Collection containing all child Widgets of this container.
*/
public Collection<Widget> getChildren()
{
ArrayList<Widget> ls = new ArrayList<Widget>(3);
if (menubar != null)
ls.add(menubar);
if (content != null)
ls.add(content);
return ls;
}
/**
* Get the BMenuBar for this window.
*/
public BMenuBar getMenuBar()
{
return menubar;
}
/**
* Set the BMenuBar for this window.
*/
public void setMenuBar(BMenuBar menus)
{
if (menubar != null)
remove(menubar);
if (menus == null)
return;
if (menus.getParent() != null)
menus.getParent().remove(menus);
menubar = menus;
getComponent().setJMenuBar((JMenuBar) menubar.getComponent());
setAsParent(menubar);
}
/**
* Remove a child Widget from this container.
*/
public void remove(Widget widget)
{
if (menubar == widget)
{
getComponent().setJMenuBar(null);
removeAsParent(menubar);
menubar = null;
}
else if (content == widget)
{
getComponent().getContentPane().remove(widget.getComponent());
removeAsParent(content);
content = null;
}
}
/**
* Remove all child Widgets from this container.
*/
public void removeAll()
{
if (menubar != null)
remove(menubar);
if (content != null)
remove(content);
}
/**
* Get the title of the dialog.
*/
public String getTitle()
{
return getComponent().getTitle();
}
/**
* Set the title of the dialog.
*/
public void setTitle(String title)
{
getComponent().setTitle(title);
}
/**
* Set whether this dialog is modal. This must be called before the dialog is made visible.
* It is not possible to change whether a currently visible dialog is modal.
*/
public void setModal(boolean modal)
{
getComponent().setModal(modal);
}
/**
* Determine whether this dialog is modal.
*/
public boolean isModal()
{
return getComponent().isModal();
}
/**
* Determine whether this dialog may be resized by the user.
*/
public boolean isResizable()
{
return getComponent().isResizable();
}
/**
* Set whether this dialog may be resized by the user.
*/
public void setResizable(boolean resizable)
{
getComponent().setResizable(resizable);
}
/**
* Select an appropriate size for the dialog, based on the preferred size of its contents, then re-layout
* all of its contents. If this is being called for the first time before the dialog has yet been shown,
* it also centers the dialog relative to its parent (or the screen if it does not have a parent).
*/
public void pack()
{
boolean center = !getComponent().isDisplayable();
super.pack();
if (!center)
return;
Rectangle bounds = getBounds();
Widget parent = getParent();
if (parent == null)
{
Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize();
bounds.x = (screenSize.width-bounds.width)/2;
bounds.y = (screenSize.height-bounds.height)/2;
}
else
{
Rectangle parentBounds = parent.getBounds();
bounds.x = parentBounds.x+(parentBounds.width-bounds.width)/2;
bounds.y = parentBounds.y+(parentBounds.height-bounds.height)/2;
}
setBounds(new Rectangle(bounds.x, bounds.y, bounds.width, bounds.height));
}
/**
* Get the JRootPane for this Widget's component.
*/
protected JRootPane getRootPane()
{
return ((JDialog) getComponent()).getRootPane();
}
/**
* This is the JDialog subclass which is used as the Component for a BDialog.
*/
private class BDialogComponent extends JDialog
{
public BDialogComponent()
{
super();
}
public BDialogComponent(Frame parent, String title, boolean modal)
{
super(parent, title, modal);
}
public BDialogComponent(Dialog parent, String title, boolean modal)
{
super(parent, title, modal);
}
public void paintComponent(Graphics g)
{
BDialog.this.dispatchEvent(new RepaintEvent(BDialog.this, (Graphics2D) g));
}
public void validate()
{
super.validate();
layoutChildren();
if (!BDialog.this.getComponent().getSize().equals(lastSize))
{
lastSize = BDialog.this.getComponent().getSize();
EventQueue.invokeLater(new Runnable()
{
public void run()
{
BDialog.this.dispatchEvent(new WindowResizedEvent(BDialog.this));
}
});
}
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.curator.framework.imps;
import org.apache.curator.RetryLoop;
import org.apache.curator.TimeTrace;
import org.apache.curator.framework.api.ACLPathAndBytesable;
import org.apache.curator.framework.api.BackgroundCallback;
import org.apache.curator.framework.api.BackgroundPathAndBytesable;
import org.apache.curator.framework.api.CuratorEvent;
import org.apache.curator.framework.api.CuratorEventType;
import org.apache.curator.framework.api.PathAndBytesable;
import org.apache.curator.framework.api.SetDataBackgroundVersionable;
import org.apache.curator.framework.api.SetDataBuilder;
import org.apache.curator.framework.api.transaction.CuratorTransactionBridge;
import org.apache.curator.framework.api.transaction.OperationType;
import org.apache.curator.framework.api.transaction.TransactionSetDataBuilder;
import org.apache.zookeeper.AsyncCallback;
import org.apache.zookeeper.Op;
import org.apache.zookeeper.data.Stat;
import java.util.concurrent.Callable;
import java.util.concurrent.Executor;
class SetDataBuilderImpl implements SetDataBuilder, BackgroundOperation<PathAndBytes>
{
private final CuratorFrameworkImpl client;
private Backgrounding backgrounding;
private int version;
private boolean compress;
SetDataBuilderImpl(CuratorFrameworkImpl client)
{
this.client = client;
backgrounding = new Backgrounding();
version = -1;
compress = false;
}
TransactionSetDataBuilder asTransactionSetDataBuilder(final CuratorTransactionImpl curatorTransaction, final CuratorMultiTransactionRecord transaction)
{
return new TransactionSetDataBuilder()
{
@Override
public CuratorTransactionBridge forPath(String path, byte[] data) throws Exception
{
if ( compress )
{
data = client.getCompressionProvider().compress(path, data);
}
String fixedPath = client.fixForNamespace(path);
transaction.add(Op.setData(fixedPath, data, version), OperationType.SET_DATA, path);
return curatorTransaction;
}
@Override
public CuratorTransactionBridge forPath(String path) throws Exception
{
return forPath(path, client.getDefaultData());
}
@Override
public PathAndBytesable<CuratorTransactionBridge> withVersion(int version)
{
SetDataBuilderImpl.this.withVersion(version);
return this;
}
@Override
public PathAndBytesable<CuratorTransactionBridge> compressed() {
compress = true;
return this;
}
};
}
@Override
public SetDataBackgroundVersionable compressed()
{
compress = true;
return new SetDataBackgroundVersionable()
{
@Override
public PathAndBytesable<Stat> inBackground()
{
return SetDataBuilderImpl.this.inBackground();
}
@Override
public PathAndBytesable<Stat> inBackground(BackgroundCallback callback, Object context)
{
return SetDataBuilderImpl.this.inBackground(callback, context);
}
@Override
public PathAndBytesable<Stat> inBackground(BackgroundCallback callback, Object context, Executor executor)
{
return SetDataBuilderImpl.this.inBackground(callback, context, executor);
}
@Override
public PathAndBytesable<Stat> inBackground(Object context)
{
return SetDataBuilderImpl.this.inBackground(context);
}
@Override
public PathAndBytesable<Stat> inBackground(BackgroundCallback callback)
{
return SetDataBuilderImpl.this.inBackground(callback);
}
@Override
public PathAndBytesable<Stat> inBackground(BackgroundCallback callback, Executor executor)
{
return SetDataBuilderImpl.this.inBackground(callback, executor);
}
@Override
public Stat forPath(String path, byte[] data) throws Exception
{
return SetDataBuilderImpl.this.forPath(path, data);
}
@Override
public Stat forPath(String path) throws Exception
{
return SetDataBuilderImpl.this.forPath(path);
}
@Override
public BackgroundPathAndBytesable<Stat> withVersion(int version)
{
return SetDataBuilderImpl.this.withVersion(version);
}
};
}
@Override
public BackgroundPathAndBytesable<Stat> withVersion(int version)
{
this.version = version;
return this;
}
@Override
public PathAndBytesable<Stat> inBackground(BackgroundCallback callback, Object context)
{
backgrounding = new Backgrounding(callback, context);
return this;
}
@Override
public PathAndBytesable<Stat> inBackground(BackgroundCallback callback, Object context, Executor executor)
{
backgrounding = new Backgrounding(client, callback, context, executor);
return this;
}
@Override
public PathAndBytesable<Stat> inBackground(BackgroundCallback callback)
{
backgrounding = new Backgrounding(callback);
return this;
}
@Override
public PathAndBytesable<Stat> inBackground()
{
backgrounding = new Backgrounding(true);
return this;
}
@Override
public PathAndBytesable<Stat> inBackground(Object context)
{
backgrounding = new Backgrounding(context);
return this;
}
@Override
public PathAndBytesable<Stat> inBackground(BackgroundCallback callback, Executor executor)
{
backgrounding = new Backgrounding(client, callback, executor);
return this;
}
@Override
public void performBackgroundOperation(final OperationAndData<PathAndBytes> operationAndData) throws Exception
{
final TimeTrace trace = client.getZookeeperClient().startTracer("SetDataBuilderImpl-Background");
client.getZooKeeper().setData
(
operationAndData.getData().getPath(),
operationAndData.getData().getData(),
version,
new AsyncCallback.StatCallback()
{
@SuppressWarnings({"unchecked"})
@Override
public void processResult(int rc, String path, Object ctx, Stat stat)
{
trace.commit();
CuratorEvent event = new CuratorEventImpl(client, CuratorEventType.SET_DATA, rc, path, null, ctx, stat, null, null, null, null);
client.processBackgroundOperation(operationAndData, event);
}
},
backgrounding.getContext()
);
}
@Override
public Stat forPath(String path) throws Exception
{
return forPath(path, client.getDefaultData());
}
@Override
public Stat forPath(String path, byte[] data) throws Exception
{
if ( compress )
{
data = client.getCompressionProvider().compress(path, data);
}
path = client.fixForNamespace(path);
Stat resultStat = null;
if ( backgrounding.inBackground() )
{
client.processBackgroundOperation(new OperationAndData<PathAndBytes>(this, new PathAndBytes(path, data), backgrounding.getCallback(), null, backgrounding.getContext()), null);
}
else
{
resultStat = pathInForeground(path, data);
}
return resultStat;
}
int getVersion()
{
return version;
}
private Stat pathInForeground(final String path, final byte[] data) throws Exception
{
TimeTrace trace = client.getZookeeperClient().startTracer("SetDataBuilderImpl-Foreground");
Stat resultStat = RetryLoop.callWithRetry
(
client.getZookeeperClient(),
new Callable<Stat>()
{
@Override
public Stat call() throws Exception
{
return client.getZooKeeper().setData(path, data, version);
}
}
);
trace.commit();
return resultStat;
}
}
| |
package org.act.tstream.daemon.worker.metrics;
import java.util.List;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.Map;
import java.util.Map.Entry;
import java.util.HashMap;
import org.apache.log4j.Logger;
import org.act.tstream.daemon.nimbus.NimbusData;
import org.act.tstream.task.heartbeat.TaskHeartbeat;
import org.act.tstream.client.ConfigExtension;
import org.act.tstream.cluster.StormBase;
import org.act.tstream.cluster.StormClusterState;
import org.act.tstream.stats.CommonStatsData;
import org.act.tstream.task.TaskInfo;
import org.act.tstream.metric.MetricDef;
import org.act.tstream.metric.UserDefMetricData;
import com.codahale.metrics.Gauge;
import com.codahale.metrics.Metric;
public class UploadMetricFromZK implements Runnable {
private static final Logger LOG = Logger.getLogger(UploadMetricFromZK.class);
private NimbusData data;
private StormClusterState clusterState;
private MetricSendClient client;
private Map<String, TopoCommStatsInfo> topologyMap;
public UploadMetricFromZK(NimbusData data, MetricSendClient client) {
this.data = data;
this.client = client;
clusterState = data.getStormClusterState();
topologyMap = new HashMap<String, TopoCommStatsInfo>();
}
@Override
public void run() {
uploadCommStats();
uploadUseDefMetric(clusterState);
}
// remove obsolete topology
private boolean rmObsoleteTopo() {
boolean ret = true;
List<String> obsoleteTopos = new ArrayList<String>();
try {
List<String> topologys = clusterState.active_storms();
for (Entry<String, TopoCommStatsInfo> entry : topologyMap.entrySet()) {
if (topologys.contains(entry.getKey()) == false) {
obsoleteTopos.add(entry.getKey());
}
}
for (String topo : obsoleteTopos) {
topologyMap.remove(topo);
}
} catch (Exception e) {
LOG.warn("Faild to update topology list.", e);
ret = false;
}
return ret;
}
private void uploadCommStats() {
// Remove obsolete topology firstly. new topology will be
// added when uploading the common statistic data
rmObsoleteTopo();
List<Map<String,Object>> listMapMsg=new ArrayList<Map<String,Object>>();
try {
TopoCommStatsInfo ret;
List<String> topologys = clusterState.heartbeat_storms();
for (String topologyId : topologys) {
if (topologyMap.containsKey(topologyId) == false) {
StormBase base = clusterState.storm_base(topologyId, null);
if (base == null) {
topologyMap.remove(topologyId);
continue;
} else {
topologyMap.put(topologyId, new TopoCommStatsInfo(topologyId, base.getStormName()));
}
}
// Get common statistic data from taskbeats in ZK for a topology
ret = getCommStatsData(topologyId);
if (ret != null) {
// Build topology, spout and bolt statis data from the
// statis data of all tasks
buildCommStatsData(ret);
// Build statistic data message of remote monitor server
buildComonSendMsg(ret,listMapMsg);
}
}
if(listMapMsg.size() > 0) {
// Send statistic data to remote monitor server
sendCommStatsData(listMapMsg);
}
} catch (Exception e) {
LOG.warn("Failed to upload comm statistic data to Alimonitor.", e);
}
}
public void uploadUseDefMetric(StormClusterState clusterState) {
try {
List<String> active_topologys = clusterState.active_storms();
if (active_topologys == null) {
return;
}
Map<String, Object> totalMsg = new HashMap<String, Object>();
for (String topologyId : active_topologys) {
Map<String, Map<String, Metric>> compont_metrics = new HashMap<String, Map<String, Metric>>();
List<String> workerIds = clusterState.monitor_user_workers(topologyId);
if(workerIds == null)
continue;
MetricKVMsg topologyMetricMsg = new MetricKVMsg();
for(String workerId : workerIds) {
UserDefMetricData useWorkDefMetric = clusterState.get_userDef_metric(topologyId, workerId);
//add metric based on worker to useWorkDefMetric
topologyMetricMsg.countGangeMetric(useWorkDefMetric.getGaugeDataMap());
topologyMetricMsg.countCounterMetric(useWorkDefMetric.getCounterDataMap());
topologyMetricMsg.countHistogramMetric(useWorkDefMetric.getHistogramDataMap());
topologyMetricMsg.countTimerMetric(useWorkDefMetric.getTimerDataMap());
topologyMetricMsg.countMeterMetric(useWorkDefMetric.getMeterDataMap());
}
topologyMetricMsg.calcAvgTimer();
topologyMetricMsg.emptyCountMap();
Map<String, Object> ret = topologyMetricMsg.convertToKVMap();
if(ret.size() >0) totalMsg.putAll(ret);
}
if(totalMsg.size() > 0) {
// For Alimonitor Client only
if (client instanceof AlimonitorClient) {
((AlimonitorClient) client).setMonitorName(
ConfigExtension.getAlmonUserMetricName(data.getConf()));
((AlimonitorClient) client).setCollectionFlag(0);
((AlimonitorClient) client).setErrorInfo("");
}
client.send(totalMsg);
}
} catch (Exception e) {
LOG.warn("Failed to upload user define metric data", e);
}
}
public void clean() {
}
private TopoCommStatsInfo getCommStatsData(String topologyId) {
try
{
String taskId;
String componentId;
TaskHeartbeat taskHb;
TopoCommStatsInfo commStatsInfo = topologyMap.get(topologyId);
if (commStatsInfo == null) {LOG.warn("commStatsInfo is null, topoId=" + topologyId);}
Map<String, TaskHeartbeat> heartbeats = clusterState.task_heartbeat(topologyId);
if (heartbeats == null || heartbeats.size() == 0) return null;
for (Entry<String, TaskHeartbeat> entry : heartbeats.entrySet()) {
taskId = entry.getKey();
taskHb = entry.getValue();
TaskInfo taskInfo = clusterState.task_info(topologyId, Integer.parseInt(taskId));
if (taskInfo == null ) {
LOG.warn("Task information can not be found in ZK for task-" + taskId);
continue;
}
componentId = taskInfo.getComponentId();
//update taskHb into the corresponding component map
if (taskHb.getComponentType().equals("spout")) {
commStatsInfo.addToSpoutList(componentId, taskId, taskHb);
} else {
commStatsInfo.addToBoltList(componentId, taskId, taskHb);
}
}
return commStatsInfo;
} catch (Exception e) {
LOG.warn("getCommStatsData, failed to read data from ZK.", e);
return null;
}
}
private void buildCommStatsData(TopoCommStatsInfo commStatsInfo) {
commStatsInfo.buildBoltStatsData();
commStatsInfo.buildSpoutStatsData();
commStatsInfo.buildTopoStatsData();
}
private void sendCommStatsData(List<Map<String,Object>> listMapMsg) {
try {
// For Alimonitor Client only
if (client instanceof AlimonitorClient) {
((AlimonitorClient) client).setMonitorName(
ConfigExtension.getAlmonTopoMetricName(data.getConf()));
((AlimonitorClient) client).setCollectionFlag(0);
((AlimonitorClient) client).setErrorInfo("");
}
client.send(listMapMsg);
} catch (Exception e) {
LOG.warn("Error when sending common statistic data.", e);
}
}
private void buildComonSendMsg(TopoCommStatsInfo commStatsInfo,List<Map<String,Object>> listMapMsg) {
String topoloygName = commStatsInfo.getTopoName();
Map<String, Object> jsonMsg;
try {
//build topology statistic data
TopoCommStatsInfo.CommStatsData topoStatsData = commStatsInfo.getTopoStatsData();
jsonMsg = topoStatsData.convertToKVMap(topoloygName,topoloygName);
listMapMsg.add(jsonMsg);
//build spout statistic data
Map<String, TopoCommStatsInfo.CommStatsData> spoutStatsData = commStatsInfo.getSpoutStatsData();
for (Entry<String, TopoCommStatsInfo.CommStatsData> entry : spoutStatsData.entrySet()) {
String componentId = entry.getKey();
jsonMsg = entry.getValue().convertToKVMap(topoloygName,componentId);
listMapMsg.add(jsonMsg);
}
//build bolt statistic data
Map<String, TopoCommStatsInfo.CommStatsData> boltStatsData = commStatsInfo.getBoltStatsData();
for (Entry<String, TopoCommStatsInfo.CommStatsData> entry : boltStatsData.entrySet()) {
String componentId = entry.getKey();
jsonMsg = entry.getValue().convertToKVMap(topoloygName,componentId);
listMapMsg.add(jsonMsg);
}
} catch (Exception e) {
LOG.warn("Error when bulding common statistic data message.", e);
}
}
}
| |
package edu.ou.weinmann.repsi.controller.script;
import edu.ou.weinmann.repsi.model.calibration.Calibration;
import edu.ou.weinmann.repsi.model.database.Database;
import edu.ou.weinmann.repsi.model.trial.Trial;
import edu.ou.weinmann.repsi.model.util.Global;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
/**
* Command line interface to the REPSI tool. The interface is based on the
* Jakarta Commons CLI library which simplifies the handling of command line
* parameters. The main parameter is called <code>mode</code> and this
* parameter determines the functionality to be executed:
* <ul>
* <li><code>calibration</code> perform a calibration run.</li>
* <li><code>master</code> modify the schema or instance of the master
* database.</li>
* <li><code>result_cn</code> export the results of a calibration run into an
* Excel file.</li>
* <li><code>result_r</code> export the results of a calibration run into a R
* compatible files.</li>
* <li><code>result_tl</code> export the results of one or all trial runs
* into an Excel file.</li>
* <li><code>test modify</code> the schema or instance of a test database.</li>
* <li>t<code>rial</code> perform a trial run.</li>
* </ul>
* <br>
* Further details can be found in the document 'REPSI Tool User Manual'.
*
* @author Walter Weinmann
*
*/
public final class Main {
private static final Logger LOGGER =
Logger.getLogger(Main.class.getPackage().getName());
private static final String MSG_ARGUMENT_MUST_BE_AN_INTEGER =
": argument must be an integer";
private static final String MSG_OPTION_ALLOWS_NO_ARGUMENT =
": option allows no argument";
private static final String MSG_OPTION_IS_MANDATORY_WITH_MODE =
": option is mandatory with mode=";
private static final String MSG_OPTION_NOT_ALLOWED_WITH_MODE =
": option not allowed with mode=";
private static final String MSG_OPTION_REQUIRES_AN_ARGUMENT =
": option requires an argument";
private static final String OBJECT_TYPE_NANOTIME = "nanotime";
private static final String OBJECT_TYPE_QUERY = "query";
private static final String OPTION = "Option ";
private static final String OPTION_CYC_CODE = "cyc";
private static final String OPTION_CYC_NAME = "number of cycles to run";
private static final String OPTION_DES_CODE = "des";
private static final String OPTION_DES_NAME = "description of the run";
private static final String OPTION_DI_CODE = "di";
private static final String OPTION_DI_NAME =
"database instance (identification)";
private static final String OPTION_EFN_CODE = "efn";
private static final String OPTION_EFN_NAME = "name of the Excel file";
private static final String OPTION_EFNALL_CODE = "efnall";
private static final String OPTION_EFNALL_NAME =
"all trial runs into the Excel file";
private static final String OPTION_EXALT_CODE = "exalt";
private static final String OPTION_EXALT_NAME =
"execute the queries alternating without and with the application of the pattern";
private static final String OPTION_EXCON_CODE = "excon";
private static final String OPTION_EXCON_NAME =
"execute first the query without application of the pattern and then the query with application of the pattern";
private static final String OPTION_FN_CODE = "fn";
private static final String OPTION_FN_NAME =
"name of the file which contains DDL / DML statements";
private static final String OPTION_FNEXCEL_NAME =
"file contains an Excel file";
private static final String OPTION_FNXML_NAME =
"file contains an XML document";
private static final String OPTION_FS_CODE = "fs";
private static final String OPTION_FS_NAME = "fetch size";
private static final String OPTION_IDN_CODE = "idn";
private static final String OPTION_IDN_NAME = "name of the input directory";
private static final String OPTION_IGN1_CODE = "ign1";
private static final String OPTION_IGN1_NAME = "ignore the first reading";
private static final String OPTION_MODE_CALIBRATION = "calibration";
private static final String OPTION_MODE_CODE = "mode";
private static final String OPTION_MODE_MASTER = "master";
private static final String OPTION_MODE_NAME =
"processing mode (calibration/master/result_cn/result_tl/test/trial)";
private static final String OPTION_MODE_RESULT_CN = "result_cn";
private static final String OPTION_MODE_RESULT_R = "result_R";
private static final String OPTION_MODE_RESULT_TL = "result_tl";
private static final String OPTION_MODE_TEST = "test";
private static final String OPTION_MODE_TRIAL = "trial";
private static final String OPTION_OBJ_CODE = "obj";
private static final String OPTION_OBJ_NAME = "object to calibrate";
private static final String OPTION_ODN_CODE = "odn";
private static final String OPTION_ODN_NAME =
"name of the output directory";
private static final String OPTION_PF_CODE = "pf";
private static final String OPTION_PF_NAME = "name of the properties file";
private static final String OPTION_PFXML_CODE = "pfxml";
private static final String OPTION_PFXML_NAME =
"properties file contains an XML document";
private static final String OPTION_PREC_CODE = "prec";
private static final String OPTION_PREC_NAME =
"exponent (base 10) of the time precision: 0 (nanosecond), 3 (microsecond), ...";
private static final String OPTION_TQP_CODE = "tqp";
private static final String OPTION_TQP_NAME =
"test query pair (identification)";
private static final String OPTION_TS_CODE = "ts";
private static final String OPTION_TS_NAME = "test suite (identification)";
private static final String OPTION_VERB_CODE = "verb";
private static final String OPTION_VERB_NAME =
"print a statistical overview";
private static final int STATUS_ERROR = 1;
private static final int STATUS_OK = 0;
private static int argumentCyc;
private static int argumentDi;
private static String argumentDes;
private static String argumentEfn;
private static String[] argumentFn;
private static int argumentFs;
private static String argumentIdn;
private static String argumentMode;
private static String argumentObj;
private static String argumentOdn;
private static String argumentPf;
private static long argumentPrec;
private static int argumentTqp;
private static int argumentTs;
private static Database database;
private static boolean isEfnall;
private static boolean isExalt;
private static boolean[] isFnxls;
private static boolean[] isFnxml;
private static boolean isExcon;
private static boolean isIgn1;
private static boolean isPfxml;
private static boolean isVerbose;
/**
* Initialises the database accessor of the master database.
*/
private Main() {
super();
}
private static int checkCmdLineOptions(final CommandLine parCmdLine) {
if (!checkOptionMode(parCmdLine, OPTION_MODE_CODE)) {
return STATUS_ERROR;
}
if (!checkOptionObj(parCmdLine, OPTION_OBJ_CODE)) {
return STATUS_ERROR;
}
if (!checkOptionCyc(parCmdLine, OPTION_CYC_CODE)) {
return STATUS_ERROR;
}
if (!checkOptionDes(parCmdLine, OPTION_DES_CODE)) {
return STATUS_ERROR;
}
if (!checkOptionDi(parCmdLine, OPTION_DI_CODE)) {
return STATUS_ERROR;
}
if (!checkOptionEfn(parCmdLine, OPTION_EFN_CODE)) {
return STATUS_ERROR;
}
if (!checkOptionEfnall(parCmdLine, OPTION_EFNALL_CODE)) {
return STATUS_ERROR;
}
if (!checkOptionExalt(parCmdLine, OPTION_EXALT_CODE)) {
return STATUS_ERROR;
}
if (!checkOptionExcon(parCmdLine, OPTION_EXCON_CODE)) {
return STATUS_ERROR;
}
if (!checkOptionFn(parCmdLine)) {
return STATUS_ERROR;
}
if (!checkOptionFs(parCmdLine, OPTION_FS_CODE)) {
return STATUS_ERROR;
}
if (!checkOptionIdn(parCmdLine, OPTION_IDN_CODE)) {
return STATUS_ERROR;
}
if (!checkOptionIgn1(parCmdLine, OPTION_IGN1_CODE)) {
return STATUS_ERROR;
}
if (!checkOptionOdn(parCmdLine, OPTION_ODN_CODE)) {
return STATUS_ERROR;
}
if (!checkOptionPf(parCmdLine, OPTION_PF_CODE)) {
return STATUS_ERROR;
}
if (!checkOptionPfxml(parCmdLine, OPTION_PFXML_CODE)) {
return STATUS_ERROR;
}
if (!checkOptionPrec(parCmdLine, OPTION_PREC_CODE)) {
return STATUS_ERROR;
}
if (!checkOptionTqp(parCmdLine, OPTION_TQP_CODE)) {
return STATUS_ERROR;
}
if (!checkOptionTs(parCmdLine, OPTION_TS_CODE)) {
return STATUS_ERROR;
}
if (!checkOptionVerb(parCmdLine, OPTION_VERB_CODE)) {
return STATUS_ERROR;
}
return STATUS_OK;
}
private static boolean checkOptionCyc(final CommandLine parCmdLine,
final String parOptionCode) {
if (!parCmdLine.hasOption(parOptionCode)) {
if (OPTION_MODE_CALIBRATION.equals(argumentMode)) {
argumentCyc = 50;
}
return true;
}
if (!(OPTION_MODE_CALIBRATION.equals(argumentMode) || OPTION_MODE_TRIAL
.equals(argumentMode))) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_NOT_ALLOWED_WITH_MODE + argumentMode);
return false;
}
final String lvCycleIn = parCmdLine.getOptionValue(OPTION_CYC_CODE);
if (lvCycleIn == null) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_REQUIRES_AN_ARGUMENT);
return false;
}
try {
argumentCyc = Integer.parseInt(lvCycleIn);
} catch (NumberFormatException e) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_ARGUMENT_MUST_BE_AN_INTEGER);
return false;
}
if (argumentCyc < 1) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ ": argument must be greater than zero");
return false;
}
if (OPTION_MODE_CALIBRATION.equals(argumentMode) && argumentCyc > 999) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ ": argument must be less than 1000");
return false;
}
return true;
}
private static boolean checkOptionDes(final CommandLine parCmdLine,
final String parOptionCode) {
if (!parCmdLine.hasOption(parOptionCode)) {
return true;
}
if (!(OPTION_MODE_CALIBRATION.equals(argumentMode) || OPTION_MODE_TRIAL
.equals(argumentMode))) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_NOT_ALLOWED_WITH_MODE + argumentMode);
return false;
}
argumentDes = parCmdLine.getOptionValue(parOptionCode);
if (argumentDes == null) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_REQUIRES_AN_ARGUMENT);
return false;
}
return true;
}
private static boolean checkOptionDi(final CommandLine parCmdLine,
final String parOptionCode) {
if (!parCmdLine.hasOption(parOptionCode)) {
if (OPTION_MODE_CALIBRATION.equals(argumentMode)
|| OPTION_MODE_TEST.equals(argumentMode)
|| OPTION_MODE_TRIAL.equals(argumentMode)) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_IS_MANDATORY_WITH_MODE + argumentMode);
return false;
}
return true;
}
if (!(OPTION_MODE_CALIBRATION.equals(argumentMode)
|| OPTION_MODE_TEST.equals(argumentMode) || OPTION_MODE_TRIAL
.equals(argumentMode))) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_NOT_ALLOWED_WITH_MODE + argumentMode);
return false;
}
final String lvDatabaseInstanceIdIn =
parCmdLine.getOptionValue(parOptionCode);
if (lvDatabaseInstanceIdIn == null) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_REQUIRES_AN_ARGUMENT);
return false;
}
try {
argumentDi = Integer.parseInt(lvDatabaseInstanceIdIn);
} catch (NumberFormatException e) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_ARGUMENT_MUST_BE_AN_INTEGER);
return false;
}
return true;
}
private static boolean checkOptionEfn(final CommandLine parCmdLine,
final String parOptionCode) {
if (!parCmdLine.hasOption(parOptionCode)) {
if (OPTION_MODE_RESULT_CN.equals(argumentMode)) {
argumentEfn = "out/CalibrationData.xls";
} else if (OPTION_MODE_RESULT_TL.equals(argumentMode)) {
argumentEfn = "out/TrialRunData.xls";
}
return true;
}
if (!(OPTION_MODE_RESULT_CN.equals(argumentMode) || OPTION_MODE_RESULT_TL
.equals(argumentMode))) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_NOT_ALLOWED_WITH_MODE + argumentMode);
return false;
}
argumentEfn = parCmdLine.getOptionValue(parOptionCode);
if (argumentEfn == null) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_REQUIRES_AN_ARGUMENT);
return false;
}
return true;
}
private static boolean checkOptionEfnall(final CommandLine parCmdLine,
final String parOptionCode) {
isEfnall = parCmdLine.hasOption(parOptionCode);
if (!isEfnall) {
return true;
}
if (!OPTION_MODE_RESULT_TL.equals(argumentMode)) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_NOT_ALLOWED_WITH_MODE + argumentMode);
return false;
}
if (parCmdLine.getOptionValue(parOptionCode) != null) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_ALLOWS_NO_ARGUMENT);
return false;
}
return true;
}
private static boolean checkOptionExalt(final CommandLine parCmdLine,
final String parOptionCode) {
isExalt = parCmdLine.hasOption(parOptionCode);
if (!isExalt) {
return true;
}
if (!OPTION_MODE_CALIBRATION.equals(argumentMode)) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_NOT_ALLOWED_WITH_MODE + argumentMode);
return false;
}
if (!OBJECT_TYPE_QUERY.equals(argumentObj)) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ ": option not allowed with obj=" + argumentObj);
return false;
}
if (parCmdLine.getOptionValue(parOptionCode) != null) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_ALLOWS_NO_ARGUMENT);
return false;
}
return true;
}
private static boolean checkOptionExcon(final CommandLine parCmdLine,
final String parOptionCode) {
isExcon = parCmdLine.hasOption(parOptionCode);
if (!isExcon) {
if (!isExalt && OPTION_MODE_CALIBRATION.equals(argumentMode)
&& OBJECT_TYPE_QUERY.equals(argumentObj)) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ ": option exalt or excon mandatory with mode="
+ argumentMode + " and obj=" + argumentObj);
return false;
}
return true;
}
if (!OPTION_MODE_CALIBRATION.equals(argumentMode)) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_NOT_ALLOWED_WITH_MODE + argumentMode);
return false;
}
if (!OBJECT_TYPE_QUERY.equals(argumentObj)) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ ": option not allowed with obj=" + argumentObj);
return false;
}
if (parCmdLine.getOptionValue(parOptionCode) != null) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_ALLOWS_NO_ARGUMENT);
return false;
}
return true;
}
private static boolean checkOptionFn(final CommandLine parCmdLine) {
int lvNumber = 0;
for (int i = 0; i < argumentFn.length; i++) {
if (!parCmdLine.hasOption(OPTION_FN_CODE + i)
&& !parCmdLine.hasOption(OPTION_FN_CODE + i
+ Global.FILE_TYPE_EXCEL)
&& !parCmdLine.hasOption(OPTION_FN_CODE + i
+ Global.FILE_TYPE_XML)) {
continue;
}
if (!(OPTION_MODE_MASTER.equals(argumentMode) || OPTION_MODE_TEST
.equals(argumentMode))) {
if (parCmdLine.hasOption(OPTION_FN_CODE + i)) {
LOGGER.log(Level.SEVERE, OPTION + OPTION_FN_CODE + i
+ MSG_OPTION_NOT_ALLOWED_WITH_MODE + argumentMode);
return false;
}
if (parCmdLine.hasOption(OPTION_FN_CODE + i
+ Global.FILE_TYPE_EXCEL)) {
LOGGER.log(Level.SEVERE, OPTION + OPTION_FN_CODE + i
+ "xls: option not allowed with mode="
+ argumentMode);
return false;
}
LOGGER.log(Level.SEVERE, OPTION + OPTION_FN_CODE + i
+ "xml: option not allowed with mode=" + argumentMode);
return false;
}
if (!parCmdLine.hasOption(OPTION_FN_CODE + i)) {
LOGGER.log(Level.SEVERE, OPTION + OPTION_FN_CODE + i
+ MSG_OPTION_IS_MANDATORY_WITH_MODE + argumentMode);
return false;
}
argumentFn[i] = parCmdLine.getOptionValue(OPTION_FN_CODE + i);
if (argumentFn[i] == null) {
LOGGER.log(Level.SEVERE, OPTION + OPTION_FN_CODE + i
+ MSG_OPTION_REQUIRES_AN_ARGUMENT);
return false;
}
lvNumber++;
isFnxls[i] =
parCmdLine.hasOption(OPTION_FN_CODE + i
+ Global.FILE_TYPE_EXCEL);
isFnxml[i] =
parCmdLine.hasOption(OPTION_FN_CODE + i
+ Global.FILE_TYPE_XML);
if (isFnxls[i] && isFnxml[i]) {
LOGGER.log(Level.SEVERE, "Either option " + OPTION_FN_CODE + i
+ "xls or option " + OPTION_FN_CODE + i
+ "xml - but not both together");
return false;
}
}
if (lvNumber == 0
&& (OPTION_MODE_MASTER.equals(argumentMode) || OPTION_MODE_TEST
.equals(argumentMode))) {
LOGGER.log(Level.SEVERE, OPTION + OPTION_FN_CODE
+ ".: at least one occurence required with mode="
+ argumentMode);
return false;
}
return true;
}
private static boolean checkOptionFs(final CommandLine parCmdLine,
final String parOptionCode) {
if (!parCmdLine.hasOption(parOptionCode)) {
if (OPTION_MODE_CALIBRATION.equals(argumentMode)
|| OPTION_MODE_TRIAL.equals(argumentMode)) {
argumentFs = 10;
}
return true;
}
if (!(OPTION_MODE_CALIBRATION.equals(argumentMode) || OPTION_MODE_TRIAL
.equals(argumentMode))) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_NOT_ALLOWED_WITH_MODE + argumentMode);
return false;
}
final String lvFetchSize = parCmdLine.getOptionValue(parOptionCode);
if (lvFetchSize == null) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_REQUIRES_AN_ARGUMENT);
return false;
}
try {
argumentFs = Integer.parseInt(lvFetchSize);
} catch (NumberFormatException e) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_ARGUMENT_MUST_BE_AN_INTEGER);
return false;
}
return true;
}
private static boolean checkOptionIdn(final CommandLine parCmdLine,
final String parOptionCode) {
if (!parCmdLine.hasOption(parOptionCode)) {
argumentIdn = "in/R";
return true;
}
if (!OPTION_MODE_RESULT_R.equals(argumentMode)) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_NOT_ALLOWED_WITH_MODE + argumentMode);
return false;
}
argumentIdn = parCmdLine.getOptionValue(parOptionCode);
if (argumentIdn == null) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_REQUIRES_AN_ARGUMENT);
return false;
}
return true;
}
private static boolean checkOptionIgn1(final CommandLine parCmdLine,
final String parOptionCode) {
isIgn1 = parCmdLine.hasOption(parOptionCode);
if (!isIgn1) {
return true;
}
if (!OPTION_MODE_CALIBRATION.equals(argumentMode)) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_NOT_ALLOWED_WITH_MODE + argumentMode);
return false;
}
if (parCmdLine.getOptionValue(parOptionCode) != null) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_ALLOWS_NO_ARGUMENT);
return false;
}
return true;
}
private static boolean checkOptionMode(final CommandLine parCmdLine,
final String parOptionCode) {
if (!parCmdLine.hasOption(parOptionCode)) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_IS_MANDATORY_WITH_MODE + argumentMode);
return false;
}
argumentMode = parCmdLine.getOptionValue(parOptionCode);
if (argumentMode == null) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_REQUIRES_AN_ARGUMENT);
return false;
}
if (OPTION_MODE_CALIBRATION.equals(argumentMode)
|| OPTION_MODE_MASTER.equals(argumentMode)
|| OPTION_MODE_RESULT_CN.equals(argumentMode)
|| OPTION_MODE_RESULT_R.equals(argumentMode)
|| OPTION_MODE_RESULT_TL.equals(argumentMode)
|| OPTION_MODE_TEST.equals(argumentMode)
|| OPTION_MODE_TRIAL.equals(argumentMode)) {
return true;
}
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ ": option has wrong argument");
return false;
}
private static boolean checkOptionObj(final CommandLine parCmdLine,
final String parOptionCode) {
if (!parCmdLine.hasOption(parOptionCode)) {
if (OPTION_MODE_CALIBRATION.equals(argumentMode)) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_IS_MANDATORY_WITH_MODE + argumentMode);
return false;
}
return true;
}
if (!OPTION_MODE_CALIBRATION.equals(argumentMode)) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_NOT_ALLOWED_WITH_MODE + argumentMode);
return false;
}
argumentObj = parCmdLine.getOptionValue(parOptionCode);
if (argumentObj == null) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_REQUIRES_AN_ARGUMENT);
return false;
}
if (!(OBJECT_TYPE_NANOTIME.equals(argumentObj) || OBJECT_TYPE_QUERY
.equals(argumentObj))) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ ": option has wrong argument");
return false;
}
return true;
}
private static boolean checkOptionOdn(final CommandLine parCmdLine,
final String parOptionCode) {
if (!parCmdLine.hasOption(parOptionCode)) {
argumentOdn = "out/R";
return true;
}
if (!OPTION_MODE_RESULT_R.equals(argumentMode)) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_NOT_ALLOWED_WITH_MODE + argumentMode);
return false;
}
argumentOdn = parCmdLine.getOptionValue(parOptionCode);
if (argumentOdn == null) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_REQUIRES_AN_ARGUMENT);
return false;
}
return true;
}
private static boolean checkOptionPf(final CommandLine parCmdLine,
final String parOptionCode) {
if (!parCmdLine.hasOption(parOptionCode)) {
return true;
}
argumentPf = parCmdLine.getOptionValue(parOptionCode);
if ("".equals(argumentPf)) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_REQUIRES_AN_ARGUMENT);
return false;
}
return true;
}
private static boolean checkOptionPfxml(final CommandLine parCmdLine,
final String parOptionCode) {
isPfxml = parCmdLine.hasOption(parOptionCode);
if (!isPfxml) {
return true;
}
if (parCmdLine.getOptionValue(parOptionCode) != null) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_ALLOWS_NO_ARGUMENT);
return false;
}
if ("".equals(argumentPf)) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ ": option only allowed in connection with option pf");
return false;
}
return true;
}
private static boolean checkOptionPrec(final CommandLine parCmdLine,
final String parOptionCode) {
if (!parCmdLine.hasOption(parOptionCode)) {
return true;
}
if (!(OPTION_MODE_CALIBRATION.equals(argumentMode) || OPTION_MODE_TRIAL
.equals(argumentMode))) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_NOT_ALLOWED_WITH_MODE + argumentMode);
return false;
}
int lvPrecision;
final String lvPrecisionIn = parCmdLine.getOptionValue(parOptionCode);
if (lvPrecisionIn == null) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_REQUIRES_AN_ARGUMENT);
return false;
}
try {
lvPrecision = Integer.parseInt(lvPrecisionIn);
} catch (NumberFormatException e) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_ARGUMENT_MUST_BE_AN_INTEGER);
return false;
}
if (lvPrecision < 0) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ ": argument must be positive");
return false;
}
argumentPrec =
Double
.valueOf(
Math
.pow(10., Double
.parseDouble(lvPrecisionIn)))
.intValue();
return true;
}
private static boolean checkOptionTqp(final CommandLine parCmdLine,
final String parOptionCode) {
if (!parCmdLine.hasOption(parOptionCode)) {
if (OPTION_MODE_CALIBRATION.equals(argumentMode)
&& OBJECT_TYPE_QUERY.equals(argumentObj)) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_IS_MANDATORY_WITH_MODE + argumentMode
+ " and obj=" + argumentObj);
return false;
}
return true;
}
if (!OPTION_MODE_CALIBRATION.equals(argumentMode)) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_NOT_ALLOWED_WITH_MODE + argumentMode);
return false;
}
if (!OBJECT_TYPE_QUERY.equals(argumentObj)) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ ": option not allowed with obj=" + argumentObj);
return false;
}
final String lvTestQueryPairIdIn =
parCmdLine.getOptionValue(parOptionCode);
if (lvTestQueryPairIdIn == null) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_REQUIRES_AN_ARGUMENT);
return false;
}
try {
argumentTqp = Integer.parseInt(lvTestQueryPairIdIn);
} catch (NumberFormatException e) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_ARGUMENT_MUST_BE_AN_INTEGER);
return false;
}
return true;
}
private static boolean checkOptionTs(final CommandLine parCmdLine,
final String parOptionCode) {
if (!parCmdLine.hasOption(parOptionCode)) {
if (OPTION_MODE_TRIAL.equals(argumentMode)) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_IS_MANDATORY_WITH_MODE + argumentMode);
return false;
}
return true;
}
if (!OPTION_MODE_TRIAL.equals(argumentMode)) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_NOT_ALLOWED_WITH_MODE + argumentMode);
return false;
}
final String lvTestSuiteIdIn = parCmdLine.getOptionValue(parOptionCode);
if (lvTestSuiteIdIn == null) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_REQUIRES_AN_ARGUMENT);
return false;
}
try {
argumentTs = Integer.parseInt(lvTestSuiteIdIn);
} catch (NumberFormatException e) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_ARGUMENT_MUST_BE_AN_INTEGER);
return false;
}
return true;
}
private static boolean checkOptionVerb(final CommandLine parCmdLine,
final String parOptionCode) {
isVerbose = parCmdLine.hasOption(parOptionCode);
if (!isVerbose) {
return true;
}
if (!OPTION_MODE_CALIBRATION.equals(argumentMode)) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_NOT_ALLOWED_WITH_MODE + argumentMode);
return false;
}
if (parCmdLine.getOptionValue(parOptionCode) != null) {
LOGGER.log(Level.SEVERE, OPTION + parOptionCode
+ MSG_OPTION_ALLOWS_NO_ARGUMENT);
return false;
}
return true;
}
private static Options defineCmdLineOptions() {
final Options lvOptions = new Options();
lvOptions.addOption(OPTION_EXALT_CODE, false, OPTION_EXALT_NAME);
lvOptions.addOption(OPTION_EXCON_CODE, false, OPTION_EXCON_NAME);
lvOptions.addOption(OPTION_CYC_CODE, true, OPTION_CYC_NAME);
lvOptions.addOption(OPTION_DES_CODE, true, OPTION_DES_NAME);
lvOptions.addOption(OPTION_DI_CODE, true, OPTION_DI_NAME);
lvOptions.addOption(OPTION_EFN_CODE, true, OPTION_EFN_NAME);
lvOptions.addOption(OPTION_EFNALL_CODE, false, OPTION_EFNALL_NAME);
for (int i = 0; i < 10; i++) {
lvOptions.addOption(OPTION_FN_CODE + i, true, OPTION_FN_NAME);
lvOptions.addOption(OPTION_FN_CODE + i + Global.FILE_TYPE_EXCEL,
false, OPTION_FNEXCEL_NAME);
lvOptions.addOption(OPTION_FN_CODE + i + Global.FILE_TYPE_XML,
false, OPTION_FNXML_NAME);
}
lvOptions.addOption(OPTION_FS_CODE, true, OPTION_FS_NAME);
lvOptions.addOption(OPTION_IDN_CODE, true, OPTION_IDN_NAME);
lvOptions.addOption(OPTION_IGN1_CODE, false, OPTION_IGN1_NAME);
lvOptions.addOption(OPTION_MODE_CODE, true, OPTION_MODE_NAME);
lvOptions.addOption(OPTION_OBJ_CODE, true, OPTION_OBJ_NAME);
lvOptions.addOption(OPTION_ODN_CODE, true, OPTION_ODN_NAME);
lvOptions.addOption(OPTION_PF_CODE, true, OPTION_PF_NAME);
lvOptions.addOption(OPTION_PFXML_CODE, false, OPTION_PFXML_NAME);
lvOptions.addOption(OPTION_PREC_CODE, true, OPTION_PREC_NAME);
lvOptions.addOption(OPTION_TQP_CODE, true, OPTION_TQP_NAME);
lvOptions.addOption(OPTION_TS_CODE, true, OPTION_TS_NAME);
lvOptions.addOption(OPTION_VERB_CODE, false, OPTION_VERB_NAME);
return lvOptions;
}
private static Calibration determineCalibration() {
if ("".equals(argumentPf)) {
return new Calibration();
}
return new Calibration(argumentPf, isPfxml);
}
private static Database determineDatabase(final CommandLine parCmdLine) {
if (parCmdLine.hasOption(OPTION_PF_CODE)) {
return new Database(parCmdLine.getOptionValue(OPTION_PF_CODE),
parCmdLine.hasOption(OPTION_PFXML_CODE));
}
return new Database();
}
private static Trial determineTrial(final CommandLine parCmdLine) {
if (parCmdLine.hasOption(OPTION_PF_CODE)) {
return new Trial(parCmdLine.getOptionValue(OPTION_PF_CODE),
parCmdLine.hasOption(OPTION_PFXML_CODE));
}
return new Trial();
}
private static int execute(final CommandLine parCmdLine) {
if (OPTION_MODE_CALIBRATION.equals(argumentMode)) {
return executeCalibration();
}
if (OPTION_MODE_MASTER.equals(argumentMode)
|| OPTION_MODE_TEST.equals(argumentMode)) {
return executeDatabase(parCmdLine);
}
if (OPTION_MODE_RESULT_CN.equals(argumentMode)) {
return executeResultCalibration();
}
if (OPTION_MODE_RESULT_R.equals(argumentMode)) {
return executeResultR();
}
if (OPTION_MODE_RESULT_TL.equals(argumentMode)) {
return executeResultTrial(parCmdLine);
}
if (OPTION_MODE_TRIAL.equals(argumentMode)) {
return executeTrial(parCmdLine);
}
LOGGER.log(Level.SEVERE, OPTION + OPTION_MODE_CODE
+ ": mode not yet implemented");
return STATUS_ERROR;
}
private static int executeCalibration() {
if (OBJECT_TYPE_NANOTIME.equals(argumentObj)) {
if (!determineCalibration().calibrateSimpleMethod(argumentObj,
argumentCyc, argumentDi, argumentDes, isIgn1, argumentPrec,
isVerbose)) {
return STATUS_ERROR;
}
} else if (OBJECT_TYPE_QUERY.equals(argumentObj)) {
if (!determineCalibration().calibrateQuery(argumentTqp,
argumentCyc, argumentDi, argumentDes, isExalt, isExcon,
argumentFs, isIgn1, argumentPrec, isVerbose)) {
return STATUS_ERROR;
}
} else {
LOGGER.log(Level.SEVERE, OPTION + OPTION_OBJ_CODE
+ ": not yet implemented");
return STATUS_ERROR;
}
return STATUS_OK;
}
private static int executeDatabase(final CommandLine parCmdLine) {
database = determineDatabase(parCmdLine);
// Process a file with DDL or DML statements (-fn9 xxx -fn9xml) ********
for (int i = 0; i < 10; i++) {
if ("".equals(argumentFn[i])) {
continue;
}
if (OPTION_MODE_MASTER.equals(argumentMode)) {
if (isFnxls[i]) {
if (!database.modifyMasterDatabase(argumentFn[i],
Global.FILE_TYPE_EXCEL)) {
return STATUS_ERROR;
}
} else if (isFnxml[i]) {
if (!database.modifyMasterDatabase(argumentFn[i],
Global.FILE_TYPE_XML)) {
return STATUS_ERROR;
}
} else {
if (!database.modifyMasterDatabase(argumentFn[i], "")) {
return STATUS_ERROR;
}
}
} else {
if (isFnxls[i]) {
if (!database.modifyTestDatabase(argumentDi, argumentFn[i],
Global.FILE_TYPE_EXCEL)) {
return STATUS_ERROR;
}
} else if (isFnxml[i]) {
if (!database.modifyTestDatabase(argumentDi, argumentFn[i],
Global.FILE_TYPE_XML)) {
return STATUS_ERROR;
}
} else {
if (!database.modifyTestDatabase(argumentDi, argumentFn[i],
"")) {
return STATUS_ERROR;
}
}
}
}
return STATUS_OK;
}
private static int executeResultCalibration() {
if (!determineCalibration().calibrationDataToExcel(argumentEfn)) {
return STATUS_ERROR;
}
return STATUS_OK;
}
private static int executeResultR() {
if (!determineCalibration().calibrationDataToR(argumentIdn)) {
return STATUS_ERROR;
}
return STATUS_OK;
}
private static int executeResultTrial(final CommandLine parCmdLine) {
if (!determineTrial(parCmdLine).trialDataToExcel(argumentEfn, isEfnall)) {
return STATUS_ERROR;
}
return STATUS_OK;
}
private static int executeTrial(final CommandLine parCmdLine) {
if (!determineTrial(parCmdLine).runTrial(argumentDi, argumentTs,
argumentDes, argumentFs, argumentCyc, argumentPrec)) {
return STATUS_ERROR;
}
return STATUS_OK;
}
/**
* Command line interface to the REPSI tool. The interface is based on the
* Jakarta Commons CLI library which simplifies the handling of command line
* parameters. The main parameter is called <code>mode</code> and this
* parameter determines the functionality to be executed:
* <ul>
* <li><code>calibration</code> perform a calibration run.</li>
* <li><code>master</code> modify the schema or instance of the master
* database.</li>
* <li><code>result_cn</code> export the results of a calibration run
* into an Excel file.</li>
* <li><code>result_r</code> export the results of a calibration run into
* a R compatible files.</li>
* <li><code>result_tl</code> export the results of one or all trial runs
* into an Excel file.</li>
* <li><code>test modify</code> the schema or instance of a test
* database.</li>
* <li>t<code>rial</code> perform a trial run.</li>
* </ul>
* <br>
* Further details can be found in the document 'REPSI Tool User Manual'.
*
* @param args here not supported
*
*/
public static void main(final String[] args) {
if (LOGGER.isLoggable(Level.FINE)) {
LOGGER.entering(Main.class.getName(), "main", args);
}
int lvStatus = STATUS_OK;
resetOptionsAndArguments();
// Create and initialise an Options object *****************************
final Options lvOptions = defineCmdLineOptions();
// Create the parser and parse the command line arguments **************
CommandLine parCmdLine = null;
try {
// Parse the command line arguments
parCmdLine = new GnuParser().parse(lvOptions, args);
lvStatus += checkCmdLineOptions(parCmdLine);
if (lvStatus == STATUS_OK) {
lvStatus += execute(parCmdLine);
}
} catch (ParseException e) {
LOGGER.log(Level.SEVERE,
"Parsing of the command line arguments failed", e);
lvStatus++;
}
if (LOGGER.isLoggable(Level.FINE)) {
LOGGER.exiting(Main.class.getName(), "main", Integer
.valueOf(lvStatus));
}
if (lvStatus == STATUS_OK) {
LOGGER.log(Level.INFO, "Task completed successfully");
} else {
LOGGER.log(Level.SEVERE, "Task due to errors aborted");
}
System.exit(lvStatus);
}
private static void resetOptionsAndArguments() {
argumentCyc = 1;
argumentDi = 0;
argumentDes = "n/a";
argumentEfn = "";
argumentFn = new String[] { "", "", "", "", "", "", "", "", "", "", };
argumentFs = 10;
argumentMode = "";
argumentObj = "";
argumentPf = "";
argumentPrec = 1L;
argumentTqp = 0;
argumentTs = 0;
isEfnall = false;
isExalt = false;
isFnxls =
new boolean[] { false, false, false, false, false, false,
false, false, false, false, };
isFnxml =
new boolean[] { false, false, false, false, false, false,
false, false, false, false, };
isExcon = false;
isIgn1 = false;
isPfxml = false;
isVerbose = false;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.metastore.HiveMetaStore;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.hive.jdbc.miniHS2.MiniHS2;
import org.apache.hive.service.cli.HiveSQLException;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
/**
* This unit test is for testing HIVE-13884 with more complex queries and
* hive.metastore.limit.partition.request enabled.
* It covers cases when the query predicates can be pushed down and the
* number of partitions can be retrieved via directSQL.
* It also covers cases when the number of partitions cannot be retrieved
* via directSQL, so it falls back to ORM.
*/
public class TestMetaStoreLimitPartitionRequest {
private static final String DB_NAME = "max_partition_test_db";
private static final String TABLE_NAME = "max_partition_test_table";
private static int PARTITION_REQUEST_LIMIT = 4;
private static MiniHS2 miniHS2 = null;
private static HiveConf conf;
private Connection hs2Conn = null;
private Statement stmt;
@BeforeClass
public static void beforeTest() throws Exception {
Class.forName(MiniHS2.getJdbcDriverName());
conf = new HiveConf();
DriverManager.setLoginTimeout(0);
conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
conf.setIntVar(HiveConf.ConfVars.METASTORE_LIMIT_PARTITION_REQUEST, PARTITION_REQUEST_LIMIT);
conf.setBoolVar(HiveConf.ConfVars.METASTORE_INTEGER_JDO_PUSHDOWN, true);
conf.setBoolVar(HiveConf.ConfVars.METASTORE_TRY_DIRECT_SQL, true);
conf.setBoolVar(HiveConf.ConfVars.DYNAMICPARTITIONING, true);
conf.setBoolVar(HiveConf.ConfVars.HIVE_CBO_ENABLED, false);
miniHS2 = new MiniHS2.Builder().withConf(conf).build();
Map<String, String> overlayProps = new HashMap<String, String>();
miniHS2.start(overlayProps);
createDb();
}
private static void createDb() throws Exception {
Connection conn =
DriverManager.getConnection(miniHS2.getJdbcURL(), System.getProperty("user.name"), "bar");
Statement stmt2 = conn.createStatement();
stmt2.execute("DROP DATABASE IF EXISTS " + DB_NAME + " CASCADE");
stmt2.execute("CREATE DATABASE " + DB_NAME);
stmt2.close();
conn.close();
}
@Before
public void setUp() throws Exception {
hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL(DB_NAME),
System.getProperty("user.name"), "bar");
stmt = hs2Conn.createStatement();
stmt.execute("USE " + DB_NAME);
createTable();
}
private void createTable() throws Exception {
String tmpTableName = TABLE_NAME + "_tmp";
stmt.execute("CREATE TABLE " + tmpTableName
+ " (id string, value string, num string, ds date) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' LINES TERMINATED BY '\n' STORED AS TEXTFILE");
stmt.execute("INSERT OVERWRITE TABLE " + tmpTableName
+ " VALUES ('1', 'value1', '25', '2008-04-09'), ('2', 'value2', '30', '2008-04-09'), "
+ "('3', 'value3', '35', '2008-04-09'), ('4', 'value4', '40', '2008-04-09'), "
+ "('5', 'value5', '25', '2008-05-09'), ('6', 'value6', '30', '2008-05-09'), "
+ "('7', 'value7', '35', '2008-05-09'), ('8', 'value8', '40', '2008-05-09'), "
+ "('9', 'value9', '25', '2009-04-09'), ('10', 'value10', '30', '2009-04-09'), "
+ "('11', 'value11', '35', '2009-04-09'), ('12', 'value12', '40', '2009-04-09')");
stmt.execute("CREATE TABLE " + TABLE_NAME + " (id string, value string) PARTITIONED BY (num string, ds date)");
stmt.execute("INSERT OVERWRITE TABLE " + TABLE_NAME + " PARTITION (num, ds) SELECT id, value, num, ds FROM " + tmpTableName);
}
@After
public void tearDown() throws Exception {
String tmpTableName = TABLE_NAME + "_tmp";
stmt.execute("DROP TABLE IF EXISTS " + TABLE_NAME);
stmt.execute("DROP TABLE IF EXISTS " + tmpTableName);
stmt.execute("DROP TABLE IF EXISTS " + TABLE_NAME + "_num_tmp");
if (hs2Conn != null) {
hs2Conn.close();
}
}
@AfterClass
public static void afterTest() throws Exception {
if (miniHS2 != null && miniHS2.isStarted()) {
miniHS2.stop();
}
}
/* Tests with queries which can be pushed down and executed with directSQL */
@Test
public void testSimpleQueryWithDirectSql() throws Exception {
String queryString = "select value from %s where num='25' and ds='2008-04-09'";
executeQuery(queryString, "value1");
}
@Test
public void testMoreComplexQueryWithDirectSql() throws Exception {
String queryString = "select value from %s where (ds between '2009-01-01' and '2009-12-31' and num='25') or (ds between '2008-01-01' and '2008-12-31' and num='30')";
executeQuery(queryString, "value2", "value6", "value9");
}
/*
* Tests with queries which can be pushed down and executed with directSQL, but the number of
* partitions which should be fetched is bigger than the maximum set by the
* hive.metastore.limit.partition.request parameter.
*/
@Test
public void testSimpleQueryWithDirectSqlTooManyPartitions() throws Exception {
String queryString = "select value from %s where ds>'2008-04-20'";
executeQueryExceedPartitionLimit(queryString, 8);
}
@Test
public void testMoreComplexQueryWithDirectSqlTooManyPartitions() throws Exception {
String queryString = "select value from %s where num='25' or (num='30' and ds between '2008-01-01' and '2008-12-31')";
executeQueryExceedPartitionLimit(queryString, 5);
}
/*
* Tests with queries which cannot be executed with directSQL, because of type mismatch. The type
* of the num column is string, but the parameters used in the where clause are numbers. After
* falling back to ORM, the number of partitions can be fetched by the
* ObjectStore.getNumPartitionsViaOrmFilter method.
*/
@Test
public void testQueryWithFallbackToORM1() throws Exception {
String queryString = "select value from %s where num!=25 and num!=35 and num!=40";
executeQuery(queryString, "value2", "value6", "value10");
}
@Test
public void testQueryWithFallbackToORMTooManyPartitions1() throws Exception {
String queryString = "select value from %s where num=30 or num=25";
executeQueryExceedPartitionLimit(queryString, 6);
}
/*
* Tests with queries which cannot be executed with directSQL, because of type mismatch. The type
* of the num column is string, but the parameters used in the where clause are numbers. After
* falling back to ORM the number of partitions cannot be fetched by the
* ObjectStore.getNumPartitionsViaOrmFilter method. They are fetched by the
* ObjectStore.getPartitionNamesPrunedByExprNoTxn method.
*/
@Test
public void testQueryWithFallbackToORM2() throws Exception {
String queryString = "select value from %s where num!=25 and ds='2008-04-09'";
executeQuery(queryString, "value2", "value3", "value4");
}
@Test
public void testQueryWithFallbackToORM3() throws Exception {
String queryString = "select value from %s where num between 26 and 31";
executeQuery(queryString, "value2", "value6", "value10");
}
@Test
public void testQueryWithFallbackToORMTooManyPartitions2() throws Exception {
String queryString = "select value from %s where num!=25 and (ds='2008-04-09' or ds='2008-05-09')";
executeQueryExceedPartitionLimit(queryString, 6);
}
@Test
public void testQueryWithFallbackToORMTooManyPartitions3() throws Exception {
String queryString = "select value from %s where num>=30";
executeQueryExceedPartitionLimit(queryString, 9);
}
@Test
public void testQueryWithFallbackToORMTooManyPartitions4() throws Exception {
String queryString = "select value from %s where num between 20 and 50";
executeQueryExceedPartitionLimit(queryString, 12);
}
/*
* Tests with queries which cannot be executed with directSQL, because the contain like or in.
* After falling back to ORM the number of partitions cannot be fetched by the
* ObjectStore.getNumPartitionsViaOrmFilter method. They are fetched by the
* ObjectStore.getPartitionNamesPrunedByExprNoTxn method.
*/
@Test
public void testQueryWithInWithFallbackToORM() throws Exception {
setupNumTmpTable();
String queryString = "select value from %s a where ds='2008-04-09' and a.num in (select value from " + TABLE_NAME + "_num_tmp)";
executeQuery(queryString, "value1", "value2");
}
@Test
public void testQueryWithInWithFallbackToORMTooManyPartitions() throws Exception {
setupNumTmpTable();
String queryString = "select value from %s a where a.num in (select value from " + TABLE_NAME + "_num_tmp)";
executeQueryExceedPartitionLimit(queryString, 12);
}
@Test
public void testQueryWithInWithFallbackToORMTooManyPartitions2() throws Exception {
setupNumTmpTable();
String queryString = "select value from %s a where a.num in (select value from " + TABLE_NAME + "_num_tmp where value='25')";
executeQueryExceedPartitionLimit(queryString, 12);
}
@Test
public void testQueryWithLikeWithFallbackToORMTooManyPartitions() throws Exception {
String queryString = "select value from %s where num like '3%%'";
executeQueryExceedPartitionLimit(queryString, 6);
}
private void setupNumTmpTable() throws SQLException {
stmt.execute("CREATE TABLE " + TABLE_NAME + "_num_tmp (value string)");
stmt.execute("INSERT INTO " + TABLE_NAME + "_num_tmp VALUES ('25')");
stmt.execute("INSERT INTO " + TABLE_NAME + "_num_tmp VALUES ('30')");
}
private void executeQuery(String query, String... expectedValues) throws SQLException {
String queryStr = String.format(query, TABLE_NAME);
ResultSet result = stmt.executeQuery(queryStr);
assertTrue(result != null);
Set<String> expectedValueSet = new HashSet<>(Arrays.asList(expectedValues));
Set<String> resultValues = getResultValues(result);
String errorMsg = getWrongResultErrorMsg(queryStr, expectedValueSet.toString(), resultValues.toString());
assertTrue(errorMsg, resultValues.equals(expectedValueSet));
}
private Set<String> getResultValues(ResultSet result) throws SQLException {
Set<String> resultValues = new HashSet<>();
while(result.next()) {
resultValues.add(result.getString(1));
}
return resultValues;
}
private void executeQueryExceedPartitionLimit(String query, int expectedPartitionNumber) throws Exception {
try {
String queryStr = String.format(query, TABLE_NAME);
stmt.executeQuery(queryStr);
fail("The query should have failed, because the number of requested partitions are bigger than "
+ PARTITION_REQUEST_LIMIT);
} catch (HiveSQLException e) {
String exceedLimitMsg = String.format(HiveMetaStore.PARTITION_NUMBER_EXCEED_LIMIT_MSG, expectedPartitionNumber,
TABLE_NAME, PARTITION_REQUEST_LIMIT, MetastoreConf.ConfVars.LIMIT_PARTITION_REQUEST.toString());
assertTrue(getWrongExceptionMessage(exceedLimitMsg, e.getMessage()),
e.getMessage().contains(exceedLimitMsg.toString()));
}
}
private String getWrongResultErrorMsg(String query, String expectedValues, String resultValues) {
StringBuilder errorMsg = new StringBuilder();
errorMsg.append("The query '");
errorMsg.append(query);
errorMsg.append("' returned wrong values. It returned the values ");
errorMsg.append(resultValues);
errorMsg.append(" instead of the expected ");
errorMsg.append(expectedValues);
return errorMsg.toString();
}
private String getWrongExceptionMessage(String exceedLimitMsg, String exceptionMessage) {
StringBuilder errorMsg = new StringBuilder();
errorMsg.append("The message of the exception doesn't contain the expected '");
errorMsg.append(exceedLimitMsg.toString());
errorMsg.append("'. It is: ");
errorMsg.append(exceptionMessage);
return errorMsg.toString();
}
}
| |
package org.knowm.xchange.bitcoinde;
import java.math.BigDecimal;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;
import org.knowm.xchange.bitcoinde.dto.account.BitcoindeAccountWrapper;
import org.knowm.xchange.bitcoinde.dto.account.BitcoindeBalance;
import org.knowm.xchange.bitcoinde.dto.marketdata.BitcoindeOrder;
import org.knowm.xchange.bitcoinde.dto.marketdata.BitcoindeOrderbookWrapper;
import org.knowm.xchange.bitcoinde.dto.marketdata.BitcoindeTrade;
import org.knowm.xchange.bitcoinde.dto.marketdata.BitcoindeTradesWrapper;
import org.knowm.xchange.bitcoinde.trade.BitcoindeMyOpenOrdersWrapper;
import org.knowm.xchange.bitcoinde.trade.BitcoindeMyOrder;
import org.knowm.xchange.currency.Currency;
import org.knowm.xchange.currency.CurrencyPair;
import org.knowm.xchange.dto.Order;
import org.knowm.xchange.dto.Order.OrderType;
import org.knowm.xchange.dto.account.AccountInfo;
import org.knowm.xchange.dto.account.Balance;
import org.knowm.xchange.dto.account.Wallet;
import org.knowm.xchange.dto.marketdata.OrderBook;
import org.knowm.xchange.dto.marketdata.Trade;
import org.knowm.xchange.dto.marketdata.Trades;
import org.knowm.xchange.dto.marketdata.Trades.TradeSortType;
import org.knowm.xchange.dto.trade.LimitOrder;
import org.knowm.xchange.dto.trade.OpenOrders;
import org.knowm.xchange.utils.DateUtils;
import org.knowm.xchange.utils.jackson.CurrencyPairDeserializer;
/** @author matthewdowney & frank kaiser */
public final class BitcoindeAdapters {
public static final Comparator<LimitOrder> ASK_COMPARATOR =
new Comparator<LimitOrder>() {
@Override
public int compare(LimitOrder o1, LimitOrder o2) {
return o1.getLimitPrice().compareTo(o2.getLimitPrice());
}
};
public static final Comparator<LimitOrder> BID_COMPARATOR =
new Comparator<LimitOrder>() {
@Override
public int compare(LimitOrder o1, LimitOrder o2) {
return o2.getLimitPrice().compareTo(o1.getLimitPrice());
}
};
/** Private constructor. */
private BitcoindeAdapters() {}
/**
* Adapt a org.knowm.xchange.bitcoinde.dto.marketdata.BitcoindeOrderBook object to an OrderBook
* object.
*
* @param bitcoindeOrderbookWrapper the exchange specific OrderBook object
* @param currencyPair (e.g. BTC/USD)
* @return The XChange OrderBook
*/
public static OrderBook adaptOrderBook(
BitcoindeOrderbookWrapper bitcoindeOrderbookWrapper, CurrencyPair currencyPair) {
// System.out.println("bitcoindeOrderbookWrapper = " +
// bitcoindeOrderbookWrapper);
// System.out.println("credits = " + bitcoindeOrderbookWrapper.getCredits());
List<LimitOrder> asks =
createOrders(
currencyPair,
Order.OrderType.ASK,
bitcoindeOrderbookWrapper.getBitcoindeOrders().getAsks());
List<LimitOrder> bids =
createOrders(
currencyPair,
Order.OrderType.BID,
bitcoindeOrderbookWrapper.getBitcoindeOrders().getBids());
Collections.sort(bids, BID_COMPARATOR);
Collections.sort(asks, ASK_COMPARATOR);
return new OrderBook(null, asks, bids);
}
/**
* Adapt a org.knowm.xchange.bitcoinde.dto.marketdata.BitcoindeAccount object to an AccountInfo
* object.
*
* @param bitcoindeAccount
* @return
*/
public static AccountInfo adaptAccountInfo(BitcoindeAccountWrapper bitcoindeAccount) {
// This adapter is not complete yet
BitcoindeBalance btc = bitcoindeAccount.getData().getBalances().getBtc();
BitcoindeBalance eth = bitcoindeAccount.getData().getBalances().getEth();
BigDecimal eur = bitcoindeAccount.getData().getFidorReservation().getAvailableAmount();
Balance btcBalance = new Balance(Currency.BTC, btc.getAvailableAmount());
Balance ethBalance = new Balance(Currency.ETH, eth.getAvailableAmount());
Balance eurBalance = new Balance(Currency.EUR, eur);
Wallet wallet = Wallet.Builder.from(Arrays.asList(btcBalance, ethBalance, eurBalance)).build();
return new AccountInfo(wallet);
}
/** Create a list of orders from a list of asks or bids. */
public static List<LimitOrder> createOrders(
CurrencyPair currencyPair, Order.OrderType orderType, BitcoindeOrder[] orders) {
List<LimitOrder> limitOrders = new ArrayList<>();
for (BitcoindeOrder order : orders) {
limitOrders.add(createOrder(currencyPair, order, orderType, null, null));
}
return limitOrders;
}
/** Create an individual order. */
public static LimitOrder createOrder(
CurrencyPair currencyPair,
BitcoindeOrder bitcoindeOrder,
Order.OrderType orderType,
String orderId,
Date timeStamp) {
return new LimitOrder(
orderType,
bitcoindeOrder.getAmount(),
currencyPair,
orderId,
timeStamp,
bitcoindeOrder.getPrice());
}
/**
* Adapt a org.knowm.xchange.bitcoinde.dto.marketdata.BitcoindeTrade[] object to a Trades object.
*
* @param bitcoindeTradesWrapper Exchange specific trades
* @param currencyPair (e.g. BTC/USD)
* @return The XChange Trades
*/
public static Trades adaptTrades(
BitcoindeTradesWrapper bitcoindeTradesWrapper, CurrencyPair currencyPair) {
List<Trade> trades = new ArrayList<>();
long lastTradeId = 0;
for (BitcoindeTrade bitcoindeTrade : bitcoindeTradesWrapper.getTrades()) {
final long tid = bitcoindeTrade.getTid();
if (tid > lastTradeId) {
lastTradeId = tid;
}
trades.add(
new Trade.Builder()
.originalAmount(bitcoindeTrade.getAmount())
.currencyPair(currencyPair)
.price(bitcoindeTrade.getPrice())
.timestamp(DateUtils.fromMillisUtc(bitcoindeTrade.getDate() * 1000L))
.id(String.valueOf(tid))
.build());
}
return new Trades(trades, lastTradeId, TradeSortType.SortByID);
}
/**
* @param bitcoindeOpenOrdersWrapper
* @return
*/
public static OpenOrders adaptOpenOrders(
BitcoindeMyOpenOrdersWrapper bitcoindeOpenOrdersWrapper) {
System.out.println(bitcoindeOpenOrdersWrapper);
List<BitcoindeMyOrder> bitcoindeMyOrders = bitcoindeOpenOrdersWrapper.getOrders();
List<LimitOrder> orders = new ArrayList<>(bitcoindeMyOrders.size());
for (BitcoindeMyOrder bitcoindeMyOrder : bitcoindeMyOrders) {
CurrencyPair tradingPair =
CurrencyPairDeserializer.getCurrencyPairFromString(bitcoindeMyOrder.getTradingPair());
Date timestamp = fromRfc3339DateStringQuietly(bitcoindeMyOrder.getCreatedAt());
OrderType otype = "buy".equals(bitcoindeMyOrder.getType()) ? OrderType.BID : OrderType.ASK;
LimitOrder limitOrder =
new LimitOrder(
otype,
bitcoindeMyOrder.getMaxAmount(),
tradingPair,
bitcoindeMyOrder.getOrderId(),
timestamp,
bitcoindeMyOrder.getPrice());
orders.add(limitOrder);
}
return new OpenOrders(orders);
}
private static Date fromRfc3339DateStringQuietly(String timestamp) {
try {
SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssXXX");
return simpleDateFormat.parse(timestamp);
} catch (ParseException e) {
return null;
}
}
}
| |
/*
* Copyright (C) 2011 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.cache;
import static com.google.common.cache.CacheBuilder.EMPTY_STATS;
import static com.google.common.cache.TestingCacheLoaders.identityLoader;
import static com.google.common.truth.Truth.assertThat;
import java.lang.Thread.UncaughtExceptionHandler;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import com.github.benmanes.caffeine.cache.Caffeine;
import com.github.benmanes.caffeine.guava.CaffeinatedGuava;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.testing.NullPointerTester;
import com.google.common.util.concurrent.MoreExecutors;
import junit.framework.TestCase;
/**
* @author Charles Fry
*/
public class LocalLoadingCacheTest extends TestCase {
private static <K, V> LoadingCache<K, V> makeCache(
Caffeine<K, V> builder, CacheLoader<? super K, V> loader) {
return CaffeinatedGuava.build(builder, loader);
}
private Caffeine<Object, Object> createCacheBuilder() {
return Caffeine.newBuilder().executor(MoreExecutors.directExecutor()).recordStats();
}
// constructor tests
public void testComputingFunction() {
CacheLoader<Object, Object> loader = new CacheLoader<Object, Object>() {
@Override
public Object load(Object from) {
return new Object();
}
};
makeCache(createCacheBuilder(), loader);
}
// null parameters test
public void testNullParameters() throws Exception {
NullPointerTester tester = new NullPointerTester();
CacheLoader<Object, Object> loader = identityLoader();
tester.testAllPublicInstanceMethods(makeCache(createCacheBuilder(), loader));
}
// stats tests
public void testStats() {
Caffeine<Object, Object> builder = createCacheBuilder().maximumSize(2);
LoadingCache<Object, Object> cache = makeCache(builder, identityLoader());
assertEquals(EMPTY_STATS, cache.stats());
Object one = new Object();
cache.getUnchecked(one);
CacheStats stats = cache.stats();
assertEquals(1, stats.requestCount());
assertEquals(0, stats.hitCount());
assertEquals(0.0, stats.hitRate());
assertEquals(1, stats.missCount());
assertEquals(1.0, stats.missRate());
assertEquals(1, stats.loadCount());
long totalLoadTime = stats.totalLoadTime();
assertTrue(totalLoadTime >= 0);
assertTrue(stats.averageLoadPenalty() >= 0.0);
assertEquals(0, stats.evictionCount());
cache.getUnchecked(one);
stats = cache.stats();
assertEquals(2, stats.requestCount());
assertEquals(1, stats.hitCount());
assertEquals(1.0/2, stats.hitRate());
assertEquals(1, stats.missCount());
assertEquals(1.0/2, stats.missRate());
assertEquals(1, stats.loadCount());
assertEquals(0, stats.evictionCount());
Object two = new Object();
cache.getUnchecked(two);
stats = cache.stats();
assertEquals(3, stats.requestCount());
assertEquals(1, stats.hitCount());
assertEquals(1.0/3, stats.hitRate());
assertEquals(2, stats.missCount());
assertEquals(2.0/3, stats.missRate());
assertEquals(2, stats.loadCount());
assertTrue(stats.totalLoadTime() >= totalLoadTime);
totalLoadTime = stats.totalLoadTime();
assertTrue(stats.averageLoadPenalty() >= 0.0);
assertEquals(0, stats.evictionCount());
Object three = new Object();
cache.getUnchecked(three);
stats = cache.stats();
assertEquals(4, stats.requestCount());
assertEquals(1, stats.hitCount());
assertEquals(1.0/4, stats.hitRate());
assertEquals(3, stats.missCount());
assertEquals(3.0/4, stats.missRate());
assertEquals(3, stats.loadCount());
assertTrue(stats.totalLoadTime() >= totalLoadTime);
totalLoadTime = stats.totalLoadTime();
assertTrue(stats.averageLoadPenalty() >= 0.0);
assertEquals(1, stats.evictionCount());
}
public void testStatsNoops() {
Caffeine<Object, Object> builder = createCacheBuilder();
LoadingCache<Object, Object> cache = makeCache(builder, identityLoader());
ConcurrentMap<Object, Object> map = cache.asMap(); // modifiable map view
assertEquals(EMPTY_STATS, cache.stats());
Object one = new Object();
assertNull(map.put(one, one));
assertSame(one, map.get(one));
assertTrue(map.containsKey(one));
assertTrue(map.containsValue(one));
Object two = new Object();
assertSame(one, map.replace(one, two));
assertTrue(map.containsKey(one));
assertFalse(map.containsValue(one));
Object three = new Object();
assertTrue(map.replace(one, two, three));
assertTrue(map.remove(one, three));
assertFalse(map.containsKey(one));
assertFalse(map.containsValue(one));
assertNull(map.putIfAbsent(two, three));
assertSame(three, map.remove(two));
assertNull(map.put(three, one));
assertNull(map.put(one, two));
assertThat(map).containsEntry(three, one);
assertThat(map).containsEntry(one, two);
//TODO(user): Confirm with fry@ that this is a reasonable substitute.
//Set<Map.Entry<Object, Object>> entries = map.entrySet();
//assertThat(entries).containsExactly(
// Maps.immutableEntry(three, one), Maps.immutableEntry(one, two));
//Set<Object> keys = map.keySet();
//assertThat(keys).containsExactly(one, three);
//Collection<Object> values = map.values();
//assertThat(values).containsExactly(one, two);
map.clear();
assertEquals(EMPTY_STATS, cache.stats());
}
public void testNoStats() {
Caffeine<Object, Object> builder = Caffeine.newBuilder().maximumSize(2);
LoadingCache<Object, Object> cache = makeCache(builder, identityLoader());
assertEquals(EMPTY_STATS, cache.stats());
Object one = new Object();
cache.getUnchecked(one);
assertEquals(EMPTY_STATS, cache.stats());
cache.getUnchecked(one);
assertEquals(EMPTY_STATS, cache.stats());
Object two = new Object();
cache.getUnchecked(two);
assertEquals(EMPTY_STATS, cache.stats());
Object three = new Object();
cache.getUnchecked(three);
assertEquals(EMPTY_STATS, cache.stats());
}
public void testRecordStats() {
Caffeine<Object, Object> builder = createCacheBuilder()
.recordStats()
.maximumSize(2);
LoadingCache<Object, Object> cache = makeCache(builder, identityLoader());
assertEquals(0, cache.stats().hitCount());
assertEquals(0, cache.stats().missCount());
Object one = new Object();
cache.getUnchecked(one);
assertEquals(0, cache.stats().hitCount());
assertEquals(1, cache.stats().missCount());
cache.getUnchecked(one);
assertEquals(1, cache.stats().hitCount());
assertEquals(1, cache.stats().missCount());
Object two = new Object();
cache.getUnchecked(two);
assertEquals(1, cache.stats().hitCount());
assertEquals(2, cache.stats().missCount());
Object three = new Object();
cache.getUnchecked(three);
assertEquals(1, cache.stats().hitCount());
assertEquals(3, cache.stats().missCount());
}
// asMap tests
public void testAsMap() {
Caffeine<Object, Object> builder = createCacheBuilder();
LoadingCache<Object, Object> cache = makeCache(builder, identityLoader());
assertEquals(EMPTY_STATS, cache.stats());
Object one = new Object();
Object two = new Object();
Object three = new Object();
ConcurrentMap<Object, Object> map = cache.asMap();
assertNull(map.put(one, two));
assertSame(two, map.get(one));
map.putAll(ImmutableMap.of(two, three));
assertSame(three, map.get(two));
assertSame(two, map.putIfAbsent(one, three));
assertSame(two, map.get(one));
assertNull(map.putIfAbsent(three, one));
assertSame(one, map.get(three));
assertSame(two, map.replace(one, three));
assertSame(three, map.get(one));
assertFalse(map.replace(one, two, three));
assertSame(three, map.get(one));
assertTrue(map.replace(one, three, two));
assertSame(two, map.get(one));
assertEquals(3, map.size());
map.clear();
assertTrue(map.isEmpty());
assertEquals(0, map.size());
cache.getUnchecked(one);
assertEquals(1, map.size());
assertSame(one, map.get(one));
assertTrue(map.containsKey(one));
assertTrue(map.containsValue(one));
assertSame(one, map.remove(one));
assertEquals(0, map.size());
cache.getUnchecked(one);
assertEquals(1, map.size());
assertFalse(map.remove(one, two));
assertTrue(map.remove(one, one));
assertEquals(0, map.size());
cache.getUnchecked(one);
Map<Object, Object> newMap = ImmutableMap.of(one, one);
assertEquals(newMap, map);
assertEquals(newMap.entrySet(), map.entrySet());
assertEquals(newMap.keySet(), map.keySet());
Set<Object> expectedValues = ImmutableSet.of(one);
Set<Object> actualValues = ImmutableSet.copyOf(map.values());
assertEquals(expectedValues, actualValues);
}
// Bug in JDK8; fixed but not released as of 1.8.0_25-b17
public void disabled_testRecursiveComputation() throws InterruptedException {
final AtomicReference<LoadingCache<Integer, String>> cacheRef =
new AtomicReference<LoadingCache<Integer, String>>();
CacheLoader<Integer, String> recursiveLoader = new CacheLoader<Integer, String>() {
@Override
public String load(Integer key) {
if (key > 0) {
return key + ", " + cacheRef.get().getUnchecked(key - 1);
} else {
return "0";
}
}
};
LoadingCache<Integer, String> recursiveCache = CaffeinatedGuava.build(Caffeine.newBuilder()
.weakKeys()
.weakValues(), recursiveLoader);
cacheRef.set(recursiveCache);
assertEquals("3, 2, 1, 0", recursiveCache.getUnchecked(3));
recursiveLoader = new CacheLoader<Integer, String>() {
@Override
public String load(Integer key) {
return cacheRef.get().getUnchecked(key);
}
};
recursiveCache = CaffeinatedGuava.build(Caffeine.newBuilder()
.weakKeys()
.weakValues(), recursiveLoader);
cacheRef.set(recursiveCache);
// tells the test when the compution has completed
final CountDownLatch doneSignal = new CountDownLatch(1);
Thread thread = new Thread() {
@Override
public void run() {
try {
cacheRef.get().getUnchecked(3);
} finally {
doneSignal.countDown();
}
}
};
thread.setUncaughtExceptionHandler(new UncaughtExceptionHandler() {
@Override
public void uncaughtException(Thread t, Throwable e) {}
});
thread.start();
boolean done = doneSignal.await(1, TimeUnit.SECONDS);
if (!done) {
StringBuilder builder = new StringBuilder();
for (StackTraceElement trace : thread.getStackTrace()) {
builder.append("\tat ").append(trace).append('\n');
}
fail(builder.toString());
}
}
}
| |
package us.kbase.userandjobstate.docserver;
import java.io.IOException;
import java.io.InputStream;
import java.util.Map;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.io.IOUtils;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import us.kbase.common.service.JsonServerServlet;
import us.kbase.common.service.JsonServerSyslog;
import us.kbase.common.service.JsonServerSyslog.RpcInfo;
import us.kbase.common.service.JsonServerSyslog.SyslogOutput;
/** A document server that serves documentation for another service.
* This code is configured for the UJS service, but is easy to configure
* for other services by changing DEFAULT_COMPANION_SERVICE_NAME.
* @author gaprice@lbl.gov
*
*/
public class DocServer extends HttpServlet {
//TODO DOCSERV move to common, make default service name changable by static method
/**
* The name of the service that this document server is serving documents
* for. This name will be used to find the appropriate section of the
* KBase deploy.cfg configuration file if the name is not specified in the
* environment.
*/
public static final String DEFAULT_COMPANION_SERVICE_NAME =
"UserAndJobState";
/**
* The name of this document server, used for logging purposes.
*/
public static final String DEFAULT_SERVICE_NAME = "DocServ";
/**
* Location of the documents this service will serve in relation to the
* classpath.
*/
public static final String DEFAULT_DOCS_LOC = "/server_docs";
private static final String CFG_SERVICE_NAME = "doc-server-name";
private static final String CFG_DOCS_LOC = "doc-server-docs-location";
private static final String X_FORWARDED_FOR = "X-Forwarded-For";
private static final String USER_AGENT = "User-Agent";
private final String docsLoc;
private final JsonServerSyslog logger;
private final Map<String, String> config;
private static String defaultDocsLoc = DEFAULT_DOCS_LOC;
private static SyslogOutput sysLogOut = null;
private static final String SERVER_CONTEXT_LOC = "/docs/*";
private Integer jettyPort = null;
private Server jettyServer = null;
private static final long serialVersionUID = 1L;
// could make custom 404 page at some point
// http://www.eclipse.org/jetty/documentation/current/custom-error-pages.html
/**
* Creates a new document server
*/
public DocServer() {
super();
/* really should try and get the companion service name from the env
* here, but not worth the effort
*/
JsonServerSyslog templogger = new JsonServerSyslog(
DEFAULT_COMPANION_SERVICE_NAME, JsonServerServlet.KB_DEP,
JsonServerSyslog.LOG_LEVEL_INFO, false);
if (sysLogOut != null) {
templogger.changeOutput(sysLogOut);
}
// getConfig() gets the service name from the env if it exists
config = JsonServerServlet.getConfig(DEFAULT_COMPANION_SERVICE_NAME,
templogger);
String serverName = config.get(CFG_SERVICE_NAME);
if (serverName == null || serverName.isEmpty()) {
serverName = DEFAULT_SERVICE_NAME;
}
final String dlog = config.get(CFG_DOCS_LOC);
if (dlog == null || dlog.isEmpty()) {
docsLoc = defaultDocsLoc;
} else {
if (!dlog.startsWith("/")) {
docsLoc = "/" + dlog;
} else {
docsLoc = dlog;
}
}
logger = new JsonServerSyslog(serverName, JsonServerServlet.KB_DEP,
JsonServerSyslog.LOG_LEVEL_INFO, false);
if (sysLogOut != null) {
logger.changeOutput(sysLogOut);
}
}
@Override
protected void doOptions(
final HttpServletRequest request,
final HttpServletResponse response)
throws ServletException, IOException {
JsonServerServlet.setupResponseHeaders(request, response);
response.setContentLength(0);
response.getOutputStream().print("");
response.getOutputStream().flush();
}
@Override
protected void doGet(
final HttpServletRequest request,
final HttpServletResponse response)
throws ServletException, IOException {
final RpcInfo rpc = JsonServerSyslog.getCurrentRpcInfo();
rpc.setId(("" + Math.random()).substring(2));
rpc.setIp(JsonServerServlet.getIpAddress(request, config));
rpc.setMethod("GET");
logHeaders(request);
String path = request.getPathInfo();
if (path == null) { // e.g. /docs
handle404(request, response);
return;
}
if (path.endsWith("/")) { // e.g. /docs/
path = path + "index.html";
}
// the path is already normalized by the framework, so no need to
// normalize here
path = docsLoc + path;
final InputStream is = getClass().getResourceAsStream(path);
if (is == null) {
handle404(request, response);
return;
}
try {
final byte[] page = IOUtils.toByteArray(is);
response.getOutputStream().write(page);
} catch (IOException ioe) {
logger.logErr(request.getRequestURI() + " 500 " +
request.getHeader(USER_AGENT));
logger.logErr(ioe);
response.sendError(500);
return;
}
logger.logInfo(request.getRequestURI() + " 200 " +
request.getHeader(USER_AGENT));
}
private void handle404(final HttpServletRequest request,
final HttpServletResponse response) throws IOException {
logger.logErr(request.getRequestURI() + " 404 " +
request.getHeader(USER_AGENT));
response.sendError(404);
}
private void logHeaders(final HttpServletRequest req) {
final String xFF = req.getHeader(X_FORWARDED_FOR);
if (xFF != null && !xFF.isEmpty()) {
logger.logInfo(X_FORWARDED_FOR + ": " + xFF);
}
}
/** Test method to test logging. Call before creating a server.
* @param output where logger output is to be sent.
*/
public static void setLoggerOutput(final SyslogOutput output) {
sysLogOut = output;
}
/**
* Location of the documents this service will serve in relation to the
* classpath. Call before creating a server.
* @param path documents location
*/
public static void setDefaultDocsLocation(final String path) {
defaultDocsLoc = path;
}
/**
* Starts a test jetty doc server on an OS-determined port at /docs. Blocks
* until the server is terminated.
* @throws Exception if the server couldn't be started.
*/
public void startupServer() throws Exception {
startupServer(0);
}
/**
* Starts a test jetty doc server at /docs. Blocks until the
* server is terminated.
* @param port the port to which the server will connect.
* @throws Exception if the server couldn't be started.
*/
public void startupServer(int port) throws Exception {
jettyServer = new Server(port);
ServletContextHandler context =
new ServletContextHandler(ServletContextHandler.SESSIONS);
context.setContextPath("/");
jettyServer.setHandler(context);
context.addServlet(new ServletHolder(this), SERVER_CONTEXT_LOC);
jettyServer.start();
jettyPort = jettyServer.getConnectors()[0].getLocalPort();
jettyServer.join();
}
/**
* Get the jetty test server port. Returns null if the server is not
* running or starting up.
* @return the port
*/
public Integer getServerPort() {
return jettyPort;
}
/**
* Stops the test jetty server.
* @throws Exception if there was an error stopping the server.
*/
public void stopServer() throws Exception {
jettyServer.stop();
jettyServer = null;
jettyPort = null;
}
public static void main(String[] args) throws Exception {
new DocServer().startupServer(10000);
}
}
| |
package org.zstack.network.service.lb;
import org.springframework.beans.factory.annotation.Autowire;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Configurable;
import org.springframework.transaction.annotation.Transactional;
import org.zstack.core.Platform;
import org.zstack.core.cloudbus.CloudBus;
import org.zstack.core.db.DatabaseFacade;
import org.zstack.core.db.SimpleQuery;
import org.zstack.core.db.SimpleQuery.Op;
import org.zstack.core.errorcode.ErrorFacade;
import org.zstack.core.thread.ChainTask;
import org.zstack.core.thread.SyncTaskChain;
import org.zstack.core.thread.ThreadFacade;
import org.zstack.core.workflow.FlowChainBuilder;
import org.zstack.core.workflow.ShareFlow;
import org.zstack.header.core.Completion;
import org.zstack.header.core.NoErrorCompletion;
import org.zstack.header.core.workflow.*;
import org.zstack.header.errorcode.ErrorCode;
import org.zstack.header.errorcode.OperationFailureException;
import org.zstack.header.exception.CloudRuntimeException;
import org.zstack.header.message.APIMessage;
import org.zstack.header.message.Message;
import org.zstack.header.network.l3.L3NetworkVO;
import org.zstack.header.network.service.NetworkServiceL3NetworkRefVO;
import org.zstack.header.vm.VmNicInventory;
import org.zstack.header.vm.VmNicVO;
import org.zstack.header.vm.VmNicVO_;
import org.zstack.identity.AccountManager;
import org.zstack.network.service.vip.VipInventory;
import org.zstack.network.service.vip.VipManager;
import org.zstack.network.service.vip.VipVO;
import org.zstack.tag.TagManager;
import org.zstack.utils.CollectionUtils;
import org.zstack.utils.DebugUtils;
import org.zstack.utils.function.Function;
import javax.persistence.TypedQuery;
import java.util.*;
/**
* Created by frank on 8/8/2015.
*/
@Configurable(preConstruction = true, autowire = Autowire.BY_TYPE)
public class LoadBalancerBase {
@Autowired
private CloudBus bus;
@Autowired
private DatabaseFacade dbf;
@Autowired
private LoadBalancerManager lbMgr;
@Autowired
private ThreadFacade thdf;
@Autowired
private ErrorFacade errf;
@Autowired
private AccountManager acntMgr;
@Autowired
private TagManager tagMgr;
@Autowired
private VipManager vipMgr;
private LoadBalancerVO self;
private String getSyncId() {
return String.format("operate-lb-%s", self.getUuid());
}
protected LoadBalancerInventory getInventory() {
return LoadBalancerInventory.valueOf(self);
}
private LoadBalancerInventory reloadAndGetInventory() {
self = dbf.reload(self);
return getInventory();
}
public LoadBalancerBase(LoadBalancerVO self) {
this.self = self;
}
void handleMessage(Message msg) {
if (msg instanceof APIMessage) {
handleApiMessage((APIMessage) msg);
} else {
handleLocalMessage(msg);
}
}
private void handleLocalMessage(Message msg) {
if (msg instanceof LoadBalancerActiveVmNicMsg) {
handle((LoadBalancerActiveVmNicMsg) msg);
} else if (msg instanceof LoadBalancerDeactiveVmNicMsg) {
handle((LoadBalancerDeactiveVmNicMsg) msg);
} else if (msg instanceof LoadBalancerRemoveVmNicMsg) {
handle((LoadBalancerRemoveVmNicMsg) msg);
} else if (msg instanceof RefreshLoadBalancerMsg) {
handle((RefreshLoadBalancerMsg) msg);
} else if (msg instanceof DeleteLoadBalancerMsg) {
handle((DeleteLoadBalancerMsg) msg);
} else {
bus.dealWithUnknownMessage(msg);
}
}
private void handle(final DeleteLoadBalancerMsg msg) {
final DeleteLoadBalancerReply reply = new DeleteLoadBalancerReply();
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(final SyncTaskChain chain) {
delete(new Completion(msg, chain) {
@Override
public void success() {
bus.reply(msg ,reply);
chain.next();
}
@Override
public void fail(ErrorCode errorCode) {
reply.setError(errorCode);
bus.reply(msg ,reply);
chain.next();
}
});
}
@Override
public String getName() {
return "delete-lb";
}
});
}
private void handle(final RefreshLoadBalancerMsg msg) {
final RefreshLoadBalancerReply reply = new RefreshLoadBalancerReply();
thdf.chainSubmit(new ChainTask() {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(final SyncTaskChain chain) {
refresh(new Completion(msg, chain) {
@Override
public void success() {
reply.setInventory(getInventory());
bus.reply(msg, reply);
chain.next();
}
@Override
public void fail(ErrorCode errorCode) {
reply.setError(errorCode);
bus.reply(msg, reply);
chain.next();
}
});
}
@Override
public String getName() {
return "refresh-lb";
}
});
}
private void refresh(final Completion completion) {
LoadBalancerBackend bkd = getBackend();
bkd.refresh(makeStruct(), completion);
}
private void handle(final LoadBalancerRemoveVmNicMsg msg) {
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(final SyncTaskChain chain) {
final LoadBalancerRemoveVmNicReply reply = new LoadBalancerRemoveVmNicReply();
removeNics(msg.getVmNicUuids(), new Completion(msg, chain) {
@Override
public void success() {
bus.reply(msg, reply);
chain.next();
}
@Override
public void fail(ErrorCode errorCode) {
reply.setError(errorCode);
bus.reply(msg, reply);
chain.next();
}
});
}
@Override
public String getName() {
return "remove-nic-from-lb";
}
});
}
private void checkIfNicIsAdded(List<String> nicUuids) {
List<String> allNicUuids = new ArrayList<String>();
for (LoadBalancerListenerVO l : self.getListeners()) {
allNicUuids.addAll(CollectionUtils.transformToList(l.getVmNicRefs(), new Function<String, LoadBalancerListenerVmNicRefVO>() {
@Override
public String call(LoadBalancerListenerVmNicRefVO arg) {
return arg.getVmNicUuid();
}
}));
}
for (String nicUuid : nicUuids) {
if (!allNicUuids.contains(nicUuid)) {
throw new CloudRuntimeException(String.format("the load balancer[uuid: %s] doesn't have a vm nic[uuid: %s] added", self.getUuid(), nicUuid));
}
}
}
private void handle(final LoadBalancerDeactiveVmNicMsg msg) {
checkIfNicIsAdded(msg.getVmNicUuids());
LoadBalancerListenerVO l = CollectionUtils.find(self.getListeners(), new Function<LoadBalancerListenerVO, LoadBalancerListenerVO>() {
@Override
public LoadBalancerListenerVO call(LoadBalancerListenerVO arg) {
return arg.getUuid().equals(msg.getListenerUuid()) ? arg : null;
}
});
final List<LoadBalancerListenerVmNicRefVO> refs = CollectionUtils.transformToList(l.getVmNicRefs(), new Function<LoadBalancerListenerVmNicRefVO, LoadBalancerListenerVmNicRefVO>() {
@Override
public LoadBalancerListenerVmNicRefVO call(LoadBalancerListenerVmNicRefVO arg) {
return msg.getVmNicUuids().contains(arg.getVmNicUuid()) ? arg : null;
}
});
final LoadBalancerDeactiveVmNicReply reply = new LoadBalancerDeactiveVmNicReply();
FlowChain chain = FlowChainBuilder.newShareFlowChain();
chain.setName(String.format("deactive-vm-nics-on-lb-%s", self.getUuid()));
chain.then(new ShareFlow() {
@Override
public void setup() {
flow(new Flow() {
String __name__ = "set-nics-to-inactive-in-db";
@Override
public void run(FlowTrigger trigger, Map data) {
for (LoadBalancerListenerVmNicRefVO ref : refs) {
ref.setStatus(LoadBalancerVmNicStatus.Inactive);
dbf.update(ref);
}
trigger.next();
}
@Override
public void rollback(FlowTrigger trigger, Map data) {
for (LoadBalancerListenerVmNicRefVO ref : refs) {
ref.setStatus(LoadBalancerVmNicStatus.Active);
dbf.update(ref);
}
trigger.rollback();
}
});
flow(new NoRollbackFlow() {
String __name__ = "deactive-nics-on-backend";
@Override
public void run(final FlowTrigger trigger, Map data) {
SimpleQuery<VmNicVO> q = dbf.createQuery(VmNicVO.class);
q.add(VmNicVO_.uuid, Op.IN, CollectionUtils.transformToList(refs, new Function<String, LoadBalancerListenerVmNicRefVO>() {
@Override
public String call(LoadBalancerListenerVmNicRefVO arg) {
return arg.getVmNicUuid();
}
}));
List<VmNicVO> nicvos = q.list();
LoadBalancerBackend bkd = getBackend();
bkd.removeVmNics(makeStruct(), VmNicInventory.valueOf(nicvos), new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
done(new FlowDoneHandler(msg) {
@Override
public void handle(Map data) {
bus.reply(msg, reply);
}
});
error(new FlowErrorHandler(msg) {
@Override
public void handle(ErrorCode errCode, Map data) {
reply.setError(errCode);
bus.reply(msg, reply);
}
});
}
}).start();
}
private void deactiveVmNic(final LoadBalancerActiveVmNicMsg msg, final NoErrorCompletion completion) {
checkIfNicIsAdded(msg.getVmNicUuids());
LoadBalancerListenerVO l = CollectionUtils.find(self.getListeners(), new Function<LoadBalancerListenerVO, LoadBalancerListenerVO>() {
@Override
public LoadBalancerListenerVO call(LoadBalancerListenerVO arg) {
return arg.getUuid().equals(msg.getListenerUuid()) ? arg : null;
}
});
final List<LoadBalancerListenerVmNicRefVO> refs = CollectionUtils.transformToList(l.getVmNicRefs(), new Function<LoadBalancerListenerVmNicRefVO, LoadBalancerListenerVmNicRefVO>() {
@Override
public LoadBalancerListenerVmNicRefVO call(LoadBalancerListenerVmNicRefVO arg) {
return msg.getVmNicUuids().contains(arg.getVmNicUuid()) ? arg : null;
}
});
final LoadBalancerActiveVmNicReply reply = new LoadBalancerActiveVmNicReply();
FlowChain chain = FlowChainBuilder.newShareFlowChain();
chain.setName(String.format("active-vm-nics-on-lb-%s", self.getUuid()));
chain.then(new ShareFlow() {
@Override
public void setup() {
flow(new Flow() {
String __name__ = "set-nics-to-active-in-db";
@Override
public void run(FlowTrigger trigger, Map data) {
for (LoadBalancerListenerVmNicRefVO ref : refs) {
ref.setStatus(LoadBalancerVmNicStatus.Active);
dbf.update(ref);
}
trigger.next();
}
@Override
public void rollback(FlowTrigger trigger, Map data) {
for (LoadBalancerListenerVmNicRefVO ref : refs) {
ref.setStatus(LoadBalancerVmNicStatus.Inactive);
dbf.update(ref);
}
trigger.rollback();
}
});
flow(new NoRollbackFlow() {
String __name__ = "active-nics-on-backend";
@Override
public void run(final FlowTrigger trigger, Map data) {
SimpleQuery<VmNicVO> q = dbf.createQuery(VmNicVO.class);
q.add(VmNicVO_.uuid, Op.IN, CollectionUtils.transformToList(refs, new Function<String, LoadBalancerListenerVmNicRefVO>() {
@Override
public String call(LoadBalancerListenerVmNicRefVO arg) {
return arg.getVmNicUuid();
}
}));
List<VmNicVO> nicvos = q.list();
LoadBalancerBackend bkd = getBackend();
bkd.addVmNics(makeStruct(), VmNicInventory.valueOf(nicvos), new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
done(new FlowDoneHandler(msg) {
@Override
public void handle(Map data) {
bus.reply(msg, reply);
completion.done();
}
});
error(new FlowErrorHandler(msg) {
@Override
public void handle(ErrorCode errCode, Map data) {
reply.setError(errCode);
bus.reply(msg, reply);
completion.done();
}
});
}
}).start();
}
private void handle(final LoadBalancerActiveVmNicMsg msg) {
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(final SyncTaskChain chain) {
deactiveVmNic(msg, new NoErrorCompletion(msg, chain) {
@Override
public void done() {
chain.next();
}
});
}
@Override
public String getName() {
return "deactive-nic";
}
});
}
private void handleApiMessage(APIMessage msg) {
if (msg instanceof APICreateLoadBalancerListenerMsg) {
handle((APICreateLoadBalancerListenerMsg) msg);
} else if (msg instanceof APIAddVmNicToLoadBalancerMsg) {
handle((APIAddVmNicToLoadBalancerMsg) msg);
} else if (msg instanceof APIRemoveVmNicFromLoadBalancerMsg) {
handle((APIRemoveVmNicFromLoadBalancerMsg) msg);
} else if (msg instanceof APIDeleteLoadBalancerListenerMsg) {
handle((APIDeleteLoadBalancerListenerMsg) msg);
} else if (msg instanceof APIDeleteLoadBalancerMsg) {
handle((APIDeleteLoadBalancerMsg) msg);
} else if (msg instanceof APIRefreshLoadBalancerMsg) {
handle((APIRefreshLoadBalancerMsg) msg);
} else {
bus.dealWithUnknownMessage(msg);
}
}
private void handle(final APIRefreshLoadBalancerMsg msg) {
final APIRefreshLoadBalancerEvent evt = new APIRefreshLoadBalancerEvent(msg.getId());
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(final SyncTaskChain chain) {
refresh(new Completion(msg, chain) {
@Override
public void success() {
evt.setInventory(getInventory());
bus.publish(evt);
chain.next();
}
@Override
public void fail(ErrorCode errorCode) {
evt.setErrorCode(errorCode);
bus.publish(evt);
chain.next();
}
});
}
@Override
public String getName() {
return "refresh-lb";
}
});
}
private void handle(final APIDeleteLoadBalancerMsg msg) {
final APIDeleteLoadBalancerEvent evt = new APIDeleteLoadBalancerEvent(msg.getId());
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(final SyncTaskChain chain) {
delete(new Completion(msg, chain) {
@Override
public void success() {
bus.publish(evt);
chain.next();
}
@Override
public void fail(ErrorCode errorCode) {
evt.setErrorCode(errorCode);
bus.publish(evt);
chain.next();
}
});
}
@Override
public String getName() {
return "delete-lb";
}
});
}
private void delete(final Completion completion) {
FlowChain chain = FlowChainBuilder.newShareFlowChain();
chain.setName(String.format("delete-lb-%s", self.getUuid()));
chain.then(new ShareFlow() {
@Override
public void setup() {
flow(new NoRollbackFlow() {
String __name__ = "delete-lb";
@Override
public void run(final FlowTrigger trigger, Map data) {
if (self.getProviderType() == null) {
trigger.next();
// not initialized yet
return;
}
LoadBalancerBackend bkd = getBackend();
bkd.destroyLoadBalancer(makeStruct(), new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
flow(new NoRollbackFlow() {
String __name__ = "unlock-vip";
@Override
public void run(FlowTrigger trigger, Map data) {
VipInventory vip = VipInventory.valueOf(dbf.findByUuid(self.getVipUuid(), VipVO.class));
vipMgr.unlockVip(vip);
trigger.next();
}
});
done(new FlowDoneHandler(completion) {
@Override
public void handle(Map data) {
dbf.remove(self);
completion.success();
}
});
error(new FlowErrorHandler(completion) {
@Override
public void handle(ErrorCode errCode, Map data) {
completion.fail(errCode);
}
});
}
}).start();
}
private void handle(final APIDeleteLoadBalancerListenerMsg msg) {
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(final SyncTaskChain chain) {
deleteListener(msg, new NoErrorCompletion(msg, chain) {
@Override
public void done() {
chain.next();
}
});
}
@Override
public String getName() {
return "delete-listener";
}
});
}
private LoadBalancerStruct removeListenerStruct(LoadBalancerListenerInventory listener) {
LoadBalancerStruct s = makeStruct();
Iterator<LoadBalancerListenerInventory> it = s.getListeners().iterator();
while (it.hasNext()) {
if (it.next().getUuid().equals(listener.getUuid())) {
it.remove();
}
}
return s;
}
private void deleteListener(APIDeleteLoadBalancerListenerMsg msg, final NoErrorCompletion completion) {
final APIDeleteLoadBalancerListenerEvent evt = new APIDeleteLoadBalancerListenerEvent(msg.getId());
final LoadBalancerListenerVO vo = dbf.findByUuid(msg.getUuid(), LoadBalancerListenerVO.class);
if (vo == null) {
evt.setInventory(getInventory());
bus.publish(evt);
completion.done();
return;
}
if (!needAction()) {
dbf.remove(vo);
evt.setInventory(reloadAndGetInventory());
bus.publish(evt);
completion.done();
return;
}
LoadBalancerListenerInventory listener = LoadBalancerListenerInventory.valueOf(vo);
LoadBalancerBackend bkd = getBackend();
bkd.removeListener(removeListenerStruct(listener), listener, new Completion(msg, completion) {
@Override
public void success() {
dbf.remove(vo);
evt.setInventory(reloadAndGetInventory());
bus.publish(evt);
completion.done();
}
@Override
public void fail(ErrorCode errorCode) {
evt.setErrorCode(errorCode);
bus.publish(evt);
completion.done();
}
});
}
private void handle(final APIRemoveVmNicFromLoadBalancerMsg msg) {
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(final SyncTaskChain chain) {
removeNic(msg, new NoErrorCompletion(msg, chain) {
@Override
public void done() {
chain.next();
}
});
}
@Override
public String getName() {
return "remove-nic";
}
});
}
private LoadBalancerStruct removeNicStruct(List<String> nicUuids) {
LoadBalancerStruct s = makeStruct();
for (LoadBalancerListenerInventory l : s.getListeners()) {
Iterator<LoadBalancerListenerVmNicRefInventory> it = l.getVmNicRefs().iterator();
while (it.hasNext()) {
if (nicUuids.contains(it.next().getVmNicUuid())) {
it.remove();
}
}
}
return s;
}
private void removeNics(final List<String> vmNicUuids, final Completion completion) {
SimpleQuery<VmNicVO> q = dbf.createQuery(VmNicVO.class);
q.add(VmNicVO_.uuid, Op.IN, vmNicUuids);
List<VmNicVO> vos = q.list();
List<VmNicInventory> nics = VmNicInventory.valueOf(vos);
LoadBalancerBackend bkd = getBackend();
bkd.removeVmNics(removeNicStruct(vmNicUuids), nics, new Completion(completion) {
@Override
public void success() {
SimpleQuery<LoadBalancerListenerVmNicRefVO> q = dbf.createQuery(LoadBalancerListenerVmNicRefVO.class);
q.add(LoadBalancerListenerVmNicRefVO_.vmNicUuid, Op.IN, vmNicUuids);
List<LoadBalancerListenerVmNicRefVO> refs = q.list();
dbf.removeCollection(refs, LoadBalancerListenerVmNicRefVO.class);
completion.success();
}
@Override
public void fail(ErrorCode errorCode) {
completion.fail(errorCode);
}
});
}
private void removeNic(APIRemoveVmNicFromLoadBalancerMsg msg, final NoErrorCompletion completion) {
final APIRemoveVmNicFromLoadBalancerEvent evt = new APIRemoveVmNicFromLoadBalancerEvent(msg.getId());
removeNics(msg.getVmNicUuids(), new Completion(msg, completion) {
@Override
public void success() {
evt.setInventory(reloadAndGetInventory());
bus.publish(evt);
completion.done();
}
@Override
public void fail(ErrorCode errorCode) {
evt.setErrorCode(errorCode);
bus.publish(evt);
completion.done();
}
});
}
@Transactional(readOnly = true)
private String findProviderTypeByVmNicUuid(String nicUuid) {
String sql = "select l3 from L3NetworkVO l3, VmNicVO nic where nic.l3NetworkUuid = l3.uuid and nic.uuid = :uuid";
TypedQuery<L3NetworkVO> q = dbf.getEntityManager().createQuery(sql, L3NetworkVO.class);
q.setParameter("uuid", nicUuid);
L3NetworkVO l3 = q.getSingleResult();
for (NetworkServiceL3NetworkRefVO ref : l3.getNetworkServices()) {
if (LoadBalancerConstants.LB_NETWORK_SERVICE_TYPE_STRING.equals(ref.getNetworkServiceType())) {
sql = "select p.type from NetworkServiceProviderVO p where p.uuid = :uuid";
TypedQuery<String> nq = dbf.getEntityManager().createQuery(sql, String.class);
nq.setParameter("uuid", ref.getNetworkServiceProviderUuid());
return nq.getSingleResult();
}
}
return null;
}
private void handle(final APIAddVmNicToLoadBalancerMsg msg) {
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(final SyncTaskChain chain) {
addVmNicToListener(msg, new NoErrorCompletion(chain) {
@Override
public void done() {
chain.next();
}
});
}
@Override
public String getName() {
return getSyncSignature();
}
});
}
private void addVmNicToListener(final APIAddVmNicToLoadBalancerMsg msg, final NoErrorCompletion completion) {
final APIAddVmNicToLoadBalancerEvent evt = new APIAddVmNicToLoadBalancerEvent(msg.getId());
final String providerType = findProviderTypeByVmNicUuid(msg.getVmNicUuids().get(0));
if (providerType == null) {
throw new OperationFailureException(errf.stringToOperationError(
String.format("the L3 network of vm nic[uuid:%s] doesn't have load balancer service enabled", msg.getVmNicUuids().get(0))
));
}
SimpleQuery<VmNicVO> q = dbf.createQuery(VmNicVO.class);
q.add(VmNicVO_.uuid, Op.IN, msg.getVmNicUuids());
List<VmNicVO> nicVOs = q.list();
final List<VmNicInventory> nics = VmNicInventory.valueOf(nicVOs);
FlowChain chain = FlowChainBuilder.newShareFlowChain();
chain.setName(String.format("add-vm-nic-to-lb-listener-%s", msg.getListenerUuid()));
chain.then(new ShareFlow() {
List<LoadBalancerListenerVmNicRefVO> refs = new ArrayList<LoadBalancerListenerVmNicRefVO>();
boolean init = false;
@Override
public void setup() {
flow(new Flow() {
String __name__ = "check-provider-type";
@Override
public void run(FlowTrigger trigger, Map data) {
if (self.getProviderType() == null) {
self.setProviderType(providerType);
self = dbf.updateAndRefresh(self);
init = true;
} else {
if (!providerType.equals(self.getProviderType())) {
throw new OperationFailureException(errf.stringToOperationError(
String.format("service provider type mismatching. The load balancer[uuid:%s] is provided by the service provider[type:%s]," +
" but the L3 network of vm nic[uuid:%s] is enabled with the service provider[type: %s]", self.getUuid(), self.getProviderType(),
msg.getVmNicUuids().get(0), providerType)
));
}
}
trigger.next();
}
@Override
public void rollback(FlowTrigger trigger, Map data) {
if (init) {
self = dbf.reload(self);
self.setProviderType(null);
dbf.update(self);
}
trigger.rollback();
}
});
flow(new Flow() {
String __name__ = "write-nic-to-db";
boolean s = false;
@Override
public void run(FlowTrigger trigger, Map data) {
for (String nicUuid : msg.getVmNicUuids()) {
LoadBalancerListenerVmNicRefVO ref = new LoadBalancerListenerVmNicRefVO();
ref.setListenerUuid(msg.getListenerUuid());
ref.setVmNicUuid(nicUuid);
ref.setStatus(LoadBalancerVmNicStatus.Pending);
refs.add(ref);
}
dbf.persistCollection(refs);
s = true;
trigger.next();
}
@Override
public void rollback(FlowTrigger trigger, Map data) {
if (s) {
dbf.removeCollection(refs, LoadBalancerListenerVmNicRefVO.class);
}
trigger.rollback();
}
});
flow(new NoRollbackFlow() {
String __name__ = "add-nic-to-lb";
@Override
public void run(final FlowTrigger trigger, Map data) {
LoadBalancerBackend bkd = getBackend();
LoadBalancerStruct s = makeStruct();
s.setInit(init);
bkd.addVmNics(s, nics, new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
done(new FlowDoneHandler(msg, completion) {
@Override
public void handle(Map data) {
for (LoadBalancerListenerVmNicRefVO ref : refs) {
ref.setStatus(LoadBalancerVmNicStatus.Active);
}
dbf.updateCollection(refs);
evt.setInventory(LoadBalancerListenerInventory.valueOf(dbf.findByUuid(msg.getListenerUuid(), LoadBalancerListenerVO.class)));
bus.publish(evt);
completion.done();
}
});
error(new FlowErrorHandler(msg, completion) {
@Override
public void handle(ErrorCode errCode, Map data) {
evt.setErrorCode(errCode);
bus.publish(evt);
completion.done();
}
});
}
}).start();
}
private boolean needAction() {
if (self.getProviderType() == null) {
return false;
}
LoadBalancerListenerVmNicRefVO activeNic = CollectionUtils.find(self.getListeners(), new Function<LoadBalancerListenerVmNicRefVO, LoadBalancerListenerVO>() {
@Override
public LoadBalancerListenerVmNicRefVO call(LoadBalancerListenerVO arg) {
for (LoadBalancerListenerVmNicRefVO ref : arg.getVmNicRefs()) {
if (ref.getStatus() == LoadBalancerVmNicStatus.Active || ref.getStatus() == LoadBalancerVmNicStatus.Pending) {
return ref;
}
}
return null;
}
});
if (activeNic == null) {
return false;
}
return true;
}
private LoadBalancerBackend getBackend() {
DebugUtils.Assert(self.getProviderType() != null, "providerType cannot be null");
return lbMgr.getBackend(self.getProviderType());
}
private LoadBalancerStruct makeStruct() {
LoadBalancerStruct struct = new LoadBalancerStruct();
struct.setLb(reloadAndGetInventory());
List<String> activeNicUuids = new ArrayList<String>();
for (LoadBalancerListenerVO l : self.getListeners()) {
activeNicUuids.addAll(CollectionUtils.transformToList(l.getVmNicRefs(), new Function<String, LoadBalancerListenerVmNicRefVO>() {
@Override
public String call(LoadBalancerListenerVmNicRefVO arg) {
return arg.getStatus() == LoadBalancerVmNicStatus.Active || arg.getStatus() == LoadBalancerVmNicStatus.Pending ? arg.getVmNicUuid() : null;
}
}));
}
if (activeNicUuids.isEmpty()) {
struct.setVmNics(new HashMap<String, VmNicInventory>());
} else {
SimpleQuery<VmNicVO> nq = dbf.createQuery(VmNicVO.class);
nq.add(VmNicVO_.uuid, Op.IN, activeNicUuids);
List<VmNicVO> nicvos = nq.list();
Map<String, VmNicInventory> m = new HashMap<String, VmNicInventory>();
for (VmNicVO n : nicvos) {
m.put(n.getUuid(), VmNicInventory.valueOf(n));
}
struct.setVmNics(m);
}
struct.setListeners(LoadBalancerListenerInventory.valueOf(self.getListeners()));
return struct;
}
private void handle(final APICreateLoadBalancerListenerMsg msg) {
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(final SyncTaskChain chain) {
createListener(msg, new NoErrorCompletion(chain) {
@Override
public void done() {
chain.next();
}
});
}
@Override
public String getName() {
return "create-listener";
}
});
}
private void createListener(final APICreateLoadBalancerListenerMsg msg, final NoErrorCompletion completion) {
final APICreateLoadBalancerListenerEvent evt = new APICreateLoadBalancerListenerEvent(msg.getId());
LoadBalancerListenerVO vo = new LoadBalancerListenerVO();
vo.setLoadBalancerUuid(self.getUuid());
vo.setUuid(msg.getResourceUuid() == null ? Platform.getUuid() : msg.getResourceUuid());
vo.setDescription(vo.getDescription());
vo.setName(msg.getName());
vo.setInstancePort(msg.getInstancePort());
vo.setLoadBalancerPort(msg.getLoadBalancerPort());
vo.setProtocol(msg.getProtocol());
vo = dbf.persistAndRefresh(vo);
acntMgr.createAccountResourceRef(msg.getSession().getAccountUuid(), vo.getUuid(), LoadBalancerListenerVO.class);
tagMgr.createNonInherentSystemTags(msg.getSystemTags(), vo.getUuid(), LoadBalancerListenerVO.class.getSimpleName());
evt.setInventory(LoadBalancerListenerInventory.valueOf(vo));
bus.publish(evt);
completion.done();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.federation.router;
import static org.apache.hadoop.hdfs.server.federation.router.RouterRpcServer.merge;
import java.io.IOException;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.hadoop.hdfs.protocol.AddErasureCodingPolicyResponse;
import org.apache.hadoop.hdfs.protocol.ECBlockGroupStats;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
import org.apache.hadoop.hdfs.server.federation.resolver.ActiveNamenodeResolver;
import org.apache.hadoop.hdfs.server.federation.resolver.FederationNamespaceInfo;
import org.apache.hadoop.hdfs.server.federation.resolver.RemoteLocation;
import org.apache.hadoop.hdfs.server.namenode.NameNode.OperationCategory;
/**
* Module that implements all the RPC calls in
* {@link org.apache.hadoop.hdfs.protocol.ClientProtocol} related to
* Erasure Coding in the {@link RouterRpcServer}.
*/
public class ErasureCoding {
/** RPC server to receive client calls. */
private final RouterRpcServer rpcServer;
/** RPC clients to connect to the Namenodes. */
private final RouterRpcClient rpcClient;
/** Interface to identify the active NN for a nameservice or blockpool ID. */
private final ActiveNamenodeResolver namenodeResolver;
public ErasureCoding(RouterRpcServer server) {
this.rpcServer = server;
this.rpcClient = this.rpcServer.getRPCClient();
this.namenodeResolver = this.rpcClient.getNamenodeResolver();
}
public ErasureCodingPolicyInfo[] getErasureCodingPolicies()
throws IOException {
rpcServer.checkOperation(OperationCategory.READ);
RemoteMethod method = new RemoteMethod("getErasureCodingPolicies");
Set<FederationNamespaceInfo> nss = namenodeResolver.getNamespaces();
Map<FederationNamespaceInfo, ErasureCodingPolicyInfo[]> ret =
rpcClient.invokeConcurrent(
nss, method, true, false, ErasureCodingPolicyInfo[].class);
return merge(ret, ErasureCodingPolicyInfo.class);
}
public Map<String, String> getErasureCodingCodecs() throws IOException {
rpcServer.checkOperation(OperationCategory.READ);
RemoteMethod method = new RemoteMethod("getErasureCodingCodecs");
Set<FederationNamespaceInfo> nss = namenodeResolver.getNamespaces();
@SuppressWarnings("rawtypes")
Map<FederationNamespaceInfo, Map> retCodecs =
rpcClient.invokeConcurrent(
nss, method, true, false, Map.class);
Map<String, String> ret = new HashMap<>();
Object obj = retCodecs;
@SuppressWarnings("unchecked")
Map<FederationNamespaceInfo, Map<String, String>> results =
(Map<FederationNamespaceInfo, Map<String, String>>)obj;
Collection<Map<String, String>> allCodecs = results.values();
for (Map<String, String> codecs : allCodecs) {
ret.putAll(codecs);
}
return ret;
}
public AddErasureCodingPolicyResponse[] addErasureCodingPolicies(
ErasureCodingPolicy[] policies) throws IOException {
rpcServer.checkOperation(OperationCategory.WRITE);
RemoteMethod method = new RemoteMethod("addErasureCodingPolicies",
new Class<?>[] {ErasureCodingPolicy[].class}, new Object[] {policies});
Set<FederationNamespaceInfo> nss = namenodeResolver.getNamespaces();
Map<FederationNamespaceInfo, AddErasureCodingPolicyResponse[]> ret =
rpcClient.invokeConcurrent(
nss, method, true, false, AddErasureCodingPolicyResponse[].class);
return merge(ret, AddErasureCodingPolicyResponse.class);
}
public void removeErasureCodingPolicy(String ecPolicyName)
throws IOException {
rpcServer.checkOperation(OperationCategory.WRITE);
RemoteMethod method = new RemoteMethod("removeErasureCodingPolicy",
new Class<?>[] {String.class}, ecPolicyName);
Set<FederationNamespaceInfo> nss = namenodeResolver.getNamespaces();
rpcClient.invokeConcurrent(nss, method, true, false);
}
public void disableErasureCodingPolicy(String ecPolicyName)
throws IOException {
rpcServer.checkOperation(OperationCategory.WRITE);
RemoteMethod method = new RemoteMethod("disableErasureCodingPolicy",
new Class<?>[] {String.class}, ecPolicyName);
Set<FederationNamespaceInfo> nss = namenodeResolver.getNamespaces();
rpcClient.invokeConcurrent(nss, method, true, false);
}
public void enableErasureCodingPolicy(String ecPolicyName)
throws IOException {
rpcServer.checkOperation(OperationCategory.WRITE);
RemoteMethod method = new RemoteMethod("enableErasureCodingPolicy",
new Class<?>[] {String.class}, ecPolicyName);
Set<FederationNamespaceInfo> nss = namenodeResolver.getNamespaces();
rpcClient.invokeConcurrent(nss, method, true, false);
}
public ErasureCodingPolicy getErasureCodingPolicy(String src)
throws IOException {
rpcServer.checkOperation(OperationCategory.READ);
final List<RemoteLocation> locations =
rpcServer.getLocationsForPath(src, true);
RemoteMethod remoteMethod = new RemoteMethod("getErasureCodingPolicy",
new Class<?>[] {String.class}, new RemoteParam());
ErasureCodingPolicy ret = rpcClient.invokeSequential(
locations, remoteMethod, null, null);
return ret;
}
public void setErasureCodingPolicy(String src, String ecPolicyName)
throws IOException {
rpcServer.checkOperation(OperationCategory.WRITE);
final List<RemoteLocation> locations =
rpcServer.getLocationsForPath(src, true);
RemoteMethod remoteMethod = new RemoteMethod("setErasureCodingPolicy",
new Class<?>[] {String.class, String.class},
new RemoteParam(), ecPolicyName);
rpcClient.invokeSequential(locations, remoteMethod, null, null);
}
public void unsetErasureCodingPolicy(String src) throws IOException {
rpcServer.checkOperation(OperationCategory.WRITE);
final List<RemoteLocation> locations =
rpcServer.getLocationsForPath(src, true);
RemoteMethod remoteMethod = new RemoteMethod("unsetErasureCodingPolicy",
new Class<?>[] {String.class}, new RemoteParam());
rpcClient.invokeSequential(locations, remoteMethod, null, null);
}
public ECBlockGroupStats getECBlockGroupStats() throws IOException {
rpcServer.checkOperation(OperationCategory.READ);
RemoteMethod method = new RemoteMethod("getECBlockGroupStats");
Set<FederationNamespaceInfo> nss = namenodeResolver.getNamespaces();
Map<FederationNamespaceInfo, ECBlockGroupStats> allStats =
rpcClient.invokeConcurrent(
nss, method, true, false, ECBlockGroupStats.class);
// Merge the stats from all the namespaces
long lowRedundancyBlockGroups = 0;
long corruptBlockGroups = 0;
long missingBlockGroups = 0;
long bytesInFutureBlockGroups = 0;
long pendingDeletionBlocks = 0;
long highestPriorityLowRedundancyBlocks = 0;
boolean hasHighestPriorityLowRedundancyBlocks = false;
for (ECBlockGroupStats stats : allStats.values()) {
lowRedundancyBlockGroups += stats.getLowRedundancyBlockGroups();
corruptBlockGroups += stats.getCorruptBlockGroups();
missingBlockGroups += stats.getMissingBlockGroups();
bytesInFutureBlockGroups += stats.getBytesInFutureBlockGroups();
pendingDeletionBlocks += stats.getPendingDeletionBlocks();
if (stats.hasHighestPriorityLowRedundancyBlocks()) {
hasHighestPriorityLowRedundancyBlocks = true;
highestPriorityLowRedundancyBlocks +=
stats.getHighestPriorityLowRedundancyBlocks();
}
}
if (hasHighestPriorityLowRedundancyBlocks) {
return new ECBlockGroupStats(lowRedundancyBlockGroups, corruptBlockGroups,
missingBlockGroups, bytesInFutureBlockGroups, pendingDeletionBlocks,
highestPriorityLowRedundancyBlocks);
}
return new ECBlockGroupStats(lowRedundancyBlockGroups, corruptBlockGroups,
missingBlockGroups, bytesInFutureBlockGroups, pendingDeletionBlocks);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hyracks.api.rewriter.runtime;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Queue;
import java.util.concurrent.Future;
import java.util.concurrent.Semaphore;
import org.apache.commons.collections4.MapUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.hyracks.api.comm.IFrameWriter;
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.dataflow.ActivityId;
import org.apache.hyracks.api.dataflow.EnforceFrameWriter;
import org.apache.hyracks.api.dataflow.IActivity;
import org.apache.hyracks.api.dataflow.IConnectorDescriptor;
import org.apache.hyracks.api.dataflow.IOperatorNodePushable;
import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider;
import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.job.JobFlag;
/**
* The runtime of a SuperActivity, which internally executes a DAG of one-to-one
* connected activities in a single thread.
*/
public class SuperActivityOperatorNodePushable implements IOperatorNodePushable {
private final Map<ActivityId, IOperatorNodePushable> operatorNodePushables = new HashMap<>();
private final List<IOperatorNodePushable> operatorNodePushablesBFSOrder = new ArrayList<>();
private final Map<ActivityId, IActivity> startActivities;
private final SuperActivity parent;
private final IHyracksTaskContext ctx;
private final IRecordDescriptorProvider recordDescProvider;
private final int partition;
private final int nPartitions;
private int inputArity = 0;
public SuperActivityOperatorNodePushable(SuperActivity parent, Map<ActivityId, IActivity> startActivities,
IHyracksTaskContext ctx, IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
this.parent = parent;
this.startActivities = startActivities;
this.ctx = ctx;
this.recordDescProvider = recordDescProvider;
this.partition = partition;
this.nPartitions = nPartitions;
/*
* initialize the writer-relationship for the internal DAG of operator
* node pushables
*/
try {
init();
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
@Override
public void initialize() throws HyracksDataException {
runInParallel(IOperatorNodePushable::initialize);
}
@Override
public void deinitialize() throws HyracksDataException {
runInParallel(IOperatorNodePushable::deinitialize);
}
private void init() throws HyracksDataException {
Queue<Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>> childQueue = new LinkedList<>();
List<IConnectorDescriptor> outputConnectors;
final boolean enforce = ctx.getJobFlags().contains(JobFlag.ENFORCE_CONTRACT);
/*
* Set up the source operators
*/
for (Entry<ActivityId, IActivity> entry : startActivities.entrySet()) {
IOperatorNodePushable opPushable =
entry.getValue().createPushRuntime(ctx, recordDescProvider, partition, nPartitions);
operatorNodePushablesBFSOrder.add(opPushable);
operatorNodePushables.put(entry.getKey(), opPushable);
inputArity += opPushable.getInputArity();
outputConnectors =
MapUtils.getObject(parent.getActivityOutputMap(), entry.getKey(), Collections.emptyList());
for (IConnectorDescriptor conn : outputConnectors) {
childQueue.add(parent.getConnectorActivityMap().get(conn.getConnectorId()));
}
}
/*
* Using BFS (breadth-first search) to construct to runtime execution DAG...
*/
while (!childQueue.isEmpty()) {
/*
* construct the source to destination information
*/
Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>> channel = childQueue.poll();
ActivityId sourceId = channel.getLeft().getLeft().getActivityId();
int outputChannel = channel.getLeft().getRight();
ActivityId destId = channel.getRight().getLeft().getActivityId();
int inputChannel = channel.getRight().getRight();
IOperatorNodePushable sourceOp = operatorNodePushables.get(sourceId);
IOperatorNodePushable destOp = operatorNodePushables.get(destId);
if (destOp == null) {
destOp = channel.getRight().getLeft().createPushRuntime(ctx, recordDescProvider, partition,
nPartitions);
operatorNodePushablesBFSOrder.add(destOp);
operatorNodePushables.put(destId, destOp);
}
/*
* construct the dataflow connection from a producer to a consumer
*/
IFrameWriter writer = destOp.getInputFrameWriter(inputChannel);
writer = enforce ? EnforceFrameWriter.enforce(writer) : writer;
sourceOp.setOutputFrameWriter(outputChannel, writer,
recordDescProvider.getInputRecordDescriptor(destId, inputChannel));
/*
* traverse to the child of the current activity
*/
outputConnectors = MapUtils.getObject(parent.getActivityOutputMap(), destId, Collections.emptyList());
/*
* expend the executing activities further to the downstream
*/
for (IConnectorDescriptor conn : outputConnectors) {
if (conn != null) {
childQueue.add(parent.getConnectorActivityMap().get(conn.getConnectorId()));
}
}
}
}
@Override
public int getInputArity() {
return inputArity;
}
@Override
public void setOutputFrameWriter(int clusterOutputIndex, IFrameWriter writer, RecordDescriptor recordDesc)
throws HyracksDataException {
/*
* set the right output frame writer
*/
Pair<ActivityId, Integer> activityIdOutputIndex = parent.getActivityIdOutputIndex(clusterOutputIndex);
IOperatorNodePushable opPushable = operatorNodePushables.get(activityIdOutputIndex.getLeft());
opPushable.setOutputFrameWriter(activityIdOutputIndex.getRight(), writer, recordDesc);
}
@Override
public IFrameWriter getInputFrameWriter(final int index) {
/*
* get the right IFrameWriter from the cluster input index
*/
Pair<ActivityId, Integer> activityIdInputIndex = parent.getActivityIdInputIndex(index);
IOperatorNodePushable operatorNodePushable = operatorNodePushables.get(activityIdInputIndex.getLeft());
return operatorNodePushable.getInputFrameWriter(activityIdInputIndex.getRight());
}
@Override
public String getDisplayName() {
return "Super Activity " + parent.getActivityMap().values().toString();
}
@FunctionalInterface
interface OperatorNodePushableAction {
void run(IOperatorNodePushable op) throws HyracksDataException;
}
private void runInParallel(OperatorNodePushableAction action) throws HyracksDataException {
List<Future<Void>> tasks = new ArrayList<>();
final Semaphore startSemaphore = new Semaphore(1 - operatorNodePushablesBFSOrder.size());
final Semaphore completeSemaphore = new Semaphore(1 - operatorNodePushablesBFSOrder.size());
try {
for (final IOperatorNodePushable op : operatorNodePushablesBFSOrder) {
tasks.add(ctx.getExecutorService().submit(() -> {
startSemaphore.release();
try {
action.run(op);
} finally {
completeSemaphore.release();
}
return null;
}));
}
for (Future<Void> task : tasks) {
task.get();
}
} catch (InterruptedException e) {
cancelTasks(tasks, startSemaphore, completeSemaphore);
Thread.currentThread().interrupt();
throw HyracksDataException.create(e);
} catch (Exception e) {
cancelTasks(tasks, startSemaphore, completeSemaphore);
throw HyracksDataException.create(e);
}
}
private void cancelTasks(List<Future<Void>> tasks, Semaphore startSemaphore, Semaphore completeSemaphore) {
try {
startSemaphore.acquireUninterruptibly();
for (Future<Void> task : tasks) {
task.cancel(true);
}
} finally {
completeSemaphore.acquireUninterruptibly();
}
}
}
| |
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.extractor;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.upstream.Allocation;
import com.google.android.exoplayer.upstream.Allocator;
import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.util.Assertions;
import com.google.android.exoplayer.util.ParsableByteArray;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.concurrent.LinkedBlockingDeque;
/**
* A rolling buffer of sample data and corresponding sample information.
*/
/* package */ final class RollingSampleBuffer {
private static final int INITIAL_SCRATCH_SIZE = 32;
private final Allocator allocator;
private final int allocationLength;
private final InfoQueue infoQueue;
private final LinkedBlockingDeque<Allocation> dataQueue;
private final SampleExtrasHolder extrasHolder;
private final ParsableByteArray scratch;
// Accessed only by the consuming thread.
private long totalBytesDropped;
// Accessed only by the loading thread.
private long totalBytesWritten;
private Allocation lastAllocation;
private int lastAllocationOffset;
/**
* @param allocator An {@link Allocator} from which allocations for sample data can be obtained.
*/
public RollingSampleBuffer(Allocator allocator) {
this.allocator = allocator;
allocationLength = allocator.getIndividualAllocationLength();
infoQueue = new InfoQueue();
dataQueue = new LinkedBlockingDeque<Allocation>();
extrasHolder = new SampleExtrasHolder();
scratch = new ParsableByteArray(INITIAL_SCRATCH_SIZE);
lastAllocationOffset = allocationLength;
}
// Called by the consuming thread, but only when there is no loading thread.
/**
* Clears the buffer, returning all allocations to the allocator.
*/
public void clear() {
infoQueue.clear();
while (!dataQueue.isEmpty()) {
allocator.release(dataQueue.remove());
}
totalBytesDropped = 0;
totalBytesWritten = 0;
lastAllocation = null;
lastAllocationOffset = allocationLength;
}
/**
* Returns the current absolute write index.
*/
public int getWriteIndex() {
return infoQueue.getWriteIndex();
}
/**
* Discards samples from the write side of the buffer.
*
* @param discardFromIndex The absolute index of the first sample to be discarded.
*/
public void discardUpstreamSamples(int discardFromIndex) {
totalBytesWritten = infoQueue.discardUpstreamSamples(discardFromIndex);
dropUpstreamFrom(totalBytesWritten);
}
/**
* Discards data from the write side of the buffer. Data is discarded from the specified absolute
* position. Any allocations that are fully discarded are returned to the allocator.
*
* @param absolutePosition The absolute position (inclusive) from which to discard data.
*/
private void dropUpstreamFrom(long absolutePosition) {
int relativePosition = (int) (absolutePosition - totalBytesDropped);
// Calculate the index of the allocation containing the position, and the offset within it.
int allocationIndex = relativePosition / allocationLength;
int allocationOffset = relativePosition % allocationLength;
// We want to discard any allocations after the one at allocationIdnex.
int allocationDiscardCount = dataQueue.size() - allocationIndex - 1;
if (allocationOffset == 0) {
// If the allocation at allocationIndex is empty, we should discard that one too.
allocationDiscardCount++;
}
// Discard the allocations.
for (int i = 0; i < allocationDiscardCount; i++) {
allocator.release(dataQueue.removeLast());
}
// Update lastAllocation and lastAllocationOffset to reflect the new position.
lastAllocation = dataQueue.peekLast();
lastAllocationOffset = allocationOffset == 0 ? allocationLength : allocationOffset;
}
// Called by the consuming thread.
/**
* Returns the current absolute read index.
*/
public int getReadIndex() {
return infoQueue.getReadIndex();
}
/**
* Fills {@code holder} with information about the current sample, but does not write its data.
* <p>
* The fields set are {@link SampleHolder#size}, {@link SampleHolder#timeUs} and
* {@link SampleHolder#flags}.
*
* @param holder The holder into which the current sample information should be written.
* @return True if the holder was filled. False if there is no current sample.
*/
public boolean peekSample(SampleHolder holder) {
return infoQueue.peekSample(holder, extrasHolder);
}
/**
* Skips the current sample.
*/
public void skipSample() {
long nextOffset = infoQueue.moveToNextSample();
dropDownstreamTo(nextOffset);
}
/**
* Attempts to skip to the keyframe before the specified time, if it's present in the buffer.
*
* @param timeUs The seek time.
* @return True if the skip was successful. False otherwise.
*/
public boolean skipToKeyframeBefore(long timeUs) {
long nextOffset = infoQueue.skipToKeyframeBefore(timeUs);
if (nextOffset == -1) {
return false;
}
dropDownstreamTo(nextOffset);
return true;
}
/**
* Reads the current sample, advancing the read index to the next sample.
*
* @param sampleHolder The holder into which the current sample should be written.
* @return True if a sample was read. False if there is no current sample.
*/
public boolean readSample(SampleHolder sampleHolder) {
// Write the sample information into the holder and extrasHolder.
boolean haveSample = infoQueue.peekSample(sampleHolder, extrasHolder);
if (!haveSample) {
return false;
}
// Read encryption data if the sample is encrypted.
if (sampleHolder.isEncrypted()) {
readEncryptionData(sampleHolder, extrasHolder);
}
// Write the sample data into the holder.
if (sampleHolder.data == null || sampleHolder.data.capacity() < sampleHolder.size) {
sampleHolder.replaceBuffer(sampleHolder.size);
}
if (sampleHolder.data != null) {
readData(extrasHolder.offset, sampleHolder.data, sampleHolder.size);
}
// Advance the read head.
long nextOffset = infoQueue.moveToNextSample();
dropDownstreamTo(nextOffset);
return true;
}
/**
* Reads encryption data for the current sample.
* <p>
* The encryption data is written into {@code sampleHolder.cryptoInfo}, and
* {@code sampleHolder.size} is adjusted to subtract the number of bytes that were read. The
* same value is added to {@code extrasHolder.offset}.
*
* @param sampleHolder The holder into which the encryption data should be written.
* @param extrasHolder The extras holder whose offset should be read and subsequently adjusted.
*/
private void readEncryptionData(SampleHolder sampleHolder, SampleExtrasHolder extrasHolder) {
long offset = extrasHolder.offset;
// Read the signal byte.
readData(offset, scratch.data, 1);
offset++;
byte signalByte = scratch.data[0];
boolean subsampleEncryption = (signalByte & 0x80) != 0;
int ivSize = signalByte & 0x7F;
// Read the initialization vector.
if (sampleHolder.cryptoInfo.iv == null) {
sampleHolder.cryptoInfo.iv = new byte[16];
}
readData(offset, sampleHolder.cryptoInfo.iv, ivSize);
offset += ivSize;
// Read the subsample count, if present.
int subsampleCount;
if (subsampleEncryption) {
readData(offset, scratch.data, 2);
offset += 2;
scratch.setPosition(0);
subsampleCount = scratch.readUnsignedShort();
} else {
subsampleCount = 1;
}
// Write the clear and encrypted subsample sizes.
int[] clearDataSizes = sampleHolder.cryptoInfo.numBytesOfClearData;
if (clearDataSizes == null || clearDataSizes.length < subsampleCount) {
clearDataSizes = new int[subsampleCount];
}
int[] encryptedDataSizes = sampleHolder.cryptoInfo.numBytesOfEncryptedData;
if (encryptedDataSizes == null || encryptedDataSizes.length < subsampleCount) {
encryptedDataSizes = new int[subsampleCount];
}
if (subsampleEncryption) {
int subsampleDataLength = 6 * subsampleCount;
ensureCapacity(scratch, subsampleDataLength);
readData(offset, scratch.data, subsampleDataLength);
offset += subsampleDataLength;
scratch.setPosition(0);
for (int i = 0; i < subsampleCount; i++) {
clearDataSizes[i] = scratch.readUnsignedShort();
encryptedDataSizes[i] = scratch.readUnsignedIntToInt();
}
} else {
clearDataSizes[0] = 0;
encryptedDataSizes[0] = sampleHolder.size - (int) (offset - extrasHolder.offset);
}
// Populate the cryptoInfo.
sampleHolder.cryptoInfo.set(subsampleCount, clearDataSizes, encryptedDataSizes,
extrasHolder.encryptionKeyId, sampleHolder.cryptoInfo.iv, C.CRYPTO_MODE_AES_CTR);
// Adjust the offset and size to take into account the bytes read.
int bytesRead = (int) (offset - extrasHolder.offset);
extrasHolder.offset += bytesRead;
sampleHolder.size -= bytesRead;
}
/**
* Reads data from the front of the rolling buffer.
*
* @param absolutePosition The absolute position from which data should be read.
* @param target The buffer into which data should be written.
* @param length The number of bytes to read.
*/
private void readData(long absolutePosition, ByteBuffer target, int length) {
int remaining = length;
while (remaining > 0) {
dropDownstreamTo(absolutePosition);
int positionInAllocation = (int) (absolutePosition - totalBytesDropped);
int toCopy = Math.min(remaining, allocationLength - positionInAllocation);
Allocation allocation = dataQueue.peek();
target.put(allocation.data, allocation.translateOffset(positionInAllocation), toCopy);
absolutePosition += toCopy;
remaining -= toCopy;
}
}
/**
* Reads data from the front of the rolling buffer.
*
* @param absolutePosition The absolute position from which data should be read.
* @param target The array into which data should be written.
* @param length The number of bytes to read.
*/
// TODO: Consider reducing duplication of this method and the one above.
private void readData(long absolutePosition, byte[] target, int length) {
int bytesRead = 0;
while (bytesRead < length) {
dropDownstreamTo(absolutePosition);
int positionInAllocation = (int) (absolutePosition - totalBytesDropped);
int toCopy = Math.min(length - bytesRead, allocationLength - positionInAllocation);
Allocation allocation = dataQueue.peek();
System.arraycopy(allocation.data, allocation.translateOffset(positionInAllocation), target,
bytesRead, toCopy);
absolutePosition += toCopy;
bytesRead += toCopy;
}
}
/**
* Discard any allocations that hold data prior to the specified absolute position, returning
* them to the allocator.
*
* @param absolutePosition The absolute position up to which allocations can be discarded.
*/
private void dropDownstreamTo(long absolutePosition) {
int relativePosition = (int) (absolutePosition - totalBytesDropped);
int allocationIndex = relativePosition / allocationLength;
for (int i = 0; i < allocationIndex; i++) {
allocator.release(dataQueue.remove());
totalBytesDropped += allocationLength;
}
}
/**
* Ensure that the passed {@link ParsableByteArray} is of at least the specified limit.
*/
private static void ensureCapacity(ParsableByteArray byteArray, int limit) {
if (byteArray.limit() < limit) {
byteArray.reset(new byte[limit], limit);
}
}
// Called by the loading thread.
/**
* Returns the current write position in the rolling buffer.
*
* @return The current write position.
*/
public long getWritePosition() {
return totalBytesWritten;
}
/**
* Appends data to the rolling buffer.
*
* @param dataSource The source from which to read.
* @param length The maximum length of the read, or {@link C#LENGTH_UNBOUNDED} if the caller does
* not wish to impose a limit.
* @return The number of bytes appended.
* @throws IOException If an error occurs reading from the source.
*/
public int appendData(DataSource dataSource, int length) throws IOException {
ensureSpaceForWrite();
int remainingAllocationCapacity = allocationLength - lastAllocationOffset;
length = length != C.LENGTH_UNBOUNDED ? Math.min(length, remainingAllocationCapacity)
: remainingAllocationCapacity;
int bytesRead = dataSource.read(lastAllocation.data,
lastAllocation.translateOffset(lastAllocationOffset), length);
if (bytesRead == C.RESULT_END_OF_INPUT) {
return C.RESULT_END_OF_INPUT;
}
lastAllocationOffset += bytesRead;
totalBytesWritten += bytesRead;
return bytesRead;
}
/**
* Appends data to the rolling buffer.
*
* @param input The source from which to read.
* @param length The maximum length of the read.
* @return The number of bytes appended.
* @throws IOException If an error occurs reading from the source.
*/
public int appendData(ExtractorInput input, int length) throws IOException, InterruptedException {
ensureSpaceForWrite();
int thisWriteLength = Math.min(length, allocationLength - lastAllocationOffset);
input.readFully(lastAllocation.data, lastAllocation.translateOffset(lastAllocationOffset),
thisWriteLength);
lastAllocationOffset += thisWriteLength;
totalBytesWritten += thisWriteLength;
return thisWriteLength;
}
/**
* Appends data to the rolling buffer.
*
* @param buffer A buffer containing the data to append.
* @param length The length of the data to append.
*/
public void appendData(ParsableByteArray buffer, int length) {
int remainingWriteLength = length;
while (remainingWriteLength > 0) {
ensureSpaceForWrite();
int thisWriteLength = Math.min(remainingWriteLength, allocationLength - lastAllocationOffset);
buffer.readBytes(lastAllocation.data, lastAllocation.translateOffset(lastAllocationOffset),
thisWriteLength);
lastAllocationOffset += thisWriteLength;
remainingWriteLength -= thisWriteLength;
}
totalBytesWritten += length;
}
/**
* Indicates the end point for the current sample, making it available for consumption.
*
* @param sampleTimeUs The sample timestamp.
* @param flags Flags that accompany the sample. See {@link SampleHolder#flags}.
* @param position The position of the sample data in the rolling buffer.
* @param size The size of the sample, in bytes.
* @param encryptionKey The encryption key associated with the sample, or null.
*/
public void commitSample(long sampleTimeUs, int flags, long position, int size,
byte[] encryptionKey) {
infoQueue.commitSample(sampleTimeUs, flags, position, size, encryptionKey);
}
/**
* Ensures at least one byte can be written, obtaining an additional allocation if necessary.
*/
private void ensureSpaceForWrite() {
if (lastAllocationOffset == allocationLength) {
lastAllocationOffset = 0;
lastAllocation = allocator.allocate();
dataQueue.add(lastAllocation);
}
}
/**
* Holds information about the samples in the rolling buffer.
*/
private static final class InfoQueue {
private static final int SAMPLE_CAPACITY_INCREMENT = 1000;
private int capacity;
private long[] offsets;
private int[] sizes;
private int[] flags;
private long[] timesUs;
private byte[][] encryptionKeys;
private int queueSize;
private int absoluteReadIndex;
private int relativeReadIndex;
private int relativeWriteIndex;
public InfoQueue() {
capacity = SAMPLE_CAPACITY_INCREMENT;
offsets = new long[capacity];
timesUs = new long[capacity];
flags = new int[capacity];
sizes = new int[capacity];
encryptionKeys = new byte[capacity][];
}
// Called by the consuming thread, but only when there is no loading thread.
/**
* Clears the queue.
*/
public void clear() {
absoluteReadIndex = 0;
relativeReadIndex = 0;
relativeWriteIndex = 0;
queueSize = 0;
}
/**
* Returns the current absolute write index.
*/
public int getWriteIndex() {
return absoluteReadIndex + queueSize;
}
/**
* Discards samples from the write side of the buffer.
*
* @param discardFromIndex The absolute index of the first sample to be discarded.
* @return The reduced total number of bytes written, after the samples have been discarded.
*/
public long discardUpstreamSamples(int discardFromIndex) {
int discardCount = getWriteIndex() - discardFromIndex;
Assertions.checkArgument(0 <= discardCount && discardCount <= queueSize);
if (discardCount == 0) {
if (absoluteReadIndex == 0) {
// queueSize == absoluteReadIndex == 0, so nothing has been written to the queue.
return 0;
}
int lastWriteIndex = (relativeWriteIndex == 0 ? capacity : relativeWriteIndex) - 1;
return offsets[lastWriteIndex] + sizes[lastWriteIndex];
}
queueSize -= discardCount;
relativeWriteIndex = (relativeWriteIndex + capacity - discardCount) % capacity;
return offsets[relativeWriteIndex];
}
// Called by the consuming thread.
/**
* Returns the current absolute read index.
*/
public int getReadIndex() {
return absoluteReadIndex;
}
/**
* Fills {@code holder} with information about the current sample, but does not write its data.
* The first entry in {@code offsetHolder} is filled with the absolute position of the sample's
* data in the rolling buffer.
* <p>
* The fields set are {SampleHolder#size}, {SampleHolder#timeUs}, {SampleHolder#flags} and
* {@code offsetHolder[0]}.
*
* @param holder The holder into which the current sample information should be written.
* @param extrasHolder The holder into which extra sample information should be written.
* @return True if the holders were filled. False if there is no current sample.
*/
public synchronized boolean peekSample(SampleHolder holder, SampleExtrasHolder extrasHolder) {
if (queueSize == 0) {
return false;
}
holder.timeUs = timesUs[relativeReadIndex];
holder.size = sizes[relativeReadIndex];
holder.flags = flags[relativeReadIndex];
extrasHolder.offset = offsets[relativeReadIndex];
extrasHolder.encryptionKeyId = encryptionKeys[relativeReadIndex];
return true;
}
/**
* Advances the read index to the next sample.
*
* @return The absolute position of the first byte in the rolling buffer that may still be
* required after advancing the index. Data prior to this position can be dropped.
*/
public synchronized long moveToNextSample() {
queueSize--;
int lastReadIndex = relativeReadIndex++;
absoluteReadIndex++;
if (relativeReadIndex == capacity) {
// Wrap around.
relativeReadIndex = 0;
}
return queueSize > 0 ? offsets[relativeReadIndex]
: (sizes[lastReadIndex] + offsets[lastReadIndex]);
}
/**
* Attempts to locate the keyframe before the specified time, if it's present in the buffer.
*
* @param timeUs The seek time.
* @return The offset of the keyframe's data if the keyframe was present. -1 otherwise.
*/
public synchronized long skipToKeyframeBefore(long timeUs) {
if (queueSize == 0 || timeUs < timesUs[relativeReadIndex]) {
return -1;
}
int lastWriteIndex = (relativeWriteIndex == 0 ? capacity : relativeWriteIndex) - 1;
long lastTimeUs = timesUs[lastWriteIndex];
if (timeUs > lastTimeUs) {
return -1;
}
// TODO: This can be optimized further using binary search, although the fact that the array
// is cyclic means we'd need to implement the binary search ourselves.
int sampleCount = 0;
int sampleCountToKeyframe = -1;
int searchIndex = relativeReadIndex;
while (searchIndex != relativeWriteIndex) {
if (timesUs[searchIndex] > timeUs) {
// We've gone too far.
break;
} else if ((flags[searchIndex] & C.SAMPLE_FLAG_SYNC) != 0) {
// We've found a keyframe, and we're still before the seek position.
sampleCountToKeyframe = sampleCount;
}
searchIndex = (searchIndex + 1) % capacity;
sampleCount++;
}
if (sampleCountToKeyframe == -1) {
return -1;
}
queueSize -= sampleCountToKeyframe;
relativeReadIndex = (relativeReadIndex + sampleCountToKeyframe) % capacity;
absoluteReadIndex += sampleCountToKeyframe;
return offsets[relativeReadIndex];
}
// Called by the loading thread.
public synchronized void commitSample(long timeUs, int sampleFlags, long offset, int size,
byte[] encryptionKey) {
timesUs[relativeWriteIndex] = timeUs;
offsets[relativeWriteIndex] = offset;
sizes[relativeWriteIndex] = size;
flags[relativeWriteIndex] = sampleFlags;
encryptionKeys[relativeWriteIndex] = encryptionKey;
// Increment the write index.
queueSize++;
if (queueSize == capacity) {
// Increase the capacity.
int newCapacity = capacity + SAMPLE_CAPACITY_INCREMENT;
long[] newOffsets = new long[newCapacity];
long[] newTimesUs = new long[newCapacity];
int[] newFlags = new int[newCapacity];
int[] newSizes = new int[newCapacity];
byte[][] newEncryptionKeys = new byte[newCapacity][];
int beforeWrap = capacity - relativeReadIndex;
System.arraycopy(offsets, relativeReadIndex, newOffsets, 0, beforeWrap);
System.arraycopy(timesUs, relativeReadIndex, newTimesUs, 0, beforeWrap);
System.arraycopy(flags, relativeReadIndex, newFlags, 0, beforeWrap);
System.arraycopy(sizes, relativeReadIndex, newSizes, 0, beforeWrap);
System.arraycopy(encryptionKeys, relativeReadIndex, newEncryptionKeys, 0, beforeWrap);
int afterWrap = relativeReadIndex;
System.arraycopy(offsets, 0, newOffsets, beforeWrap, afterWrap);
System.arraycopy(timesUs, 0, newTimesUs, beforeWrap, afterWrap);
System.arraycopy(flags, 0, newFlags, beforeWrap, afterWrap);
System.arraycopy(sizes, 0, newSizes, beforeWrap, afterWrap);
System.arraycopy(encryptionKeys, 0, newEncryptionKeys, beforeWrap, afterWrap);
offsets = newOffsets;
timesUs = newTimesUs;
flags = newFlags;
sizes = newSizes;
encryptionKeys = newEncryptionKeys;
relativeReadIndex = 0;
relativeWriteIndex = capacity;
queueSize = capacity;
capacity = newCapacity;
} else {
relativeWriteIndex++;
if (relativeWriteIndex == capacity) {
// Wrap around.
relativeWriteIndex = 0;
}
}
}
}
/**
* Holds additional sample information not held by {@link SampleHolder}.
*/
private static final class SampleExtrasHolder {
public long offset;
public byte[] encryptionKeyId;
}
}
| |
/*
* Copyright 2009-2018, Acciente LLC
*
* Acciente LLC licenses this file to you under the
* Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the
* License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in
* writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.acciente.oacc;
import org.junit.Test;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.hasItem;
import static org.hamcrest.CoreMatchers.hasItems;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
public class TestAccessControl_createDomain extends TestAccessControlBase {
@Test
public void createDomain_rootLevel() throws Exception {
authenticateSystemResource();
// because we don't have a getter for *all* domains, I'm creating unique test domains for each test run
final String domainName_one = generateUniqueDomainName();
assertThat(accessControlContext.getDomainDescendants(domainName_one).isEmpty(), is(true));
final String domainName_two = generateUniqueDomainName();
assertThat(accessControlContext.getDomainDescendants(domainName_two).isEmpty(), is(true));
accessControlContext.createDomain(domainName_one);
accessControlContext.createDomain(domainName_two);
assertThat(accessControlContext.getDomainDescendants(domainName_one).size(), is(1));
assertThat(accessControlContext.getDomainDescendants(domainName_one), hasItem(domainName_one));
assertThat(accessControlContext.getDomainDescendants(domainName_two).size(), is(1));
assertThat(accessControlContext.getDomainDescendants(domainName_two), hasItem(domainName_two));
}
@Test
public void createDomain_childLevel() throws Exception {
authenticateSystemResource();
// because we don't have a getter for *all* domains, I'm creating unique test domains for each test run
final String domainName_parent = generateUniqueDomainName();
final String domainName_child1 = "rd_child1Of-" + domainName_parent;
final String domainName_child2 = "rd_child2Of-" + domainName_parent;
assertThat(accessControlContext.getDomainDescendants(domainName_parent).isEmpty(), is(true));
assertThat(accessControlContext.getDomainDescendants(domainName_child1).isEmpty(), is(true));
assertThat(accessControlContext.getDomainDescendants(domainName_child2).isEmpty(), is(true));
accessControlContext.createDomain(domainName_parent);
accessControlContext.createDomain(domainName_child1, domainName_parent);
accessControlContext.createDomain(domainName_child2, domainName_parent);
assertThat(accessControlContext.getDomainDescendants(domainName_parent).size(), is(3));
assertThat(accessControlContext.getDomainDescendants(domainName_parent), hasItems(domainName_parent, domainName_child1, domainName_child2));
assertThat(accessControlContext.getDomainDescendants(domainName_child1).size(), is(1));
assertThat(accessControlContext.getDomainDescendants(domainName_child1), hasItems(domainName_child1));
assertThat(accessControlContext.getDomainDescendants(domainName_child2).size(), is(1));
assertThat(accessControlContext.getDomainDescendants(domainName_child2), hasItems(domainName_child2));
}
@Test
public void createDomain_grandchildLevel() throws Exception {
authenticateSystemResource();
// because we don't have a getter for *all* domains, I'm creating unique test domains for each test run
final String domainName_parent = generateUniqueDomainName();
final String domainName_child1 = "rd_child1Of-" + domainName_parent;
final String domainName_grandchild1 = "rd_grandchild1Of-" + domainName_child1;
assertThat(accessControlContext.getDomainDescendants(domainName_parent).isEmpty(), is(true));
assertThat(accessControlContext.getDomainDescendants(domainName_child1).isEmpty(), is(true));
assertThat(accessControlContext.getDomainDescendants(domainName_grandchild1).isEmpty(), is(true));
accessControlContext.createDomain(domainName_parent);
accessControlContext.createDomain(domainName_child1, domainName_parent);
accessControlContext.createDomain(domainName_grandchild1, domainName_child1);
assertThat(accessControlContext.getDomainDescendants(domainName_parent).size(), is(3));
assertThat(accessControlContext.getDomainDescendants(domainName_parent), hasItems(domainName_parent, domainName_child1, domainName_grandchild1));
assertThat(accessControlContext.getDomainDescendants(domainName_child1).size(), is(2));
assertThat(accessControlContext.getDomainDescendants(domainName_child1), hasItems(domainName_child1, domainName_grandchild1));
assertThat(accessControlContext.getDomainDescendants(domainName_grandchild1).size(), is(1));
assertThat(accessControlContext.getDomainDescendants(domainName_grandchild1), hasItem(domainName_grandchild1));
}
@Test
public void createDomain_onlyRootLevelAsAuthorized() throws Exception {
// set up an authenticatable resource with domain create permission
final Resource authenticatedResource = generateResourceAndAuthenticate();
grantDomainCreatePermission(authenticatedResource);
final String domainName_parent = generateUniqueDomainName();
final String domainName_child1 = "rd_child1Of-" + domainName_parent;
assertThat(accessControlContext.getDomainDescendants(domainName_parent).isEmpty(), is(true));
assertThat(accessControlContext.getDomainDescendants(domainName_child1).isEmpty(), is(true));
// create domain and verify
accessControlContext.createDomain(domainName_parent);
assertThat(accessControlContext.getDomainDescendants(domainName_parent).size(), is(1));
assertThat(accessControlContext.getDomainDescendants(domainName_parent), hasItem(domainName_parent));
// attempt to create a child domain without authorization
try {
accessControlContext.createDomain(domainName_child1, domainName_parent);
fail("create child domain without CREATE_CHILD_DOMAIN authorization should have failed");
}
catch (NotAuthorizedException e) {
assertThat(e.getMessage().toLowerCase(), containsString(String.valueOf(authenticatedResource).toLowerCase()
+ " is not authorized to create child domain"));
}
assertThat(accessControlContext.getDomainDescendants(domainName_parent).size(), is(1));
assertThat(accessControlContext.getDomainDescendants(domainName_parent), hasItem(domainName_parent));
}
@Test
public void createDomain_childLevelAsAuthorized() throws Exception {
// set up an authenticatable resource with domain create and child domain create permissions
final Resource authenticatedResource = generateResourceAndAuthenticate();
grantDomainAndChildCreatePermission(authenticatedResource);
final String domainName_parent = generateUniqueDomainName();
final String domainName_child1 = "rd_child1Of-" + domainName_parent;
assertThat(accessControlContext.getDomainDescendants(domainName_parent).isEmpty(), is(true));
assertThat(accessControlContext.getDomainDescendants(domainName_child1).isEmpty(), is(true));
// create domain and verify
accessControlContext.createDomain(domainName_parent);
accessControlContext.createDomain(domainName_child1, domainName_parent);
assertThat(accessControlContext.getDomainDescendants(domainName_parent).size(), is(2));
assertThat(accessControlContext.getDomainDescendants(domainName_parent), hasItems(domainName_parent, domainName_child1));
assertThat(accessControlContext.getDomainDescendants(domainName_child1).size(), is(1));
assertThat(accessControlContext.getDomainDescendants(domainName_child1), hasItem(domainName_child1));
}
@Test
public void createDomain_whitespaceConsistent() throws Exception {
authenticateSystemResource();
final String domainName = generateUniqueDomainName().trim();
final String domainNameWhitespaced = " " + domainName + "\t";
assertThat(accessControlContext.getDomainDescendants(domainName).isEmpty(), is(true));
accessControlContext.createDomain(domainNameWhitespaced);
assertThat(accessControlContext.getDomainDescendants(domainName).size(), is(1));
assertThat(accessControlContext.getDomainDescendants(domainName), hasItem(domainName));
assertThat(accessControlContext.getDomainDescendants(domainNameWhitespaced).size(), is(1));
assertThat(accessControlContext.getDomainDescendants(domainNameWhitespaced), hasItem(domainName));
final String parentDomain = generateDomain();
final String parentDomainWhitespaced = " " + parentDomain + "\t";
final String childDomainName = generateUniqueDomainName().trim();
final String childDomainNameWhitespaced = " " + childDomainName + "\t";
accessControlContext.createDomain(childDomainNameWhitespaced, parentDomainWhitespaced);
assertThat(accessControlContext.getDomainDescendants(parentDomain).size(), is(2));
assertThat(accessControlContext.getDomainDescendants(parentDomain), hasItem(parentDomain));
assertThat(accessControlContext.getDomainDescendants(parentDomain), hasItem(childDomainName));
assertThat(accessControlContext.getDomainDescendants(childDomainName).size(), is(1));
assertThat(accessControlContext.getDomainDescendants(childDomainName), hasItem(childDomainName));
assertThat(accessControlContext.getDomainDescendants(childDomainNameWhitespaced).size(), is(1));
assertThat(accessControlContext.getDomainDescendants(childDomainNameWhitespaced), hasItem(childDomainName));
}
@Test
public void createDomain_caseSensitiveConsistent() throws Exception {
authenticateSystemResource();
final String domainNameBase = generateUniqueDomainName();
final String domainName_lower = domainNameBase + "_ddd";
final String domainName_UPPER = domainNameBase + "_DDD";
assertThat(accessControlContext.getDomainDescendants(domainName_lower).isEmpty(), is(true));
assertThat(accessControlContext.getDomainDescendants(domainName_UPPER).isEmpty(), is(true));
accessControlContext.createDomain(domainName_lower);
if (isDatabaseCaseSensitive()) {
accessControlContext.createDomain(domainName_UPPER);
assertThat(accessControlContext.getDomainDescendants(domainName_lower).size(), is(1));
assertThat(accessControlContext.getDomainDescendants(domainName_lower), hasItem(domainName_lower));
assertThat(accessControlContext.getDomainDescendants(domainName_UPPER).size(), is(1));
assertThat(accessControlContext.getDomainDescendants(domainName_UPPER), hasItem(domainName_UPPER));
}
else {
try {
accessControlContext.createDomain(domainName_UPPER);
fail("creating a domain with the name of an existing domain that differs in case only should have failed for case-insensitive databases");
}
catch (IllegalArgumentException e) {
assertThat(e.getMessage().toLowerCase(), containsString("duplicate domain"));
}
}
}
@Test
public void createDomain_duplicateDomainName_shouldFail() throws Exception {
authenticateSystemResource();
final String domainName = generateUniqueDomainName();
final String otherDomainName = generateDomain();
assertThat(accessControlContext.getDomainDescendants(domainName).isEmpty(), is(true));
accessControlContext.createDomain(domainName);
// attempt to create duplicate domain
try {
accessControlContext.createDomain(domainName);
fail("creating a duplicate domain should fail");
}
catch (IllegalArgumentException e) {
assertThat(e.getMessage().toLowerCase(), containsString("duplicate"));
}
try {
accessControlContext.createDomain(domainName, domainName);
fail("creating a duplicate child domain should fail");
}
catch (IllegalArgumentException e) {
assertThat(e.getMessage().toLowerCase(), containsString("duplicate"));
}
try {
accessControlContext.createDomain(otherDomainName, domainName);
fail("creating a duplicate nested (but unrelated) domain should fail");
}
catch (IllegalArgumentException e) {
assertThat(e.getMessage().toLowerCase(), containsString("duplicate"));
}
assertThat(accessControlContext.getDomainDescendants(domainName).size(), is(1));
assertThat(accessControlContext.getDomainDescendants(domainName), hasItem(domainName));
assertThat(accessControlContext.getDomainDescendants(otherDomainName).size(), is(1));
assertThat(accessControlContext.getDomainDescendants(otherDomainName), hasItem(otherDomainName));
}
@Test
public void createDomain_null_shouldFail() throws Exception {
authenticateSystemResource();
// attempt to create domain with null name
try {
accessControlContext.createDomain(null);
fail("creating a null domain should fail");
}
catch (NullPointerException e) {
assertThat(e.getMessage().toLowerCase(), containsString("none specified"));
}
}
@Test
public void createDomain_blankDomainName_shouldFail() throws Exception {
authenticateSystemResource();
// attempt to create domain with empty or blank name
try {
accessControlContext.createDomain("");
fail("creating a domain with empty name should fail");
}
catch (IllegalArgumentException e) {
assertThat(e.getMessage().toLowerCase(), containsString("none specified"));
}
try {
accessControlContext.createDomain(" \t");
fail("creating a domain with empty name should fail");
}
catch (IllegalArgumentException e) {
assertThat(e.getMessage().toLowerCase(), containsString("none specified"));
}
}
@Test
public void createDomain_nonExistentReferences_shouldFail() throws Exception {
authenticateSystemResource();
final String domainName = generateUniqueDomainName();
assertThat(accessControlContext.getDomainDescendants(domainName).isEmpty(), is(true));
try {
accessControlContext.createDomain(domainName, "invalid_domain_name");
fail("creating a child domain with non-existent parent domain reference should fail");
}
catch (IllegalArgumentException e) {
assertThat(e.getMessage().toLowerCase(), containsString("parent domain"));
assertThat(e.getMessage().toLowerCase(), containsString("not found"));
}
assertThat(accessControlContext.getDomainDescendants(domainName).isEmpty(), is(true));
}
@Test
public void createDomain_notAuthorized_shouldFail() throws Exception {
final String domainName = generateUniqueDomainName();
// attempt to create domain without authorization
final Resource resource = generateResourceAndAuthenticate();
try {
accessControlContext.createDomain(domainName);
fail("creating a domain without authorization should fail");
}
catch (NotAuthorizedException e) {
assertThat(e.getMessage().toLowerCase(), containsString(String.valueOf(resource).toLowerCase()
+ " is not authorized to create domain"));
}
assertThat(accessControlContext.getDomainDescendants(domainName).isEmpty(), is(true));
}
}
| |
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.machinelearning.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class CreateBatchPredictionRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* A user-supplied ID that uniquely identifies the <code>BatchPrediction</code>.
* </p>
*/
private String batchPredictionId;
/**
* <p>
* A user-supplied name or description of the <code>BatchPrediction</code>. <code>BatchPredictionName</code> can
* only use the UTF-8 character set.
* </p>
*/
private String batchPredictionName;
/**
* <p>
* The ID of the <code>MLModel</code> that will generate predictions for the group of observations.
* </p>
*/
private String mLModelId;
/**
* <p>
* The ID of the <code>DataSource</code> that points to the group of observations to predict.
* </p>
*/
private String batchPredictionDataSourceId;
/**
* <p>
* The location of an Amazon Simple Storage Service (Amazon S3) bucket or directory to store the batch prediction
* results. The following substrings are not allowed in the <code>s3 key</code> portion of the
* <code>outputURI</code> field: ':', '//', '/./', '/../'.
* </p>
* <p>
* Amazon ML needs permissions to store and retrieve the logs on your behalf. For information about how to set
* permissions, see the <a href="http://docs.aws.amazon.com/machine-learning/latest/dg">Amazon Machine Learning
* Developer Guide</a>.
* </p>
*/
private String outputUri;
/**
* <p>
* A user-supplied ID that uniquely identifies the <code>BatchPrediction</code>.
* </p>
*
* @param batchPredictionId
* A user-supplied ID that uniquely identifies the <code>BatchPrediction</code>.
*/
public void setBatchPredictionId(String batchPredictionId) {
this.batchPredictionId = batchPredictionId;
}
/**
* <p>
* A user-supplied ID that uniquely identifies the <code>BatchPrediction</code>.
* </p>
*
* @return A user-supplied ID that uniquely identifies the <code>BatchPrediction</code>.
*/
public String getBatchPredictionId() {
return this.batchPredictionId;
}
/**
* <p>
* A user-supplied ID that uniquely identifies the <code>BatchPrediction</code>.
* </p>
*
* @param batchPredictionId
* A user-supplied ID that uniquely identifies the <code>BatchPrediction</code>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateBatchPredictionRequest withBatchPredictionId(String batchPredictionId) {
setBatchPredictionId(batchPredictionId);
return this;
}
/**
* <p>
* A user-supplied name or description of the <code>BatchPrediction</code>. <code>BatchPredictionName</code> can
* only use the UTF-8 character set.
* </p>
*
* @param batchPredictionName
* A user-supplied name or description of the <code>BatchPrediction</code>. <code>BatchPredictionName</code>
* can only use the UTF-8 character set.
*/
public void setBatchPredictionName(String batchPredictionName) {
this.batchPredictionName = batchPredictionName;
}
/**
* <p>
* A user-supplied name or description of the <code>BatchPrediction</code>. <code>BatchPredictionName</code> can
* only use the UTF-8 character set.
* </p>
*
* @return A user-supplied name or description of the <code>BatchPrediction</code>. <code>BatchPredictionName</code>
* can only use the UTF-8 character set.
*/
public String getBatchPredictionName() {
return this.batchPredictionName;
}
/**
* <p>
* A user-supplied name or description of the <code>BatchPrediction</code>. <code>BatchPredictionName</code> can
* only use the UTF-8 character set.
* </p>
*
* @param batchPredictionName
* A user-supplied name or description of the <code>BatchPrediction</code>. <code>BatchPredictionName</code>
* can only use the UTF-8 character set.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateBatchPredictionRequest withBatchPredictionName(String batchPredictionName) {
setBatchPredictionName(batchPredictionName);
return this;
}
/**
* <p>
* The ID of the <code>MLModel</code> that will generate predictions for the group of observations.
* </p>
*
* @param mLModelId
* The ID of the <code>MLModel</code> that will generate predictions for the group of observations.
*/
public void setMLModelId(String mLModelId) {
this.mLModelId = mLModelId;
}
/**
* <p>
* The ID of the <code>MLModel</code> that will generate predictions for the group of observations.
* </p>
*
* @return The ID of the <code>MLModel</code> that will generate predictions for the group of observations.
*/
public String getMLModelId() {
return this.mLModelId;
}
/**
* <p>
* The ID of the <code>MLModel</code> that will generate predictions for the group of observations.
* </p>
*
* @param mLModelId
* The ID of the <code>MLModel</code> that will generate predictions for the group of observations.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateBatchPredictionRequest withMLModelId(String mLModelId) {
setMLModelId(mLModelId);
return this;
}
/**
* <p>
* The ID of the <code>DataSource</code> that points to the group of observations to predict.
* </p>
*
* @param batchPredictionDataSourceId
* The ID of the <code>DataSource</code> that points to the group of observations to predict.
*/
public void setBatchPredictionDataSourceId(String batchPredictionDataSourceId) {
this.batchPredictionDataSourceId = batchPredictionDataSourceId;
}
/**
* <p>
* The ID of the <code>DataSource</code> that points to the group of observations to predict.
* </p>
*
* @return The ID of the <code>DataSource</code> that points to the group of observations to predict.
*/
public String getBatchPredictionDataSourceId() {
return this.batchPredictionDataSourceId;
}
/**
* <p>
* The ID of the <code>DataSource</code> that points to the group of observations to predict.
* </p>
*
* @param batchPredictionDataSourceId
* The ID of the <code>DataSource</code> that points to the group of observations to predict.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateBatchPredictionRequest withBatchPredictionDataSourceId(String batchPredictionDataSourceId) {
setBatchPredictionDataSourceId(batchPredictionDataSourceId);
return this;
}
/**
* <p>
* The location of an Amazon Simple Storage Service (Amazon S3) bucket or directory to store the batch prediction
* results. The following substrings are not allowed in the <code>s3 key</code> portion of the
* <code>outputURI</code> field: ':', '//', '/./', '/../'.
* </p>
* <p>
* Amazon ML needs permissions to store and retrieve the logs on your behalf. For information about how to set
* permissions, see the <a href="http://docs.aws.amazon.com/machine-learning/latest/dg">Amazon Machine Learning
* Developer Guide</a>.
* </p>
*
* @param outputUri
* The location of an Amazon Simple Storage Service (Amazon S3) bucket or directory to store the batch
* prediction results. The following substrings are not allowed in the <code>s3 key</code> portion of the
* <code>outputURI</code> field: ':', '//', '/./', '/../'.</p>
* <p>
* Amazon ML needs permissions to store and retrieve the logs on your behalf. For information about how to
* set permissions, see the <a href="http://docs.aws.amazon.com/machine-learning/latest/dg">Amazon Machine
* Learning Developer Guide</a>.
*/
public void setOutputUri(String outputUri) {
this.outputUri = outputUri;
}
/**
* <p>
* The location of an Amazon Simple Storage Service (Amazon S3) bucket or directory to store the batch prediction
* results. The following substrings are not allowed in the <code>s3 key</code> portion of the
* <code>outputURI</code> field: ':', '//', '/./', '/../'.
* </p>
* <p>
* Amazon ML needs permissions to store and retrieve the logs on your behalf. For information about how to set
* permissions, see the <a href="http://docs.aws.amazon.com/machine-learning/latest/dg">Amazon Machine Learning
* Developer Guide</a>.
* </p>
*
* @return The location of an Amazon Simple Storage Service (Amazon S3) bucket or directory to store the batch
* prediction results. The following substrings are not allowed in the <code>s3 key</code> portion of the
* <code>outputURI</code> field: ':', '//', '/./', '/../'.</p>
* <p>
* Amazon ML needs permissions to store and retrieve the logs on your behalf. For information about how to
* set permissions, see the <a href="http://docs.aws.amazon.com/machine-learning/latest/dg">Amazon Machine
* Learning Developer Guide</a>.
*/
public String getOutputUri() {
return this.outputUri;
}
/**
* <p>
* The location of an Amazon Simple Storage Service (Amazon S3) bucket or directory to store the batch prediction
* results. The following substrings are not allowed in the <code>s3 key</code> portion of the
* <code>outputURI</code> field: ':', '//', '/./', '/../'.
* </p>
* <p>
* Amazon ML needs permissions to store and retrieve the logs on your behalf. For information about how to set
* permissions, see the <a href="http://docs.aws.amazon.com/machine-learning/latest/dg">Amazon Machine Learning
* Developer Guide</a>.
* </p>
*
* @param outputUri
* The location of an Amazon Simple Storage Service (Amazon S3) bucket or directory to store the batch
* prediction results. The following substrings are not allowed in the <code>s3 key</code> portion of the
* <code>outputURI</code> field: ':', '//', '/./', '/../'.</p>
* <p>
* Amazon ML needs permissions to store and retrieve the logs on your behalf. For information about how to
* set permissions, see the <a href="http://docs.aws.amazon.com/machine-learning/latest/dg">Amazon Machine
* Learning Developer Guide</a>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateBatchPredictionRequest withOutputUri(String outputUri) {
setOutputUri(outputUri);
return this;
}
/**
* Returns a string representation of this object; useful for testing and debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getBatchPredictionId() != null)
sb.append("BatchPredictionId: ").append(getBatchPredictionId()).append(",");
if (getBatchPredictionName() != null)
sb.append("BatchPredictionName: ").append(getBatchPredictionName()).append(",");
if (getMLModelId() != null)
sb.append("MLModelId: ").append(getMLModelId()).append(",");
if (getBatchPredictionDataSourceId() != null)
sb.append("BatchPredictionDataSourceId: ").append(getBatchPredictionDataSourceId()).append(",");
if (getOutputUri() != null)
sb.append("OutputUri: ").append(getOutputUri());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof CreateBatchPredictionRequest == false)
return false;
CreateBatchPredictionRequest other = (CreateBatchPredictionRequest) obj;
if (other.getBatchPredictionId() == null ^ this.getBatchPredictionId() == null)
return false;
if (other.getBatchPredictionId() != null && other.getBatchPredictionId().equals(this.getBatchPredictionId()) == false)
return false;
if (other.getBatchPredictionName() == null ^ this.getBatchPredictionName() == null)
return false;
if (other.getBatchPredictionName() != null && other.getBatchPredictionName().equals(this.getBatchPredictionName()) == false)
return false;
if (other.getMLModelId() == null ^ this.getMLModelId() == null)
return false;
if (other.getMLModelId() != null && other.getMLModelId().equals(this.getMLModelId()) == false)
return false;
if (other.getBatchPredictionDataSourceId() == null ^ this.getBatchPredictionDataSourceId() == null)
return false;
if (other.getBatchPredictionDataSourceId() != null && other.getBatchPredictionDataSourceId().equals(this.getBatchPredictionDataSourceId()) == false)
return false;
if (other.getOutputUri() == null ^ this.getOutputUri() == null)
return false;
if (other.getOutputUri() != null && other.getOutputUri().equals(this.getOutputUri()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getBatchPredictionId() == null) ? 0 : getBatchPredictionId().hashCode());
hashCode = prime * hashCode + ((getBatchPredictionName() == null) ? 0 : getBatchPredictionName().hashCode());
hashCode = prime * hashCode + ((getMLModelId() == null) ? 0 : getMLModelId().hashCode());
hashCode = prime * hashCode + ((getBatchPredictionDataSourceId() == null) ? 0 : getBatchPredictionDataSourceId().hashCode());
hashCode = prime * hashCode + ((getOutputUri() == null) ? 0 : getOutputUri().hashCode());
return hashCode;
}
@Override
public CreateBatchPredictionRequest clone() {
return (CreateBatchPredictionRequest) super.clone();
}
}
| |
package com.mapswithme.maps.routing;
import android.animation.ValueAnimator;
import android.app.Activity;
import android.os.Build;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.RadioGroup;
import android.widget.TextView;
import com.mapswithme.maps.Framework;
import com.mapswithme.maps.MwmApplication;
import com.mapswithme.maps.R;
import com.mapswithme.maps.widget.RotateDrawable;
import com.mapswithme.maps.widget.ToolbarController;
import com.mapswithme.maps.widget.WheelProgressView;
import com.mapswithme.util.UiUtils;
import com.mapswithme.util.statistics.AlohaHelper;
import com.mapswithme.util.statistics.Statistics;
public class RoutingPlanController extends ToolbarController
{
static final int ANIM_TOGGLE = MwmApplication.get().getResources().getInteger(R.integer.anim_slots_toggle);
protected final View mFrame;
private final ImageView mToggle;
private final SlotFrame mSlotFrame;
private final RadioGroup mRouterTypes;
private final WheelProgressView mProgressVehicle;
private final WheelProgressView mProgressPedestrian;
private final View mPlanningLabel;
private final View mErrorLabel;
private final View mDetailsFrame;
private final View mNumbersFrame;
private final TextView mNumbersTime;
private final TextView mNumbersDistance;
private final TextView mNumbersArrival;
private final RotateDrawable mToggleImage = new RotateDrawable(R.drawable.ic_down);
private int mFrameHeight;
private int mToolbarHeight;
private boolean mOpen;
public RoutingPlanController(View root, Activity activity)
{
super(root, activity);
mFrame = root;
mToggle = (ImageView) mToolbar.findViewById(R.id.toggle);
mSlotFrame = (SlotFrame) root.findViewById(R.id.slots);
mSlotFrame.setOnSlotClickListener(new SlotFrame.OnSlotClickListener()
{
@Override
public void OnSlotClick(int slotId)
{
RoutingController.get().searchPoi(slotId);
}
});
View planFrame = root.findViewById(R.id.planning_frame);
mRouterTypes = (RadioGroup) planFrame.findViewById(R.id.route_type);
mRouterTypes.findViewById(R.id.vehicle).setOnClickListener(new View.OnClickListener()
{
@Override
public void onClick(View v)
{
AlohaHelper.logClick(AlohaHelper.ROUTING_VEHICLE_SET);
Statistics.INSTANCE.trackEvent(Statistics.EventName.ROUTING_VEHICLE_SET);
RoutingController.get().setRouterType(Framework.ROUTER_TYPE_VEHICLE);
}
});
mRouterTypes.findViewById(R.id.pedestrian).setOnClickListener(new View.OnClickListener()
{
@Override
public void onClick(View v)
{
AlohaHelper.logClick(AlohaHelper.ROUTING_PEDESTRIAN_SET);
Statistics.INSTANCE.trackEvent(Statistics.EventName.ROUTING_PEDESTRIAN_SET);
RoutingController.get().setRouterType(Framework.ROUTER_TYPE_PEDESTRIAN);
}
});
View progressFrame = planFrame.findViewById(R.id.progress_frame);
mProgressVehicle = (WheelProgressView) progressFrame.findViewById(R.id.progress_vehicle);
mProgressPedestrian = (WheelProgressView) progressFrame.findViewById(R.id.progress_pedestrian);
mPlanningLabel = planFrame.findViewById(R.id.planning);
mErrorLabel = planFrame.findViewById(R.id.error);
mDetailsFrame = planFrame.findViewById(R.id.details_frame);
mNumbersFrame = planFrame.findViewById(R.id.numbers);
mNumbersTime = (TextView) mNumbersFrame.findViewById(R.id.time);
mNumbersDistance = (TextView) mNumbersFrame.findViewById(R.id.distance);
mNumbersArrival = (TextView) mNumbersFrame.findViewById(R.id.arrival);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP)
{
View divider = planFrame.findViewById(R.id.details_divider);
if (divider != null)
UiUtils.invisible(divider);
}
setTitle(R.string.p2p_route_planning);
mToggle.setImageDrawable(mToggleImage);
mToggle.setOnClickListener(new View.OnClickListener()
{
@Override
public void onClick(View v)
{
toggleSlots();
}
});
}
@Override
public void onUpClick()
{
AlohaHelper.logClick(AlohaHelper.ROUTING_CANCEL);
Statistics.INSTANCE.trackEvent(Statistics.EventName.ROUTING_CANCEL);
RoutingController.get().cancelPlanning();
}
private boolean checkFrameHeight()
{
if (mFrameHeight > 0)
return true;
mFrameHeight = mSlotFrame.getHeight();
mToolbarHeight = mToolbar.getHeight();
return (mFrameHeight > 0);
}
private void animateSlotFrame(int offset)
{
ViewGroup.MarginLayoutParams lp = (ViewGroup.MarginLayoutParams) mSlotFrame.getLayoutParams();
lp.topMargin = (mToolbarHeight - offset);
mSlotFrame.setLayoutParams(lp);
}
public void updatePoints()
{
mSlotFrame.update();
}
private void updateProgressLabels()
{
RoutingController.BuildState buildState = RoutingController.get().getBuildState();
boolean idle = (RoutingController.get().isPlanning() &&
buildState == RoutingController.BuildState.NONE);
if (mDetailsFrame != null)
UiUtils.showIf(!idle, mDetailsFrame);
boolean ready = (buildState == RoutingController.BuildState.BUILT);
UiUtils.showIf(ready, mNumbersFrame);
UiUtils.showIf(RoutingController.get().isBuilding(), mPlanningLabel);
UiUtils.showIf(buildState == RoutingController.BuildState.ERROR, mErrorLabel);
if (!ready)
return;
RoutingInfo rinfo = RoutingController.get().getCachedRoutingInfo();
mNumbersTime.setText(RoutingController.formatRoutingTime(rinfo.totalTimeInSeconds, R.dimen.text_size_routing_number));
mNumbersDistance.setText(rinfo.distToTarget + " " + rinfo.targetUnits);
if (mNumbersArrival != null)
mNumbersArrival.setText(MwmApplication.get().getString(R.string.routing_arrive,
RoutingController.formatArrivalTime(rinfo.totalTimeInSeconds)));
}
public void updateBuildProgress(int progress, int router)
{
updateProgressLabels();
boolean vehicle = (router == Framework.ROUTER_TYPE_VEHICLE);
mRouterTypes.check(vehicle ? R.id.vehicle : R.id.pedestrian);
if (!RoutingController.get().isBuilding())
{
UiUtils.hide(mProgressVehicle, mProgressPedestrian);
return;
}
UiUtils.visibleIf(vehicle, mProgressVehicle);
UiUtils.visibleIf(!vehicle, mProgressPedestrian);
if (vehicle)
mProgressVehicle.setProgress(progress);
else
mProgressPedestrian.setProgress(progress);
}
private void toggleSlots()
{
AlohaHelper.logClick(AlohaHelper.ROUTING_TOGGLE);
Statistics.INSTANCE.trackEvent(Statistics.EventName.ROUTING_TOGGLE);
showSlots(!mOpen, true);
}
protected void showSlots(final boolean show, final boolean animate)
{
if (!checkFrameHeight())
{
mFrame.post(new Runnable()
{
@Override
public void run()
{
showSlots(show, animate);
}
});
return;
}
mOpen = show;
if (animate)
{
ValueAnimator animator = ValueAnimator.ofFloat(mOpen ? 1.0f : 0, mOpen ? 0 : 1.0f);
animator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener()
{
@Override
public void onAnimationUpdate(ValueAnimator animation)
{
float fraction = (float)animation.getAnimatedValue();
animateSlotFrame((int)(fraction * mFrameHeight));
mToggleImage.setAngle((1.0f - fraction) * 180.0f);
}
});
animator.setDuration(ANIM_TOGGLE);
animator.start();
mSlotFrame.fadeSlots(!mOpen);
}
else
{
animateSlotFrame(mOpen ? 0 : mFrameHeight);
mToggleImage.setAngle(mOpen ? 180.0f : 0.0f);
mSlotFrame.unfadeSlots();
}
}
public void disableToggle()
{
UiUtils.hide(mToggle);
showSlots(true, false);
}
public boolean isOpen()
{
return mOpen;
}
}
| |
package org.literacyapp.contentprovider.model.content.multimedia;
import org.greenrobot.greendao.DaoException;
import org.greenrobot.greendao.annotation.Convert;
import org.greenrobot.greendao.annotation.Entity;
import org.greenrobot.greendao.annotation.Generated;
import org.greenrobot.greendao.annotation.Id;
import org.greenrobot.greendao.annotation.JoinEntity;
import org.greenrobot.greendao.annotation.NotNull;
import org.greenrobot.greendao.annotation.ToMany;
import org.literacyapp.contentprovider.dao.AudioDao;
import org.literacyapp.contentprovider.dao.DaoSession;
import org.literacyapp.contentprovider.dao.LetterDao;
import org.literacyapp.contentprovider.dao.NumberDao;
import org.literacyapp.contentprovider.dao.WordDao;
import org.literacyapp.contentprovider.dao.converter.AudioFormatConverter;
import org.literacyapp.contentprovider.dao.converter.CalendarConverter;
import org.literacyapp.contentprovider.dao.converter.ContentStatusConverter;
import org.literacyapp.contentprovider.dao.converter.LocaleConverter;
import org.literacyapp.contentprovider.dao.converter.StringSetConverter;
import org.literacyapp.contentprovider.model.content.Letter;
import org.literacyapp.contentprovider.model.content.Number;
import org.literacyapp.contentprovider.model.content.Word;
import org.literacyapp.model.enums.Locale;
import org.literacyapp.model.enums.content.AudioFormat;
import org.literacyapp.model.enums.content.ContentStatus;
import org.literacyapp.model.enums.content.LiteracySkill;
import org.literacyapp.model.enums.content.NumeracySkill;
import java.util.Calendar;
import java.util.List;
import java.util.Set;
/**
* Based on {@link org.literacyapp.model.gson.content.multimedia.AudioGson}
*/
@Entity
public class Audio {
@Id
private Long id;
@NotNull
@Convert(converter = LocaleConverter.class, columnType = String.class)
private Locale locale;
@Convert(converter = CalendarConverter.class, columnType = Long.class)
private Calendar timeLastUpdate;
@NotNull
private Integer revisionNumber; // [1, 2, 3, ...]
@NotNull
@Convert(converter = ContentStatusConverter.class, columnType = String.class)
private ContentStatus contentStatus;
@NotNull
private String contentType; // MIME type
@Convert(converter = StringSetConverter.class, columnType = String.class)
private Set<LiteracySkill> literacySkills;
@Convert(converter = StringSetConverter.class, columnType = String.class)
private Set<NumeracySkill> numeracySkills;
@ToMany
@JoinEntity(entity = JoinAudiosWithLetters.class, sourceProperty = "audioId", targetProperty = "letterId")
private List<Letter> letters;
@ToMany
@JoinEntity(entity = JoinAudiosWithNumbers.class, sourceProperty = "audioId", targetProperty = "numberId")
private List<Number> numbers;
@ToMany
@JoinEntity(entity = JoinAudiosWithWords.class, sourceProperty = "audioId", targetProperty = "wordId")
private List<Word> words;
@NotNull
private String transcription;
@NotNull
@Convert(converter = AudioFormatConverter.class, columnType = String.class)
private AudioFormat audioFormat;
/** Used to resolve relations */
@Generated(hash = 2040040024)
private transient DaoSession daoSession;
/** Used for active entity operations. */
@Generated(hash = 226033729)
private transient AudioDao myDao;
@Generated(hash = 1550087614)
public Audio(Long id, @NotNull Locale locale, Calendar timeLastUpdate, @NotNull Integer revisionNumber,
@NotNull ContentStatus contentStatus, @NotNull String contentType, Set<LiteracySkill> literacySkills,
Set<NumeracySkill> numeracySkills, @NotNull String transcription, @NotNull AudioFormat audioFormat) {
this.id = id;
this.locale = locale;
this.timeLastUpdate = timeLastUpdate;
this.revisionNumber = revisionNumber;
this.contentStatus = contentStatus;
this.contentType = contentType;
this.literacySkills = literacySkills;
this.numeracySkills = numeracySkills;
this.transcription = transcription;
this.audioFormat = audioFormat;
}
@Generated(hash = 1642629471)
public Audio() {
}
public Long getId() {
return this.id;
}
public void setId(Long id) {
this.id = id;
}
public Locale getLocale() {
return this.locale;
}
public void setLocale(Locale locale) {
this.locale = locale;
}
public Calendar getTimeLastUpdate() {
return this.timeLastUpdate;
}
public void setTimeLastUpdate(Calendar timeLastUpdate) {
this.timeLastUpdate = timeLastUpdate;
}
public Integer getRevisionNumber() {
return this.revisionNumber;
}
public void setRevisionNumber(Integer revisionNumber) {
this.revisionNumber = revisionNumber;
}
public ContentStatus getContentStatus() {
return this.contentStatus;
}
public void setContentStatus(ContentStatus contentStatus) {
this.contentStatus = contentStatus;
}
public String getContentType() {
return this.contentType;
}
public void setContentType(String contentType) {
this.contentType = contentType;
}
public Set<LiteracySkill> getLiteracySkills() {
return this.literacySkills;
}
public void setLiteracySkills(Set<LiteracySkill> literacySkills) {
this.literacySkills = literacySkills;
}
public Set<NumeracySkill> getNumeracySkills() {
return this.numeracySkills;
}
public void setNumeracySkills(Set<NumeracySkill> numeracySkills) {
this.numeracySkills = numeracySkills;
}
public String getTranscription() {
return this.transcription;
}
public void setTranscription(String transcription) {
this.transcription = transcription;
}
public AudioFormat getAudioFormat() {
return this.audioFormat;
}
public void setAudioFormat(AudioFormat audioFormat) {
this.audioFormat = audioFormat;
}
/**
* To-many relationship, resolved on first access (and after reset).
* Changes to to-many relations are not persisted, make changes to the target entity.
*/
@Generated(hash = 764856619)
public List<Letter> getLetters() {
if (letters == null) {
final DaoSession daoSession = this.daoSession;
if (daoSession == null) {
throw new DaoException("Entity is detached from DAO context");
}
LetterDao targetDao = daoSession.getLetterDao();
List<Letter> lettersNew = targetDao._queryAudio_Letters(id);
synchronized (this) {
if (letters == null) {
letters = lettersNew;
}
}
}
return letters;
}
/** Resets a to-many relationship, making the next get call to query for a fresh result. */
@Generated(hash = 520859111)
public synchronized void resetLetters() {
letters = null;
}
/**
* To-many relationship, resolved on first access (and after reset).
* Changes to to-many relations are not persisted, make changes to the target entity.
*/
@Generated(hash = 1767884772)
public List<Number> getNumbers() {
if (numbers == null) {
final DaoSession daoSession = this.daoSession;
if (daoSession == null) {
throw new DaoException("Entity is detached from DAO context");
}
NumberDao targetDao = daoSession.getNumberDao();
List<Number> numbersNew = targetDao._queryAudio_Numbers(id);
synchronized (this) {
if (numbers == null) {
numbers = numbersNew;
}
}
}
return numbers;
}
/** Resets a to-many relationship, making the next get call to query for a fresh result. */
@Generated(hash = 1968814974)
public synchronized void resetNumbers() {
numbers = null;
}
/**
* To-many relationship, resolved on first access (and after reset).
* Changes to to-many relations are not persisted, make changes to the target entity.
*/
@Generated(hash = 763524340)
public List<Word> getWords() {
if (words == null) {
final DaoSession daoSession = this.daoSession;
if (daoSession == null) {
throw new DaoException("Entity is detached from DAO context");
}
WordDao targetDao = daoSession.getWordDao();
List<Word> wordsNew = targetDao._queryAudio_Words(id);
synchronized (this) {
if (words == null) {
words = wordsNew;
}
}
}
return words;
}
/** Resets a to-many relationship, making the next get call to query for a fresh result. */
@Generated(hash = 1954400333)
public synchronized void resetWords() {
words = null;
}
/**
* Convenient call for {@link org.greenrobot.greendao.AbstractDao#delete(Object)}.
* Entity must attached to an entity context.
*/
@Generated(hash = 128553479)
public void delete() {
if (myDao == null) {
throw new DaoException("Entity is detached from DAO context");
}
myDao.delete(this);
}
/**
* Convenient call for {@link org.greenrobot.greendao.AbstractDao#refresh(Object)}.
* Entity must attached to an entity context.
*/
@Generated(hash = 1942392019)
public void refresh() {
if (myDao == null) {
throw new DaoException("Entity is detached from DAO context");
}
myDao.refresh(this);
}
/**
* Convenient call for {@link org.greenrobot.greendao.AbstractDao#update(Object)}.
* Entity must attached to an entity context.
*/
@Generated(hash = 713229351)
public void update() {
if (myDao == null) {
throw new DaoException("Entity is detached from DAO context");
}
myDao.update(this);
}
/** called by internal mechanisms, do not call yourself. */
@Generated(hash = 1261206123)
public void __setDaoSession(DaoSession daoSession) {
this.daoSession = daoSession;
myDao = daoSession != null ? daoSession.getAudioDao() : null;
}
}
| |
/*
* Copyright (c) 2008-2016, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.client.semaphore;
import com.hazelcast.client.test.TestHazelcastFactory;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.ISemaphore;
import com.hazelcast.test.HazelcastParallelClassRunner;
import com.hazelcast.test.annotation.ParallelTest;
import com.hazelcast.test.annotation.QuickTest;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import static com.hazelcast.test.HazelcastTestSupport.randomString;
import static com.hazelcast.test.HazelcastTestSupport.sleepSeconds;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@RunWith(HazelcastParallelClassRunner.class)
@Category({QuickTest.class, ParallelTest.class})
public class ClientSemaphoreTest {
private final TestHazelcastFactory hazelcastFactory = new TestHazelcastFactory();
private HazelcastInstance client;
@After
public void tearDown() {
hazelcastFactory.terminateAll();
}
@Before
public void setup() {
hazelcastFactory.newHazelcastInstance();
client = hazelcastFactory.newHazelcastClient();
}
@Test
public void testSemaphoreInit() throws Exception {
final ISemaphore semaphore = client.getSemaphore(randomString());
assertTrue(semaphore.init(10));
}
@Test(expected = IllegalArgumentException.class)
public void testSemaphoreNegInit() throws Exception {
final ISemaphore semaphore = client.getSemaphore(randomString());
semaphore.init(-1);
}
@Test
public void testRelease() throws Exception {
final ISemaphore semaphore = client.getSemaphore(randomString());
semaphore.init(0);
semaphore.release();
assertEquals(1, semaphore.availablePermits());
}
@Test
public void testdrainPermits() throws Exception {
final ISemaphore semaphore = client.getSemaphore(randomString());
semaphore.init(10);
assertEquals(10, semaphore.drainPermits());
}
@Test
public void testAvailablePermits_AfterDrainPermits() throws Exception {
final ISemaphore semaphore = client.getSemaphore(randomString());
semaphore.init(10);
semaphore.drainPermits();
assertEquals(0, semaphore.availablePermits());
}
@Test
public void testTryAcquire_whenDrainPermits() throws Exception {
final ISemaphore semaphore = client.getSemaphore(randomString());
semaphore.init(10);
semaphore.drainPermits();
assertFalse(semaphore.tryAcquire());
}
@Test
public void testAvailablePermits() throws Exception {
final ISemaphore semaphore = client.getSemaphore(randomString());
semaphore.init(10);
assertEquals(10, semaphore.availablePermits());
}
@Test
public void testAvailableReducePermits() throws Exception {
final ISemaphore semaphore = client.getSemaphore(randomString());
semaphore.init(10);
semaphore.reducePermits(5);
assertEquals(5, semaphore.availablePermits());
}
@Test
public void testAvailableReducePermits_WhenZero() throws Exception {
final ISemaphore semaphore = client.getSemaphore(randomString());
semaphore.init(0);
semaphore.reducePermits(1);
assertEquals(0, semaphore.availablePermits());
}
@Test
public void testTryAcquire_whenAvailable() throws Exception {
final ISemaphore semaphore = client.getSemaphore(randomString());
semaphore.init(1);
assertTrue(semaphore.tryAcquire());
}
@Test
public void testTryAcquire_whenUnAvailable() throws Exception {
final ISemaphore semaphore = client.getSemaphore(randomString());
semaphore.init(0);
assertFalse(semaphore.tryAcquire());
}
@Test
public void testTryAcquire_whenAvailableWithTimeOut() throws Exception {
final ISemaphore semaphore = client.getSemaphore(randomString());
semaphore.init(1);
assertTrue(semaphore.tryAcquire(1, TimeUnit.MILLISECONDS));
}
@Test
public void testTryAcquire_whenUnAvailableWithTimeOut() throws Exception {
final ISemaphore semaphore = client.getSemaphore(randomString());
semaphore.init(0);
assertFalse(semaphore.tryAcquire(1, TimeUnit.MILLISECONDS));
}
@Test
public void testTryAcquireMultiPermits_whenAvailable() throws Exception {
final ISemaphore semaphore = client.getSemaphore(randomString());
semaphore.init(10);
assertTrue(semaphore.tryAcquire(5));
}
@Test
public void testTryAcquireMultiPermits_whenUnAvailable() throws Exception {
final ISemaphore semaphore = client.getSemaphore(randomString());
semaphore.init(5);
assertFalse(semaphore.tryAcquire(10));
}
@Test
public void testTryAcquireMultiPermits_whenAvailableWithTimeOut() throws Exception {
final ISemaphore semaphore = client.getSemaphore(randomString());
semaphore.init(10);
assertTrue(semaphore.tryAcquire(5, 1, TimeUnit.MILLISECONDS));
}
@Test
public void testTryAcquireMultiPermits_whenUnAvailableWithTimeOut() throws Exception {
final ISemaphore semaphore = client.getSemaphore(randomString());
semaphore.init(5);
assertFalse(semaphore.tryAcquire(10, 1, TimeUnit.MILLISECONDS));
}
@Test
public void testTryAcquire_afterRelease() throws Exception {
final ISemaphore semaphore = client.getSemaphore(randomString());
semaphore.init(0);
semaphore.release();
assertTrue(semaphore.tryAcquire());
}
@Test
public void testMulitReleaseTryAcquire() throws Exception {
final ISemaphore semaphore = client.getSemaphore(randomString());
semaphore.init(0);
semaphore.release(5);
assertTrue(semaphore.tryAcquire(5));
}
@Test
public void testAcquire_Threaded() throws Exception {
final ISemaphore semaphore = client.getSemaphore(randomString());
semaphore.init(0);
final CountDownLatch latch = new CountDownLatch(1);
new Thread() {
public void run() {
try {
semaphore.acquire();
latch.countDown();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}.start();
sleepSeconds(1);
semaphore.release(2);
assertTrue(latch.await(30, TimeUnit.SECONDS));
assertEquals(1, semaphore.availablePermits());
}
@Test
public void tryAcquire_Threaded() throws Exception {
final ISemaphore semaphore = client.getSemaphore(randomString());
semaphore.init(0);
final CountDownLatch latch = new CountDownLatch(1);
new Thread() {
public void run() {
try {
if (semaphore.tryAcquire(1, 5, TimeUnit.SECONDS)) {
latch.countDown();
}
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}.start();
semaphore.release(2);
assertTrue(latch.await(30, TimeUnit.SECONDS));
assertEquals(1, semaphore.availablePermits());
}
}
| |
/*
* Copyright 2015 Key Bridge LLC.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package us.gov.dod.standard.ssrf._3_1.metadata.lists;
import javax.xml.bind.annotation.XmlEnum;
import javax.xml.bind.annotation.XmlEnumValue;
import javax.xml.bind.annotation.XmlType;
import us.gov.dod.standard.ssrf._3_1.receiver.Baseband;
import us.gov.dod.standard.ssrf._3_1.receiver.RxModulation;
import us.gov.dod.standard.ssrf._3_1.transmitter.ObservedMOPAnalysis;
import us.gov.dod.standard.ssrf._3_1.transmitter.TxModulation;
/**
* Enumerated values for fields using the ListCMO type.
* <p>
* Used in
* {@link Baseband}, {@link ObservedMOPAnalysis}, {@link RxModulation}, {@link TxModulation}
* <p>
* @author Key Bridge LLC <developer@keybridge.ch>
* @version 3.1.0, 04/02/2015
*/
@XmlType(name = "ListCMO")
@XmlEnum
public enum ListCMO {
@XmlEnumValue("8-Tone")
EIGHT_TONE("8-Tone"),
@XmlEnumValue("16-Tone")
TONE_16("16-Tone"),
@XmlEnumValue("32-Tone")
TONE_32("32-Tone"),
@XmlEnumValue("AM Clear Voice")
AM_CLEAR_VOICE("AM Clear Voice"),
@XmlEnumValue("AM Secure Voice")
AM_SECURE_VOICE("AM Secure Voice"),
@XmlEnumValue("ASK/OOK")
ASKOOK("ASK/OOK"),
@XmlEnumValue("Audio FSK")
AUDIO_FSK("Audio FSK"),
@XmlEnumValue("Binary FSK")
BINARY_FSK("Binary FSK"),
@XmlEnumValue("Binary Phase Shift Key")
BINARY_PHASE_SHIFT_KEY("Binary Phase Shift Key"),
@XmlEnumValue("Code Division Multiplex")
CODE_DIVISION_MULTIPLEX("Code Division Multiplex"),
@XmlEnumValue("COFDM")
COFDM("COFDM"),
@XmlEnumValue("Coherent FSK")
COHERENT_FSK("Coherent FSK"),
@XmlEnumValue("Coherent MSK")
COHERENT_MSK("Coherent MSK"),
@XmlEnumValue("Coherent Quadrature PSK")
COHERENT_QUADRATURE_PSK("Coherent Quadrature PSK"),
@XmlEnumValue("Continuous Wave")
CONTINUOUS_WAVE("Continuous Wave"),
@XmlEnumValue("CPFSK")
CPFSK("CPFSK"),
@XmlEnumValue("Data")
DATA("Data"),
@XmlEnumValue("Differential Binary PSK")
DIFFERENTIAL_BINARY_PSK("Differential Binary PSK"),
@XmlEnumValue("Differential Gaussian MSK")
DIFFERENTIAL_GAUSSIAN_MSK("Differential Gaussian MSK"),
@XmlEnumValue("Differential PSK")
DIFFERENTIAL_PSK("Differential PSK"),
@XmlEnumValue("Differential Quadrature PSK")
DIFFERENTIAL_QUADRATURE_PSK("Differential Quadrature PSK"),
@XmlEnumValue("Differential Raised Cosine MSK")
DIFFERENTIAL_RAISED_COSINE_MSK("Differential Raised Cosine MSK"),
@XmlEnumValue("Direct Sequence Binary FSK")
DIRECT_SEQUENCE_BINARY_FSK("Direct Sequence Binary FSK"),
@XmlEnumValue("Direct Sequence Binary PSK")
DIRECT_SEQUENCE_BINARY_PSK("Direct Sequence Binary PSK"),
@XmlEnumValue("Direct Sequence Complementary Code Keying")
DIRECT_SEQUENCE_COMPLEMENTARY_CODE_KEYING("Direct Sequence Complementary Code Keying"),
@XmlEnumValue("Direct Sequence FSK")
DIRECT_SEQUENCE_FSK("Direct Sequence FSK"),
@XmlEnumValue("Direct Sequence MSK")
DIRECT_SEQUENCE_MSK("Direct Sequence MSK"),
@XmlEnumValue("Direct Sequence Offset Quadrature PSK")
DIRECT_SEQUENCE_OFFSET_QUADRATURE_PSK("Direct Sequence Offset Quadrature PSK"),
@XmlEnumValue("Direct Sequence PSK")
DIRECT_SEQUENCE_PSK("Direct Sequence PSK"),
@XmlEnumValue("Direct Sequence Quadrature PSK")
DIRECT_SEQUENCE_QUADRATURE_PSK("Direct Sequence Quadrature PSK"),
@XmlEnumValue("Doppler Frequency-Shift")
DOPPLER_FREQUENCY_SHIFT("Doppler Frequency-Shift"),
@XmlEnumValue("DTMF")
DTMF("DTMF"),
@XmlEnumValue("Electronic Attack")
ELECTRONIC_ATTACK("Electronic Attack"),
@XmlEnumValue("Feher QPSK-B")
FEHER_QPSK_B("Feher QPSK-B"),
@XmlEnumValue("Feher QPSK-JR")
FEHER_QPSK_JR("Feher QPSK-JR"),
@XmlEnumValue("FM Clear Voice")
FM_CLEAR_VOICE("FM Clear Voice"),
@XmlEnumValue("FM Secure Voice")
FM_SECURE_VOICE("FM Secure Voice"),
@XmlEnumValue("Frequency Division Multiplex")
FREQUENCY_DIVISION_MULTIPLEX("Frequency Division Multiplex"),
@XmlEnumValue("FSK")
FSK("FSK"),
@XmlEnumValue("Gaussian FSK")
GAUSSIAN_FSK("Gaussian FSK"),
@XmlEnumValue("Gaussian MSK")
GAUSSIAN_MSK("Gaussian MSK"),
@XmlEnumValue("Minimum Shift Keying")
MINIMUM_SHIFT_KEYING("Minimum Shift Keying"),
@XmlEnumValue("Multichannel")
MULTICHANNEL("Multichannel"),
@XmlEnumValue("Multichannel Data")
MULTICHANNEL_DATA("Multichannel Data"),
@XmlEnumValue("Multichannel PCM Voice")
MULTICHANNEL_PCM_VOICE("Multichannel PCM Voice"),
@XmlEnumValue("Multi-channel PCM Voice")
MULTI_CHANNEL_PCM_VOICE("Multi-channel PCM Voice"),
@XmlEnumValue("Multichannel Voice")
MULTICHANNEL_VOICE("Multichannel Voice"),
@XmlEnumValue("Multichannel Voice/Data")
MULTICHANNEL_VOICEDATA("Multichannel Voice/Data"),
@XmlEnumValue("Multi-index Continuous Phase Modulation")
MULTI_INDEX_CONTINUOUS_PHASE_MODULATION("Multi-index Continuous Phase Modulation"),
@XmlEnumValue("NOISE")
NOISE("NOISE"),
@XmlEnumValue("OFDM")
OFDM("OFDM"),
@XmlEnumValue("Offset Quadrature PSK")
OFFSET_QUADRATURE_PSK("Offset Quadrature PSK"),
@XmlEnumValue("PSK")
PSK("PSK"),
@XmlEnumValue("Pulsed")
PULSED("Pulsed"),
@XmlEnumValue("QAM")
QAM("QAM"),
@XmlEnumValue("Quad Tone")
QUAD_TONE("Quad Tone"),
@XmlEnumValue("Quadrature FSK")
QUADRATURE_FSK("Quadrature FSK"),
@XmlEnumValue("Quadrature Partial Response Signaling")
QUADRATURE_PARTIAL_RESPONSE_SIGNALING("Quadrature Partial Response Signaling"),
@XmlEnumValue("Quadrature PSK")
QUADRATURE_PSK("Quadrature PSK"),
@XmlEnumValue("Secure Data")
SECURE_DATA("Secure Data"),
@XmlEnumValue("Shaped Binary PSK")
SHAPED_BINARY_PSK("Shaped Binary PSK"),
@XmlEnumValue("Shaped FSK")
SHAPED_FSK("Shaped FSK"),
@XmlEnumValue("Shaped MSK")
SHAPED_MSK("Shaped MSK"),
@XmlEnumValue("Shaped Offset Quadrature PSK")
SHAPED_OFFSET_QUADRATURE_PSK("Shaped Offset Quadrature PSK"),
@XmlEnumValue("Single Channel")
SINGLE_CHANNEL("Single Channel"),
@XmlEnumValue("Single Secure Voice Channel")
SINGLE_SECURE_VOICE_CHANNEL("Single Secure Voice Channel"),
@XmlEnumValue("Single Voice Channel")
SINGLE_VOICE_CHANNEL("Single Voice Channel"),
@XmlEnumValue("Time Division Multiplex")
TIME_DIVISION_MULTIPLEX("Time Division Multiplex"),
@XmlEnumValue("Tuned Frequency Modulation")
TUNED_FREQUENCY_MODULATION("Tuned Frequency Modulation"),
@XmlEnumValue("Video")
VIDEO("Video"),
/**
* If selected, a clarifying remark SHOULD be entered
*/
@XmlEnumValue("Other")
OTHER("Other");
private final String value;
ListCMO(String v) {
value = v;
}
public String value() {
return value;
}
public static ListCMO fromValue(String v) {
for (ListCMO c : ListCMO.values()) {
if (c.value.equals(v)) {
return c;
}
}
throw new IllegalArgumentException(v);
}
}
| |
//========================================================================
//Copyright 2012 David Yu
//------------------------------------------------------------------------
//Licensed under the Apache License, Version 2.0 (the "License");
//you may not use this file except in compliance with the License.
//You may obtain a copy of the License at
//http://www.apache.org/licenses/LICENSE-2.0
//Unless required by applicable law or agreed to in writing, software
//distributed under the License is distributed on an "AS IS" BASIS,
//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//See the License for the specific language governing permissions and
//limitations under the License.
//========================================================================
package com.dyuproject.protostuff.runtime;
import com.dyuproject.protostuff.AbstractTest;
import com.dyuproject.protostuff.ProtostuffIOUtil;
import com.dyuproject.protostuff.Schema;
/**
* Tests for {@link RuntimeView}.
*
* @author David Yu
* @created Nov 9, 2012
*/
public class RuntimeViewTest extends AbstractTest
{
// id name timestamp
static final Baz BAZ = newBaz( 128, "baz", 0);
static final int EXPECT_BAZ_LEN = 1+2 + 1+1+3 + 1+1;
static final int ID_LEN = 3;
static final int NAME_LEN = 5;
static final int TIMESTAMP_LEN = 2;
static final int WITHOUT_ID_LEN = EXPECT_BAZ_LEN - ID_LEN;
static final int WITHOUT_NAME_LEN = EXPECT_BAZ_LEN - NAME_LEN;
static final int WITHOUT_TIMESTAMP_LEN = EXPECT_BAZ_LEN - TIMESTAMP_LEN;
static int FN_ID = 1, FN_NAME = 2, FN_TIMESTAMP = 3;
static final String STR_FN_ID = "1";
static final String STR_FN_NAME = "2";
static final String STR_FN_TIMESTAMP = "3";
static Baz newBaz(int id, String name, long timestamp)
{
Baz message = new Baz();
message.setId(id);
message.setName(name);
message.setTimestamp(timestamp);
return message;
}
static byte[] ser(Schema<Baz> schema)
{
return ProtostuffIOUtil.toByteArray(BAZ, schema, buf());
}
static int len(Schema<Baz> schema)
{
return ser(schema).length;
}
static RuntimeSchema<Baz> rs()
{
return (RuntimeSchema<Baz>)RuntimeSchema.getSchema(Baz.class);
}
static Schema<Baz> ex1(String ... args)
{
return RuntimeView.createFrom(rs(),
RuntimeView.Factories.EXCLUDE,
null, args);
}
static Schema<Baz> ex2(String ... args)
{
return RuntimeView.createFrom(rs(),
RuntimeView.Factories.EXCLUDE_OPTIMIZED_FOR_MERGE_ONLY,
null, args);
}
static Schema<Baz> ex3(int min, int max)
{
return RuntimeView.createFrom(rs(),
RuntimeView.Factories.EXCLUDE,
new Predicate.RANGE(min, max), (String[])null);
}
static Schema<Baz> in1(String ... args)
{
return RuntimeView.createFrom(rs(),
RuntimeView.Factories.INCLUDE,
null, args);
}
static Schema<Baz> in2(String ... args)
{
return RuntimeView.createFrom(rs(),
RuntimeView.Factories.INCLUDE_OPTIMIZED_FOR_MERGE_ONLY,
null, args);
}
static Schema<Baz> EQ(String ... args)
{
return RuntimeView.createFrom(rs(),
RuntimeView.Factories.PREDICATE,
Predicate.Factories.EQ, args);
}
static Schema<Baz> NOTEQ(String ... args)
{
return RuntimeView.createFrom(rs(),
RuntimeView.Factories.PREDICATE,
Predicate.Factories.NOTEQ, args);
}
static Schema<Baz> GT(String ... args)
{
return RuntimeView.createFrom(rs(),
RuntimeView.Factories.PREDICATE,
Predicate.Factories.GT, args);
}
static Schema<Baz> LT(String ... args)
{
return RuntimeView.createFrom(rs(),
RuntimeView.Factories.PREDICATE,
Predicate.Factories.LT, args);
}
static Schema<Baz> RANGE(String ... args)
{
return RuntimeView.createFrom(rs(),
RuntimeView.Factories.PREDICATE,
Predicate.Factories.RANGE, args);
}
static Schema<Baz> NOTRANGE(String ... args)
{
return RuntimeView.createFrom(rs(),
RuntimeView.Factories.PREDICATE,
Predicate.Factories.NOTRANGE, args);
}
// tests
public void testLen()
{
assertEquals(EXPECT_BAZ_LEN, len(rs()));
}
public void testExcludeBazId()
{
assertEquals(WITHOUT_ID_LEN, len(ex1("id")));
assertEquals(WITHOUT_ID_LEN, len(ex2("id")));
assertEquals(WITHOUT_ID_LEN, len(ex3(FN_ID, FN_ID)));
assertEquals(WITHOUT_ID_LEN, len(in1("name", "timestamp")));
assertEquals(WITHOUT_ID_LEN, len(in2("name", "timestamp")));
assertEquals(WITHOUT_ID_LEN, len(NOTEQ(STR_FN_ID)));
assertEquals(WITHOUT_ID_LEN, len(GT(STR_FN_ID)));
assertEquals(WITHOUT_ID_LEN, len(RANGE(STR_FN_NAME, STR_FN_TIMESTAMP)));
}
public void testExcludeBazName()
{
assertEquals(WITHOUT_NAME_LEN, len(ex1("name")));
assertEquals(WITHOUT_NAME_LEN, len(ex2("name")));
assertEquals(WITHOUT_NAME_LEN, len(ex3(FN_NAME, FN_NAME)));
assertEquals(WITHOUT_NAME_LEN, len(in1("id", "timestamp")));
assertEquals(WITHOUT_NAME_LEN, len(in2("id", "timestamp")));
assertEquals(WITHOUT_NAME_LEN, len(NOTEQ(STR_FN_NAME)));
assertEquals(WITHOUT_NAME_LEN, len(NOTRANGE(STR_FN_NAME, STR_FN_NAME)));
}
public void testExcludeBazTimestamp()
{
assertEquals(WITHOUT_TIMESTAMP_LEN, len(ex1("timestamp")));
assertEquals(WITHOUT_TIMESTAMP_LEN, len(ex2("timestamp")));
assertEquals(WITHOUT_TIMESTAMP_LEN, len(ex3(FN_TIMESTAMP, FN_TIMESTAMP)));
assertEquals(WITHOUT_TIMESTAMP_LEN, len(in1("id", "name")));
assertEquals(WITHOUT_TIMESTAMP_LEN, len(in2("id", "name")));
assertEquals(WITHOUT_TIMESTAMP_LEN, len(NOTEQ(STR_FN_TIMESTAMP)));
assertEquals(WITHOUT_TIMESTAMP_LEN, len(LT(STR_FN_TIMESTAMP)));
assertEquals(WITHOUT_TIMESTAMP_LEN, len(RANGE(STR_FN_ID, STR_FN_NAME)));
}
public void testIncludeOnlyBazId()
{
assertEquals(ID_LEN, len(ex1("name", "timestamp")));
assertEquals(ID_LEN, len(ex2("name", "timestamp")));
assertEquals(ID_LEN, len(ex3(FN_NAME, FN_TIMESTAMP)));
assertEquals(ID_LEN, len(in1("id")));
assertEquals(ID_LEN, len(in2("id")));
assertEquals(ID_LEN, len(EQ(STR_FN_ID)));
assertEquals(ID_LEN, len(LT(STR_FN_NAME)));
assertEquals(ID_LEN, len(RANGE(STR_FN_ID, STR_FN_ID)));
}
public void testIncludeOnlyBazName()
{
assertEquals(NAME_LEN, len(ex1("id", "timestamp")));
assertEquals(NAME_LEN, len(ex2("id", "timestamp")));
assertEquals(NAME_LEN, len(in1("name")));
assertEquals(NAME_LEN, len(in2("name")));
assertEquals(NAME_LEN, len(EQ(STR_FN_NAME)));
assertEquals(NAME_LEN, len(RANGE(STR_FN_NAME, STR_FN_NAME)));
}
public void testIncludeOnlyBazTimestamp()
{
assertEquals(TIMESTAMP_LEN, len(ex1("id", "name")));
assertEquals(TIMESTAMP_LEN, len(ex2("id", "name")));
assertEquals(TIMESTAMP_LEN, len(ex3(FN_ID, FN_NAME)));
assertEquals(TIMESTAMP_LEN, len(in1("timestamp")));
assertEquals(TIMESTAMP_LEN, len(in2("timestamp")));
assertEquals(TIMESTAMP_LEN, len(EQ(STR_FN_TIMESTAMP)));
assertEquals(TIMESTAMP_LEN, len(GT(STR_FN_NAME)));
assertEquals(TIMESTAMP_LEN, len(RANGE(STR_FN_TIMESTAMP, STR_FN_TIMESTAMP)));
}
}
| |
package net.minecraft.src.nuclearcontrol;
import java.util.List;
import java.util.Vector;
import net.minecraft.src.EntityPlayer;
import net.minecraft.src.Facing;
import net.minecraft.src.NBTTagCompound;
import net.minecraft.src.TileEntity;
import net.minecraft.src.mod_IC2NuclearControl;
import net.minecraft.src.ic2.api.INetworkDataProvider;
import net.minecraft.src.ic2.api.INetworkUpdateListener;
import net.minecraft.src.ic2.api.IWrenchable;
import net.minecraft.src.ic2.api.NetworkHelper;
public class TileEntityHowlerAlarm extends TileEntity implements INetworkDataProvider, INetworkUpdateListener, IWrenchable, IRedstoneConsumer
{
private boolean init;
private short prevFacing;
public short facing;
private int updateTicker;
protected int tickRate;
public boolean powered;
public boolean prevPowered;
private String soundId;
public TileEntityHowlerAlarm()
{
facing = 0;
prevFacing = 0;
init = false;
tickRate = 2;
updateTicker = 0;
powered = false;
prevPowered = false;
}
private void initData()
{
if(worldObj.isRemote){
NetworkHelper.requestInitialData(this);
}
else
{
RedstoneHelper.checkPowered(worldObj, this);
}
init = true;
}
@Override
public short getFacing()
{
return (short)Facing.faceToSide[facing];
}
@Override
public void setFacing(short f)
{
setSide((short)Facing.faceToSide[f]);
}
private void setSide(short f)
{
facing = f;
if (prevFacing != f)
{
NetworkHelper.updateTileEntityField(this, "facing");
}
prevFacing = f;
}
@Override
public boolean getPowered()
{
return powered;
}
@Override
public void invalidate()
{
if(soundId != null)
{
SoundHelper.stopAlarm(soundId);
soundId = null;
}
super.invalidate();
}
@Override
public void setPowered(boolean value)
{
powered = value;
if (prevPowered != value)
{
if(powered)
{
if(soundId == null)
soundId = SoundHelper.playAlarm(xCoord + 0.5D, yCoord + 0.5D, zCoord + 0.5D, "ic2nuclearControl.alarm", mod_IC2NuclearControl.alarmRange);
}
else
{
if(soundId != null)
{
SoundHelper.stopAlarm(soundId);
soundId = null;
}
}
NetworkHelper.updateTileEntityField(this, "powered");
}
prevPowered = value;
}
public void setPoweredNoNotify(boolean value)
{
powered = value;
if (prevPowered != value)
{
if(powered)
{
if(soundId == null)
soundId = SoundHelper.playAlarm(xCoord + 0.5D, yCoord + 0.5D, zCoord + 0.5D, "ic2nuclearControl.alarm", mod_IC2NuclearControl.alarmRange);
}
else
{
if(soundId != null)
{
SoundHelper.stopAlarm(soundId);
soundId = null;
}
}
}
prevPowered = value;
}
@Override
public boolean wrenchCanSetFacing(EntityPlayer entityPlayer, int side)
{
return false;
}
@Override
public boolean wrenchCanRemove(EntityPlayer entityPlayer)
{
return true;
}
@Override
public float getWrenchDropRate()
{
return 1;
}
@Override
public void onNetworkUpdate(String field)
{
if (field.equals("facing") && prevFacing != facing)
{
worldObj.markBlockNeedsUpdate(xCoord, yCoord, zCoord);
prevFacing = facing;
}
if (field.equals("powered") && prevPowered != powered)
{
setPoweredNoNotify(powered);
worldObj.markBlockNeedsUpdate(xCoord, yCoord, zCoord);
}
}
@Override
public List<String> getNetworkedFields()
{
Vector<String> vector = new Vector<String>(2);
vector.add("facing");
vector.add("powered");
return vector;
}
@Override
public void updateEntity()
{
if (!init)
{
initData();
}
super.updateEntity();
if (mod_IC2NuclearControl.isClient())
{
if (tickRate != -1 && updateTicker-- > 0)
return;
updateTicker = tickRate;
checkStatus();
}
}
@Override
public void readFromNBT(NBTTagCompound nbttagcompound)
{
super.readFromNBT(nbttagcompound);
prevFacing = facing = nbttagcompound.getShort("facing");
}
@Override
public void writeToNBT(NBTTagCompound nbttagcompound)
{
super.writeToNBT(nbttagcompound);
nbttagcompound.setShort("facing", facing);
}
protected void checkStatus()
{
if(powered && (soundId==null || !SoundHelper.isPlaying(soundId))){
soundId = SoundHelper.playAlarm(xCoord + 0.5D, yCoord + 0.5D, zCoord + 0.5D, "ic2nuclearControl.alarm", mod_IC2NuclearControl.alarmRange);
}
}
}
| |
package org.sbolstandard.core2;
import static org.sbolstandard.core2.URIcompliance.createCompliantURI;
import java.net.URI;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Set;
import javax.xml.namespace.QName;
import org.joda.time.DateTime;
/**
* Represents a Activity object in the SBOL data model.
*
* @author Chris Myers
* @version 2.2
*/
public class Activity extends TopLevel{
private Set<URI> types;
private DateTime startedAtTime;
private DateTime endedAtTime;
private Set<URI> wasInformedBys;
private HashMap<URI, Association> associations;
private HashMap<URI, Usage> usages;
/**
* @param identity
* @throws SBOLValidationException if either of the following condition is satisfied:
* <ul>
* <li>if an SBOL validation rule violation occurred in {@link TopLevel#TopLevel(URI)}, or</li>
* <li>the following SBOL validation rule was violated: XXXXX.</li>
* </ul>
*/
Activity(URI identity) throws SBOLValidationException {
super(identity);
this.types = new HashSet<>();
startedAtTime = null;
endedAtTime = null;
wasInformedBys = new HashSet<>();
associations = new HashMap<>();
usages = new HashMap<>();
}
/**
* @param genericTopLevel
* @throws SBOLValidationException if an SBOL validation rule violation occurred in
* the following constructor or method:
* <ul>
* <li>{@link TopLevel#TopLevel(TopLevel)}, or</li>
* <li>{@link #setRDFType(QName)}.</li>
* </ul>
*/
private Activity(Activity activity) throws SBOLValidationException {
super(activity);
//this.setRDFType(genericTopLevel.getRDFType());
}
void copy(Activity activity) throws SBOLValidationException {
((TopLevel)this).copy((TopLevel)activity);
for (URI type : activity.getTypes()) {
this.addType(URI.create(type.toString()));
}
if (activity.isSetStartedAtTime()) {
this.setStartedAtTime(activity.getStartedAtTime());
}
if (activity.isSetEndedAtTime()) {
this.setEndedAtTime(activity.getEndedAtTime());
}
for (Association association : activity.getAssociations()) {
String displayId = URIcompliance.findDisplayId(association);
Association newAssociation = this.createAssociation(displayId, association.getAgentURI());
newAssociation.copy(association);
}
for (Usage usage : activity.getUsages()) {
String displayId = URIcompliance.findDisplayId(usage);
Usage newUsage = this.createUsage(displayId, usage.getEntityURI());
newUsage.copy(usage);
}
for (URI wasInformedBy : activity.getWasInformedByURIs()) {
this.addWasInformedBy(URI.create(wasInformedBy.toString()));
}
}
/**
* Adds the given type URI to this activity's set of type URIs.
*
* @param typeURI the given type URI
* @return {@code true} if this set did not already contain the given type URI, {@code false} otherwise.
*/
public boolean addType(URI typeURI) {
return types.add(typeURI);
}
/**
* Removes the given type URI from the set of types.
*
* @param typeURI the specified type URI
* @return {@code true} if the matching type reference was removed successfully, {@code false} otherwise.
*/
public boolean removeType(URI typeURI) {
return types.remove(typeURI);
}
/**
* Clears the existing set of types first, then adds the given
* set of the types to this activity.
*
* @param types the set of types to set to
*/
public void setTypes(Set<URI> types) {
clearTypes();
for (URI type : types) {
addType(type);
}
}
/**
* Returns the set of type URIs owned by this activity.
*
* @return the set of type URIs owned by this actviity
*/
public Set<URI> getTypes() {
Set<URI> result = new HashSet<>();
result.addAll(types);
return result;
}
/**
* Checks if the given type URI is included in this activity's
* set of type URIs.
*
* @param typeURI the type URI to be checked
* @return {@code true} if this set contains the given type URI, {@code false} otherwise.
*/
public boolean containsType(URI typeURI) {
return types.contains(typeURI);
}
/**
* Removes all entries of the list of <code>type</code> instances owned by this instance.
* The list will be empty after this call returns.
*/
private void clearTypes() {
types.clear();
}
private Association createAssociation(URI identity, URI agent) throws SBOLValidationException {
Association association = new Association(identity, agent);
addAssociation(association);
return association;
}
/**
* Creates a child association for this activity with the given arguments,
* and then adds to this activity's list of associations.
* <p>
* This method first creates a compliant URI for the child association to be created.
* This URI starts with this activity's persistent identity,
* followed by the given display ID and ends with this activity's version.
*
* @param displayId the display ID for the association to be created
* @param agent URI for the agent associated with this activity
* @return the created association
* @throws SBOLValidationException if any of the following SBOL validation rules was violated:
* 12602, 12604, 12605, 12606
*/
public Association createAssociation(String displayId, URI agent) throws SBOLValidationException {
String URIprefix = this.getPersistentIdentity().toString();
String version = this.getVersion();
Association a = createAssociation(createCompliantURI(URIprefix, displayId, version),agent);
a.setPersistentIdentity(createCompliantURI(URIprefix, displayId, ""));
a.setDisplayId(displayId);
a.setVersion(version);
return a;
}
private Usage createUsage(URI identity, URI entity) throws SBOLValidationException {
Usage usage = new Usage(identity, entity);
addUsage(usage);
return usage;
}
/**
* Creates a child usage for this activity with the given arguments,
* and then adds to this activity's list of usages.
* <p>
* This method first creates a compliant URI for the child usage to be created.
* This URI starts with this activity's persistent identity,
* followed by the given display ID and ends with this activity's version.
*
* @param displayId the display ID for the usage to be created
* @param entity URI reference to the entity used
* @return the created usage
* @throws SBOLValidationException if any of the following SBOL validation rules was violated:
* 12502, 12503
*/
public Usage createUsage(String displayId, URI entity) throws SBOLValidationException {
String URIprefix = this.getPersistentIdentity().toString();
String version = this.getVersion();
Usage u = createUsage(createCompliantURI(URIprefix, displayId, version),entity);
u.setPersistentIdentity(createCompliantURI(URIprefix, displayId, ""));
u.setDisplayId(displayId);
u.setVersion(version);
return u;
}
@Override
public int hashCode() {
final int prime = 31;
int result = super.hashCode();
result = prime * result + ((startedAtTime == null) ? 0 : startedAtTime.hashCode());
result = prime * result + ((endedAtTime == null) ? 0 : endedAtTime.hashCode());
result = prime * result + ((wasInformedBys == null) ? 0 : wasInformedBys.hashCode());
result = prime * result + ((associations == null) ? 0 : associations.hashCode());
result = prime * result + ((usages == null) ? 0 : usages.hashCode());
result = prime * result + ((types == null) ? 0 : types.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (!super.equals(obj))
return false;
if (getClass() != obj.getClass())
return false;
Activity other = (Activity) obj;
if (startedAtTime == null) {
if (other.startedAtTime != null)
return false;
} else if (!startedAtTime.equals(other.startedAtTime))
return false;
if (endedAtTime == null) {
if (other.endedAtTime != null)
return false;
} else if (!endedAtTime.equals(other.endedAtTime))
return false;
if (wasInformedBys == null) {
if (other.wasInformedBys != null)
return false;
} else if (!wasInformedBys.equals(other.wasInformedBys)) {
if (getWasInformedByIdentities().size()!=getWasInformedByURIs().size() ||
other.getWasInformedByIdentities().size()!=other.getWasInformedByURIs().size() ||
!getWasInformedByIdentities().equals(other.getWasInformedByIdentities())) {
return false;
}
}
if (associations == null) {
if (other.associations != null)
return false;
} else if (!associations.equals(other.associations))
return false;
if (usages == null) {
if (other.usages != null)
return false;
} else if (!usages.equals(other.usages))
return false;
if (types == null) {
if (other.types != null)
return false;
} else if (!types.equals(other.types))
return false;
return true;
}
/* (non-Javadoc)
* @see org.sbolstandard.core2.TopLevel#deepCopy()
*/
/**
* @throws SBOLValidationException if an SBOL validation rule was violated in
* {@link #GenericTopLevel(Activity)}.
*/
@Override
Activity deepCopy() throws SBOLValidationException {
return new Activity(this);
}
/* (non-Javadoc)
* @see org.sbolstandard.core2.abstract_classes.TopLevel#copy(java.lang.String, java.lang.String, java.lang.String)
*/
/**
* @throws SBOLValidationException if an SBOL validation rule violation occurred in
* any of the following methods:
* <ul>
* <li>{@link #deepCopy()},</li>
* <li>{@link URIcompliance#createCompliantURI(String, String, String)},</li>
* <li>{@link #setDisplayId(String)},</li>
* <li>{@link #setVersion(String)},</li>
* <li>{@link #setWasDerivedFrom(URI)}, or</li>
* <li>{@link #setIdentity(URI)}.</li>
* </ul>
*/
@Override
Activity copy(String URIprefix, String displayId, String version) throws SBOLValidationException {
Activity cloned = this.deepCopy();
cloned.setPersistentIdentity(createCompliantURI(URIprefix,displayId,""));
cloned.setDisplayId(displayId);
cloned.setVersion(version);
URI newIdentity = createCompliantURI(URIprefix,displayId,version);
if (!this.getIdentity().equals(newIdentity)) {
cloned.addWasDerivedFrom(this.getIdentity());
} else {
cloned.setWasDerivedFroms(this.getWasDerivedFroms());
}
cloned.setIdentity(newIdentity);
return cloned;
}
/* (non-Javadoc)
* @see org.sbolstandard.core2.abstract_classes.TopLevel#checkDescendantsURIcompliance()
*/
@Override
void checkDescendantsURIcompliance() {//throws SBOLValidationException {
//URIcompliance.isTopLevelURIformCompliant(this.getIdentity());
}
/* (non-Javadoc)
* @see org.sbolstandard.core2.Identified#toString()
*/
@Override
public String toString() {
return "Activity ["
+ super.toString()
+ (this.isSetStartedAtTime()?", startedAtTime =" + startedAtTime:"")
+ (this.isSetEndedAtTime()?", endedAtTime =" + endedAtTime:"")
+ (associations.size()>0?", associations=" + associations:"")
+ (usages.size()>0?", usages=" + usages:"")
+ (wasInformedBys.size()>0?", wasInformedBys=" + wasInformedBys:"")
+ "]";
}
/**
* Test if the startedAtTime is set.
*
* @return {@code true} if it is not {@code null}, or {@code false} otherwise
*/
public boolean isSetStartedAtTime() {
return startedAtTime != null;
}
/**
* @return the startedAtTime
*/
public DateTime getStartedAtTime() {
return startedAtTime;
}
/**
* @param startedAtTime the startedAtTime to set
*/
public void setStartedAtTime(DateTime startedAtTime) {
this.startedAtTime = startedAtTime;
}
/**
* Test if the endedAtTime is set.
*
* @return {@code true} if it is not {@code null}, or {@code false} otherwise
*/
public boolean isSetEndedAtTime() {
return endedAtTime != null;
}
/**
* @return the endedAtTime
*/
public DateTime getEndedAtTime() {
return endedAtTime;
}
/**
* @param endedAtTime the endedAtTime to set
*/
public void setEndedAtTime(DateTime endedAtTime) {
this.endedAtTime = endedAtTime;
}
/**
* Adds the URI of the given Activity instance to this Activity's
* set of wasInformdBy URIs. This method calls {@link #addWasInformedBy(URI)} with this Activity URI.
*
* @param activity the Activity instance whose identity URI to be added
* @return {@code true} if this set did not already contain the identity URI of the given Activity, {@code false} otherwise.
* @throws SBOLValidationException if the following SBOL validation rule was violated: 12407.
*/
public boolean addWasInformedBy(Activity activity) throws SBOLValidationException {
if (this.getSBOLDocument() != null && this.getSBOLDocument().isComplete()) {
if (this.getSBOLDocument().getActivity(activity.getIdentity())==null) {
throw new SBOLValidationException("sbol-12407", this);
}
}
return this.addWasInformedBy(activity.getIdentity());
}
/**
* Adds the given activity URI to this Activity's set of wasInformedBy URIs.
*
* @param activityURI the identity URI of the Activity to be added
* @return {@code true} if this set did not already contain the given activity's URI, {@code false} otherwise.
* @throws SBOLValidationException if the following SBOL validation rule was violated: 12407.
*/
public boolean addWasInformedBy(URI activityURI) throws SBOLValidationException {
if (this.getSBOLDocument() != null && this.getSBOLDocument().isComplete()) {
if (this.getSBOLDocument().getActivity(activityURI)==null) {
throw new SBOLValidationException("sbol-12407",this);
}
}
return wasInformedBys.add(activityURI);
}
/**
* Constructs a compliant activity URI with the given display ID and version, and then adds this URI
* to this activity's set of wasInformedBy URIs.
* <p>
* This method creates a compliant activity URI with the default
* URI prefix, which was set in the SBOLDocument instance hosting this activity, the given
* display ID and version. It then calls {@link #addWasInformedBy(URI)} with this Activity URI.
*
* @param displayId the display ID of the activity whose identity URI is to be added
* @param version version of the activity whose identity URI is to be added
* @return {@code true} if this set did not already contain the given activity's URI, {@code false} otherwise.
* @throws SBOLValidationException see {@link #addWasInformedBy(URI)}
*/
public boolean addWasInformedBy(String displayId,String version) throws SBOLValidationException {
URI activityURI = URIcompliance.createCompliantURI(this.getSBOLDocument().getDefaultURIprefix(),
TopLevel.SEQUENCE, displayId, version, this.getSBOLDocument().isTypesInURIs());
return addWasInformedBy(activityURI);
}
/**
* Constructs a compliant activity URI using the given activity display ID, and then adds this URI to
* this activity's set of wasInformedBy URIs. This method calls {@link #addWasInformedBy(String, String)} with
* the given sequence display ID and an empty string as its version.
*
* @param displayId the display ID of the activity whose identity URI is to be added
* @return {@code true} if this set did not already contain the given activity's URI, {@code false} otherwise.
* @throws SBOLValidationException see {@link #addWasInformedBy(String, String)}
*/
public boolean addWasInformedBy(String displayId) throws SBOLValidationException {
return addWasInformedBy(displayId,"");
}
/**
* Returns the set of wasInformedBy URIs referenced by this activity.
*
* @return the set of wasInformedBy URIs referenced by this activity
*/
public Set<URI> getWasInformedByURIs() {
Set<URI> result = new HashSet<>();
result.addAll(wasInformedBys);
return result;
}
/**
* Returns the set of wasInformedBys identities referenced by this activity.
*
* @return the set of wasInformedBys identities referenced by this activity
*/
public Set<URI> getWasInformedByIdentities() {
if (this.getSBOLDocument()==null) return null;
Set<URI> resolved = new HashSet<>();
for(URI wib : wasInformedBys) {
Activity activity = this.getSBOLDocument().getActivity(wib);
if(activity != null) {
resolved.add(activity.getIdentity());
}
}
return resolved;
}
/**
* Returns the set of wasInformedBys referenced by this activity.
*
* @return the set of wasInformedBys referenced by this activity
*/
public Set<Activity> getWasInformedBys() {
if (this.getSBOLDocument()==null) return null;
Set<Activity> resolved = new HashSet<>();
for(URI wib : wasInformedBys) {
Activity activity = this.getSBOLDocument().getActivity(wib);
if(activity != null) {
resolved.add(activity);
}
}
return resolved;
}
/**
* Removes all entries of this activity's set of reference
* wasInformedBy URIs. The set will be empty after this call returns.
*/
public void clearWasInformedBys() {
wasInformedBys.clear();
}
/**
* @param wasInformedBys the wasInformedBys to set
* @throws SBOLValidationException if an SBOL validation rule violation occurred in {@link #addWasInformedBy(URI)}
*/
public void setWasInformedBys(Set<URI> wasInformedBys) throws SBOLValidationException {
clearWasInformedBys();
if (wasInformedBys==null) return;
for (URI wasInformedBy : wasInformedBys) {
addWasInformedBy(wasInformedBy);
}
this.wasInformedBys = wasInformedBys;
}
/**
* Returns the association matching the given association's display ID.
* <p>
* This method first creates a compliant URI for the association to be retrieved. It starts with
* this activity's persistent identity, followed by the given association's display ID,
* and ends with this activity's version.
*
* @param displayId the display ID of the association to be retrieved
* @return the matching association if present, or {@code null} otherwise.
*/
public Association getAssociation(String displayId) {
try {
return associations.get(createCompliantURI(this.getPersistentIdentity().toString(),displayId,this.getVersion()));
}
catch (SBOLValidationException e) {
return null;
}
}
/**
* Returns the instance matching the given association's identity URI.
*
* @param associationURI the identity URI of the association to be retrieved
* @return the matching association if present, or {@code null} otherwise.
*/
public Association getAssociation(URI associationURI) {
return associations.get(associationURI);
}
/**
* Returns the set of associations owned by this activity.
*
* @return the set of associations owned by this activity.
*/
public Set<Association> getAssociations() {
Set<Association> associations = new HashSet<>();
associations.addAll(this.associations.values());
return associations;
}
/**
* Adds the given association to the list of associations.
* @throws SBOLValidationException if either of the following condition is satisfied:
* <ul>
* <li>any of the following SBOL validation rules was violated: 10604, 10605, 10803</li>
* <li>an SBOL validation rule violation occurred in {@link Identified#addChildSafely(Identified, java.util.Map, String, java.util.Map...)}</li>
* </ul>
*/
private void addAssociation(Association association) throws SBOLValidationException {
association.setSBOLDocument(this.getSBOLDocument());
addChildSafely(association, associations, "association", usages);
}
/**
* Removes the given association from the list of associations.
*
* @param association the given association
* @return {@code true} if the matching association was removed successfully,
* {@code false} otherwise.
*/
public boolean removeAssociation(Association association) {
return removeChildSafely(association, associations);
}
/**
* Removes all entries of this activity's list of associations.
* The list will be empty after this call returns.
* <p>
* This method calls {@link #removeAssociation(Association association)} to iteratively remove
* each association.
*
*/
public void clearAssociations() {
Object[] valueSetArray = associations.values().toArray();
for (Object association : valueSetArray) {
removeAssociation((Association)association);
}
}
/**
* @param associations the associations to set
*/
void setAssociations(Set<Association> associations) throws SBOLValidationException {
clearAssociations();
for (Association association : associations) {
addAssociation(association);
}
}
/**
* Returns the usage matching the given usage's display ID.
* <p>
* This method first creates a compliant URI for the usage to be retrieved. It starts with
* this activity's persistent identity, followed by the given usage's display ID,
* and ends with this activity's version.
*
* @param displayId the display ID of the usage to be retrieved
* @return the matching usage if present, or {@code null} otherwise.
*/
public Usage getUsage(String displayId) {
try {
return usages.get(createCompliantURI(this.getPersistentIdentity().toString(),displayId,this.getVersion()));
}
catch (SBOLValidationException e) {
return null;
}
}
/**
* Returns the instance matching the given usage's identity URI.
*
* @param usageURI the identity URI of the usage to be retrieved
* @return the matching usage if present, or {@code null} otherwise.
*/
public Usage getUsage(URI usageURI) {
return usages.get(usageURI);
}
/**
* Returns the set of usages owned by this activity.
*
* @return the set of usages owned by this activity.
*/
public Set<Usage> getUsages() {
Set<Usage> usages = new HashSet<>();
usages.addAll(this.usages.values());
return usages;
}
/**
* Adds the given usage to the list of usages.
* @throws SBOLValidationException if either of the following condition is satisfied:
* <ul>
* <li>any of the following SBOL validation rules was violated: 10604, 10605, 10803</li>
* <li>an SBOL validation rule violation occurred in {@link Identified#addChildSafely(Identified, java.util.Map, String, java.util.Map...)}</li>
* </ul>
*/
private void addUsage(Usage usage) throws SBOLValidationException {
usage.setSBOLDocument(this.getSBOLDocument());
addChildSafely(usage, usages, "usage", associations);
}
/**
* Removes the given usage from the list of usages.
*
* @param usage the given usage
* @return {@code true} if the matching usage was removed successfully,
* {@code false} otherwise.
*/
public boolean removeUsages(Usage usage) {
return removeChildSafely(usage, usages);
}
/**
* Removes all entries of this activity's list of usages.
* The list will be empty after this call returns.
* <p>
* This method calls {@link #removeUsages(Usage usage)} to iteratively remove
* each usage.
*
*/
public void clearUsages() {
Object[] valueSetArray = usages.values().toArray();
for (Object usage : valueSetArray) {
removeUsages((Usage)usage);
}
}
/**
* @param usages the usages to set
*/
void setUsages(Set<Usage> usages) throws SBOLValidationException {
clearUsages();
for (Usage usage : usages) {
addUsage(usage);
}
}
}
| |
/*
* Copyright (c) 2008-2013 David Soergel <dev@davidsoergel.com>
* Licensed under the Apache License, Version 2.0
* http://www.apache.org/licenses/LICENSE-2.0
*/
package com.davidsoergel.s3napback;
import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.services.s3.model.DeleteObjectsRequest;
import com.amazonaws.services.s3.model.DeleteObjectsResult;
import com.amazonaws.services.s3.model.ListObjectsRequest;
import com.amazonaws.services.s3.model.ObjectListing;
import com.amazonaws.services.s3.model.ObjectMetadata;
import com.amazonaws.services.s3.model.S3ObjectSummary;
import com.amazonaws.services.s3.transfer.Download;
import com.amazonaws.services.s3.transfer.TransferManager;
import com.amazonaws.services.s3.transfer.Upload;
import com.amazonaws.services.s3.transfer.model.UploadResult;
import com.amazonaws.util.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sun.security.provider.MD5;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.ByteArrayInputStream;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.SortedMap;
import java.util.TreeMap;
/**
* Provide command-line interface for S3 put/get/delete/list operations-- just enough to support s3napback needs, no more.
*
* @author <a href="mailto:dev@davidsoergel.com">David Soergel</a>
* @version $Id$
*/
public class S3ops
{
private static final Logger logger = LoggerFactory.getLogger(S3ops.class);
public static void main( String[] argv )
{
int chunkSize = 25000000; // default 25 MB
// hacky positional arguments, whatever
String keyfileName = argv[0];
String command = argv[1];
String bucket = argv[2];
String filename = argv[3];
if (argv.length > 4) { chunkSize = new Integer(argv[3]); }
Properties props = new Properties();
try
{
props.load(new FileInputStream(keyfileName));
}
catch (FileNotFoundException e)
{
logger.error("Error", e);
}
catch (IOException e)
{
logger.error("Error", e);
}
String accessKey = props.getProperty("key");
String secretKey = props.getProperty("secret");
AWSCredentials myCredentials = new BasicAWSCredentials(accessKey, secretKey);
StreamingTransferManager tx = new StreamingTransferManager(myCredentials);
try
{
if (command.equals("upload"))
{
upload(tx, bucket, filename, chunkSize);
}
else if (command.equals("download"))
{
download(tx, bucket, filename);
}
else if (command.equals("delete"))
{
delete(tx, bucket, filename);
}
else if (command.equals("list"))
{
list(tx, bucket);
}
else
{
logger.error("Unknown command: " + command);
}
}
catch (InterruptedException e)
{
logger.error("Error", e);
System.exit(1);
}
catch (IOException e)
{
logger.error("Error", e);
System.exit(1);
}
tx.shutdownNow();
}
public static void delete( TransferManager tx, String bucket, String fileprefix ) throws InterruptedException
{
logger.info("Deleting " + fileprefix);
List<DeleteObjectsRequest.KeyVersion> keys = new ArrayList<DeleteObjectsRequest.KeyVersion>();
ObjectListing objectListing = tx.getAmazonS3Client().listObjects(new ListObjectsRequest().withBucketName(bucket).withPrefix(fileprefix));
for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries())
{
keys.add(new DeleteObjectsRequest.KeyVersion(objectSummary.getKey()));
}
DeleteObjectsRequest req = new DeleteObjectsRequest(bucket);
req.setKeys(keys);
DeleteObjectsResult result = tx.getAmazonS3Client().deleteObjects(req);
}
public static void upload( TransferManager tx, String bucket, String filename, int chunkSize ) throws InterruptedException, IOException
{
//throw new NotImplementedException();
// break input stream into chunks
// fully read each chunk into memory before sending, in order to know the size and the md5
// ** prepare the next chunk while the last is sending; need to deal with multithreading properly
// ** 4 concurrent streams?
InputStream in = new BufferedInputStream(System.in);
int chunkNum = 0;
while (in.available() > 0)
{
byte[] buf = new byte[chunkSize];
int bytesRead = in.read(buf);
String md5 = new MD5(buf);
// presume AWS does its own buffering, no need for BufferedInputStream (?)
ObjectMetadata meta = new ObjectMetadata();
meta.setContentLength(bytesRead);
meta.setContentMD5(md5);
Upload myUpload = tx.upload(bucket, filename + ":" + chunkNum, new ByteArrayInputStream(buf), meta);
UploadResult result = myUpload.waitForUploadResult();
while (myUpload.isDone() == false)
{
System.out.println("Transfer: " + myUpload.getDescription());
System.out.println(" - State: " + myUpload.getState());
System.out.println(" - Progress: " + myUpload.getProgress().getBytesTransfered());
// Do work while we wait for our upload to complete...
Thread.sleep(500);
}
}
}
public static void list( StreamingTransferManager tx, String bucket ) throws InterruptedException
{
//** sort by date
SortedMap<String, SortedMap<String, S3ObjectSummary>> blocks = new TreeMap<String, SortedMap<String, S3ObjectSummary>>();
ObjectListing current = tx.getAmazonS3Client().listObjects(new ListObjectsRequest().withBucketName(bucket));
List<S3ObjectSummary> keyList = current.getObjectSummaries();
ObjectListing next = tx.getAmazonS3Client().listNextBatchOfObjects(current);
keyList.addAll(next.getObjectSummaries());
while (next.isTruncated())
{
current = tx.getAmazonS3Client().listNextBatchOfObjects(next);
keyList.addAll(current.getObjectSummaries());
next = tx.getAmazonS3Client().listNextBatchOfObjects(current);
}
keyList.addAll(next.getObjectSummaries());
for (S3ObjectSummary objectSummary : keyList)
{
String[] c = objectSummary.getKey().split(":");
if (c.length != 2)
{ logger.warn("ignoring malformed filename " + objectSummary.getKey()); }
else
{
String filename = c[0];
String chunknum = c[1];
SortedMap<String, S3ObjectSummary> chunks = blocks.get(filename);
if (chunks == null)
{
chunks = new TreeMap<String, S3ObjectSummary>();
blocks.put(filename, chunks);
}
chunks.put(chunknum, objectSummary);
}
}
// now the files and chunks are in the maps in order
for (Map.Entry<String, SortedMap<String, S3ObjectSummary>> blockEntry : blocks.entrySet())
{
String filename = blockEntry.getKey();
SortedMap<String, S3ObjectSummary> chunks = blockEntry.getValue();
long totalsize = 0;
Date lastModified = null;
for (Map.Entry<String, S3ObjectSummary> entry : chunks.entrySet())
{
totalsize += entry.getValue().getSize();
lastModified = entry.getValue().getLastModified();
}
String[] line = { bucket, filename, "" + chunks.keySet().size(), "" + totalsize, lastModified.toString() };
System.err.println(StringUtils.join("\t", line));
// 2008-04-10 04:07:50 - dev.davidsoergel.com.backup1:MySQL/all-0 - 153.38k in 1 data blocks
}
}
// ** download todo: use a TarInputStream, choose files. Any hope of random access to needed chunks? Ooh, maybe so,
// just using the tar index and a random-access file facade!
//** todo: download tar indexes only for all archives, list dates on which a given file is available
public static void download( StreamingTransferManager tx, String bucket, String fileprefix ) throws InterruptedException, IOException
{
// first list the files
SortedMap<String, S3ObjectSummary> chunks = new TreeMap<String, S3ObjectSummary>();
ObjectListing objectListing = tx.getAmazonS3Client().listObjects(new ListObjectsRequest().withBucketName(bucket).withPrefix(fileprefix));
for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries())
{
chunks.put(objectSummary.getKey(), objectSummary);
}
logger.info("Downloading " + fileprefix);
Date start = new Date();
// now the chunks are in the map in order
long totalBytes = 0;
BufferedOutputStream out = new BufferedOutputStream(System.out);
for (Map.Entry<String, S3ObjectSummary> entry : chunks.entrySet())
{
String key = entry.getKey();
logger.info("Downloading " + key);
Download myDownload = tx.download(bucket, key, out);
while (myDownload.isDone() == false)
{
long bytes = totalBytes + myDownload.getProgress().getBytesTransfered();
Double mb = (double) bytes / 1024. / 1024.;
Double sec = (new Date().getTime() - start.getTime()) / 1000.;
Double rate = mb / sec;
logger.info(String.format("%.2f MB, %.2fMB/s", mb, rate));
// Do work while we wait for our upload to complete...
Thread.sleep(500);
}
totalBytes += myDownload.getProgress().getBytesTransfered();
}
out.close();
Long bytes = totalBytes;
Double mb = (double) bytes / 1024. / 1024.;
Double sec = (new Date().getTime() - start.getTime()) / 1000.;
Double rate = mb / sec;
logger.info(String.format("Downloaded %s to stdout, %d bytes, %.2f sec, %.2fMB/s", fileprefix, totalBytes, sec, rate));
//logger.info("Downloaded " + fileprefix + " to stdout, " + totalBytes + " bytes, " + sec +" sec, " + rate + " MB/sec");
}
}
| |
/*******************************************************************************
* Copyright (c) 2006-2010 eBay Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*******************************************************************************/
package org.ebayopensource.turmeric.runtime.common.exceptions;
import java.io.InputStream;
import java.lang.reflect.Field;
import java.net.URL;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.MissingResourceException;
import java.util.ResourceBundle;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.logging.Logger;
import org.ebayopensource.turmeric.common.v1.types.CommonErrorData;
import org.ebayopensource.turmeric.runtime.common.errors.ErrorDataProvider;
import org.ebayopensource.turmeric.runtime.common.errors.ErrorTextResolver;
import org.ebayopensource.turmeric.runtime.common.utils.Preconditions;
/**
* This is the property based Error Library implementation of the
* ErrorDataProvider.
*
* @author ana, wdeng
*
*/
public class PropertyFileBasedErrorProvider implements ErrorDataProvider{
private final static Logger LOG = Logger.getLogger(PropertyFileBasedErrorProvider.class.getName());
private final static String BUNDLE_NAME = "Errors";
private final static String ERRORDATACOLLECTION_CALSSNAME = "ErrorDataCollection";
private final static String ERRORCONSTANTS_CALSSNAME = "ErrorConstants";
private final static String ERRORDOMAINCONSTANTNAME = "ERRORDOMAIN";
private final static String MESSAGE_PROPERTY = ".message";
private final static String DEFAULT_CLASS_PACKAGE = "com.ebay.errorlibrary";
private final static String DEFAULT_RESOURCE_PACKAGE = "META-INF.errorlibrary";
private static Map<ErrorNameDomain, CommonErrorData> s_domainErrorDataMap = new HashMap<ErrorNameDomain, CommonErrorData>();
private static ConcurrentMap<String, String> s_domainPackageMap = new ConcurrentHashMap<String, String>();
private static List<Class> errorTypesClassList = new ArrayList<Class>();
private static Map<String,ErrorTextResolver> s_bundleResolvers =
new HashMap<String,ErrorTextResolver>();
private static PropertyFileBasedErrorProvider s_propertyFileBasedErrorProvider;
private PropertyFileBasedErrorProvider(){
}
/**
*
* @return The singleton PrapertyFileBasedErrorProvider.
*/
public static PropertyFileBasedErrorProvider getInstance(){
if(s_propertyFileBasedErrorProvider == null)
s_propertyFileBasedErrorProvider =new PropertyFileBasedErrorProvider();
return s_propertyFileBasedErrorProvider;
}
/**
* Initial setup for a domain. This method will load the ErrorData for the given
* domain.
*
* @param domain The error domain for which CommonErrorDatas are loaded.
*/
public synchronized void initialize(String domain) {
Class errorDataClass = getRequiredClass(getErrorDataClassPackage(domain), ERRORDATACOLLECTION_CALSSNAME);
try {
if(errorDataClass != null && !errorTypesClassList.contains(errorDataClass)){
errorTypesClassList.add(errorDataClass);
populateResourceBundleResolver(domain);
Field[] allFields = errorDataClass.getFields();
for (Field field : allFields) {
if (field != null && field.getType().equals(CommonErrorData.class)) {
CommonErrorData errorData = (CommonErrorData) field.get(null);
ErrorNameDomain errorNameDomain = new ErrorNameDomain(field.getName(), errorData.getDomain());
s_domainErrorDataMap.put(errorNameDomain, errorData);
}
}
validate(domain);
}
} catch (Exception exception) {
Object[] arguments = new Object[] { domain };
ExceptionUtils.throwServiceRuntimeException(ErrorLibraryBaseErrors.el_initialization_failed, arguments, exception);
}
}
private Class getRequiredClass(String packageName, String className){
Class requiredClass;
String requiredClassName = packageName + "." + className;
requiredClass = ExceptionUtils.loadClass(requiredClassName,null,
Thread.currentThread().getContextClassLoader());
return requiredClass;
}
private static String getErrorDataClassPackage(String domain){
// StringBuffer packageNameBuilder = new StringBuffer(100);
// packageNameBuilder.append(DEFAULT_CLASS_PACKAGE);
// if(domain != null){
// packageNameBuilder.append(".").append(domain.toLowerCase());
// }
String packageName = s_domainPackageMap.get(domain);
if(packageName == null){
InputStream inputStream = null;
URL errorDataXmlUrl = ExceptionUtils.getErrordataXMLURL(domain);
if(errorDataXmlUrl != null)
try {
inputStream = errorDataXmlUrl.openStream();
packageName = ExceptionUtils.getPackageNameFromXML(inputStream);
if(packageName != null)
s_domainPackageMap.putIfAbsent(domain, packageName.toLowerCase());
} catch (Exception exception) {
exception.printStackTrace();
}
}
return packageName;
}
private void validate(String domain){
Class errorConstantsClass = getRequiredClass(getErrorDataClassPackage(domain), ERRORCONSTANTS_CALSSNAME);
Set<String> constantsClassErrorSet = new HashSet<String>();
Set<String> propertiesErrorSet = new HashSet<String>();
String errorBundleName = getErrorBundleName(domain);
// Populating the set with the error names from ErrorConstants.java and Errors.properties.
// The key in the properties file is <ErrorName>.message and hence populating the set from
// ErrorConstants.java with <ErrorName>.message and check for consistencies. The constraint here
// is Errors.properties must contain all the errors defined in ErrorConstants.java
try {
if(errorConstantsClass != null){
Field[] allFields = errorConstantsClass.getFields();
for (Field field : allFields)
if(field != null && !field.getName().equals(ERRORDOMAINCONSTANTNAME)){
String propertyValue = (String)field.get(null);
if(propertyValue != null)
constantsClassErrorSet.add(propertyValue + MESSAGE_PROPERTY);
}
Locale locale2 = Locale.US;
ResourceBundle rBundle = ResourceBundle.getBundle(errorBundleName, locale2, Thread.currentThread().getContextClassLoader());
propertiesErrorSet = rBundle.keySet();
}
} catch (Exception exception) {
Object[] arguments = new Object[] {"ErrorData.xml" };
ExceptionUtils.throwServiceRuntimeException(ErrorLibraryBaseErrors.el_io_error, arguments, exception);
}
constantsClassErrorSet.removeAll(propertiesErrorSet);
if(!constantsClassErrorSet.isEmpty()){
StringBuilder logmsg = new StringBuilder();
logmsg.append("Not all found error properties found in bundle \"");
logmsg.append(errorBundleName);
logmsg.append("\" : Missing [");
boolean delim = false;
for(String constantsClassError: constantsClassErrorSet) {
if(delim) {
logmsg.append(", ");
}
logmsg.append(constantsClassError);
delim = true;
}
logmsg.append("]");
LOG.warning(logmsg.toString());
Object[] arguments = new Object[] {domain};
ExceptionUtils.throwServiceRuntimeException(ErrorLibraryBaseErrors.el_validation_failed, arguments);
}
}
private String getErrorBundleName(String domain){
return DEFAULT_RESOURCE_PACKAGE + "." + domain + "." + BUNDLE_NAME;
}
/* (non-Javadoc)
* @see org.ebayopensource.turmeric.runtime.common.errors.ErrorDataProvider#getCommonErrorData(org.ebayopensource.turmeric.runtime.common.errors.ErrorDataProvider.ErrorDataKey, java.lang.Object[], java.util.Locale)
*/
@Override
public CommonErrorData getCommonErrorData(ErrorDataKey key, Object[] args,
Locale locale) {
ErrorNameDomain errorNameDomain = new ErrorNameDomain(key.getErrorName(), key.getBundle());
CommonErrorData tempErrorData = s_domainErrorDataMap.get(errorNameDomain);
if(tempErrorData == null){
initialize(key.getBundle());
tempErrorData = s_domainErrorDataMap.get(errorNameDomain);
}
if(tempErrorData == null){
Object[] arguments = new Object[] {key.getErrorName(), key.getBundle() };
ExceptionUtils.throwServiceRuntimeException(ErrorLibraryBaseErrors.el_no_such_error_defined, arguments);
}
CommonErrorData commonErrorData = ExceptionUtils.cloneErrorData(key, tempErrorData, args);
buildMessageAndResolution(commonErrorData, locale.getLanguage(), args);
return commonErrorData;
}
/* (non-Javadoc)
* @see org.ebayopensource.turmeric.runtime.common.errors.ErrorDataProvider#getCustomErrorData(org.ebayopensource.turmeric.runtime.common.errors.ErrorDataProvider.ErrorDataKey, java.lang.Object[], java.lang.Class, java.util.Locale)
*/
@SuppressWarnings("unchecked")
@Override
public <T extends CommonErrorData> T getCustomErrorData(ErrorDataKey key,
Object[] args, Class<T> clazz, Locale locale) {
return (T) getCommonErrorData(key, args, locale);
}
@Override
public CommonErrorData getErrorData(ErrorDataKey key, Object[] args, Locale locale) {
return getCommonErrorData(key, args, locale);
}
/* (non-Javadoc)
* @see org.ebayopensource.turmeric.runtime.common.errors.ErrorDataProvider#init()
*/
@Override
public void init() {
// no op
}
/**
* This method built the message and resolution for a given ErrorData. It include fetching
* the CommonErrorData from Error Libraries and localized the messages.
* @param errorData The CommonErrorData where the message and resolution are both
* constructed and localized.
* @param locale The local the message will be converted to
* @param params parameters to be used
*/
public void buildMessageAndResolution(CommonErrorData errorData, String locale, Object[] params) {
CommonErrorData commonErrorData = null;
String message = null;
String resolution = null;
ErrorTextResolver m_errorTextResolver = null;
if(errorData instanceof CommonErrorData){
commonErrorData = (CommonErrorData) errorData;
if(commonErrorData != null && commonErrorData.getDomain() != null){
m_errorTextResolver = s_bundleResolvers.get(commonErrorData.getDomain());
String messageId = commonErrorData.getErrorName() + ".message";
String resolutionId = commonErrorData.getErrorName() + ".resolution";
if(m_errorTextResolver == null)
populateResourceBundleResolver(commonErrorData.getDomain());
if(m_errorTextResolver != null){
// Get localized version here, to keep caching logic below working
if (locale != null) {
message = m_errorTextResolver.getErrorText(messageId, commonErrorData.getDomain(), locale);
resolution = m_errorTextResolver.getErrorText(resolutionId, commonErrorData.getDomain(), locale);
}
// If localized text not found, get the English version
if (message == null || message.isEmpty()) {
message = m_errorTextResolver.getErrorText(messageId, commonErrorData.getDomain(), null);
// If the English version is not found, it is an error.
if (message == null) {
message = "Unable to retrieve the message. Error message not defined in" +
" the bundle for Error " + commonErrorData.getDomain() + "." + commonErrorData.getErrorName();
commonErrorData.setMessage(message);
return;
}
}
if (resolution == null || resolution.isEmpty()) {
resolution = m_errorTextResolver.getErrorText(resolutionId, commonErrorData.getDomain(), null);
// Uncomment these once the resolution is ready.
// if (resolution == null || resolution.isEmpty()) {
// resolution = "Unable to retrieve the resolution message. Resolution might not be defined for Error " +
// commonErrorData.getDomain() + "." + commonErrorData.getErrorName();
// }
}
try {
message = MessageFormat.format(message, params);
} catch (Exception e) {
message = "Error Message Formatting error for Error ID " + commonErrorData.getDomain() + "." + commonErrorData.getErrorId() + ". Exception: " + e.toString();
}
}
}
commonErrorData.setMessage(message);
commonErrorData.setResolution(resolution);
}
}
private synchronized void populateResourceBundleResolver(String domain)
{
String bundlePackage = getErrorDataClassPackage(domain);
String errorBundleName = getErrorBundleName(domain);
ErrorTextResolver result = s_bundleResolvers.get(domain);
if (result == null) {
ClassLoader loader = null;
if (bundlePackage != null) {
String errorCollectionClass = bundlePackage + "." + ERRORDATACOLLECTION_CALSSNAME;
for(Class class1 : errorTypesClassList)
if(class1.getName().equals(errorCollectionClass)){
loader = class1.getClassLoader();
break;
}
if (loader == null) {
// we're in system class loader
loader = ClassLoader.getSystemClassLoader();
}
} else {
loader = Thread.currentThread().getContextClassLoader();
}
result = new ResourceBundleErrorTextResolver(errorBundleName, loader);
s_bundleResolvers.put(domain, result);
}
}
private static class ResourceBundleErrorTextResolver implements ErrorTextResolver {
private final String m_bundleName;
private final ClassLoader m_loader;
ResourceBundleErrorTextResolver(String bundleName, ClassLoader loader) {
Preconditions.checkNotNull(bundleName);
Preconditions.checkNotNull(loader);
m_bundleName = bundleName;
m_loader = loader;
}
public String getErrorText(String id, String domain, String locale) {
String result = null;
try {
Locale locale2 = (locale == null) ? Locale.US : new Locale(locale);
ResourceBundle rb = ResourceBundle.getBundle(m_bundleName, locale2, m_loader);
result = rb.getString(id);
} catch (MissingResourceException e) {
// ignore errors
} catch (ClassCastException e) {
// ignore errors
}
return result;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.jdbc;
import java.util.Map;
import javax.sql.DataSource;
import org.apache.camel.Category;
import org.apache.camel.Component;
import org.apache.camel.Consumer;
import org.apache.camel.Processor;
import org.apache.camel.Producer;
import org.apache.camel.spi.Metadata;
import org.apache.camel.spi.UriEndpoint;
import org.apache.camel.spi.UriParam;
import org.apache.camel.spi.UriPath;
import org.apache.camel.support.DefaultEndpoint;
/**
* Access databases through SQL and JDBC.
*/
@UriEndpoint(firstVersion = "1.2.0", scheme = "jdbc", title = "JDBC", syntax = "jdbc:dataSourceName", producerOnly = true,
category = { Category.DATABASE, Category.SQL })
public class JdbcEndpoint extends DefaultEndpoint {
private DataSource dataSource;
@UriPath
@Metadata(required = true)
private String dataSourceName;
@UriParam
private int readSize;
@UriParam
private boolean transacted;
@UriParam(defaultValue = "true")
private boolean resetAutoCommit = true;
@UriParam(prefix = "statement.", multiValue = true)
private Map<String, Object> parameters;
@UriParam(defaultValue = "true")
private boolean useJDBC4ColumnNameAndLabelSemantics = true;
@UriParam
private boolean useGetBytesForBlob;
@UriParam(label = "advanced")
private JdbcPrepareStatementStrategy prepareStatementStrategy = new DefaultJdbcPrepareStatementStrategy();
@UriParam(defaultValue = "true")
private boolean allowNamedParameters = true;
@UriParam
private boolean useHeadersAsParameters;
@UriParam(defaultValue = "SelectList")
private JdbcOutputType outputType = JdbcOutputType.SelectList;
@UriParam
private String outputClass;
@UriParam(label = "advanced")
private BeanRowMapper beanRowMapper = new DefaultBeanRowMapper();
public JdbcEndpoint() {
}
public JdbcEndpoint(String endpointUri, Component component, DataSource dataSource) {
super(endpointUri, component);
this.dataSource = dataSource;
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
throw new UnsupportedOperationException("Not supported");
}
@Override
public Producer createProducer() throws Exception {
return new JdbcProducer(this, dataSource, readSize, parameters);
}
public String getDataSourceName() {
return dataSourceName;
}
/**
* Name of DataSource to lookup in the Registry. If the name is dataSource or default, then Camel will attempt to
* lookup a default DataSource from the registry, meaning if there is a only one instance of DataSource found, then
* this DataSource will be used.
*/
public void setDataSourceName(String dataSourceName) {
this.dataSourceName = dataSourceName;
}
public int getReadSize() {
return readSize;
}
/**
* The default maximum number of rows that can be read by a polling query. The default value is 0.
*/
public void setReadSize(int readSize) {
this.readSize = readSize;
}
public boolean isTransacted() {
return transacted;
}
/**
* Whether transactions are in use.
*/
public void setTransacted(boolean transacted) {
this.transacted = transacted;
}
public boolean isResetAutoCommit() {
return resetAutoCommit;
}
/**
* Camel will set the autoCommit on the JDBC connection to be false, commit the change after executed the statement
* and reset the autoCommit flag of the connection at the end, if the resetAutoCommit is true. If the JDBC
* connection doesn't support to reset the autoCommit flag, you can set the resetAutoCommit flag to be false, and
* Camel will not try to reset the autoCommit flag. When used with XA transactions you most likely need to set it to
* false so that the transaction manager is in charge of committing this tx.
*/
public void setResetAutoCommit(boolean resetAutoCommit) {
this.resetAutoCommit = resetAutoCommit;
}
public DataSource getDataSource() {
return dataSource;
}
/**
* The data source to use
*/
public void setDataSource(DataSource dataSource) {
this.dataSource = dataSource;
}
public Map<String, Object> getParameters() {
return parameters;
}
/**
* Optional parameters to the {@link java.sql.Statement}.
* <p/>
* For example to set maxRows, fetchSize etc.
*
* @param parameters parameters which will be set using reflection
*/
public void setParameters(Map<String, Object> parameters) {
this.parameters = parameters;
}
public boolean isUseJDBC4ColumnNameAndLabelSemantics() {
return useJDBC4ColumnNameAndLabelSemantics;
}
/**
* Sets whether to use JDBC 4 or JDBC 3.0 or older semantic when retrieving column name.
* <p/>
* JDBC 4.0 uses columnLabel to get the column name where as JDBC 3.0 uses both columnName or columnLabel.
* Unfortunately JDBC drivers behave differently so you can use this option to work out issues around your JDBC
* driver if you get problem using this component
* <p/>
* This option is default <tt>true</tt>.
*
* @param useJDBC4ColumnNameAndLabelSemantics <tt>true</tt> to use JDBC 4.0 semantics, <tt>false</tt> to use JDBC
* 3.0.
*/
public void setUseJDBC4ColumnNameAndLabelSemantics(boolean useJDBC4ColumnNameAndLabelSemantics) {
this.useJDBC4ColumnNameAndLabelSemantics = useJDBC4ColumnNameAndLabelSemantics;
}
public JdbcPrepareStatementStrategy getPrepareStatementStrategy() {
return prepareStatementStrategy;
}
/**
* Allows the plugin to use a custom org.apache.camel.component.jdbc.JdbcPrepareStatementStrategy to control
* preparation of the query and prepared statement.
*/
public void setPrepareStatementStrategy(JdbcPrepareStatementStrategy prepareStatementStrategy) {
this.prepareStatementStrategy = prepareStatementStrategy;
}
public boolean isAllowNamedParameters() {
return allowNamedParameters;
}
/**
* Whether to allow using named parameters in the queries.
*/
public void setAllowNamedParameters(boolean allowNamedParameters) {
this.allowNamedParameters = allowNamedParameters;
}
public boolean isUseHeadersAsParameters() {
return useHeadersAsParameters;
}
/**
* Set this option to true to use the prepareStatementStrategy with named parameters. This allows to define queries
* with named placeholders, and use headers with the dynamic values for the query placeholders.
*/
public void setUseHeadersAsParameters(boolean useHeadersAsParameters) {
this.useHeadersAsParameters = useHeadersAsParameters;
}
public JdbcOutputType getOutputType() {
return outputType;
}
/**
* Determines the output the producer should use.
*/
public void setOutputType(JdbcOutputType outputType) {
this.outputType = outputType;
}
public String getOutputClass() {
return outputClass;
}
/**
* Specify the full package and class name to use as conversion when outputType=SelectOne or SelectList.
*/
public void setOutputClass(String outputClass) {
this.outputClass = outputClass;
}
public BeanRowMapper getBeanRowMapper() {
return beanRowMapper;
}
/**
* To use a custom org.apache.camel.component.jdbc.BeanRowMapper when using outputClass. The default implementation
* will lower case the row names and skip underscores, and dashes. For example "CUST_ID" is mapped as "custId".
*/
public void setBeanRowMapper(BeanRowMapper beanRowMapper) {
this.beanRowMapper = beanRowMapper;
}
public boolean isUseGetBytesForBlob() {
return this.useGetBytesForBlob;
}
/**
* To read BLOB columns as bytes instead of string data.
* <p/>
* This may be needed for certain databases such as Oracle where you must read BLOB columns as bytes.
*/
public void setUseGetBytesForBlob(boolean useGetBytesForBlob) {
this.useGetBytesForBlob = useGetBytesForBlob;
}
@Override
protected String createEndpointUri() {
return dataSourceName != null ? "jdbc:" + dataSourceName : "jdbc";
}
}
| |
/*
* Copyright 2016 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.buffer;
import io.netty.util.ByteProcessor;
import io.netty.util.Recycler;
import io.netty.util.Recycler.Handle;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.channels.GatheringByteChannel;
import java.nio.channels.ScatteringByteChannel;
import static io.netty.util.internal.MathUtil.isOutOfBounds;
final class PooledSlicedByteBuf extends AbstractPooledDerivedByteBuf<PooledSlicedByteBuf> {
private static final Recycler<PooledSlicedByteBuf> RECYCLER = new Recycler<PooledSlicedByteBuf>() {
@Override
protected PooledSlicedByteBuf newObject(Handle<PooledSlicedByteBuf> handle) {
return new PooledSlicedByteBuf(handle);
}
};
static PooledSlicedByteBuf newInstance(AbstractByteBuf buffer, int index, int length, int adjustment) {
if (isOutOfBounds(index, length, buffer.capacity())) {
throw new IndexOutOfBoundsException(buffer + ".slice(" + index + ", " + length + ')');
}
final PooledSlicedByteBuf slice = RECYCLER.get();
slice.init(buffer, 0, length, length);
slice.discardMarks();
slice.adjustment = adjustment;
return slice;
}
private int adjustment;
private PooledSlicedByteBuf(Handle<PooledSlicedByteBuf> handle) {
super(handle);
}
@Override
public int capacity() {
return maxCapacity();
}
@Override
public ByteBuf capacity(int newCapacity) {
return reject();
}
@Override
public int arrayOffset() {
return idx(unwrap().arrayOffset());
}
@Override
public long memoryAddress() {
return unwrap().memoryAddress() + adjustment;
}
@Override
public ByteBuffer nioBuffer(int index, int length) {
checkIndex0(index, length);
return unwrap().nioBuffer(idx(index), length);
}
@Override
public ByteBuffer[] nioBuffers(int index, int length) {
checkIndex0(index, length);
return unwrap().nioBuffers(idx(index), length);
}
@Override
public ByteBuf copy(int index, int length) {
checkIndex0(index, length);
return unwrap().copy(idx(index), length);
}
@Override
public byte getByte(int index) {
checkIndex0(index, 1);
return unwrap().getByte(idx(index));
}
@Override
protected byte _getByte(int index) {
return unwrap()._getByte(idx(index));
}
@Override
public short getShort(int index) {
checkIndex0(index, 2);
return unwrap().getShort(idx(index));
}
@Override
protected short _getShort(int index) {
return unwrap()._getShort(idx(index));
}
@Override
public short getShortLE(int index) {
checkIndex0(index, 2);
return unwrap().getShortLE(idx(index));
}
@Override
protected short _getShortLE(int index) {
return unwrap()._getShortLE(idx(index));
}
@Override
public int getUnsignedMedium(int index) {
checkIndex0(index, 3);
return unwrap().getUnsignedMedium(idx(index));
}
@Override
protected int _getUnsignedMedium(int index) {
return unwrap()._getUnsignedMedium(idx(index));
}
@Override
public int getUnsignedMediumLE(int index) {
checkIndex0(index, 3);
return unwrap().getUnsignedMediumLE(idx(index));
}
@Override
protected int _getUnsignedMediumLE(int index) {
return unwrap()._getUnsignedMediumLE(idx(index));
}
@Override
public int getInt(int index) {
checkIndex0(index, 4);
return unwrap().getInt(idx(index));
}
@Override
protected int _getInt(int index) {
return unwrap()._getInt(idx(index));
}
@Override
public int getIntLE(int index) {
checkIndex0(index, 4);
return unwrap().getIntLE(idx(index));
}
@Override
protected int _getIntLE(int index) {
return unwrap()._getIntLE(idx(index));
}
@Override
public long getLong(int index) {
checkIndex0(index, 8);
return unwrap().getLong(idx(index));
}
@Override
protected long _getLong(int index) {
return unwrap()._getLong(idx(index));
}
@Override
public long getLongLE(int index) {
checkIndex0(index, 8);
return unwrap().getLongLE(idx(index));
}
@Override
protected long _getLongLE(int index) {
return unwrap()._getLongLE(idx(index));
}
@Override
public ByteBuf getBytes(int index, ByteBuf dst, int dstIndex, int length) {
checkIndex0(index, length);
unwrap().getBytes(idx(index), dst, dstIndex, length);
return this;
}
@Override
public ByteBuf getBytes(int index, byte[] dst, int dstIndex, int length) {
checkIndex0(index, length);
unwrap().getBytes(idx(index), dst, dstIndex, length);
return this;
}
@Override
public ByteBuf getBytes(int index, ByteBuffer dst) {
checkIndex0(index, dst.remaining());
unwrap().getBytes(idx(index), dst);
return this;
}
@Override
public ByteBuf setByte(int index, int value) {
checkIndex0(index, 1);
unwrap().setByte(idx(index), value);
return this;
}
@Override
protected void _setByte(int index, int value) {
unwrap()._setByte(idx(index), value);
}
@Override
public ByteBuf setShort(int index, int value) {
checkIndex0(index, 2);
unwrap().setShort(idx(index), value);
return this;
}
@Override
protected void _setShort(int index, int value) {
unwrap()._setShort(idx(index), value);
}
@Override
public ByteBuf setShortLE(int index, int value) {
checkIndex0(index, 2);
unwrap().setShortLE(idx(index), value);
return this;
}
@Override
protected void _setShortLE(int index, int value) {
unwrap()._setShortLE(idx(index), value);
}
@Override
public ByteBuf setMedium(int index, int value) {
checkIndex0(index, 3);
unwrap().setMedium(idx(index), value);
return this;
}
@Override
protected void _setMedium(int index, int value) {
unwrap()._setMedium(idx(index), value);
}
@Override
public ByteBuf setMediumLE(int index, int value) {
checkIndex0(index, 3);
unwrap().setMediumLE(idx(index), value);
return this;
}
@Override
protected void _setMediumLE(int index, int value) {
unwrap()._setMediumLE(idx(index), value);
}
@Override
public ByteBuf setInt(int index, int value) {
checkIndex0(index, 4);
unwrap().setInt(idx(index), value);
return this;
}
@Override
protected void _setInt(int index, int value) {
unwrap()._setInt(idx(index), value);
}
@Override
public ByteBuf setIntLE(int index, int value) {
checkIndex0(index, 4);
unwrap().setIntLE(idx(index), value);
return this;
}
@Override
protected void _setIntLE(int index, int value) {
unwrap()._setIntLE(idx(index), value);
}
@Override
public ByteBuf setLong(int index, long value) {
checkIndex0(index, 8);
unwrap().setLong(idx(index), value);
return this;
}
@Override
protected void _setLong(int index, long value) {
unwrap()._setLong(idx(index), value);
}
@Override
public ByteBuf setLongLE(int index, long value) {
checkIndex0(index, 8);
unwrap().setLongLE(idx(index), value);
return this;
}
@Override
protected void _setLongLE(int index, long value) {
unwrap().setLongLE(idx(index), value);
}
@Override
public ByteBuf setBytes(int index, byte[] src, int srcIndex, int length) {
checkIndex0(index, length);
unwrap().setBytes(idx(index), src, srcIndex, length);
return this;
}
@Override
public ByteBuf setBytes(int index, ByteBuf src, int srcIndex, int length) {
checkIndex0(index, length);
unwrap().setBytes(idx(index), src, srcIndex, length);
return this;
}
@Override
public ByteBuf setBytes(int index, ByteBuffer src) {
checkIndex0(index, src.remaining());
unwrap().setBytes(idx(index), src);
return this;
}
@Override
public ByteBuf getBytes(int index, OutputStream out, int length)
throws IOException {
checkIndex0(index, length);
unwrap().getBytes(idx(index), out, length);
return this;
}
@Override
public int getBytes(int index, GatheringByteChannel out, int length)
throws IOException {
checkIndex0(index, length);
return unwrap().getBytes(idx(index), out, length);
}
@Override
public int getBytes(int index, FileChannel out, long position, int length)
throws IOException {
checkIndex0(index, length);
return unwrap().getBytes(idx(index), out, position, length);
}
@Override
public int setBytes(int index, InputStream in, int length)
throws IOException {
checkIndex0(index, length);
return unwrap().setBytes(idx(index), in, length);
}
@Override
public int setBytes(int index, ScatteringByteChannel in, int length)
throws IOException {
checkIndex0(index, length);
return unwrap().setBytes(idx(index), in, length);
}
@Override
public int setBytes(int index, FileChannel in, long position, int length)
throws IOException {
checkIndex0(index, length);
return unwrap().setBytes(idx(index), in, position, length);
}
@Override
public int forEachByte(int index, int length, ByteProcessor processor) {
checkIndex0(index, length);
int ret = unwrap().forEachByte(idx(index), length, processor);
if (ret < adjustment) {
return -1;
}
return ret - adjustment;
}
@Override
public int forEachByteDesc(int index, int length, ByteProcessor processor) {
checkIndex0(index, length);
int ret = unwrap().forEachByteDesc(idx(index), length, processor);
if (ret < adjustment) {
return -1;
}
return ret - adjustment;
}
private int idx(int index) {
return index + adjustment;
}
private static ByteBuf reject() {
throw new UnsupportedOperationException("sliced buffer");
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.impl.cluster;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import org.apache.camel.CamelContext;
import org.apache.camel.cluster.CamelClusterMember;
import org.apache.camel.cluster.CamelClusterService;
import org.apache.camel.cluster.CamelClusterView;
import org.apache.camel.component.file.cluster.FileLockClusterService;
import org.apache.camel.impl.DefaultCamelContext;
import org.apache.camel.support.cluster.AbstractCamelClusterService;
import org.apache.camel.support.cluster.AbstractCamelClusterView;
import org.apache.camel.support.cluster.ClusterServiceSelectors;
import org.junit.Assert;
import org.junit.Test;
import static org.apache.camel.support.cluster.ClusterServiceHelper.lookupService;
import static org.apache.camel.support.cluster.ClusterServiceHelper.mandatoryLookupService;
public class ClusterServiceSelectorTest {
@Test
public void testDefaultSelector() throws Exception {
CamelContext context = null;
try {
DummyClusterService1 service1 = new DummyClusterService1();
context = new DefaultCamelContext();
context.addService(service1);
Optional<CamelClusterService> lookup = lookupService(context);
Assert.assertTrue(lookup.isPresent());
Assert.assertEquals(service1, lookup.get());
} finally {
if (context != null) {
context.stop();
}
}
}
@Test
public void testDefaultSelectorFailure() throws Exception {
CamelContext context = null;
try {
context = new DefaultCamelContext();
context.addService(new DummyClusterService1());
context.addService(new DummyClusterService2());
Optional<CamelClusterService> lookup = lookupService(context);
Assert.assertFalse(lookup.isPresent());
} finally {
if (context != null) {
context.stop();
}
}
}
@Test
public void testSelectSingle() throws Exception {
CamelContext context = null;
try {
DummyClusterService1 service1 = new DummyClusterService1();
context = new DefaultCamelContext();
context.addService(service1);
CamelClusterService.Selector selector = ClusterServiceSelectors.single();
Optional<CamelClusterService> lookup = lookupService(context, selector);
Assert.assertTrue(lookup.isPresent());
Assert.assertEquals(service1, lookup.get());
} finally {
if (context != null) {
context.stop();
}
}
}
@Test
public void testSelectSingleFailure() throws Exception {
CamelContext context = null;
try {
context = new DefaultCamelContext();
context.addService(new DummyClusterService1());
context.addService(new DummyClusterService2());
CamelClusterService.Selector selector = ClusterServiceSelectors.single();
Optional<CamelClusterService> lookup = lookupService(context, selector);
Assert.assertFalse(lookup.isPresent());
} finally {
if (context != null) {
context.stop();
}
}
}
@Test
public void testSelectFirst() throws Exception {
CamelContext context = null;
try {
context = new DefaultCamelContext();
context.addService(new DummyClusterService1());
context.addService(new DummyClusterService2());
CamelClusterService.Selector selector = ClusterServiceSelectors.first();
Optional<CamelClusterService> lookup = lookupService(context, selector);
Assert.assertTrue(lookup.isPresent());
} finally {
if (context != null) {
context.stop();
}
}
}
@Test
public void testSelectByType() throws Exception {
CamelContext context = null;
try {
context = new DefaultCamelContext();
context.addService(new DummyClusterService1());
context.addService(new DummyClusterService2());
Assert.assertTrue(lookupService(context, ClusterServiceSelectors.type(DummyClusterService1.class)).isPresent());
Assert.assertTrue(lookupService(context, ClusterServiceSelectors.type(DummyClusterService2.class)).isPresent());
Assert.assertFalse(lookupService(context, ClusterServiceSelectors.type(FileLockClusterService.class)).isPresent());
} finally {
if (context != null) {
context.stop();
}
}
}
@Test
public void testSelectByAttribute() throws Exception {
CamelContext context = null;
try {
DummyClusterService1 service1 = new DummyClusterService1();
service1.setAttribute("service.type", "zookeeper");
DummyClusterService2 service2 = new DummyClusterService2();
service2.setAttribute("service.type", "file");
context = new DefaultCamelContext();
context.addService(service1);
context.addService(service2);
Optional<CamelClusterService> lookup;
lookup = lookupService(context, ClusterServiceSelectors.attribute("service.type", "zookeeper"));
Assert.assertTrue(lookup.isPresent());
Assert.assertEquals(service1, lookup.get());
lookup = lookupService(context, ClusterServiceSelectors.attribute("service.type", "file"));
Assert.assertTrue(lookup.isPresent());
Assert.assertEquals(service2, lookup.get());
lookup = lookupService(context, ClusterServiceSelectors.attribute("service.type", "consul"));
Assert.assertFalse(lookup.isPresent());
} finally {
if (context != null) {
context.stop();
}
}
}
@Test
public void testSelectByOrder() throws Exception {
CamelContext context = null;
try {
DummyClusterService1 service1 = new DummyClusterService1();
service1.setOrder(1);
DummyClusterService2 service2 = new DummyClusterService2();
service2.setOrder(0);
context = new DefaultCamelContext();
context.addService(service1);
context.addService(service2);
CamelClusterService.Selector selector = ClusterServiceSelectors.order();
Optional<CamelClusterService> lookup = lookupService(context, selector);
Assert.assertTrue(lookup.isPresent());
Assert.assertEquals(service2, lookup.get());
} finally {
if (context != null) {
context.stop();
}
}
}
@Test
public void testSelectByOrderFailure() throws Exception {
CamelContext context = null;
try {
DummyClusterService1 service1 = new DummyClusterService1();
service1.setOrder(1);
DummyClusterService2 service2 = new DummyClusterService2();
service2.setOrder(0);
DummyClusterService2 service3 = new DummyClusterService2();
service3.setOrder(0);
context = new DefaultCamelContext();
context.addService(service1);
context.addService(service2);
context.addService(service3);
CamelClusterService.Selector selector = ClusterServiceSelectors.order();
Optional<CamelClusterService> lookup = lookupService(context, selector);
Assert.assertFalse(lookup.isPresent());
} finally {
if (context != null) {
context.stop();
}
}
}
@Test
public void testMandatoryLookup() throws Exception {
CamelContext context = null;
try {
DummyClusterService1 service1 = new DummyClusterService1();
context = new DefaultCamelContext();
context.addService(service1);
CamelClusterService.Selector selector = ClusterServiceSelectors.single();
CamelClusterService lookup = mandatoryLookupService(context, selector);
Assert.assertNotNull(lookup);
Assert.assertEquals(service1, lookup);
} finally {
if (context != null) {
context.stop();
}
}
}
@Test
public void testMandatoryLookupWithoutSelector() throws Exception {
CamelContext context = null;
try {
DummyClusterService1 service1 = new DummyClusterService1();
context = new DefaultCamelContext();
context.addService(service1);
CamelClusterService lookup = mandatoryLookupService(context);
Assert.assertNotNull(lookup);
Assert.assertEquals(service1, lookup);
} finally {
if (context != null) {
context.stop();
}
}
}
@Test(expected = IllegalStateException.class)
public void testMandatoryLookupFailure() throws Exception {
CamelContext context = null;
try {
context = new DefaultCamelContext();
mandatoryLookupService(context, ClusterServiceSelectors.single());
} finally {
if (context != null) {
context.stop();
}
}
}
@Test(expected = IllegalStateException.class)
public void testMandatoryLookupFailureWithoutSelector() throws Exception {
CamelContext context = null;
try {
context = new DefaultCamelContext();
mandatoryLookupService(context);
} finally {
if (context != null) {
context.stop();
}
}
}
// **************************************
// Helpers
// **************************************
private final class DummyClusterService1 extends AbstractCamelClusterService {
public DummyClusterService1() {
}
@Override
protected CamelClusterView createView(String namespace) throws Exception {
return new DummyClusterServiceView(this, namespace);
}
}
private final class DummyClusterService2 extends AbstractCamelClusterService {
public DummyClusterService2() {
}
@Override
protected CamelClusterView createView(String namespace) throws Exception {
return new DummyClusterServiceView(this, namespace);
}
}
private final class DummyClusterServiceView extends AbstractCamelClusterView {
public DummyClusterServiceView(CamelClusterService cluster, String namespace) {
super(cluster, namespace);
}
@Override
public Optional<CamelClusterMember> getLeader() {
return Optional.empty();
}
@Override
public CamelClusterMember getLocalMember() {
return new DummyClusterServiceMember(false, true);
}
@Override
public List<CamelClusterMember> getMembers() {
return Collections.emptyList();
}
@Override
protected void doStart() throws Exception {
}
@Override
protected void doStop() throws Exception {
}
private final class DummyClusterServiceMember implements CamelClusterMember {
private final boolean leader;
private final boolean local;
public DummyClusterServiceMember(boolean leader, boolean local) {
this.leader = leader;
this.local = local;
}
@Override
public boolean isLeader() {
return leader;
}
@Override
public boolean isLocal() {
return local;
}
@Override
public String getId() {
return getClusterService().getId();
}
}
}
}
| |
/*
* CPAchecker is a tool for configurable software verification.
* This file is part of CPAchecker.
*
* Copyright (C) 2007-2012 Dirk Beyer
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* CPAchecker web page:
* http://cpachecker.sosy-lab.org
*/
package org.sosy_lab.cpachecker.util.predicates.mathsat5;
import static com.google.common.base.Preconditions.checkNotNull;
import static org.sosy_lab.cpachecker.util.predicates.mathsat5.Mathsat5NativeApi.*;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.logging.Level;
import org.sosy_lab.common.Appender;
import org.sosy_lab.common.Appenders;
import org.sosy_lab.common.LogManager;
import org.sosy_lab.common.configuration.Configuration;
import org.sosy_lab.common.configuration.FileOption;
import org.sosy_lab.common.configuration.FileOption.Type;
import org.sosy_lab.common.configuration.InvalidConfigurationException;
import org.sosy_lab.common.configuration.Option;
import org.sosy_lab.common.configuration.Options;
import org.sosy_lab.common.io.Path;
import org.sosy_lab.common.io.Paths;
import org.sosy_lab.cpachecker.core.ShutdownNotifier;
import org.sosy_lab.cpachecker.util.predicates.interfaces.BooleanFormula;
import org.sosy_lab.cpachecker.util.predicates.interfaces.Formula;
import org.sosy_lab.cpachecker.util.predicates.interfaces.basicimpl.AbstractBitvectorFormulaManager;
import org.sosy_lab.cpachecker.util.predicates.interfaces.basicimpl.AbstractBooleanFormulaManager;
import org.sosy_lab.cpachecker.util.predicates.interfaces.basicimpl.AbstractFormulaManager;
import org.sosy_lab.cpachecker.util.predicates.interfaces.basicimpl.AbstractFunctionFormulaManager;
import org.sosy_lab.cpachecker.util.predicates.interfaces.basicimpl.AbstractRationalFormulaManager;
import org.sosy_lab.cpachecker.util.predicates.interfaces.basicimpl.AbstractUnsafeFormulaManager;
import org.sosy_lab.cpachecker.util.predicates.interfaces.basicimpl.FormulaCreator;
import org.sosy_lab.cpachecker.util.predicates.mathsat5.Mathsat5NativeApi.TerminationTest;
import com.google.common.base.Splitter;
import com.google.common.base.Splitter.MapSplitter;
import com.google.common.collect.ImmutableMap;
@Options(prefix="cpa.predicate.mathsat5")
public class Mathsat5FormulaManager extends AbstractFormulaManager<Long> implements AutoCloseable {
@Options(prefix="cpa.predicate.mathsat5")
private static class Mathsat5Settings {
@Option(description = "List of further options which will be passed to Mathsat in addition to the default options. "
+ "Format is 'key1=value1,key2=value2'")
private String furtherOptions = "";
@Option(description = "Export solver queries in Smtlib format into a file (for Mathsat5).")
private boolean logAllQueries = false;
@Option(description = "Export solver queries in Smtlib format into a file (for Mathsat5).")
@FileOption(Type.OUTPUT_FILE)
private Path logfile = Paths.get("mathsat5.%03d.smt2");
private final ImmutableMap<String, String> furtherOptionsMap ;
private Mathsat5Settings(Configuration config) throws InvalidConfigurationException {
config.inject(this);
MapSplitter optionSplitter = Splitter.on(',').trimResults().omitEmptyStrings()
.withKeyValueSeparator(Splitter.on('=').limit(2).trimResults());
try {
furtherOptionsMap = ImmutableMap.copyOf(optionSplitter.split(furtherOptions));
} catch (IllegalArgumentException e) {
throw new InvalidConfigurationException("Invalid Mathsat option in \"" + furtherOptions + "\": " + e.getMessage(), e);
}
}
}
private final LogManager logger;
private final Mathsat5FormulaCreator formulaCreator;
private final long mathsatEnv;
private final long mathsatConfig;
private final Mathsat5Settings settings;
private static final AtomicInteger logfileCounter = new AtomicInteger(0);
private final ShutdownNotifier shutdownNotifier;
private final TerminationTest terminationTest;
private Mathsat5FormulaManager(
LogManager pLogger,
long pMathsatConfig,
AbstractUnsafeFormulaManager<Long> unsafeManager,
AbstractFunctionFormulaManager<Long> pFunctionManager,
AbstractBooleanFormulaManager<Long> pBooleanManager,
AbstractRationalFormulaManager<Long> pNumericManager,
AbstractBitvectorFormulaManager<Long> pBitpreciseManager,
Mathsat5Settings pSettings,
final ShutdownNotifier pShutdownNotifier) {
super(unsafeManager, pFunctionManager, pBooleanManager, pNumericManager, pBitpreciseManager);
FormulaCreator<Long> creator = getFormulaCreator();
if (!(creator instanceof Mathsat5FormulaCreator)) {
throw new IllegalArgumentException("the formel-creator has to be a Mathsat5FormulaCreator instance!");
}
formulaCreator = (Mathsat5FormulaCreator) getFormulaCreator();
mathsatConfig = pMathsatConfig;
mathsatEnv = formulaCreator.getEnv();
settings = pSettings;
logger = checkNotNull(pLogger);
shutdownNotifier = checkNotNull(pShutdownNotifier);
terminationTest = new TerminationTest() {
@Override
public boolean shouldTerminate() throws InterruptedException {
pShutdownNotifier.shutdownIfNecessary();
return false;
}
};
}
ShutdownNotifier getShutdownNotifier() {
return shutdownNotifier;
}
static long getMsatTerm(Formula pT) {
return ((Mathsat5Formula)pT).getTerm();
}
public static Mathsat5FormulaManager create(LogManager logger,
Configuration config, ShutdownNotifier pShutdownNotifier, boolean useIntegers) throws InvalidConfigurationException {
// Init Msat
Mathsat5Settings settings = new Mathsat5Settings(config);
long msatConf = msat_create_config();
msat_set_option_checked(msatConf, "theory.la.split_rat_eq", "false");
for (Map.Entry<String, String> option : settings.furtherOptionsMap.entrySet()) {
try {
msat_set_option_checked(msatConf, option.getKey(), option.getValue());
} catch (IllegalArgumentException e) {
throw new InvalidConfigurationException(e.getMessage(), e);
}
}
final long msatEnv = msat_create_env(msatConf);
// Create Mathsat5FormulaCreator
Mathsat5FormulaCreator creator = new Mathsat5FormulaCreator(msatEnv, useIntegers);
// Create managers
Mathsat5UnsafeFormulaManager unsafeManager = new Mathsat5UnsafeFormulaManager(creator);
Mathsat5FunctionFormulaManager functionTheory = new Mathsat5FunctionFormulaManager(creator, unsafeManager);
Mathsat5BooleanFormulaManager booleanTheory = Mathsat5BooleanFormulaManager.create(creator);
Mathsat5RationalFormulaManager rationalTheory = new Mathsat5RationalFormulaManager(creator, functionTheory, useIntegers);
Mathsat5BitvectorFormulaManager bitvectorTheory = Mathsat5BitvectorFormulaManager.create(creator);
return new Mathsat5FormulaManager(logger, msatConf,
unsafeManager, functionTheory, booleanTheory,
rationalTheory, bitvectorTheory, settings, pShutdownNotifier);
}
BooleanFormula encapsulateBooleanFormula(long t) {
return formulaCreator.encapsulate(BooleanFormula.class, t);
}
@Override
public BooleanFormula parse(String pS) throws IllegalArgumentException {
long f = msat_from_smtlib2(mathsatEnv, pS);
return encapsulateBooleanFormula(f);
}
@Override
public Appender dumpFormula(final Long f) {
// Lazy invocation of msat_to_smtlib2 wrapped in an Appender.
return Appenders.fromToStringMethod(
new Object() {
@Override
public String toString() {
return msat_to_smtlib2(mathsatEnv, f);
}
});
}
@Override
public String getVersion() {
return msat_get_version();
}
long createEnvironment(long cfg, boolean shared, boolean ghostFilter) {
long env;
if (ghostFilter) {
msat_set_option_checked(cfg, "dpll.ghost_filtering", "true");
}
msat_set_option_checked(cfg, "theory.la.split_rat_eq", "false");
for (Map.Entry<String, String> option : settings.furtherOptionsMap.entrySet()) {
msat_set_option_checked(cfg, option.getKey(), option.getValue());
}
if (settings.logAllQueries && settings.logfile != null) {
String filename = String.format(settings.logfile.toAbsolutePath().getPath(), logfileCounter.getAndIncrement());
msat_set_option_checked(cfg, "debug.api_call_trace", "1");
msat_set_option_checked(cfg, "debug.api_call_trace_filename", filename);
}
if (shared) {
env = msat_create_shared_env(cfg, this.mathsatEnv);
} else {
env = msat_create_env(cfg);
}
return env;
}
long addTerminationTest(long env) {
return msat_set_termination_test(env, terminationTest);
}
long getMsatEnv() {
return mathsatEnv;
}
@Override
public void close() {
logger.log(Level.FINER, "Freeing Mathsat environment");
msat_destroy_env(mathsatEnv);
msat_destroy_config(mathsatConfig);
}
}
| |
/*
* Copyright 2004-2009 the Seasar Foundation and the Others.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.seasar.struts.action;
import java.util.AbstractMap;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import org.apache.struts.Globals;
import org.apache.struts.action.Action;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionMapping;
import org.apache.struts.action.ActionServlet;
import org.apache.struts.upload.CommonsMultipartRequestHandler;
import org.apache.struts.upload.MultipartRequestHandler;
import org.apache.struts.upload.MultipartRequestWrapper;
import org.seasar.extension.unit.S2TestCase;
import org.seasar.framework.aop.Aspect;
import org.seasar.framework.aop.Pointcut;
import org.seasar.framework.aop.impl.AspectImpl;
import org.seasar.framework.aop.impl.PointcutImpl;
import org.seasar.framework.aop.interceptors.TraceInterceptor;
import org.seasar.framework.aop.proxy.AopProxy;
import org.seasar.framework.beans.BeanDesc;
import org.seasar.framework.beans.factory.BeanDescFactory;
import org.seasar.struts.action.S2RequestProcessor.IndexParsedResult;
import org.seasar.struts.config.S2ActionMapping;
import org.seasar.struts.config.S2ExecuteConfig;
import org.seasar.struts.config.S2FormBeanConfig;
import org.seasar.struts.config.S2ModuleConfig;
import org.seasar.struts.exception.IndexedPropertyNotListArrayRuntimeException;
import org.seasar.struts.exception.NoParameterizedListRuntimeException;
import org.seasar.struts.util.S2ExecuteConfigUtil;
/**
* @author higa
*
*/
public class S2RequestProcessorTest extends S2TestCase {
@Override
public void setUp() throws Exception {
register(BbbAction.class, "aaa_bbbAction");
register(BbbForm.class, "bbbForm");
}
/**
* @throws Exception
*/
public void testProcessMapping() throws Exception {
S2ActionMapping mapping = new S2ActionMapping();
mapping.setPath("/aaa/bbb");
mapping.setComponentDef(getComponentDef("aaa_bbbAction"));
S2ExecuteConfig executeConfig = new S2ExecuteConfig();
executeConfig.setMethod(getClass().getMethod("getClass"));
mapping.addExecuteConfig(executeConfig);
S2RequestProcessor processor = new S2RequestProcessor();
S2ModuleConfig moduleConfig = new S2ModuleConfig("");
moduleConfig.addActionConfig(mapping);
processor.init(new ActionServlet(), moduleConfig);
ActionMapping am = processor.processMapping(getRequest(),
getResponse(), "/aaa/bbb");
assertNotNull(am);
assertSame(am, mapping);
assertNotNull(getRequest().getAttribute(Globals.MAPPING_KEY));
}
/**
* @throws Exception
*/
public void testProcessExecuteConfig() throws Exception {
S2ActionMapping mapping = new S2ActionMapping();
mapping.setPath("/aaa/bbb");
mapping.setComponentDef(getComponentDef("aaa_bbbAction"));
S2ExecuteConfig executeConfig = new S2ExecuteConfig();
executeConfig.setMethod(getClass().getMethod("getClass"));
mapping.addExecuteConfig(executeConfig);
S2RequestProcessor processor = new S2RequestProcessor();
S2ModuleConfig moduleConfig = new S2ModuleConfig("");
moduleConfig.addActionConfig(mapping);
processor.init(new ActionServlet(), moduleConfig);
processor.processExecuteConfig(getRequest(), getResponse(), mapping);
assertNotNull(S2ExecuteConfigUtil.getExecuteConfig());
}
/**
* @throws Exception
*/
public void testProcessActionCreate() throws Exception {
S2ActionMapping mapping = new S2ActionMapping();
mapping.setComponentDef(getComponentDef("aaa_bbbAction"));
S2RequestProcessor processor = new S2RequestProcessor();
S2ModuleConfig moduleConfig = new S2ModuleConfig("");
processor.init(new ActionServlet(), moduleConfig);
Action action = processor.processActionCreate(getRequest(),
getResponse(), mapping);
assertNotNull(action);
assertEquals(ActionWrapper.class, action.getClass());
assertNotNull(action.getServlet());
}
/**
* @throws Exception
*/
public void testProcessActionForm() throws Exception {
S2ActionMapping mapping = new S2ActionMapping();
mapping.setName("aaa_bbbActionForm");
mapping.setComponentDef(getComponentDef("aaa_bbbAction"));
S2RequestProcessor processor = new S2RequestProcessor();
S2ModuleConfig moduleConfig = new S2ModuleConfig("");
ActionFormWrapperClass wrapperClass = new ActionFormWrapperClass(
mapping);
S2FormBeanConfig formConfig = new S2FormBeanConfig();
formConfig.setName("aaa_bbbActionForm");
formConfig.setDynaClass(wrapperClass);
moduleConfig.addFormBeanConfig(formConfig);
processor.init(new ActionServlet(), moduleConfig);
ActionForm actionForm = processor.processActionForm(getRequest(),
getResponse(), mapping);
assertNotNull(actionForm);
assertEquals(ActionFormWrapper.class, actionForm.getClass());
assertNotNull(getRequest().getAttribute("aaa_bbbActionForm"));
}
/**
* @throws Exception
*/
public void testGetMultipartHandler_request() throws Exception {
S2RequestProcessor processor = new S2RequestProcessor();
assertNotNull(processor
.getMultipartHandler("org.apache.struts.upload.CommonsMultipartRequestHandler"));
}
/**
* @throws Exception
*/
public void testGetMultipartHandler_moduleConfig() throws Exception {
S2RequestProcessor processor = new S2RequestProcessor();
S2ModuleConfig moduleConfig = new S2ModuleConfig("");
processor.init(null, moduleConfig);
assertNotNull(processor.getMultipartHandler(null));
}
/**
* @throws Exception
*/
public void testGetAllParameters() throws Exception {
MultipartRequestHandler multipartHandler = new CommonsMultipartRequestHandler() {
@SuppressWarnings("unchecked")
@Override
public Hashtable getAllElements() {
Hashtable elements = new Hashtable();
elements.put("aaa", "111");
return elements;
}
};
getRequest().addParameter("bbb", "222");
HttpServletRequest request = new MultipartRequestWrapper(getRequest());
S2RequestProcessor processor = new S2RequestProcessor();
Map<String, Object> params = processor.getAllParameters(request,
multipartHandler);
assertEquals("111", params.get("aaa"));
assertEquals("222", ((String[]) params.get("bbb"))[0]);
}
/**
* @throws Exception
*/
public void testSetProperty_simple() throws Exception {
BbbAction bean = new BbbAction();
S2RequestProcessor processor = new S2RequestProcessor();
processor.setProperty(bean, "hoge", new String[] { "111" });
assertEquals("111", bean.hoge);
}
/**
* @throws Exception
*/
public void testSetProperty_nested_bean() throws Exception {
BbbAction bean = new BbbAction();
S2RequestProcessor processor = new S2RequestProcessor();
processor.setProperty(bean, "myBean.aaa", new String[] { "111" });
assertEquals("111", bean.myBean.aaa);
}
/**
* @throws Exception
*/
public void testSetProperty_nested_map() throws Exception {
BbbAction bean = new BbbAction();
S2RequestProcessor processor = new S2RequestProcessor();
processor.setProperty(bean, "map.aaa", new String[] { "111" });
assertEquals("111", bean.map.get("aaa"));
}
/**
* @throws Exception
*/
public void testSetProperty_nested_map2() throws Exception {
BbbAction bean = new BbbAction();
S2RequestProcessor processor = new S2RequestProcessor();
processor.setProperty(bean, "map(aaa)", new String[] { "111" });
assertEquals("111", bean.map.get("aaa"));
}
/**
* @throws Exception
*/
public void testSetProperty_indexed_array() throws Exception {
BbbAction bean = new BbbAction();
S2RequestProcessor processor = new S2RequestProcessor();
processor.setProperty(bean, "hogeArray[1]", new String[] { "111" });
assertEquals("111", bean.hogeArray[1]);
}
/**
* @throws Exception
*/
public void testSetProperty_indexed_list() throws Exception {
BbbAction bean = new BbbAction();
S2RequestProcessor processor = new S2RequestProcessor();
processor.setProperty(bean, "hogeList[1]", new String[] { "111" });
assertEquals("111", bean.hogeList.get(1));
}
/**
* @throws Exception
*/
public void testSetProperty_indexed_nested_bean() throws Exception {
BbbAction bean = new BbbAction();
S2RequestProcessor processor = new S2RequestProcessor();
processor.setProperty(bean, "myBeanArrayArray[1][1].aaa",
new String[] { "111" });
assertEquals("111", bean.myBeanArrayArray[1][1].aaa);
}
/**
* @throws Exception
*/
public void testSetProperty_indexed_nested_map() throws Exception {
BbbAction bean = new BbbAction();
S2RequestProcessor processor = new S2RequestProcessor();
processor.setProperty(bean, "mapArrayArray[1][1].aaa",
new String[] { "111" });
assertEquals("111", bean.mapArrayArray[1][1].get("aaa"));
}
/**
* @throws Exception
*/
public void testSetProperty_indexed_list_map() throws Exception {
BbbAction bean = new BbbAction();
S2RequestProcessor processor = new S2RequestProcessor();
processor.setProperty(bean, "mapList[1].aaa", new String[] { "111" });
assertEquals("111", bean.mapList.get(1).get("aaa"));
}
/**
* @throws Exception
*/
public void testSetProperty_illegal() throws Exception {
BbbAction bean = new BbbAction();
S2RequestProcessor processor = new S2RequestProcessor();
try {
processor.setProperty(bean, "myBeanArrayArray[1][1]",
new String[] { "111" });
fail();
} catch (IllegalArgumentException e) {
System.out.println(e);
}
}
/**
* @throws Exception
*/
public void testSetSimpleProperty() throws Exception {
BbbAction bean = new BbbAction();
S2RequestProcessor processor = new S2RequestProcessor();
processor.setSimpleProperty(bean, "hoge", new String[] { "111" });
assertEquals("111", bean.hoge);
}
/**
* @throws Exception
*/
public void testSetSimpleProperty_ignore() throws Exception {
BbbAction bean = new BbbAction();
S2RequestProcessor processor = new S2RequestProcessor();
processor.setSimpleProperty(bean, "xxx", null);
}
/**
* @throws Exception
*/
public void testSetSimpleProperty_array() throws Exception {
BbbAction bean = new BbbAction();
S2RequestProcessor processor = new S2RequestProcessor();
processor.setSimpleProperty(bean, "hogeArray", new String[] { "111" });
assertEquals(1, bean.hogeArray.length);
assertEquals("111", bean.hogeArray[0]);
}
/**
* @throws Exception
*/
public void testSetSimpleProperty_list() throws Exception {
BbbAction bean = new BbbAction();
S2RequestProcessor processor = new S2RequestProcessor();
processor.setSimpleProperty(bean, "hogeList", new String[] { "111" });
assertEquals(1, bean.hogeList.size());
assertEquals("111", bean.hogeList.get(0));
}
/**
* @throws Exception
*/
public void testSetSimpleProperty_empty() throws Exception {
BbbAction bean = new BbbAction();
bean.hoge = "111";
S2RequestProcessor processor = new S2RequestProcessor();
processor.setSimpleProperty(bean, "hoge", new String[0]);
assertNull(bean.hoge);
}
/**
* @throws Exception
*/
public void testGetSimpleProperty_notNull() throws Exception {
BbbAction bean = new BbbAction();
MyBean myBean = new MyBean();
bean.myBean = myBean;
S2RequestProcessor processor = new S2RequestProcessor();
assertSame(myBean, processor.getSimpleProperty(bean, "myBean"));
}
/**
* @throws Exception
*/
public void testGetSimpleProperty_null() throws Exception {
BbbAction bean = new BbbAction();
S2RequestProcessor processor = new S2RequestProcessor();
assertNotNull(processor.getSimpleProperty(bean, "myBean"));
assertNotNull(bean.myBean);
}
/**
* @throws Exception
*/
public void testParseIndex() throws Exception {
S2RequestProcessor processor = new S2RequestProcessor();
IndexParsedResult result = processor.parseIndex("12].aaa");
assertEquals(1, result.indexes.length);
assertEquals(12, result.indexes[0]);
assertEquals("aaa", result.name);
}
/**
* @throws Exception
*/
public void testParseIndex_nest() throws Exception {
S2RequestProcessor processor = new S2RequestProcessor();
IndexParsedResult result = processor.parseIndex("12][34].aaa");
assertEquals(2, result.indexes.length);
assertEquals(12, result.indexes[0]);
assertEquals(34, result.indexes[1]);
assertEquals("aaa", result.name);
}
/**
* @throws Exception
*/
public void testParseIndex_end() throws Exception {
S2RequestProcessor processor = new S2RequestProcessor();
IndexParsedResult result = processor.parseIndex("12]");
assertEquals(1, result.indexes.length);
assertEquals(12, result.indexes[0]);
assertEquals("", result.name);
}
/**
* @throws Exception
*/
public void testGetIndexedProperty_array() throws Exception {
S2RequestProcessor processor = new S2RequestProcessor();
BbbAction bean = new BbbAction();
MyBean result = (MyBean) processor.getIndexedProperty(bean,
"myBeanArray", new int[] { 0 });
assertNotNull(result);
assertEquals(1, bean.myBeanArray.length);
}
/**
* @throws Exception
*/
public void testGetIndexedProperty_array_nest() throws Exception {
S2RequestProcessor processor = new S2RequestProcessor();
BbbAction bean = new BbbAction();
MyBean myBean = new MyBean();
myBean.aaa = "111";
bean.myBeanArrayArray = new MyBean[][] { new MyBean[] { myBean } };
MyBean result = (MyBean) processor.getIndexedProperty(bean,
"myBeanArrayArray", new int[] { 1, 2 });
assertNotNull(result);
assertEquals(2, bean.myBeanArrayArray.length);
assertEquals(1, bean.myBeanArrayArray[0].length);
assertEquals("111", bean.myBeanArrayArray[0][0].aaa);
assertEquals(3, bean.myBeanArrayArray[1].length);
}
/**
* @throws Exception
*/
public void testGetIndexedProperty_list_bean() throws Exception {
S2RequestProcessor processor = new S2RequestProcessor();
BbbAction bean = new BbbAction();
MyBean result = (MyBean) processor.getIndexedProperty(bean,
"myBeanList", new int[] { 0 });
assertNotNull(result);
assertEquals(1, bean.myBeanList.size());
}
/**
* @throws Exception
*/
@SuppressWarnings("unchecked")
public void testGetIndexedProperty_list_map() throws Exception {
S2RequestProcessor processor = new S2RequestProcessor();
BbbAction bean = new BbbAction();
Map result = (Map) processor.getIndexedProperty(bean, "mapList",
new int[] { 0 });
assertNotNull(result);
assertEquals(1, bean.mapList.size());
}
/**
* @throws Exception
*/
public void testGetIndexedProperty_list_nest() throws Exception {
S2RequestProcessor processor = new S2RequestProcessor();
BbbAction bean = new BbbAction();
MyBean myBean = new MyBean();
myBean.aaa = "111";
bean.myBeanListList = new ArrayList<List<MyBean>>();
bean.myBeanListList.add(Arrays.asList(myBean));
MyBean result = (MyBean) processor.getIndexedProperty(bean,
"myBeanListList", new int[] { 1, 2 });
assertNotNull(result);
assertEquals(2, bean.myBeanListList.size());
assertEquals(1, bean.myBeanListList.get(0).size());
assertEquals("111", bean.myBeanListList.get(0).get(0).aaa);
assertEquals(3, bean.myBeanListList.get(1).size());
}
/**
* @throws Exception
*/
public void testGetIndexedProperty_list_nest_notParameterizedList()
throws Exception {
S2RequestProcessor processor = new S2RequestProcessor();
BbbAction bean = new BbbAction();
try {
processor
.getIndexedProperty(bean, "myBeanList", new int[] { 1, 2 });
fail();
} catch (NoParameterizedListRuntimeException e) {
System.out.println(e.getMessage());
}
}
/**
* @throws Exception
*/
public void testGetIndexedProperty_notListArray() throws Exception {
S2RequestProcessor processor = new S2RequestProcessor();
BbbAction bean = new BbbAction();
try {
processor.getIndexedProperty(bean, "hoge", new int[] { 1, 2 });
fail();
} catch (IndexedPropertyNotListArrayRuntimeException e) {
System.out.println(e.getMessage());
assertEquals(BbbAction.class, e.getTargetClass());
assertEquals("hoge", e.getPropertyName());
}
}
/**
* @throws Exception
*/
public void testMinIndex() throws Exception {
S2RequestProcessor processor = new S2RequestProcessor();
assertEquals(1, processor.minIndex(1, 2));
assertEquals(1, processor.minIndex(1, -1));
assertEquals(1, processor.minIndex(-1, 1));
assertEquals(-2, processor.minIndex(-1, -2));
}
/**
* @throws Exception
*/
public void testExpand() throws Exception {
S2RequestProcessor processor = new S2RequestProcessor();
int[] result = (int[]) processor.expand(new int[] { 1 },
new int[] { 1 }, int.class);
assertEquals(2, result.length);
assertEquals(1, result[0]);
}
/**
* @throws Exception
*/
public void testExpand_nest() throws Exception {
S2RequestProcessor processor = new S2RequestProcessor();
int[][] result = (int[][]) processor.expand(
new int[][] { new int[] { 1 } }, new int[] { 1, 2 }, int.class);
assertEquals(2, result.length);
assertEquals(1, result[0].length);
assertEquals(1, result[0][0]);
assertEquals(3, result[1].length);
}
/**
* @throws Exception
*/
public void testExpand_nest_bean() throws Exception {
S2RequestProcessor processor = new S2RequestProcessor();
MyBean[][] result = (MyBean[][]) processor.expand(
new MyBean[][] { new MyBean[] { new MyBean() } }, new int[] {
1, 2 }, MyBean.class);
assertEquals(2, result.length);
assertEquals(1, result[0].length);
assertNotNull(result[0][0]);
assertEquals(3, result[1].length);
}
/**
* @throws Exception
*/
public void testGetArrayElementType() throws Exception {
S2RequestProcessor processor = new S2RequestProcessor();
assertEquals(MyBean.class, processor.getArrayElementType(
new MyBean[0][0].getClass(), 2));
}
/**
* @throws Exception
*/
public void testGetArrayValue() throws Exception {
S2RequestProcessor processor = new S2RequestProcessor();
assertEquals(1, processor.getArrayValue(
new int[][] { new int[] { 1 } }, new int[] { 0, 0 }, int.class));
assertNotNull(processor.getArrayValue(
new MyBean[][] { new MyBean[] { null } }, new int[] { 0, 0 },
MyBean.class));
}
/**
* @throws Exception
*/
public void testSetArrayValue() throws Exception {
S2RequestProcessor processor = new S2RequestProcessor();
int[] array = new int[] { 0 };
processor.setArrayValue(array, new int[] { 0 }, 1);
assertEquals(1, array[0]);
}
/**
* @throws Exception
*/
public void testSetArrayValue_nest() throws Exception {
S2RequestProcessor processor = new S2RequestProcessor();
int[][] array = new int[][] { new int[] { 0 } };
processor.setArrayValue(array, new int[] { 0, 0 }, 1);
assertEquals(1, array[0][0]);
}
/**
* @throws Exception
*/
public void testGetRealClass() throws Exception {
Pointcut pointcut = new PointcutImpl(new String[] { "execte" });
Aspect aspect = new AspectImpl(new TraceInterceptor(), pointcut);
AopProxy aopProxy = new AopProxy(BbbAction.class,
new Aspect[] { aspect });
BbbAction proxy = (BbbAction) aopProxy.create();
System.out.println(proxy.getClass());
S2RequestProcessor processor = new S2RequestProcessor();
assertEquals(BbbAction.class, processor.getRealClass(proxy.getClass()));
}
/**
* @throws Exception
*/
public void testConvertClass() throws Exception {
S2RequestProcessor processor = new S2RequestProcessor();
assertEquals(LinkedHashMap.class, processor
.convertClass(LinkedHashMap.class));
assertEquals(HashMap.class, processor.convertClass(AbstractMap.class));
}
/**
* @throws Exception
*/
public void testExportPropertiesToRequest() throws Exception {
S2ActionMapping mapping = new S2ActionMapping();
mapping.setPath("/aaa/bbb");
mapping.setComponentDef(getComponentDef("aaa_bbbAction"));
S2ExecuteConfig executeConfig = new S2ExecuteConfig();
executeConfig.setMethod(getClass().getMethod("getClass"));
mapping.addExecuteConfig(executeConfig);
mapping.setActionFormField(BbbAction.class.getDeclaredField("bbbForm"));
BeanDesc beanDesc = BeanDescFactory.getBeanDesc(BbbForm.class);
S2DynaProperty property = new S2DynaProperty(beanDesc
.getPropertyDesc("myBean2"));
ActionFormWrapperClass wrapperClass = new ActionFormWrapperClass(
mapping);
wrapperClass.addDynaProperty(property);
ActionFormWrapper formWrapper = new ActionFormWrapper(wrapperClass);
getRequest().setAttribute(mapping.getAttribute(), formWrapper);
S2RequestProcessor processor = new S2RequestProcessor();
S2ModuleConfig moduleConfig = new S2ModuleConfig("");
moduleConfig.addActionConfig(mapping);
processor.init(new ActionServlet(), moduleConfig);
BbbAction action = (BbbAction) getComponent(BbbAction.class);
action.hoge = "111";
action.foo = "222";
action.bbbForm.myBean2 = new MyBean();
processor.exportPropertiesToRequest(getRequest(), mapping,
executeConfig);
assertEquals("111", getRequest().getAttribute("hoge"));
assertNull(getRequest().getAttribute("foo"));
assertEquals(BeanWrapper.class, getRequest().getAttribute("myBean2")
.getClass());
}
/**
*
*/
public static class BbbAction {
/**
*
*/
public String id;
/**
*
*/
public String hoge;
/**
*
*/
public String[] hogeArray;
/**
*
*/
public List<String> hogeList;
/**
*
*/
public MyBean myBean;
/**
*
*/
public MyBean[] myBeanArray;
/**
*
*/
public MyBean[][] myBeanArrayArray;
/**
*
*/
public List<MyBean> myBeanList;
/**
*
*/
public List<List<MyBean>> myBeanListList;
/**
*
*/
public Map<String, Object> map;
/**
*
*/
public Map<String, Object>[][] mapArrayArray;
/**
*
*/
public List<Map<String, Object>> mapList;
/**
*
*/
public BbbForm bbbForm;
@SuppressWarnings("unused")
private String foo;
/**
* @return
*/
public String execute() {
return "success";
}
/**
*
*/
public void reset() {
hoge = "aaa";
}
/**
* @param foo
*/
public void setFoo(String foo) {
this.foo = foo;
}
}
/**
*
*/
public static class BbbForm {
/**
*
*/
public MyBean myBean2;
}
/**
*
*/
public static class MyBean {
/**
*
*/
public String aaa;
}
}
| |
/*
* Copyright 2002-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.security.oauth2.client.web.reactive.function.client;
import java.time.Duration;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletResponse;
import reactor.core.publisher.Mono;
import reactor.core.scheduler.Schedulers;
import reactor.util.context.Context;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.security.authentication.AbstractAuthenticationToken;
import org.springframework.security.authentication.AnonymousAuthenticationToken;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.authority.AuthorityUtils;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.security.oauth2.client.ClientAuthorizationException;
import org.springframework.security.oauth2.client.ClientCredentialsOAuth2AuthorizedClientProvider;
import org.springframework.security.oauth2.client.OAuth2AuthorizationFailureHandler;
import org.springframework.security.oauth2.client.OAuth2AuthorizeRequest;
import org.springframework.security.oauth2.client.OAuth2AuthorizedClient;
import org.springframework.security.oauth2.client.OAuth2AuthorizedClientManager;
import org.springframework.security.oauth2.client.OAuth2AuthorizedClientProvider;
import org.springframework.security.oauth2.client.OAuth2AuthorizedClientProviderBuilder;
import org.springframework.security.oauth2.client.RefreshTokenOAuth2AuthorizedClientProvider;
import org.springframework.security.oauth2.client.RemoveAuthorizedClientOAuth2AuthorizationFailureHandler;
import org.springframework.security.oauth2.client.authentication.OAuth2AuthenticationToken;
import org.springframework.security.oauth2.client.endpoint.OAuth2AccessTokenResponseClient;
import org.springframework.security.oauth2.client.endpoint.OAuth2ClientCredentialsGrantRequest;
import org.springframework.security.oauth2.client.registration.ClientRegistration;
import org.springframework.security.oauth2.client.registration.ClientRegistrationRepository;
import org.springframework.security.oauth2.client.web.DefaultOAuth2AuthorizedClientManager;
import org.springframework.security.oauth2.client.web.OAuth2AuthorizedClientRepository;
import org.springframework.security.oauth2.core.OAuth2AuthorizationException;
import org.springframework.security.oauth2.core.OAuth2Error;
import org.springframework.security.oauth2.core.OAuth2ErrorCodes;
import org.springframework.security.oauth2.core.endpoint.OAuth2ParameterNames;
import org.springframework.util.Assert;
import org.springframework.util.StringUtils;
import org.springframework.web.context.request.RequestAttributes;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;
import org.springframework.web.reactive.function.client.ClientRequest;
import org.springframework.web.reactive.function.client.ClientResponse;
import org.springframework.web.reactive.function.client.ExchangeFilterFunction;
import org.springframework.web.reactive.function.client.ExchangeFunction;
import org.springframework.web.reactive.function.client.WebClient;
import org.springframework.web.reactive.function.client.WebClientResponseException;
/**
* Provides an easy mechanism for using an {@link OAuth2AuthorizedClient} to make OAuth
* 2.0 requests by including the {@link OAuth2AuthorizedClient#getAccessToken() access
* token} as a bearer token.
*
* <p>
* <b>NOTE:</b>This class is intended to be used in a {@code Servlet} environment.
*
* <p>
* Example usage:
*
* <pre>
* ServletOAuth2AuthorizedClientExchangeFilterFunction oauth2 = new ServletOAuth2AuthorizedClientExchangeFilterFunction(authorizedClientManager);
* WebClient webClient = WebClient.builder()
* .apply(oauth2.oauth2Configuration())
* .build();
* Mono<String> response = webClient
* .get()
* .uri(uri)
* .attributes(oauth2AuthorizedClient(authorizedClient))
* // ...
* .retrieve()
* .bodyToMono(String.class);
* </pre>
*
* <h3>Authentication and Authorization Failures</h3>
*
* <p>
* Since 5.3, this filter function has the ability to forward authentication (HTTP 401
* Unauthorized) and authorization (HTTP 403 Forbidden) failures from an OAuth 2.0
* Resource Server to a {@link OAuth2AuthorizationFailureHandler}. A
* {@link RemoveAuthorizedClientOAuth2AuthorizationFailureHandler} can be used to remove
* the cached {@link OAuth2AuthorizedClient}, so that future requests will result in a new
* token being retrieved from an Authorization Server, and sent to the Resource Server.
*
* <p>
* If the
* {@link #ServletOAuth2AuthorizedClientExchangeFilterFunction(ClientRegistrationRepository, OAuth2AuthorizedClientRepository)}
* constructor is used, a {@link RemoveAuthorizedClientOAuth2AuthorizationFailureHandler}
* will be configured automatically.
*
* <p>
* If the
* {@link #ServletOAuth2AuthorizedClientExchangeFilterFunction(OAuth2AuthorizedClientManager)}
* constructor is used, a {@link RemoveAuthorizedClientOAuth2AuthorizationFailureHandler}
* will <em>NOT</em> be configured automatically. It is recommended that you configure one
* via {@link #setAuthorizationFailureHandler(OAuth2AuthorizationFailureHandler)}.
*
* @author Rob Winch
* @author Joe Grandja
* @author Roman Matiushchenko
* @since 5.1
* @see OAuth2AuthorizedClientManager
* @see DefaultOAuth2AuthorizedClientManager
* @see OAuth2AuthorizedClientProvider
* @see OAuth2AuthorizedClientProviderBuilder
*/
public final class ServletOAuth2AuthorizedClientExchangeFilterFunction implements ExchangeFilterFunction {
// Same key as in
// SecurityReactorContextConfiguration.SecurityReactorContextSubscriber.SECURITY_CONTEXT_ATTRIBUTES
static final String SECURITY_REACTOR_CONTEXT_ATTRIBUTES_KEY = "org.springframework.security.SECURITY_CONTEXT_ATTRIBUTES";
/**
* The request attribute name used to locate the {@link OAuth2AuthorizedClient}.
*/
private static final String OAUTH2_AUTHORIZED_CLIENT_ATTR_NAME = OAuth2AuthorizedClient.class.getName();
private static final String CLIENT_REGISTRATION_ID_ATTR_NAME = OAuth2AuthorizedClient.class.getName()
.concat(".CLIENT_REGISTRATION_ID");
private static final String AUTHENTICATION_ATTR_NAME = Authentication.class.getName();
private static final String HTTP_SERVLET_REQUEST_ATTR_NAME = HttpServletRequest.class.getName();
private static final String HTTP_SERVLET_RESPONSE_ATTR_NAME = HttpServletResponse.class.getName();
private static final Authentication ANONYMOUS_AUTHENTICATION = new AnonymousAuthenticationToken("anonymous",
"anonymousUser", AuthorityUtils.createAuthorityList("ROLE_ANONYMOUS"));
@Deprecated
private Duration accessTokenExpiresSkew = Duration.ofMinutes(1);
@Deprecated
private OAuth2AccessTokenResponseClient<OAuth2ClientCredentialsGrantRequest> clientCredentialsTokenResponseClient;
private OAuth2AuthorizedClientManager authorizedClientManager;
private boolean defaultAuthorizedClientManager;
private boolean defaultOAuth2AuthorizedClient;
private String defaultClientRegistrationId;
private ClientResponseHandler clientResponseHandler;
public ServletOAuth2AuthorizedClientExchangeFilterFunction() {
}
/**
* Constructs a {@code ServletOAuth2AuthorizedClientExchangeFilterFunction} using the
* provided parameters.
*
* <p>
* When this constructor is used, authentication (HTTP 401) and authorization (HTTP
* 403) failures returned from an OAuth 2.0 Resource Server will <em>NOT</em> be
* forwarded to an {@link OAuth2AuthorizationFailureHandler}. Therefore, future
* requests to the Resource Server will most likely use the same (likely invalid)
* token, resulting in the same errors returned from the Resource Server. It is
* recommended to configure a
* {@link RemoveAuthorizedClientOAuth2AuthorizationFailureHandler} via
* {@link #setAuthorizationFailureHandler(OAuth2AuthorizationFailureHandler)} so that
* authentication and authorization failures returned from a Resource Server will
* result in removing the authorized client, so that a new token is retrieved for
* future requests.
* @param authorizedClientManager the {@link OAuth2AuthorizedClientManager} which
* manages the authorized client(s)
* @since 5.2
*/
public ServletOAuth2AuthorizedClientExchangeFilterFunction(OAuth2AuthorizedClientManager authorizedClientManager) {
Assert.notNull(authorizedClientManager, "authorizedClientManager cannot be null");
this.authorizedClientManager = authorizedClientManager;
this.clientResponseHandler = (request, responseMono) -> responseMono;
}
/**
* Constructs a {@code ServletOAuth2AuthorizedClientExchangeFilterFunction} using the
* provided parameters.
*
* <p>
* Since 5.3, when this constructor is used, authentication (HTTP 401) and
* authorization (HTTP 403) failures returned from an OAuth 2.0 Resource Server will
* be forwarded to a {@link RemoveAuthorizedClientOAuth2AuthorizationFailureHandler},
* which will potentially remove the {@link OAuth2AuthorizedClient} from the given
* {@link OAuth2AuthorizedClientRepository}, depending on the OAuth 2.0 error code
* returned. Authentication failures returned from an OAuth 2.0 Resource Server
* typically indicate that the token is invalid, and should not be used in future
* requests. Removing the authorized client from the repository will ensure that the
* existing token will not be sent for future requests to the Resource Server, and a
* new token is retrieved from the Authorization Server and used for future requests
* to the Resource Server.
* @param clientRegistrationRepository the repository of client registrations
* @param authorizedClientRepository the repository of authorized clients
*/
public ServletOAuth2AuthorizedClientExchangeFilterFunction(
ClientRegistrationRepository clientRegistrationRepository,
OAuth2AuthorizedClientRepository authorizedClientRepository) {
OAuth2AuthorizationFailureHandler authorizationFailureHandler = new RemoveAuthorizedClientOAuth2AuthorizationFailureHandler(
(clientRegistrationId, principal, attributes) -> removeAuthorizedClient(authorizedClientRepository,
clientRegistrationId, principal, attributes));
DefaultOAuth2AuthorizedClientManager defaultAuthorizedClientManager = new DefaultOAuth2AuthorizedClientManager(
clientRegistrationRepository, authorizedClientRepository);
defaultAuthorizedClientManager.setAuthorizationFailureHandler(authorizationFailureHandler);
this.authorizedClientManager = defaultAuthorizedClientManager;
this.defaultAuthorizedClientManager = true;
this.clientResponseHandler = new AuthorizationFailureForwarder(authorizationFailureHandler);
}
private void removeAuthorizedClient(OAuth2AuthorizedClientRepository authorizedClientRepository,
String clientRegistrationId, Authentication principal, Map<String, Object> attributes) {
HttpServletRequest request = getRequest(attributes);
HttpServletResponse response = getResponse(attributes);
authorizedClientRepository.removeAuthorizedClient(clientRegistrationId, principal, request, response);
}
/**
* Sets the {@link OAuth2AccessTokenResponseClient} used for getting an
* {@link OAuth2AuthorizedClient} for the client_credentials grant.
* @param clientCredentialsTokenResponseClient the client to use
* @deprecated Use
* {@link #ServletOAuth2AuthorizedClientExchangeFilterFunction(OAuth2AuthorizedClientManager)}
* instead. Create an instance of
* {@link ClientCredentialsOAuth2AuthorizedClientProvider} configured with a
* {@link ClientCredentialsOAuth2AuthorizedClientProvider#setAccessTokenResponseClient(OAuth2AccessTokenResponseClient)
* DefaultClientCredentialsTokenResponseClient} (or a custom one) and than supply it
* to
* {@link DefaultOAuth2AuthorizedClientManager#setAuthorizedClientProvider(OAuth2AuthorizedClientProvider)
* DefaultOAuth2AuthorizedClientManager}.
*/
@Deprecated
public void setClientCredentialsTokenResponseClient(
OAuth2AccessTokenResponseClient<OAuth2ClientCredentialsGrantRequest> clientCredentialsTokenResponseClient) {
Assert.notNull(clientCredentialsTokenResponseClient, "clientCredentialsTokenResponseClient cannot be null");
Assert.state(this.defaultAuthorizedClientManager,
"The client cannot be set when the constructor used is \"ServletOAuth2AuthorizedClientExchangeFilterFunction(OAuth2AuthorizedClientManager)\". "
+ "Instead, use the constructor \"ServletOAuth2AuthorizedClientExchangeFilterFunction(ClientRegistrationRepository, OAuth2AuthorizedClientRepository)\".");
this.clientCredentialsTokenResponseClient = clientCredentialsTokenResponseClient;
updateDefaultAuthorizedClientManager();
}
private void updateDefaultAuthorizedClientManager() {
// @formatter:off
OAuth2AuthorizedClientProvider authorizedClientProvider = OAuth2AuthorizedClientProviderBuilder.builder()
.authorizationCode()
.refreshToken((configurer) -> configurer.clockSkew(this.accessTokenExpiresSkew))
.clientCredentials(this::updateClientCredentialsProvider)
.password((configurer) -> configurer.clockSkew(this.accessTokenExpiresSkew))
.build();
// @formatter:on
((DefaultOAuth2AuthorizedClientManager) this.authorizedClientManager)
.setAuthorizedClientProvider(authorizedClientProvider);
}
private void updateClientCredentialsProvider(
OAuth2AuthorizedClientProviderBuilder.ClientCredentialsGrantBuilder builder) {
if (this.clientCredentialsTokenResponseClient != null) {
builder.accessTokenResponseClient(this.clientCredentialsTokenResponseClient);
}
builder.clockSkew(this.accessTokenExpiresSkew);
}
/**
* If true, a default {@link OAuth2AuthorizedClient} can be discovered from the
* current Authentication. It is recommended to be cautious with this feature since
* all HTTP requests will receive the access token if it can be resolved from the
* current Authentication.
* @param defaultOAuth2AuthorizedClient true if a default
* {@link OAuth2AuthorizedClient} should be used, else false. Default is false.
*/
public void setDefaultOAuth2AuthorizedClient(boolean defaultOAuth2AuthorizedClient) {
this.defaultOAuth2AuthorizedClient = defaultOAuth2AuthorizedClient;
}
/**
* If set, will be used as the default {@link ClientRegistration#getRegistrationId()}.
* It is recommended to be cautious with this feature since all HTTP requests will
* receive the access token.
* @param clientRegistrationId the id to use
*/
public void setDefaultClientRegistrationId(String clientRegistrationId) {
this.defaultClientRegistrationId = clientRegistrationId;
}
/**
* Configures the builder with {@link #defaultRequest()} and adds this as a
* {@link ExchangeFilterFunction}
* @return the {@link Consumer} to configure the builder
*/
public Consumer<WebClient.Builder> oauth2Configuration() {
return (builder) -> builder.defaultRequest(defaultRequest()).filter(this);
}
/**
* Provides defaults for the {@link HttpServletRequest} and the
* {@link HttpServletResponse} using {@link RequestContextHolder}. It also provides
* defaults for the {@link Authentication} using {@link SecurityContextHolder}. It
* also can default the {@link OAuth2AuthorizedClient} using the
* {@link #clientRegistrationId(String)} or the
* {@link #authentication(Authentication)}.
* @return the {@link Consumer} to populate the attributes
*/
public Consumer<WebClient.RequestHeadersSpec<?>> defaultRequest() {
return (spec) -> spec.attributes((attrs) -> {
populateDefaultRequestResponse(attrs);
populateDefaultAuthentication(attrs);
});
}
/**
* Modifies the {@link ClientRequest#attributes()} to include the
* {@link OAuth2AuthorizedClient} to be used for providing the Bearer Token.
* @param authorizedClient the {@link OAuth2AuthorizedClient} to use.
* @return the {@link Consumer} to populate the attributes
*/
public static Consumer<Map<String, Object>> oauth2AuthorizedClient(OAuth2AuthorizedClient authorizedClient) {
return (attributes) -> {
if (authorizedClient == null) {
attributes.remove(OAUTH2_AUTHORIZED_CLIENT_ATTR_NAME);
}
else {
attributes.put(OAUTH2_AUTHORIZED_CLIENT_ATTR_NAME, authorizedClient);
}
};
}
/**
* Modifies the {@link ClientRequest#attributes()} to include the
* {@link ClientRegistration#getRegistrationId()} to be used to look up the
* {@link OAuth2AuthorizedClient}.
* @param clientRegistrationId the {@link ClientRegistration#getRegistrationId()} to
* be used to look up the {@link OAuth2AuthorizedClient}.
* @return the {@link Consumer} to populate the attributes
*/
public static Consumer<Map<String, Object>> clientRegistrationId(String clientRegistrationId) {
return (attributes) -> attributes.put(CLIENT_REGISTRATION_ID_ATTR_NAME, clientRegistrationId);
}
/**
* Modifies the {@link ClientRequest#attributes()} to include the
* {@link Authentication} used to look up and save the {@link OAuth2AuthorizedClient}.
* The value is defaulted in
* {@link ServletOAuth2AuthorizedClientExchangeFilterFunction#defaultRequest()}
* @param authentication the {@link Authentication} to use.
* @return the {@link Consumer} to populate the attributes
*/
public static Consumer<Map<String, Object>> authentication(Authentication authentication) {
return (attributes) -> attributes.put(AUTHENTICATION_ATTR_NAME, authentication);
}
/**
* Modifies the {@link ClientRequest#attributes()} to include the
* {@link HttpServletRequest} used to look up and save the
* {@link OAuth2AuthorizedClient}. The value is defaulted in
* {@link ServletOAuth2AuthorizedClientExchangeFilterFunction#defaultRequest()}
* @param request the {@link HttpServletRequest} to use.
* @return the {@link Consumer} to populate the attributes
*/
public static Consumer<Map<String, Object>> httpServletRequest(HttpServletRequest request) {
return (attributes) -> attributes.put(HTTP_SERVLET_REQUEST_ATTR_NAME, request);
}
/**
* Modifies the {@link ClientRequest#attributes()} to include the
* {@link HttpServletResponse} used to save the {@link OAuth2AuthorizedClient}. The
* value is defaulted in
* {@link ServletOAuth2AuthorizedClientExchangeFilterFunction#defaultRequest()}
* @param response the {@link HttpServletResponse} to use.
* @return the {@link Consumer} to populate the attributes
*/
public static Consumer<Map<String, Object>> httpServletResponse(HttpServletResponse response) {
return (attributes) -> attributes.put(HTTP_SERVLET_RESPONSE_ATTR_NAME, response);
}
/**
* An access token will be considered expired by comparing its expiration to now +
* this skewed Duration. The default is 1 minute.
* @param accessTokenExpiresSkew the Duration to use.
* @deprecated The {@code accessTokenExpiresSkew} should be configured with the
* specific {@link OAuth2AuthorizedClientProvider} implementation, e.g.
* {@link ClientCredentialsOAuth2AuthorizedClientProvider#setClockSkew(Duration)
* ClientCredentialsOAuth2AuthorizedClientProvider} or
* {@link RefreshTokenOAuth2AuthorizedClientProvider#setClockSkew(Duration)
* RefreshTokenOAuth2AuthorizedClientProvider}.
*/
@Deprecated
public void setAccessTokenExpiresSkew(Duration accessTokenExpiresSkew) {
Assert.notNull(accessTokenExpiresSkew, "accessTokenExpiresSkew cannot be null");
Assert.state(this.defaultAuthorizedClientManager,
"The accessTokenExpiresSkew cannot be set when the constructor used is \"ServletOAuth2AuthorizedClientExchangeFilterFunction(OAuth2AuthorizedClientManager)\". "
+ "Instead, use the constructor \"ServletOAuth2AuthorizedClientExchangeFilterFunction(ClientRegistrationRepository, OAuth2AuthorizedClientRepository)\".");
this.accessTokenExpiresSkew = accessTokenExpiresSkew;
updateDefaultAuthorizedClientManager();
}
/**
* Sets the {@link OAuth2AuthorizationFailureHandler} that handles authentication and
* authorization failures when communicating to the OAuth 2.0 Resource Server.
*
* <p>
* For example, a {@link RemoveAuthorizedClientOAuth2AuthorizationFailureHandler} is
* typically used to remove the cached {@link OAuth2AuthorizedClient}, so that the
* same token is no longer used in future requests to the Resource Server.
*
* <p>
* The failure handler used by default depends on which constructor was used to
* construct this {@link ServletOAuth2AuthorizedClientExchangeFilterFunction}. See the
* constructors for more details.
* @param authorizationFailureHandler the {@link OAuth2AuthorizationFailureHandler}
* that handles authentication and authorization failures
* @since 5.3
*/
public void setAuthorizationFailureHandler(OAuth2AuthorizationFailureHandler authorizationFailureHandler) {
Assert.notNull(authorizationFailureHandler, "authorizationFailureHandler cannot be null");
this.clientResponseHandler = new AuthorizationFailureForwarder(authorizationFailureHandler);
}
@Override
public Mono<ClientResponse> filter(ClientRequest request, ExchangeFunction next) {
// @formatter:off
return mergeRequestAttributesIfNecessary(request)
.filter((req) -> req.attribute(OAUTH2_AUTHORIZED_CLIENT_ATTR_NAME).isPresent())
.flatMap((req) -> reauthorizeClient(getOAuth2AuthorizedClient(req.attributes()), req))
.switchIfEmpty(
Mono.defer(() ->
mergeRequestAttributesIfNecessary(request)
.filter((req) -> resolveClientRegistrationId(req) != null)
.flatMap((req) -> authorizeClient(resolveClientRegistrationId(req), req))
)
)
.map((authorizedClient) -> bearer(request, authorizedClient))
.flatMap((requestWithBearer) -> exchangeAndHandleResponse(requestWithBearer, next))
.switchIfEmpty(Mono.defer(() -> exchangeAndHandleResponse(request, next)));
// @formatter:on
}
private Mono<ClientResponse> exchangeAndHandleResponse(ClientRequest request, ExchangeFunction next) {
return next.exchange(request)
.transform((responseMono) -> this.clientResponseHandler.handleResponse(request, responseMono));
}
private Mono<ClientRequest> mergeRequestAttributesIfNecessary(ClientRequest request) {
if (!request.attribute(HTTP_SERVLET_REQUEST_ATTR_NAME).isPresent()
|| !request.attribute(HTTP_SERVLET_RESPONSE_ATTR_NAME).isPresent()
|| !request.attribute(AUTHENTICATION_ATTR_NAME).isPresent()) {
return mergeRequestAttributesFromContext(request);
}
return Mono.just(request);
}
private Mono<ClientRequest> mergeRequestAttributesFromContext(ClientRequest request) {
ClientRequest.Builder builder = ClientRequest.from(request);
return Mono.subscriberContext()
.map((ctx) -> builder.attributes((attrs) -> populateRequestAttributes(attrs, ctx)))
.map(ClientRequest.Builder::build);
}
private void populateRequestAttributes(Map<String, Object> attrs, Context ctx) {
// NOTE: SecurityReactorContextConfiguration.SecurityReactorContextSubscriber adds
// this key
if (!ctx.hasKey(SECURITY_REACTOR_CONTEXT_ATTRIBUTES_KEY)) {
return;
}
Map<Object, Object> contextAttributes = ctx.get(SECURITY_REACTOR_CONTEXT_ATTRIBUTES_KEY);
HttpServletRequest servletRequest = (HttpServletRequest) contextAttributes.get(HttpServletRequest.class);
if (servletRequest != null) {
attrs.putIfAbsent(HTTP_SERVLET_REQUEST_ATTR_NAME, servletRequest);
}
HttpServletResponse servletResponse = (HttpServletResponse) contextAttributes.get(HttpServletResponse.class);
if (servletResponse != null) {
attrs.putIfAbsent(HTTP_SERVLET_RESPONSE_ATTR_NAME, servletResponse);
}
Authentication authentication = (Authentication) contextAttributes.get(Authentication.class);
if (authentication != null) {
attrs.putIfAbsent(AUTHENTICATION_ATTR_NAME, authentication);
}
}
private void populateDefaultRequestResponse(Map<String, Object> attrs) {
if (attrs.containsKey(HTTP_SERVLET_REQUEST_ATTR_NAME) && attrs.containsKey(HTTP_SERVLET_RESPONSE_ATTR_NAME)) {
return;
}
RequestAttributes context = RequestContextHolder.getRequestAttributes();
if (context instanceof ServletRequestAttributes) {
attrs.putIfAbsent(HTTP_SERVLET_REQUEST_ATTR_NAME, ((ServletRequestAttributes) context).getRequest());
attrs.putIfAbsent(HTTP_SERVLET_RESPONSE_ATTR_NAME, ((ServletRequestAttributes) context).getResponse());
}
}
private void populateDefaultAuthentication(Map<String, Object> attrs) {
if (attrs.containsKey(AUTHENTICATION_ATTR_NAME)) {
return;
}
Authentication authentication = SecurityContextHolder.getContext().getAuthentication();
attrs.putIfAbsent(AUTHENTICATION_ATTR_NAME, authentication);
}
private String resolveClientRegistrationId(ClientRequest request) {
Map<String, Object> attrs = request.attributes();
String clientRegistrationId = getClientRegistrationId(attrs);
if (clientRegistrationId == null) {
clientRegistrationId = this.defaultClientRegistrationId;
}
Authentication authentication = getAuthentication(attrs);
if (clientRegistrationId == null && this.defaultOAuth2AuthorizedClient
&& authentication instanceof OAuth2AuthenticationToken) {
clientRegistrationId = ((OAuth2AuthenticationToken) authentication).getAuthorizedClientRegistrationId();
}
return clientRegistrationId;
}
private Mono<OAuth2AuthorizedClient> authorizeClient(String clientRegistrationId, ClientRequest request) {
if (this.authorizedClientManager == null) {
return Mono.empty();
}
Map<String, Object> attrs = request.attributes();
Authentication authentication = getAuthentication(attrs);
if (authentication == null) {
authentication = ANONYMOUS_AUTHENTICATION;
}
HttpServletRequest servletRequest = getRequest(attrs);
HttpServletResponse servletResponse = getResponse(attrs);
OAuth2AuthorizeRequest.Builder builder = OAuth2AuthorizeRequest.withClientRegistrationId(clientRegistrationId)
.principal(authentication);
builder.attributes((attributes) -> addToAttributes(attributes, servletRequest, servletResponse));
OAuth2AuthorizeRequest authorizeRequest = builder.build();
// NOTE: 'authorizedClientManager.authorize()' needs to be executed on a dedicated
// thread via subscribeOn(Schedulers.boundedElastic()) since it performs a
// blocking I/O operation using RestTemplate internally
return Mono.fromSupplier(() -> this.authorizedClientManager.authorize(authorizeRequest))
.subscribeOn(Schedulers.boundedElastic());
}
private Mono<OAuth2AuthorizedClient> reauthorizeClient(OAuth2AuthorizedClient authorizedClient,
ClientRequest request) {
if (this.authorizedClientManager == null) {
return Mono.just(authorizedClient);
}
Map<String, Object> attrs = request.attributes();
Authentication authentication = getAuthentication(attrs);
if (authentication == null) {
authentication = createAuthentication(authorizedClient.getPrincipalName());
}
HttpServletRequest servletRequest = getRequest(attrs);
HttpServletResponse servletResponse = getResponse(attrs);
OAuth2AuthorizeRequest.Builder builder = OAuth2AuthorizeRequest.withAuthorizedClient(authorizedClient)
.principal(authentication);
builder.attributes((attributes) -> addToAttributes(attributes, servletRequest, servletResponse));
OAuth2AuthorizeRequest reauthorizeRequest = builder.build();
// NOTE: 'authorizedClientManager.authorize()' needs to be executed on a dedicated
// thread via subscribeOn(Schedulers.boundedElastic()) since it performs a
// blocking I/O operation using RestTemplate internally
return Mono.fromSupplier(() -> this.authorizedClientManager.authorize(reauthorizeRequest))
.subscribeOn(Schedulers.boundedElastic());
}
private void addToAttributes(Map<String, Object> attributes, HttpServletRequest servletRequest,
HttpServletResponse servletResponse) {
if (servletRequest != null) {
attributes.put(HTTP_SERVLET_REQUEST_ATTR_NAME, servletRequest);
}
if (servletResponse != null) {
attributes.put(HTTP_SERVLET_RESPONSE_ATTR_NAME, servletResponse);
}
}
private ClientRequest bearer(ClientRequest request, OAuth2AuthorizedClient authorizedClient) {
// @formatter:off
return ClientRequest.from(request)
.headers((headers) -> headers.setBearerAuth(authorizedClient.getAccessToken().getTokenValue()))
.attributes(oauth2AuthorizedClient(authorizedClient))
.build();
// @formatter:on
}
static OAuth2AuthorizedClient getOAuth2AuthorizedClient(Map<String, Object> attrs) {
return (OAuth2AuthorizedClient) attrs.get(OAUTH2_AUTHORIZED_CLIENT_ATTR_NAME);
}
static String getClientRegistrationId(Map<String, Object> attrs) {
return (String) attrs.get(CLIENT_REGISTRATION_ID_ATTR_NAME);
}
static Authentication getAuthentication(Map<String, Object> attrs) {
return (Authentication) attrs.get(AUTHENTICATION_ATTR_NAME);
}
static HttpServletRequest getRequest(Map<String, Object> attrs) {
return (HttpServletRequest) attrs.get(HTTP_SERVLET_REQUEST_ATTR_NAME);
}
static HttpServletResponse getResponse(Map<String, Object> attrs) {
return (HttpServletResponse) attrs.get(HTTP_SERVLET_RESPONSE_ATTR_NAME);
}
private static Authentication createAuthentication(final String principalName) {
Assert.hasText(principalName, "principalName cannot be empty");
return new AbstractAuthenticationToken(null) {
@Override
public Object getCredentials() {
return "";
}
@Override
public Object getPrincipal() {
return principalName;
}
};
}
@FunctionalInterface
private interface ClientResponseHandler {
Mono<ClientResponse> handleResponse(ClientRequest request, Mono<ClientResponse> response);
}
/**
* Forwards authentication and authorization failures to an
* {@link OAuth2AuthorizationFailureHandler}.
*
* @since 5.3
*/
private static final class AuthorizationFailureForwarder implements ClientResponseHandler {
/**
* A map of HTTP status code to OAuth 2.0 error code for HTTP status codes that
* should be interpreted as authentication or authorization failures.
*/
private final Map<Integer, String> httpStatusToOAuth2ErrorCodeMap;
/**
* The {@link OAuth2AuthorizationFailureHandler} to notify when an
* authentication/authorization failure occurs.
*/
private final OAuth2AuthorizationFailureHandler authorizationFailureHandler;
private AuthorizationFailureForwarder(OAuth2AuthorizationFailureHandler authorizationFailureHandler) {
Assert.notNull(authorizationFailureHandler, "authorizationFailureHandler cannot be null");
this.authorizationFailureHandler = authorizationFailureHandler;
Map<Integer, String> httpStatusToOAuth2Error = new HashMap<>();
httpStatusToOAuth2Error.put(HttpStatus.UNAUTHORIZED.value(), OAuth2ErrorCodes.INVALID_TOKEN);
httpStatusToOAuth2Error.put(HttpStatus.FORBIDDEN.value(), OAuth2ErrorCodes.INSUFFICIENT_SCOPE);
this.httpStatusToOAuth2ErrorCodeMap = Collections.unmodifiableMap(httpStatusToOAuth2Error);
}
@Override
public Mono<ClientResponse> handleResponse(ClientRequest request, Mono<ClientResponse> responseMono) {
return responseMono.flatMap((response) -> handleResponse(request, response).thenReturn(response))
.onErrorResume(WebClientResponseException.class,
(e) -> handleWebClientResponseException(request, e).then(Mono.error(e)))
.onErrorResume(OAuth2AuthorizationException.class,
(e) -> handleAuthorizationException(request, e).then(Mono.error(e)));
}
private Mono<Void> handleResponse(ClientRequest request, ClientResponse response) {
// @formatter:off
return Mono.justOrEmpty(resolveErrorIfPossible(response))
.flatMap((oauth2Error) -> {
Map<String, Object> attrs = request.attributes();
OAuth2AuthorizedClient authorizedClient = getOAuth2AuthorizedClient(attrs);
if (authorizedClient == null) {
return Mono.empty();
}
ClientAuthorizationException authorizationException = new ClientAuthorizationException(oauth2Error,
authorizedClient.getClientRegistration().getRegistrationId());
Authentication principal = createAuthentication(authorizedClient.getPrincipalName());
HttpServletRequest servletRequest = getRequest(attrs);
HttpServletResponse servletResponse = getResponse(attrs);
return handleAuthorizationFailure(authorizationException, principal, servletRequest, servletResponse);
});
// @formatter:on
}
private OAuth2Error resolveErrorIfPossible(ClientResponse response) {
// Try to resolve from 'WWW-Authenticate' header
if (!response.headers().header(HttpHeaders.WWW_AUTHENTICATE).isEmpty()) {
String wwwAuthenticateHeader = response.headers().header(HttpHeaders.WWW_AUTHENTICATE).get(0);
Map<String, String> authParameters = parseAuthParameters(wwwAuthenticateHeader);
if (authParameters.containsKey(OAuth2ParameterNames.ERROR)) {
return new OAuth2Error(authParameters.get(OAuth2ParameterNames.ERROR),
authParameters.get(OAuth2ParameterNames.ERROR_DESCRIPTION),
authParameters.get(OAuth2ParameterNames.ERROR_URI));
}
}
return resolveErrorIfPossible(response.rawStatusCode());
}
private OAuth2Error resolveErrorIfPossible(int statusCode) {
if (this.httpStatusToOAuth2ErrorCodeMap.containsKey(statusCode)) {
return new OAuth2Error(this.httpStatusToOAuth2ErrorCodeMap.get(statusCode), null,
"https://tools.ietf.org/html/rfc6750#section-3.1");
}
return null;
}
private Map<String, String> parseAuthParameters(String wwwAuthenticateHeader) {
// @formatter:off
return Stream.of(wwwAuthenticateHeader).filter((header) -> !StringUtils.isEmpty(header))
.filter((header) -> header.toLowerCase().startsWith("bearer"))
.map((header) -> header.substring("bearer".length()))
.map((header) -> header.split(","))
.flatMap(Stream::of)
.map((parameter) -> parameter.split("="))
.filter((parameter) -> parameter.length > 1)
.collect(Collectors.toMap((parameters) -> parameters[0].trim(),
(parameters) -> parameters[1].trim().replace("\"", ""))
);
// @formatter:on
}
/**
* Handles the given http status code returned from a resource server by notifying
* the authorization failure handler if the http status code is in the
* {@link #httpStatusToOAuth2ErrorCodeMap}.
* @param request the request being processed
* @param exception The root cause exception for the failure
* @return a {@link Mono} that completes empty after the authorization failure
* handler completes
*/
private Mono<Void> handleWebClientResponseException(ClientRequest request,
WebClientResponseException exception) {
return Mono.justOrEmpty(resolveErrorIfPossible(exception.getRawStatusCode())).flatMap((oauth2Error) -> {
Map<String, Object> attrs = request.attributes();
OAuth2AuthorizedClient authorizedClient = getOAuth2AuthorizedClient(attrs);
if (authorizedClient == null) {
return Mono.empty();
}
ClientAuthorizationException authorizationException = new ClientAuthorizationException(oauth2Error,
authorizedClient.getClientRegistration().getRegistrationId(), exception);
Authentication principal = createAuthentication(authorizedClient.getPrincipalName());
HttpServletRequest servletRequest = getRequest(attrs);
HttpServletResponse servletResponse = getResponse(attrs);
return handleAuthorizationFailure(authorizationException, principal, servletRequest, servletResponse);
});
}
/**
* Handles the given {@link OAuth2AuthorizationException} that occurred downstream
* by notifying the authorization failure handler.
* @param request the request being processed
* @param authorizationException the authorization exception to include in the
* failure event
* @return a {@link Mono} that completes empty after the authorization failure
* handler completes
*/
private Mono<Void> handleAuthorizationException(ClientRequest request,
OAuth2AuthorizationException authorizationException) {
return Mono.justOrEmpty(request).flatMap((req) -> {
Map<String, Object> attrs = req.attributes();
OAuth2AuthorizedClient authorizedClient = getOAuth2AuthorizedClient(attrs);
if (authorizedClient == null) {
return Mono.empty();
}
Authentication principal = createAuthentication(authorizedClient.getPrincipalName());
HttpServletRequest servletRequest = getRequest(attrs);
HttpServletResponse servletResponse = getResponse(attrs);
return handleAuthorizationFailure(authorizationException, principal, servletRequest, servletResponse);
});
}
/**
* Delegates the failed authorization to the
* {@link OAuth2AuthorizationFailureHandler}.
* @param exception the {@link OAuth2AuthorizationException} to include in the
* failure event
* @param principal the principal associated with the failed authorization attempt
* @param servletRequest the currently active {@code HttpServletRequest}
* @param servletResponse the currently active {@code HttpServletResponse}
* @return a {@link Mono} that completes empty after the
* {@link OAuth2AuthorizationFailureHandler} completes
*/
private Mono<Void> handleAuthorizationFailure(OAuth2AuthorizationException exception, Authentication principal,
HttpServletRequest servletRequest, HttpServletResponse servletResponse) {
Runnable runnable = () -> this.authorizationFailureHandler.onAuthorizationFailure(exception, principal,
createAttributes(servletRequest, servletResponse));
// @formatter:off
return Mono.fromRunnable(runnable)
.subscribeOn(Schedulers.boundedElastic())
.then();
// @formatter:on
}
private static Map<String, Object> createAttributes(HttpServletRequest servletRequest,
HttpServletResponse servletResponse) {
Map<String, Object> attributes = new HashMap<>();
attributes.put(HttpServletRequest.class.getName(), servletRequest);
attributes.put(HttpServletResponse.class.getName(), servletResponse);
return attributes;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs;
import java.io.EOFException;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.net.URI;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.FSConstants;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.protocol.LocatedBlocksWithMetaInfo;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.hdfs.server.namenode.NameNode;
import org.apache.hadoop.hdfs.tools.FastCopy;
import org.apache.hadoop.hdfs.tools.FastCopy.FastFileCopyRequest;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.HardLink;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.tools.FastCopy.FastCopyFileStatus;
import org.junit.AfterClass;
import static org.junit.Assert.*;
public class FastCopySetupUtil {
private static final Random random = new Random();
protected static Configuration conf;
protected static Configuration remoteConf;
private static MiniDFSCluster cluster;
protected static DistributedFileSystem fs;
private static MiniDFSCluster remoteCluster;
private static DistributedFileSystem remoteFs;
private static boolean pass = true;
private static RWThread rwThread;
public static final int FILESIZE = 1024 * 5; // 5 KB
private static Map<Integer, DataNode> dnMap = new HashMap<Integer, DataNode>();
private static Log LOG = LogFactory.getLog(FastCopySetupUtil.class);
public static final int BLOCK_SIZE = 1024;
private static final byte[] buffer = new byte[BLOCK_SIZE];
public static final int TMPFILESIZE = 2048;
private static final byte[] fileBuffer = new byte[TMPFILESIZE];
public static final int BYTES_PER_CHECKSUM = 512;
public static final int COPIES = 5;
private static String confFile = "build/test/extraconf/core-site.xml";
private static final int softLeasePeriod = 3 * 1000; // 3 sec
private static final int hardLeasePeriod = 5 * 1000; // 5 sec
public static void setUpClass() throws Exception {
// Require the complete file to be replicated before we return in unit
// tests.
setConf("dfs.replication.min", 3);
// Lower the pending replication timeout to make sure if any of our blocks
// timeout the unit test catches it.
setConf("dfs.replication.pending.timeout.sec", 60);
// Make sure we get multiple blocks.
setConf("dfs.block.size", BLOCK_SIZE);
setConf("io.bytes.per.checksum", BYTES_PER_CHECKSUM);
// Set low soft and hard lease period.
setConf(FSConstants.DFS_HARD_LEASE_KEY, hardLeasePeriod);
setConf(FSConstants.DFS_SOFT_LEASE_KEY, softLeasePeriod);
System.setProperty("test.build.data", "build/test/data1");
cluster = new MiniDFSCluster(conf, 6, true, new String[] { "/r1", "/r2",
"/r1", "/r2", "/r1", "/r2" }, new String[] { "h1", "h2", "h3", "h1",
"h2", "h3" });
for (DataNode dn : cluster.getDataNodes()) {
dnMap.put(dn.getSelfAddr().getPort(), dn);
}
// Writing conf to disk so that the FastCopy tool picks it up.
FileOutputStream out = new FileOutputStream(confFile);
conf.writeXml(out);
fs = (DistributedFileSystem) cluster.getFileSystem();
System.setProperty("test.build.data", "build/test/data2");
remoteCluster = new MiniDFSCluster(remoteConf, 6, true, new String[] {
"/r1", "/r2", "/r1", "/r2", "/r1", "/r2" }, new String[] { "h1", "h2",
"h3", "h1", "h2", "h3" });
for (DataNode dn : remoteCluster.getDataNodes()) {
dnMap.put(dn.getSelfAddr().getPort(), dn);
}
remoteFs = (DistributedFileSystem) remoteCluster.getFileSystem();
random.nextBytes(fileBuffer);
rwThread = new RWThread();
rwThread.start();
}
private static void setConf(String name, int value) {
conf.setInt(name, value);
remoteConf.setInt(name, value);
}
@AfterClass
public static void tearDownClass() throws Exception {
rwThread.stopRW();
rwThread.join();
remoteFs.close();
remoteCluster.shutdown();
fs.close();
cluster.shutdown();
// Remove the extra conf file.
new File(confFile).delete();
}
private static class RWThread extends Thread {
private boolean flag = true;
private byte[] tmpBuffer = new byte[TMPFILESIZE];
public void run() {
while (flag) {
try {
// Make sure we have no pendingReplicationBlocks
pass = (0 == cluster.getNameNode().namesystem
.getPendingReplicationBlocks());
pass = (0 == remoteCluster.getNameNode().namesystem
.getPendingReplicationBlocks());
create_verify_file();
try {
Thread.sleep(1000);
} catch (InterruptedException ex) {
}
} catch (IOException e) {
pass = false;
LOG.warn("Create Verify Failed", e);
}
}
}
public void stopRW() {
flag = false;
}
public void create_verify_file() throws IOException {
String filename = "/create_verify_file" + random.nextInt();
Path filePath = new Path(filename);
// Write.
FSDataOutputStream out = fs.create(filePath, true, 4096);
out.write(fileBuffer);
out.close();
// Read.
FSDataInputStream in = fs.open(filePath, 4096);
in.readFully(tmpBuffer);
in.close();
// Verify and delete.
pass = Arrays.equals(tmpBuffer, fileBuffer);
fs.dfs.delete(filePath.toString(), true);
}
}
/**
* Generates a file with random data.
*
* @param fs
* the FileSystem on which to generate the file
*
* @param filename
* the full path name of the file
*/
protected static void generateRandomFile(FileSystem fs, String filename,
int filesize)
throws IOException {
Path filePath = new Path(filename);
OutputStream out = fs.create(filePath, true, 4096);
int bytesWritten = 0;
while (bytesWritten < filesize) {
random.nextBytes(buffer);
out.write(buffer);
bytesWritten += buffer.length;
}
out.close();
}
public void testFastCopy(boolean hardlink) throws Exception {
// Create a source file.
String src = "/testFastCopySrc" + hardlink;
generateRandomFile(fs, src, FILESIZE);
String destination = "/testFastCopyDestination" + hardlink;
FastCopy fastCopy = new FastCopy(conf);
NameNode namenode = cluster.getNameNode();
try {
for (int i = 0; i < COPIES; i++) {
fastCopy.copy(src, destination + i, fs, fs);
assertTrue(verifyCopiedFile(src, destination + i, namenode, namenode,
fs, fs, hardlink));
verifyFileStatus(destination + i, namenode, fastCopy);
}
} catch (Exception e) {
LOG.error("Fast Copy failed with exception : ", e);
fail("Fast Copy failed");
} finally {
fastCopy.shutdown();
}
assertTrue(pass);
}
public void testFastCopyMultiple(boolean hardlink) throws Exception {
// Create a source file.
String src = "/testFastCopyMultipleSrc" + hardlink;
generateRandomFile(fs, src, FILESIZE);
String destination = "/testFastCopyMultipleDestination" + hardlink;
FastCopy fastCopy = new FastCopy(conf);
List<FastFileCopyRequest> requests = new ArrayList<FastFileCopyRequest>();
for (int i = 0; i < COPIES; i++) {
requests.add(new FastFileCopyRequest(src, destination + i, fs, fs));
}
NameNode namenode = cluster.getNameNode();
try {
fastCopy.copy(requests);
for (FastFileCopyRequest r : requests) {
assertTrue(verifyCopiedFile(r.getSrc(), r.getDestination(), namenode,
namenode, fs, fs, hardlink));
verifyFileStatus(r.getDestination(), namenode, fastCopy);
}
} catch (Exception e) {
LOG.error("Fast Copy failed with exception : ", e);
fail("Fast Copy failed");
} finally {
fastCopy.shutdown();
}
assertTrue(pass);
}
private void verifyFileStatus(String file, NameNode namenode,
FastCopy fastCopy) throws Exception {
LOG.info("Verifying for file : " + file);
FastCopyFileStatus fstat = fastCopy.getFileStatus(file);
assertNotNull(fstat);
int totalBlocks = namenode.getBlockLocations(file, 0,
Long.MAX_VALUE).locatedBlockCount();
assertEquals(totalBlocks, fstat.getTotalBlocks());
assertEquals(fstat.getTotalBlocks(), fstat.getBlocksDone());
assertEquals(FILESIZE / BLOCK_SIZE, totalBlocks);
}
public void testInterFileSystemFastCopy(boolean hardlink) throws Exception {
// Create a source file.
String src = "/testInterFileSystemFastCopySrc" + hardlink;
generateRandomFile(fs, src, FILESIZE);
String destination = "/testInterFileSystemFastCopyDst" + hardlink;
FastCopy fastCopy = new FastCopy(conf);
NameNode srcNameNode = cluster.getNameNode();
NameNode dstNameNode = remoteCluster.getNameNode();
try {
for (int i = 0; i < COPIES; i++) {
fastCopy.copy(src, destination + i, fs, remoteFs);
assertTrue(verifyCopiedFile(src, destination + i, srcNameNode,
dstNameNode, fs, remoteFs, hardlink));
verifyFileStatus(destination + i, dstNameNode, fastCopy);
}
} catch (Exception e) {
LOG.error("Fast Copy failed with exception : ", e);
fail("Fast Copy failed");
} finally {
fastCopy.shutdown();
}
assertTrue(pass);
}
public void testInterFileSystemFastCopyMultiple(boolean hardlink)
throws Exception {
// Create a source file.
String src = "/testInterFileSystemFastCopyMultipleSrc" + hardlink;
generateRandomFile(fs, src, FILESIZE);
String destination = "/testInterFileSystemFastCopyMultipleDestination"
+ hardlink;
FastCopy fastCopy = new FastCopy(conf);
List<FastFileCopyRequest> requests = new ArrayList<FastFileCopyRequest>();
for (int i = 0; i < COPIES; i++) {
requests.add(new FastFileCopyRequest(src, destination + i, fs, remoteFs));
}
NameNode srcNameNode = cluster.getNameNode();
NameNode dstNameNode = remoteCluster.getNameNode();
try {
fastCopy.copy(requests);
for (FastFileCopyRequest r : requests) {
assertTrue(verifyCopiedFile(r.getSrc(), r.getDestination(),
srcNameNode, dstNameNode, fs, remoteFs, hardlink));
verifyFileStatus(r.getDestination(), dstNameNode, fastCopy);
}
} catch (Exception e) {
LOG.error("Fast Copy failed with exception : ", e);
fail("Fast Copy failed");
} finally {
fastCopy.shutdown();
}
assertTrue(pass);
}
public void testFastCopyShellMultiple(boolean hardlink, String extraargs[])
throws Exception {
// Create a source file.
String src = "/testFastCopyShellMultipleSrc" + hardlink;
List<String> argsList = new ArrayList<String>();
int i;
for (i = 0; i < COPIES; i++) {
generateRandomFile(fs, src + i, TMPFILESIZE); // Create a file
argsList.add(src + i);
}
String destination = "/testFastCopyShellMultipleDestination" + hardlink;
fs.mkdirs(new Path(destination));
NameNode namenode = cluster.getNameNode();
argsList.add(destination);
argsList.addAll(Arrays.asList(extraargs));
String args[] = new String[argsList.size()];
args = argsList.toArray(args);
try {
FastCopy.runTool(args);
for (i = 0; i < COPIES; i++) {
String dstPath = destination + src + i;
assertTrue(fs.exists(new Path(dstPath)));
assertTrue(verifyCopiedFile(src + i, dstPath, namenode, namenode, fs,
fs, hardlink));
}
} catch (Exception e) {
LOG.error("Fast Copy failed with exception : ", e);
fail("Fast Copy failed");
}
assertTrue(pass);
}
public void testInterFileSystemFastCopyShellMultiple(boolean hardlink,
String extraargs[]) throws Exception {
// Create a source file.
String fsname = new URI(conf.get("fs.default.name")).getAuthority();
String remoteFsname = new URI(remoteConf.get("fs.default.name"))
.getAuthority();
String srcFile = "/testInterFileSystemFastCopyShellMultipleSrc" + hardlink;
String src = "hdfs://" + fsname + srcFile;
List<String> argsList = new ArrayList<String>();
int i;
for (i = 0; i < COPIES; i++) {
generateRandomFile(fs, src + i, TMPFILESIZE); // Create a file
argsList.add(src + i);
}
String destDir = "/testInterFileSystemFastCopyShellMultipleDestination"
+ hardlink;
String destination = "hdfs://" + remoteFsname + destDir;
remoteFs.mkdirs(new Path(destination));
NameNode srcNamenode = cluster.getNameNode();
NameNode dstNamenode = remoteCluster.getNameNode();
argsList.add(destination);
argsList.addAll(Arrays.asList(extraargs));
String args[] = new String[argsList.size()];
args = argsList.toArray(args);
FastCopy.runTool(args);
for (i = 0; i < COPIES; i++) {
String dstPath = destDir + srcFile + i;
assertTrue(remoteFs.exists(new Path(dstPath)));
assertTrue(verifyCopiedFile(srcFile + i, dstPath, srcNamenode,
dstNamenode, fs, remoteFs, hardlink));
}
assertTrue(pass);
}
public void testFastCopyShellGlob(boolean hardlink, String[] files,
String[] args, String srcPrefix, String dstPrefix, boolean isDir)
throws Exception {
int i;
if (isDir) {
String destination = args[args.length - 1];
fs.mkdirs(new Path(destination));
}
NameNode namenode = cluster.getNameNode();
try {
FastCopy.runTool(args);
for (i = 0; i < files.length; i++) {
String dstPath = dstPrefix + files[i];
String srcPath = srcPrefix + files[i];
LOG.info("srcPath : " + srcPath + " dstPath : " + dstPath);
assertTrue(fs.exists(new Path(dstPath)));
assertTrue(verifyCopiedFile(srcPath, dstPath, namenode, namenode, fs,
fs, hardlink));
}
} catch (Exception e) {
LOG.error("Fast Copy failed with exception : ", e);
throw e;
}
assertTrue(pass);
}
public boolean verifyCopiedFile(String src, String destination,
NameNode srcNameNode, NameNode dstNameNode, FileSystem srcFs,
FileSystem dstFs, boolean hardlink) throws Exception {
verifyBlockLocations(src, destination, srcNameNode, dstNameNode, hardlink);
Path srcFilePath = new Path(src);
Path destFilePath = new Path(destination);
FSDataInputStream srcStream = srcFs.open(srcFilePath, 4096);
FSDataInputStream destStream = dstFs.open(destFilePath, 4096);
int counter = 0;
byte[] buffer1 = new byte[4096]; // 4KB
byte[] buffer2 = new byte[4096]; // 4KB
while (true) {
try {
srcStream.readFully(buffer1);
} catch (EOFException e) {
System.out.println("Src file EOF reached");
counter++;
}
try {
destStream.readFully(buffer2);
} catch (EOFException e) {
System.out.println("Destination file EOF reached");
counter++;
}
if (counter == 1) {
System.out.println("One file larger than other");
return false;
} else if (counter == 2) {
return true;
}
if (!Arrays.equals(buffer1, buffer2)) {
System.out.println("Files Mismatch");
return false;
}
}
}
public boolean verifyBlockLocations(String src, String destination,
NameNode srcNameNode, NameNode dstNameNode, boolean hardlink)
throws IOException {
LocatedBlocksWithMetaInfo srcLocatedBlocks =
srcNameNode.openAndFetchMetaInfo(src, 0,Long.MAX_VALUE);
List<LocatedBlock> srcblocks = srcLocatedBlocks.getLocatedBlocks();
LocatedBlocksWithMetaInfo dstLocatedBlocks =
dstNameNode.openAndFetchMetaInfo(destination, 0, Long.MAX_VALUE);
List<LocatedBlock> dstblocks = dstLocatedBlocks.getLocatedBlocks();
assertEquals(srcblocks.size(), dstblocks.size());
Iterator<LocatedBlock> srcIt = srcblocks.iterator();
Iterator<LocatedBlock> dstIt = dstblocks.iterator();
while (srcIt.hasNext()) {
LocatedBlock srcBlock = srcIt.next();
LocatedBlock dstBlock = dstIt.next();
List<DatanodeInfo> srcLocations = Arrays.asList(srcBlock.getLocations());
List<DatanodeInfo> dstLocations = Arrays.asList(dstBlock.getLocations());
System.out.println("Locations for src block : " + srcBlock.getBlock()
+ " file : " + src);
for (DatanodeInfo info : srcLocations) {
System.out.println("Datanode : " + info.toString());
}
System.out.println("Locations for dst block : " + dstBlock.getBlock()
+ " file : " + destination);
for (DatanodeInfo info : dstLocations) {
System.out.println("Datanode : " + info.toString());
}
assertEquals(srcLocations.size(), dstLocations.size());
if (srcNameNode.getNameNodeAddress().equals(
dstNameNode.getNameNodeAddress())) {
// Same FS copy, verify blocks are machine local.
assertTrue(srcLocations.containsAll(dstLocations));
assertTrue(dstLocations.containsAll(srcLocations));
} else {
// Verify blocks are rack local.
Iterator<DatanodeInfo> sit = srcLocations.iterator();
while (sit.hasNext()) {
DatanodeInfo srcInfo = sit.next();
// Verify location.
Iterator<DatanodeInfo> dit = dstLocations.iterator();
boolean flag = false;
while (dit.hasNext() && !flag) {
DatanodeInfo dstInfo = dit.next();
if (dstInfo.getHostName().equals(srcInfo.getHostName())) {
verifyHardLinks(srcInfo, dstInfo,
srcLocatedBlocks.getNamespaceID(), srcBlock.getBlock(),
dstLocatedBlocks.getNamespaceID(), dstBlock.getBlock(),
hardlink);
}
flag = srcInfo.getNetworkLocation().equals(
dstInfo.getNetworkLocation());
}
assertTrue(flag);
}
}
}
return true;
}
private void verifyHardLinks(DatanodeInfo srcInfo, DatanodeInfo dstInfo,
int srcNamespaceId, Block srcBlock, int dstNamespaceId, Block dstBlock,
boolean hardlink) throws IOException {
// Verify hard links.
DataNode dnSrc = dnMap.get(srcInfo.getPort());
File blockFileSrc = dnSrc.data.getBlockFile(srcNamespaceId, srcBlock);
LOG.warn("Link count for : " + blockFileSrc + " is : "
+ HardLink.getLinkCount(blockFileSrc));
if (hardlink) {
assertTrue(HardLink.getLinkCount(blockFileSrc) > 1);
} else {
assertEquals(1, HardLink.getLinkCount(blockFileSrc));
}
DataNode dnDst = dnMap.get(dstInfo.getPort());
File blockFileDst = dnDst.data.getBlockFile(dstNamespaceId, dstBlock);
if (hardlink) {
assertTrue(HardLink.getLinkCount(blockFileDst) > 1);
} else {
assertEquals(1, HardLink.getLinkCount(blockFileDst));
}
}
}
| |
package com.mikepenz.materialdrawer.model;
import android.content.Context;
import android.graphics.drawable.Drawable;
import android.support.annotation.ColorInt;
import android.support.annotation.ColorRes;
import android.support.annotation.LayoutRes;
import android.support.annotation.StringRes;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.SwitchCompat;
import android.view.View;
import android.widget.CompoundButton;
import android.widget.ImageView;
import android.widget.TextView;
import com.mikepenz.materialdrawer.R;
import com.mikepenz.materialdrawer.holder.ColorHolder;
import com.mikepenz.materialdrawer.holder.ImageHolder;
import com.mikepenz.materialdrawer.holder.StringHolder;
import com.mikepenz.materialdrawer.interfaces.OnCheckedChangeListener;
import com.mikepenz.materialdrawer.model.utils.ViewHolderFactory;
import com.mikepenz.materialdrawer.util.DrawerUIUtils;
import com.mikepenz.materialize.util.UIUtils;
/**
* Created by mikepenz on 03.02.15.
*/
public class SwitchDrawerItem extends BaseDrawerItem<SwitchDrawerItem> {
private StringHolder description;
private ColorHolder descriptionTextColor;
private boolean switchEnabled = true;
private boolean checked = false;
private OnCheckedChangeListener onCheckedChangeListener = null;
public SwitchDrawerItem withDescription(String description) {
this.description = new StringHolder(description);
return this;
}
public SwitchDrawerItem withDescription(@StringRes int descriptionRes) {
this.description = new StringHolder(descriptionRes);
return this;
}
public SwitchDrawerItem withDescriptionTextColor(@ColorInt int color) {
this.descriptionTextColor = ColorHolder.fromColor(color);
return this;
}
public SwitchDrawerItem withDescriptionTextColorRes(@ColorRes int colorRes) {
this.descriptionTextColor = ColorHolder.fromColorRes(colorRes);
return this;
}
public SwitchDrawerItem withChecked(boolean checked) {
this.checked = checked;
return this;
}
public SwitchDrawerItem withSwitchEnabled(boolean switchEnabled) {
this.switchEnabled = switchEnabled;
return this;
}
public SwitchDrawerItem withOnCheckedChangeListener(OnCheckedChangeListener onCheckedChangeListener) {
this.onCheckedChangeListener = onCheckedChangeListener;
return this;
}
public SwitchDrawerItem withCheckable(boolean checkable) {
return withSelectable(checkable);
}
public StringHolder getDescription() {
return description;
}
public ColorHolder getDescriptionTextColor() {
return descriptionTextColor;
}
public boolean isChecked() {
return checked;
}
public boolean isSwitchEnabled() {
return switchEnabled;
}
public OnCheckedChangeListener getOnCheckedChangeListener() {
return onCheckedChangeListener;
}
@Override
public String getType() {
return "SWITCH_ITEM";
}
@Override
@LayoutRes
public int getLayoutRes() {
return R.layout.material_drawer_item_switch;
}
@Override
public void bindView(RecyclerView.ViewHolder holder) {
Context ctx = holder.itemView.getContext();
//get our viewHolder
final ViewHolder viewHolder = (ViewHolder) holder;
//set the identifier from the drawerItem here. It can be used to run tests
viewHolder.itemView.setId(getIdentifier());
//set the item selected if it is
viewHolder.itemView.setSelected(isSelected());
//get the correct color for the background
int selectedColor = getSelectedColor(ctx);
//get the correct color for the text
int color = getColor(ctx);
int selectedTextColor = getSelectedTextColor(ctx);
//get the correct color for the icon
int iconColor = getIconColor(ctx);
int selectedIconColor = getSelectedIconColor(ctx);
//set the background for the item
UIUtils.setBackground(viewHolder.view, DrawerUIUtils.getSelectableBackground(ctx, selectedColor));
//set the text for the name
StringHolder.applyTo(this.getName(), viewHolder.name);
//set the text for the description or hide
StringHolder.applyToOrHide(this.getDescription(), viewHolder.description);
if (!isSelectable()) {
viewHolder.view.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (switchEnabled) {
viewHolder.switchView.setChecked(!viewHolder.switchView.isChecked());
}
}
});
}
viewHolder.switchView.setChecked(checked);
viewHolder.switchView.setOnCheckedChangeListener(checkedChangeListener);
viewHolder.switchView.setEnabled(switchEnabled);
//set the colors for textViews
viewHolder.name.setTextColor(getTextColorStateList(color, selectedTextColor));
//set the description text color
ColorHolder.applyToOr(getDescriptionTextColor(), viewHolder.description, getTextColorStateList(color, selectedTextColor));
//define the typeface for our textViews
if (getTypeface() != null) {
viewHolder.name.setTypeface(getTypeface());
viewHolder.description.setTypeface(getTypeface());
}
//get the drawables for our icon and set it
Drawable icon = ImageHolder.decideIcon(getIcon(), ctx, iconColor, isIconTinted(), 1);
Drawable selectedIcon = ImageHolder.decideIcon(getSelectedIcon(), ctx, selectedIconColor, isIconTinted(), 1);
ImageHolder.applyMultiIconTo(icon, iconColor, selectedIcon, selectedIconColor, isIconTinted(), viewHolder.icon);
//for android API 17 --> Padding not applied via xml
DrawerUIUtils.setDrawerVerticalPadding(viewHolder.view);
//call the onPostBindView method to trigger post bind view actions (like the listener to modify the item if required)
onPostBindView(this, holder.itemView);
}
@Override
public ViewHolderFactory getFactory() {
return new ItemFactory();
}
public static class ItemFactory implements ViewHolderFactory<ViewHolder> {
public ViewHolder factory(View v) {
return new ViewHolder(v);
}
}
private static class ViewHolder extends RecyclerView.ViewHolder {
private View view;
private ImageView icon;
private TextView name;
private TextView description;
private SwitchCompat switchView;
private ViewHolder(View view) {
super(view);
this.view = view;
this.icon = (ImageView) view.findViewById(R.id.material_drawer_icon);
this.name = (TextView) view.findViewById(R.id.material_drawer_name);
this.description = (TextView) view.findViewById(R.id.material_drawer_description);
this.switchView = (SwitchCompat) view.findViewById(R.id.material_drawer_switch);
}
}
private CompoundButton.OnCheckedChangeListener checkedChangeListener = new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
checked = isChecked;
if (getOnCheckedChangeListener() != null) {
getOnCheckedChangeListener().onCheckedChanged(SwitchDrawerItem.this, buttonView, isChecked);
}
}
};
}
| |
/*
* Copyright 2021 B2i Healthcare Pte Ltd, http://b2i.sg
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.b2international.snowowl.fhir.tests.domain.valueset;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals;
import java.text.SimpleDateFormat;
import java.util.Date;
import org.junit.Test;
import com.b2international.snowowl.fhir.core.FhirDates;
import com.b2international.snowowl.fhir.core.model.dt.Code;
import com.b2international.snowowl.fhir.core.model.dt.Uri;
import com.b2international.snowowl.fhir.core.model.valueset.expansion.*;
import com.b2international.snowowl.fhir.tests.FhirTest;
import io.restassured.path.json.JsonPath;
/**
* Tests for {@link Parameter}
* @since 8.0.0
*/
public class ExpansionParameterTest extends FhirTest {
@Test
public void serializeStringParameter() throws Exception {
StringParameter parameter = StringParameter.builder()
.name("paramName")
.value("paramValue")
.build();
JsonPath jsonPath = getJsonPath(parameter);
assertThat(jsonPath.getString("name"), equalTo("paramName"));
assertThat(jsonPath.get("valueString"), equalTo("paramValue"));
}
@Test
public void serializeUriParameter() throws Exception {
UriParameter parameter = UriParameter.builder()
.name("paramName")
.value(new Uri("paramValue"))
.build();
JsonPath jsonPath = getJsonPath(parameter);
assertThat(jsonPath.getString("name"), equalTo("paramName"));
assertThat(jsonPath.get("valueUri"), equalTo("paramValue"));
}
@Test
public void serializeDateTimeParameter() throws Exception {
Date date = new SimpleDateFormat(FhirDates.DATE_TIME_FORMAT).parse(TEST_DATE_STRING);
DateTimeParameter parameter = DateTimeParameter.builder()
.name("paramName")
.value(date)
.build();
JsonPath jsonPath = getJsonPath(parameter);
assertThat(jsonPath.getString("name"), equalTo("paramName"));
assertThat(jsonPath.get("valueDateTime"), equalTo(TEST_DATE_STRING));
}
@Test
public void uriParameter() throws Exception {
UriParameter parameter = UriParameter.builder()
.name("paramName")
.value(new Uri("paramValue"))
.build();
assertEquals("paramName", parameter.getName());
assertEquals("paramValue", parameter.getValue().getUriValue());
printPrettyJson(parameter);
UriParameter readParameter = objectMapper.readValue(objectMapper.writeValueAsString(parameter), UriParameter.class);
assertEquals("paramName", readParameter.getName());
assertEquals("paramValue", readParameter.getValue().getUriValue());
}
@Test
public void stringParameter() throws Exception {
StringParameter parameter = StringParameter.builder()
.name("paramName")
.value("paramValue")
.build();
assertEquals("paramName", parameter.getName());
assertEquals("paramValue", parameter.getValue());
printPrettyJson(parameter);
StringParameter readParameter = objectMapper.readValue(objectMapper.writeValueAsString(parameter), StringParameter.class);
assertEquals("paramName", readParameter.getName());
assertEquals("paramValue", readParameter.getValue());
}
@Test
public void integerParameter() throws Exception {
IntegerParameter parameter = IntegerParameter.builder()
.name("paramName")
.value(1)
.build();
assertEquals("paramName", parameter.getName());
assertEquals(Integer.valueOf(1), parameter.getValue());
printPrettyJson(parameter);
IntegerParameter readParameter = objectMapper.readValue(objectMapper.writeValueAsString(parameter), IntegerParameter.class);
assertEquals("paramName", readParameter.getName());
assertEquals(Integer.valueOf(1), readParameter.getValue());
}
@Test
public void decimalParameter() throws Exception {
DecimalParameter parameter = DecimalParameter.builder()
.name("paramName")
.value(1.1d)
.build();
assertEquals("paramName", parameter.getName());
assertEquals(Double.valueOf(1.1), parameter.getValue());
printPrettyJson(parameter);
DecimalParameter readParameter = objectMapper.readValue(objectMapper.writeValueAsString(parameter), DecimalParameter.class);
assertEquals("paramName", readParameter.getName());
assertEquals(Double.valueOf(1.1), readParameter.getValue());
}
@Test
public void dateTimeParameter() throws Exception {
DateTimeParameter parameter = DateTimeParameter.builder()
.name("paramName")
.value(FhirDates.parseDate(TEST_DATE_STRING))
.build();
assertEquals("paramName", parameter.getName());
assertEquals(FhirDates.parseDate(TEST_DATE_STRING), parameter.getValue());
printPrettyJson(parameter);
DateTimeParameter readParameter = objectMapper.readValue(objectMapper.writeValueAsString(parameter), DateTimeParameter.class);
assertEquals("paramName", readParameter.getName());
assertEquals(FhirDates.parseDate(TEST_DATE_STRING), readParameter.getValue());
}
@Test
public void codeParameter() throws Exception {
CodeParameter parameter = CodeParameter.builder()
.name("paramName")
.value(new Code("code"))
.build();
assertEquals("paramName", parameter.getName());
assertEquals("code", parameter.getValue().getCodeValue());
printPrettyJson(parameter);
CodeParameter readParameter = objectMapper.readValue(objectMapper.writeValueAsString(parameter), CodeParameter.class);
assertEquals("paramName", readParameter.getName());
assertEquals("code", readParameter.getValue().getCodeValue());
}
@Test
public void booleanParameter() throws Exception {
BooleanParameter parameter = BooleanParameter.builder()
.name("paramName")
.value(true)
.build();
assertEquals("paramName", parameter.getName());
assertEquals(true, parameter.getValue());
printPrettyJson(parameter);
BooleanParameter readParameter = objectMapper.readValue(objectMapper.writeValueAsString(parameter), BooleanParameter.class);
assertEquals("paramName", readParameter.getName());
assertEquals(true, readParameter.getValue());
}
}
| |
/*
* Copyright 2014 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.handler.codec.compression;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.ByteToMessageDecoder;
import io.netty.util.internal.EmptyArrays;
import java.util.List;
import java.util.zip.Adler32;
import java.util.zip.Checksum;
import static io.netty.handler.codec.compression.FastLz.*;
/**
* Uncompresses a {@link ByteBuf} encoded by {@link FastLzFrameEncoder} using the FastLZ algorithm.
*
* See <a href="https://github.com/netty/netty/issues/2750">FastLZ format</a>.
*/
public class FastLzFrameDecoder extends ByteToMessageDecoder {
/**
* Current state of decompression.
*/
private enum State {
INIT_BLOCK,
INIT_BLOCK_PARAMS,
DECOMPRESS_DATA,
CORRUPTED
}
private State currentState = State.INIT_BLOCK;
/**
* Underlying checksum calculator in use.
*/
private final Checksum checksum;
/**
* Length of current received chunk of data.
*/
private int chunkLength;
/**
* Original of current received chunk of data.
* It is equal to {@link #chunkLength} for non compressed chunks.
*/
private int originalLength;
/**
* Indicates is this chunk compressed or not.
*/
private boolean isCompressed;
/**
* Indicates is this chunk has checksum or not.
*/
private boolean hasChecksum;
/**
* Chechsum value of current received chunk of data which has checksum.
*/
private int currentChecksum;
/**
* Creates the fastest FastLZ decoder without checksum calculation.
*/
public FastLzFrameDecoder() {
this(false);
}
/**
* Creates a FastLZ decoder with calculation of checksums as specified.
*
* @param validateChecksums
* If true, the checksum field will be validated against the actual
* uncompressed data, and if the checksums do not match, a suitable
* {@link DecompressionException} will be thrown.
* Note, that in this case decoder will use {@link java.util.zip.Adler32}
* as a default checksum calculator.
*/
public FastLzFrameDecoder(boolean validateChecksums) {
this(validateChecksums ? new Adler32() : null);
}
/**
* Creates a FastLZ decoder with specified checksum calculator.
*
* @param checksum
* the {@link Checksum} instance to use to check data for integrity.
* You may set {@code null} if you do not want to validate checksum of each block.
*/
public FastLzFrameDecoder(Checksum checksum) {
this.checksum = checksum;
}
@Override
protected void decode(ChannelHandlerContext ctx, ByteBuf in, List<Object> out) throws Exception {
try {
switch (currentState) {
case INIT_BLOCK:
if (in.readableBytes() < 4) {
break;
}
final int magic = in.readUnsignedMedium();
if (magic != MAGIC_NUMBER) {
throw new DecompressionException("unexpected block identifier");
}
final byte options = in.readByte();
isCompressed = (options & 0x01) == BLOCK_TYPE_COMPRESSED;
hasChecksum = (options & 0x10) == BLOCK_WITH_CHECKSUM;
currentState = State.INIT_BLOCK_PARAMS;
case INIT_BLOCK_PARAMS:
if (in.readableBytes() < 2 + (isCompressed ? 2 : 0) + (hasChecksum ? 4 : 0)) {
break;
}
currentChecksum = hasChecksum ? in.readInt() : 0;
chunkLength = in.readUnsignedShort();
originalLength = isCompressed ? in.readUnsignedShort() : chunkLength;
currentState = State.DECOMPRESS_DATA;
case DECOMPRESS_DATA:
final int chunkLength = this.chunkLength;
if (in.readableBytes() < chunkLength) {
break;
}
final int idx = in.readerIndex();
final int originalLength = this.originalLength;
final ByteBuf uncompressed;
final byte[] output;
final int outputPtr;
if (originalLength != 0) {
uncompressed = ctx.alloc().heapBuffer(originalLength, originalLength);
output = uncompressed.array();
outputPtr = uncompressed.arrayOffset() + uncompressed.writerIndex();
} else {
uncompressed = null;
output = EmptyArrays.EMPTY_BYTES;
outputPtr = 0;
}
boolean success = false;
try {
if (isCompressed) {
final byte[] input;
final int inputPtr;
if (in.hasArray()) {
input = in.array();
inputPtr = in.arrayOffset() + idx;
} else {
input = new byte[chunkLength];
in.getBytes(idx, input);
inputPtr = 0;
}
final int decompressedBytes = decompress(input, inputPtr, chunkLength,
output, outputPtr, originalLength);
if (originalLength != decompressedBytes) {
throw new DecompressionException(String.format(
"stream corrupted: originalLength(%d) and actual length(%d) mismatch",
originalLength, decompressedBytes));
}
} else {
in.getBytes(idx, output, outputPtr, chunkLength);
}
final Checksum checksum = this.checksum;
if (hasChecksum && checksum != null) {
checksum.reset();
checksum.update(output, outputPtr, originalLength);
final int checksumResult = (int) checksum.getValue();
if (checksumResult != currentChecksum) {
throw new DecompressionException(String.format(
"stream corrupted: mismatching checksum: %d (expected: %d)",
checksumResult, currentChecksum));
}
}
if (uncompressed != null) {
uncompressed.writerIndex(uncompressed.writerIndex() + originalLength);
out.add(uncompressed);
}
in.skipBytes(chunkLength);
currentState = State.INIT_BLOCK;
success = true;
} finally {
if (!success) {
uncompressed.release();
}
}
break;
case CORRUPTED:
in.skipBytes(in.readableBytes());
break;
default:
throw new IllegalStateException();
}
} catch (Exception e) {
currentState = State.CORRUPTED;
throw e;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zookeeper.server.quorum;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.NoRouteToHostException;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.zookeeper.PortAssignment;
import org.junit.jupiter.api.Test;
public class MultipleAddressesTest {
public static final int PORTS_AMOUNT = 10;
@Test
public void testIsEmpty() {
MultipleAddresses multipleAddresses = new MultipleAddresses();
assertTrue(multipleAddresses.isEmpty());
multipleAddresses.addAddress(new InetSocketAddress(22));
assertFalse(multipleAddresses.isEmpty());
}
@Test
public void testGetAllAddresses() {
List<InetSocketAddress> addresses = getAddressList();
MultipleAddresses multipleAddresses = new MultipleAddresses(addresses);
assertTrue(CollectionUtils.isEqualCollection(addresses, multipleAddresses.getAllAddresses()));
multipleAddresses.addAddress(addresses.get(1));
assertTrue(CollectionUtils.isEqualCollection(addresses, multipleAddresses.getAllAddresses()));
}
@Test
public void testGetAllHostStrings() {
List<InetSocketAddress> addresses = getAddressList();
List<String> hostStrings = getHostStrings(addresses);
MultipleAddresses multipleAddresses = new MultipleAddresses(addresses);
assertTrue(CollectionUtils.isEqualCollection(hostStrings, multipleAddresses.getAllHostStrings()));
multipleAddresses.addAddress(addresses.get(addresses.size() - 1));
assertTrue(CollectionUtils.isEqualCollection(hostStrings, multipleAddresses.getAllHostStrings()));
}
@Test
public void testGetAllPorts() {
List<Integer> ports = getPortList();
MultipleAddresses multipleAddresses = new MultipleAddresses(getAddressList(ports));
assertTrue(CollectionUtils.isEqualCollection(ports, multipleAddresses.getAllPorts()));
multipleAddresses.addAddress(new InetSocketAddress("localhost", ports.get(ports.size() - 1)));
assertTrue(CollectionUtils.isEqualCollection(ports, multipleAddresses.getAllPorts()));
}
@Test
public void testGetWildcardAddresses() {
List<Integer> ports = getPortList();
List<InetSocketAddress> addresses = getAddressList(ports);
MultipleAddresses multipleAddresses = new MultipleAddresses(addresses);
List<InetSocketAddress> allAddresses = ports.stream().map(InetSocketAddress::new).collect(Collectors.toList());
assertTrue(CollectionUtils.isEqualCollection(allAddresses, multipleAddresses.getWildcardAddresses()));
multipleAddresses.addAddress(new InetSocketAddress("localhost", ports.get(ports.size() - 1)));
assertTrue(CollectionUtils.isEqualCollection(allAddresses, multipleAddresses.getWildcardAddresses()));
}
@Test
public void testGetValidAddress() throws NoRouteToHostException {
List<InetSocketAddress> addresses = getAddressList();
MultipleAddresses multipleAddresses = new MultipleAddresses(addresses);
assertTrue(addresses.contains(multipleAddresses.getReachableAddress()));
}
@Test
public void testGetValidAddressWithNotValid() {
assertThrows(NoRouteToHostException.class, () -> {
// IP chosen because it is reserved for documentation/examples and should be unreachable (RFC 5737)
MultipleAddresses multipleAddresses = new MultipleAddresses(new InetSocketAddress("203.0.113.1", 22));
multipleAddresses.getReachableAddress();
});
}
@Test
public void testGetReachableOrOneWithSingleReachableAddress() {
InetSocketAddress reachableAddress = new InetSocketAddress("127.0.0.1", PortAssignment.unique());
MultipleAddresses multipleAddresses = new MultipleAddresses(Collections.singletonList(reachableAddress));
InetSocketAddress actualReturnedAddress = multipleAddresses.getReachableOrOne();
assertEquals(reachableAddress, actualReturnedAddress);
}
@Test
public void testGetReachableOrOneWithSingleUnreachableAddress() {
InetSocketAddress unreachableAddress = new InetSocketAddress("unreachable.address.zookeeper.apache.com", 1234);
MultipleAddresses multipleAddresses = new MultipleAddresses(Collections.singletonList(unreachableAddress));
InetSocketAddress actualReturnedAddress = multipleAddresses.getReachableOrOne();
assertEquals(unreachableAddress, actualReturnedAddress);
}
@Test
public void testRecreateSocketAddresses() throws UnknownHostException {
List<InetSocketAddress> searchedAddresses = Arrays.stream(InetAddress.getAllByName("google.com"))
.map(addr -> new InetSocketAddress(addr, 222)).collect(Collectors.toList());
MultipleAddresses multipleAddresses = new MultipleAddresses(searchedAddresses.get(searchedAddresses.size() - 1));
List<InetSocketAddress> addresses = new ArrayList<>(multipleAddresses.getAllAddresses());
assertEquals(1, addresses.size());
assertEquals(searchedAddresses.get(searchedAddresses.size() - 1), addresses.get(0));
multipleAddresses.recreateSocketAddresses();
addresses = new ArrayList<>(multipleAddresses.getAllAddresses());
assertEquals(1, addresses.size());
assertEquals(searchedAddresses.get(0), addresses.get(0));
}
@Test
public void testRecreateSocketAddressesWithWrongAddresses() {
InetSocketAddress address = new InetSocketAddress("locahost", 222);
MultipleAddresses multipleAddresses = new MultipleAddresses(address);
multipleAddresses.recreateSocketAddresses();
assertEquals(address, multipleAddresses.getOne());
}
@Test
public void testAlwaysGetReachableAddress() throws Exception{
InetSocketAddress reachableHost = new InetSocketAddress("127.0.0.1", 1234);
InetSocketAddress unreachableHost1 = new InetSocketAddress("unreachable1.address.zookeeper.apache.com", 1234);
InetSocketAddress unreachableHost2 = new InetSocketAddress("unreachable2.address.zookeeper.apache.com", 1234);
InetSocketAddress unreachableHost3 = new InetSocketAddress("unreachable3.address.zookeeper.apache.com", 1234);
MultipleAddresses multipleAddresses = new MultipleAddresses(
Arrays.asList(unreachableHost1, unreachableHost2, unreachableHost3, reachableHost));
// we call the getReachableAddress() function multiple times, to make sure we
// always got back a reachable address and not just a random one
for (int i = 0; i < 10; i++) {
assertEquals(reachableHost, multipleAddresses.getReachableAddress());
}
}
@Test
public void testGetAllReachableAddresses() throws Exception {
InetSocketAddress reachableHost1 = new InetSocketAddress("127.0.0.1", 1234);
InetSocketAddress reachableHost2 = new InetSocketAddress("127.0.0.1", 2345);
InetSocketAddress unreachableHost1 = new InetSocketAddress("unreachable1.address.zookeeper.apache.com", 1234);
InetSocketAddress unreachableHost2 = new InetSocketAddress("unreachable2.address.zookeeper.apache.com", 1234);
MultipleAddresses multipleAddresses = new MultipleAddresses(
Arrays.asList(unreachableHost1, unreachableHost2, reachableHost1, reachableHost2));
Set<InetSocketAddress> reachableHosts = new HashSet<>(Arrays.asList(reachableHost1, reachableHost2));
assertEquals(reachableHosts, multipleAddresses.getAllReachableAddresses());
}
@Test
public void testGetAllReachableAddressesOrAllWhenSomeReachable() throws Exception {
InetSocketAddress reachableHost1 = new InetSocketAddress("127.0.0.1", 1234);
InetSocketAddress reachableHost2 = new InetSocketAddress("127.0.0.1", 2345);
InetSocketAddress unreachableHost1 = new InetSocketAddress("unreachable1.address.zookeeper.apache.com", 1234);
InetSocketAddress unreachableHost2 = new InetSocketAddress("unreachable2.address.zookeeper.apache.com", 1234);
MultipleAddresses multipleAddresses = new MultipleAddresses(
Arrays.asList(unreachableHost1, unreachableHost2, reachableHost1, reachableHost2));
Set<InetSocketAddress> reachableHosts = new HashSet<>(Arrays.asList(reachableHost1, reachableHost2));
assertEquals(reachableHosts, multipleAddresses.getAllReachableAddressesOrAll());
}
@Test
public void testGetAllReachableAddressesOrAllWhenNoneReachable() throws Exception {
InetSocketAddress unreachableHost1 = new InetSocketAddress("unreachable1.address.zookeeper.apache.com", 1234);
InetSocketAddress unreachableHost2 = new InetSocketAddress("unreachable2.address.zookeeper.apache.com", 1234);
InetSocketAddress unreachableHost3 = new InetSocketAddress("unreachable3.address.zookeeper.apache.com", 1234);
List<InetSocketAddress> allUnreachableAddresses = Arrays.asList(unreachableHost1, unreachableHost2, unreachableHost3);
MultipleAddresses multipleAddresses = new MultipleAddresses(allUnreachableAddresses);
assertEquals(new HashSet<>(allUnreachableAddresses), multipleAddresses.getAllReachableAddressesOrAll());
}
@Test
public void testEquals() {
List<InetSocketAddress> addresses = getAddressList();
MultipleAddresses multipleAddresses = new MultipleAddresses(addresses);
MultipleAddresses multipleAddressesEquals = new MultipleAddresses(addresses);
assertEquals(multipleAddresses, multipleAddressesEquals);
MultipleAddresses multipleAddressesNotEquals = new MultipleAddresses(getAddressList());
assertNotEquals(multipleAddresses, multipleAddressesNotEquals);
}
@Test
public void testSize() {
List<InetSocketAddress> addresses = getAddressList();
MultipleAddresses multipleAddresses = new MultipleAddresses(addresses);
assertEquals(PORTS_AMOUNT, multipleAddresses.size());
}
public List<Integer> getPortList() {
return IntStream.range(0, PORTS_AMOUNT).mapToObj(i -> PortAssignment.unique()).collect(Collectors.toList());
}
public List<InetSocketAddress> getAddressList() {
return getAddressList(getPortList());
}
public List<InetSocketAddress> getAddressList(List<Integer> ports) {
return IntStream.range(0, ports.size())
.mapToObj(i -> new InetSocketAddress("127.0.0." + i, ports.get(i))).collect(Collectors.toList());
}
private List<String> getHostStrings(List<InetSocketAddress> addresses) {
return IntStream.range(0, addresses.size())
.mapToObj(i -> "127.0.0." + i).collect(Collectors.toList());
}
}
| |
/**
* Copyright (c) 2014 Martin Paljak
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.esteid.applet;
import javacard.framework.APDU;
import javacard.framework.Applet;
import javacard.framework.ISOException;
import javacard.framework.JCSystem;
import javacard.framework.Util;
import javacard.security.KeyBuilder;
import javacard.security.RSAPrivateCrtKey;
import javacardx.crypto.Cipher;
import visa.openplatform.OPSystem;
// See https://github.com/martinpaljak/AppletPlayground/wiki/FakeEstEIDApplet
public final class FakeEstEID extends Applet {
// Interesting Data (tm)
private RSAPrivateCrtKey auth;
private RSAPrivateCrtKey sign;
private Cipher rsa;
private byte[] authcert;
private byte[] signcert;
private PersonalDataFile pd;
// TODO: actually maintain the PIN values?
private byte[] pin1 = new byte[13]; // +1 for length
private byte[] pin2 = new byte[13];
private byte[] puk = new byte[13];
// Less interesting objects
// File identifiers that are used by baastarkvara
private final short FID_3F00 = (short) 0x3F00;
private final short FID_0013 = (short) 0x0013;
private final short FID_0016 = (short) 0x0016;
private final short FID_EEEE = (short) 0xEEEE;
private final short FID_5044 = (short) 0x5044;
private final short FID_AACE = (short) 0xAACE;
private final short FID_DDCE = (short) 0xDDCE;
private final short FID_0033 = (short) 0x0033;
// FCI bytes;
public final byte[] fci_mf = new byte[] { (byte) 0x6F, (byte) 0x26,
(byte) 0x82, (byte) 0x01, (byte) 0x38, (byte) 0x83, (byte) 0x02,
(byte) 0x3F, (byte) 0x00, (byte) 0x84, (byte) 0x02, (byte) 0x4D,
(byte) 0x46, (byte) 0x85, (byte) 0x02, (byte) 0x57, (byte) 0x3E,
(byte) 0x8A, (byte) 0x01, (byte) 0x05, (byte) 0xA1, (byte) 0x03,
(byte) 0x8B, (byte) 0x01, (byte) 0x02, (byte) 0x81, (byte) 0x08,
(byte) 0xD2, (byte) 0x76, (byte) 0x00, (byte) 0x00, (byte) 0x28,
(byte) 0xFF, (byte) 0x05, (byte) 0x2D, (byte) 0x82, (byte) 0x03,
(byte) 0x03, (byte) 0x00, (byte) 0x00 };
public final byte[] fci_eeee = new byte[] { (byte) 0x6F,
(byte) 0x25, (byte) 0x82, (byte) 0x01, (byte) 0x38, (byte) 0x83,
(byte) 0x02, (byte) 0xEE, (byte) 0xEE, (byte) 0x84, (byte) 0x10,
(byte) 0xD2, (byte) 0x33, (byte) 0x00, (byte) 0x00, (byte) 0x01,
(byte) 0x00, (byte) 0x00, (byte) 0x01, (byte) 0x00, (byte) 0x00,
(byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00,
(byte) 0x00, (byte) 0x85, (byte) 0x02, (byte) 0x57, (byte) 0x3E,
(byte) 0x8A, (byte) 0x01, (byte) 0x05, (byte) 0xA1, (byte) 0x03,
(byte) 0x8B, (byte) 0x01, (byte) 0x02 };
public final byte[] fci_aace = new byte[] { (byte) 0x62,
(byte) 0x18, (byte) 0x82, (byte) 0x01, (byte) 0x01, (byte) 0x83,
(byte) 0x02, (byte) 0xAA, (byte) 0xCE, (byte) 0x85, (byte) 0x02,
(byte) 0x06, (byte) 0x00, (byte) 0x8A, (byte) 0x01, (byte) 0x05,
(byte) 0xA1, (byte) 0x08, (byte) 0x8B, (byte) 0x06, (byte) 0x00,
(byte) 0x30, (byte) 0x03, (byte) 0x06, (byte) 0x00, (byte) 0x01 };
public final byte[] fci_ddce = new byte[] { (byte) 0x62,
(byte) 0x18, (byte) 0x82, (byte) 0x01, (byte) 0x01, (byte) 0x83,
(byte) 0x02, (byte) 0xDD, (byte) 0xCE, (byte) 0x85, (byte) 0x02,
(byte) 0x06, (byte) 0x00, (byte) 0x8A, (byte) 0x01, (byte) 0x05,
(byte) 0xA1, (byte) 0x08, (byte) 0x8B, (byte) 0x06, (byte) 0x00,
(byte) 0x30, (byte) 0x03, (byte) 0x06, (byte) 0x00, (byte) 0x01 };
public final byte[] fci_5044 = new byte[] { (byte) 0x62,
(byte) 0x17, (byte) 0x82, (byte) 0x05, (byte) 0x04, (byte) 0x41,
(byte) 0x00, (byte) 0x32, (byte) 0x10, (byte) 0x83, (byte) 0x02,
(byte) 0x50, (byte) 0x44, (byte) 0x85, (byte) 0x02, (byte) 0x01,
(byte) 0x8C, (byte) 0x8A, (byte) 0x01, (byte) 0x05, (byte) 0xA1,
(byte) 0x03, (byte) 0x8B, (byte) 0x01, (byte) 0x01 };
public final byte[] fci_0016 = new byte[] { (byte) 0x62,
(byte) 0x17, (byte) 0x82, (byte) 0x05, (byte) 0x04, (byte) 0x41,
(byte) 0x00, (byte) 0x0C, (byte) 0x03, (byte) 0x83, (byte) 0x02,
(byte) 0x00, (byte) 0x16, (byte) 0x85, (byte) 0x02, (byte) 0x00,
(byte) 0x1A, (byte) 0x8A, (byte) 0x01, (byte) 0x05, (byte) 0xA1,
(byte) 0x03, (byte) 0x8B, (byte) 0x01, (byte) 0x01 };
public final byte[] fci_0013 = new byte[] { (byte) 0x62,
(byte) 0x18, (byte) 0x82, (byte) 0x05, (byte) 0x02, (byte) 0x41,
(byte) 0x00, (byte) 0x4F, (byte) 0x04, (byte) 0x83, (byte) 0x02,
(byte) 0x00, (byte) 0x13, (byte) 0x8A, (byte) 0x01, (byte) 0x05,
(byte) 0xA1, (byte) 0x08, (byte) 0x8B, (byte) 0x06, (byte) 0x00,
(byte) 0x30, (byte) 0x03, (byte) 0x07, (byte) 0x00, (byte) 0x01 };
public final byte[] fci_0033 = new byte[] { (byte) 0x62,
(byte) 0x18, (byte) 0x82, (byte) 0x05, (byte) 0x02, (byte) 0x41,
(byte) 0x00, (byte) 0x15, (byte) 0x01, (byte) 0x83, (byte) 0x02,
(byte) 0x00, (byte) 0x33, (byte) 0x8A, (byte) 0x01, (byte) 0x05,
(byte) 0xA1, (byte) 0x08, (byte) 0x8B, (byte) 0x06, (byte) 0x00,
(byte) 0x30, (byte) 0x03, (byte) 0x07, (byte) 0x00, (byte) 0x01 };
// Records of EEEE/0013
public final byte[] eeee_0013_1 = new byte[] { (byte) 0x83,
(byte) 0x04, (byte) 0x01, (byte) 0x00, (byte) 0x10, (byte) 0x01,
(byte) 0xC0, (byte) 0x02, (byte) 0x81, (byte) 0x80, (byte) 0x91,
(byte) 0x03, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0x7B,
(byte) 0x18, (byte) 0x80, (byte) 0x01, (byte) 0x00, (byte) 0xA1,
(byte) 0x0A, (byte) 0x8B, (byte) 0x08, (byte) 0x00, (byte) 0x30,
(byte) 0x01, (byte) 0x03, (byte) 0x02, (byte) 0x04, (byte) 0x03,
(byte) 0x05, (byte) 0xB6, (byte) 0x07, (byte) 0x95, (byte) 0x01,
(byte) 0x40, (byte) 0x89, (byte) 0x02, (byte) 0x13, (byte) 0x10,
(byte) 0x7B, (byte) 0x11, (byte) 0x80, (byte) 0x01, (byte) 0x06,
(byte) 0xA1, (byte) 0x03, (byte) 0x8B, (byte) 0x01, (byte) 0x09,
(byte) 0xB8, (byte) 0x07, (byte) 0x95, (byte) 0x01, (byte) 0x40,
(byte) 0x89, (byte) 0x02, (byte) 0x11, (byte) 0x30, (byte) 0x7B,
(byte) 0x11, (byte) 0x80, (byte) 0x01, (byte) 0x07, (byte) 0xA1,
(byte) 0x03, (byte) 0x8B, (byte) 0x01, (byte) 0x0A, (byte) 0xB8,
(byte) 0x07, (byte) 0x95, (byte) 0x01, (byte) 0x40, (byte) 0x89,
(byte) 0x02, (byte) 0x11, (byte) 0x30 };
public final byte[] eeee_0013_2 = new byte[] { (byte) 0x83,
(byte) 0x04, (byte) 0x02, (byte) 0x00, (byte) 0x10, (byte) 0x02,
(byte) 0xC0, (byte) 0x02, (byte) 0x81, (byte) 0x80, (byte) 0x91,
(byte) 0x03, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0x7B,
(byte) 0x18, (byte) 0x80, (byte) 0x01, (byte) 0x00, (byte) 0xA1,
(byte) 0x0A, (byte) 0x8B, (byte) 0x08, (byte) 0x00, (byte) 0x30,
(byte) 0x01, (byte) 0x03, (byte) 0x02, (byte) 0x04, (byte) 0x03,
(byte) 0x05, (byte) 0xF6, (byte) 0x07, (byte) 0x95, (byte) 0x01,
(byte) 0x40, (byte) 0x89, (byte) 0x02, (byte) 0x13, (byte) 0x10,
(byte) 0x7B, (byte) 0x11, (byte) 0x80, (byte) 0x01, (byte) 0x06,
(byte) 0xA1, (byte) 0x03, (byte) 0x8B, (byte) 0x01, (byte) 0x09,
(byte) 0xB8, (byte) 0x07, (byte) 0x95, (byte) 0x01, (byte) 0x40,
(byte) 0x89, (byte) 0x02, (byte) 0x11, (byte) 0x30, (byte) 0x7B,
(byte) 0x11, (byte) 0x80, (byte) 0x01, (byte) 0x07, (byte) 0xA1,
(byte) 0x03, (byte) 0x8B, (byte) 0x01, (byte) 0x0A, (byte) 0xB8,
(byte) 0x07, (byte) 0x95, (byte) 0x01, (byte) 0x40, (byte) 0x89,
(byte) 0x02, (byte) 0x11, (byte) 0x30 };
public final byte[] eeee_0013_3 = new byte[] { (byte) 0x83,
(byte) 0x04, (byte) 0x11, (byte) 0x00, (byte) 0x10, (byte) 0x11,
(byte) 0xC0, (byte) 0x02, (byte) 0x81, (byte) 0x80, (byte) 0x91,
(byte) 0x03, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0x7B,
(byte) 0x18, (byte) 0x80, (byte) 0x01, (byte) 0x00, (byte) 0xA1,
(byte) 0x0A, (byte) 0x8B, (byte) 0x08, (byte) 0x00, (byte) 0x30,
(byte) 0x01, (byte) 0x03, (byte) 0x02, (byte) 0x04, (byte) 0x03,
(byte) 0x05, (byte) 0xA4, (byte) 0x07, (byte) 0x95, (byte) 0x01,
(byte) 0x40, (byte) 0x89, (byte) 0x02, (byte) 0x21, (byte) 0x13,
(byte) 0x7B, (byte) 0x11, (byte) 0x80, (byte) 0x01, (byte) 0x06,
(byte) 0xA1, (byte) 0x03, (byte) 0x8B, (byte) 0x01, (byte) 0x0B,
(byte) 0xB8, (byte) 0x07, (byte) 0x95, (byte) 0x01, (byte) 0x40,
(byte) 0x89, (byte) 0x02, (byte) 0x11, (byte) 0x30, (byte) 0x7B,
(byte) 0x11, (byte) 0x80, (byte) 0x01, (byte) 0x07, (byte) 0xA1,
(byte) 0x03, (byte) 0x8B, (byte) 0x01, (byte) 0x0C, (byte) 0xB8,
(byte) 0x07, (byte) 0x95, (byte) 0x01, (byte) 0x40, (byte) 0x89,
(byte) 0x02, (byte) 0x11, (byte) 0x30 };
public final byte[] eeee_0013_4 = new byte[] { (byte) 0x83,
(byte) 0x04, (byte) 0x12, (byte) 0x00, (byte) 0x10, (byte) 0x12,
(byte) 0xC0, (byte) 0x02, (byte) 0x81, (byte) 0x80, (byte) 0x91,
(byte) 0x03, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0x7B,
(byte) 0x18, (byte) 0x80, (byte) 0x01, (byte) 0x00, (byte) 0xA1,
(byte) 0x0A, (byte) 0x8B, (byte) 0x08, (byte) 0x00, (byte) 0x30,
(byte) 0x01, (byte) 0x03, (byte) 0x02, (byte) 0x04, (byte) 0x03,
(byte) 0x05, (byte) 0xE4, (byte) 0x07, (byte) 0x95, (byte) 0x01,
(byte) 0x40, (byte) 0x89, (byte) 0x02, (byte) 0x21, (byte) 0x13,
(byte) 0x7B, (byte) 0x11, (byte) 0x80, (byte) 0x01, (byte) 0x06,
(byte) 0xA1, (byte) 0x03, (byte) 0x8B, (byte) 0x01, (byte) 0x0B,
(byte) 0xB8, (byte) 0x07, (byte) 0x95, (byte) 0x01, (byte) 0x40,
(byte) 0x89, (byte) 0x02, (byte) 0x11, (byte) 0x30, (byte) 0x7B,
(byte) 0x11, (byte) 0x80, (byte) 0x01, (byte) 0x07, (byte) 0xA1,
(byte) 0x03, (byte) 0x8B, (byte) 0x01, (byte) 0x0C, (byte) 0xB8,
(byte) 0x07, (byte) 0x95, (byte) 0x01, (byte) 0x40, (byte) 0x89,
(byte) 0x02, (byte) 0x11, (byte) 0x30 };
// Records of MF/0016
public final byte[] mf_0016_1 = new byte[] { (byte) 0x80,
(byte) 0x01, (byte) 0x03, (byte) 0x90, (byte) 0x01, (byte) 0x03,
(byte) 0x83, (byte) 0x02, (byte) 0x00, (byte) 0x00 };
public final byte[] mf_0016_2 = new byte[] { (byte) 0x80,
(byte) 0x01, (byte) 0x03, (byte) 0x90, (byte) 0x01, (byte) 0x03,
(byte) 0x83, (byte) 0x02, (byte) 0x00, (byte) 0x00 };
public final byte[] mf_0016_3 = new byte[] { (byte) 0x80,
(byte) 0x01, (byte) 0x03, (byte) 0x90, (byte) 0x01, (byte) 0x03 };
// Record of EEEE/0033
public final byte[] eeee_0033_1 = new byte[] { (byte) 0x00,
(byte) 0xA4, (byte) 0x08, (byte) 0x95, (byte) 0x01, (byte) 0x40,
(byte) 0x83, (byte) 0x03, (byte) 0x80, (byte) 0x11, (byte) 0x00,
(byte) 0xB6, (byte) 0x08, (byte) 0x95, (byte) 0x01, (byte) 0x40,
(byte) 0x83, (byte) 0x03, (byte) 0x80, (byte) 0x01, (byte) 0x00 };
// Historical bytes
public final byte[] histbytes = new byte[] {(byte) 0x45, (byte) 0x73, (byte) 0x74, (byte) 0x45, (byte) 0x49, (byte) 0x44, (byte) 0x20, (byte) 0x76, (byte) 0x65, (byte) 0x72, (byte) 0x20, (byte) 0x31, (byte) 0x2E, (byte) 0x30};
// AID
public final byte[] aid = new byte[] {(byte)0xD2, (byte)0x33, (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x45, (byte)0x73, (byte)0x74, (byte)0x45, (byte)0x49, (byte)0x44, (byte)0x20, (byte)0x76, (byte)0x33, (byte)0x35};
// This could be EEPROM for... fun and readability
// private short selected_file = FID_3F00;
private short[] runtime_fields;
private short selectedfile = 0;
private byte [] ram = null;
private FakeEstEID() {
auth = (RSAPrivateCrtKey) KeyBuilder.buildKey(KeyBuilder.TYPE_RSA_CRT_PRIVATE, KeyBuilder.LENGTH_RSA_2048, false);
sign = (RSAPrivateCrtKey) KeyBuilder.buildKey(KeyBuilder.TYPE_RSA_CRT_PRIVATE, KeyBuilder.LENGTH_RSA_2048, false);
auth.clearKey();
sign.clearKey();
authcert = new byte[0x600];
Util.arrayFillNonAtomic(authcert, (short) 0, (short) authcert.length, (byte) 0x00);
signcert = new byte[0x600];
Util.arrayFillNonAtomic(signcert, (short) 0, (short) signcert.length, (byte) 0x00);
pd = new PersonalDataFile();
// Fill all records of pd with 'A'
for (byte i = 1; i <= 16; i++) {
byte[] src = pd.rec2field(i);
Util.arrayFillNonAtomic(src, (short) 0, (short) src.length, (byte) 'A');
}
runtime_fields = JCSystem.makeTransientShortArray((short) 1, JCSystem.CLEAR_ON_RESET);
rsa = Cipher.getInstance(Cipher.ALG_RSA_PKCS1, false);
ram = JCSystem.makeTransientByteArray((short) 384, JCSystem.CLEAR_ON_RESET);
}
public static void install(byte[] bArray, short bOffset, byte bLength) throws ISOException {
FakeEstEID fake = new FakeEstEID();
fake.register(bArray, (short) (bOffset + 1), bArray[bOffset]);
}
public boolean select() {
runtime_fields[selectedfile] = FID_3F00;
return true;
}
public void process(APDU apdu) throws ISOException {
if (selectingApplet())
return;
byte[] buffer = apdu.getBuffer();
switch (buffer[ISO7816.OFFSET_CLA]) {
case (byte) 0x00: // ISO as described in specs
case (byte) 0x10: // Chaining
process_real_commands(apdu, buffer);
break;
case (byte) 0x80: // Proprietary: setting/getting values
process_mock_commands(apdu, buffer);
break;
default:
ISOException.throwIt(ISO7816.SW_CLA_NOT_SUPPORTED);
break;
}
}
// Commands that are executed by opensc/qesteidutil
private void process_real_commands(APDU apdu, byte[] buffer) {
short len = 0;
short len2 = 0;
byte p1 = buffer[ISO7816.OFFSET_P1];
byte p2 = buffer[ISO7816.OFFSET_P2];
switch (buffer[ISO7816.OFFSET_INS]) {
case ISO7816.INS_SELECT:
if (p1 == 0x00) {
runtime_fields[selectedfile] = FID_3F00;
} else if (buffer[ISO7816.OFFSET_LC] == 0x02) {
len = apdu.setIncomingAndReceive();
// must be 2 bytes of input.
short fid = Util.makeShort(buffer[5], buffer[6]);
switch (fid) {
case FID_3F00:
case FID_EEEE:
case FID_0013:
case FID_0016:
case FID_5044:
case FID_AACE:
case FID_DDCE:
case FID_0033:
runtime_fields[selectedfile] = fid;
break;
default:
Pro.throwIt(ISO7816.SW_FILE_NOT_FOUND);
break;
}
} else if (p1 == 0x04) {
if (!JCSystem.getAID().partialEquals(buffer, ISO7816.OFFSET_CDATA, (byte) len)) {
Pro.throwIt(ISO7816.SW_FILE_NOT_FOUND);
}
}
// Send FCI if asked
if (p2 == 0x04 || p2 == 0x00) {
switch (runtime_fields[selectedfile]) {
case FID_3F00:
Pro.send_array(fci_mf);
break;
case FID_AACE:
Pro.send_array(fci_aace);
break;
case FID_DDCE:
Pro.send_array(fci_ddce);
break;
case FID_0013:
Pro.send_array(fci_0013);
break;
case FID_0016:
Pro.send_array(fci_0016);
break;
case FID_EEEE:
Pro.send_array(fci_eeee);
break;
case FID_5044:
Pro.send_array(fci_5044);
break;
case FID_0033:
Pro.send_array(fci_0033);
break;
default:
Pro.throwIt(ISO7816.SW_FILE_NOT_FOUND);
}
}
break;
case ISO7816.INS_READ_BINARY:
short offset = Util.makeShort(p1, p2);
len = apdu.setOutgoing();
if (runtime_fields[selectedfile] == FID_AACE) {
Pro.send_array(authcert, offset, len);
} else if (runtime_fields[selectedfile] == FID_DDCE) {
Pro.send_array(signcert, offset, len);
} else {
Pro.throwIt(ISO7816.SW_FILE_INVALID);
}
break;
case ISO7816.INS_READ_RECORD:
byte recno = p1;
if (runtime_fields[selectedfile] == FID_5044) {
byte[] src = pd.rec2field(recno);
if (src == null) {
Pro.throwIt(ISO7816.SW_RECORD_NOT_FOUND);
}
Pro.send_array(src);
} else if (runtime_fields[selectedfile] == FID_0016) {
if (recno == (byte) 1) {
Pro.send_array(mf_0016_1);
} else if (recno == (byte) 2) {
Pro.send_array(mf_0016_2);
} else if (recno == (byte) 3) {
Pro.send_array(mf_0016_3);
} else {
Pro.throwIt(ISO7816.SW_RECORD_NOT_FOUND);
}
} else if (runtime_fields[selectedfile] == FID_0013) {
if (recno == (byte) 1) {
Pro.send_array(eeee_0013_1);
} else if (recno == (byte) 2) {
Pro.send_array(eeee_0013_2);
} else if (recno == (byte) 3) {
Pro.send_array(eeee_0013_3);
} else if (recno == (byte) 4) {
Pro.send_array(eeee_0013_4);
} else {
Pro.throwIt(ISO7816.SW_RECORD_NOT_FOUND);
}
} else if (runtime_fields[selectedfile] == FID_0033) {
if (recno == (byte) 1) {
Pro.send_array(eeee_0033_1);
} else {
Pro.throwIt(ISO7816.SW_RECORD_NOT_FOUND);
}
} else
Pro.throwIt(ISO7816.SW_FILE_INVALID);
break;
// Above is enough to show a "valid card" in qesteidutil/pkcs15-tool
case ISO7816.INS_VERIFY:
// We don't use PIN codes, so anything goes
// But store it ... just in case
byte [] src = null;
if (p2 == 0x00) { // puk
src = puk;
} else if (p2 == 0x01) {
src = pin1;
} else if (p2 == 0x02) {
src = pin2;
}
len = apdu.setIncomingAndReceive();
// Check for same length :)
if (src[0] > 0) {
if (src[0] != len) {
Pro.throwIt((short) 0x63C2);
}
} else {
// FIXME: this approach requires transactions
Util.arrayCopyNonAtomic(buffer, ISO7816.OFFSET_LC, src, (short) 1, (short) len);
src[0] = (byte) len;
}
Pro.throwIt(ISO7816.SW_NO_ERROR);
break;
case ISO7816.INS_CHANGE_REFERENCE_DATA:
// We don't use PIN codes, so anything goes
Pro.throwIt(ISO7816.SW_NO_ERROR);
break;
case ISO7816.INS_RESET_RETRY_COUNTER:
// We don't use PIN codes, so anything goes
Pro.throwIt(ISO7816.SW_NO_ERROR);
break;
// The following commands do actual crypto
case ISO7816.INS_MANAGE_SECURITY_ENVIRONMENT:
// Internal state is implicitly known
Pro.throwIt(ISO7816.SW_NO_ERROR);
break;
case ISO7816.INS_INTERNAL_AUTHENTICATE:
// We sign the incoming data with authentication key
len = apdu.setIncomingAndReceive();
rsa.init(auth, Cipher.MODE_ENCRYPT);
len2 = rsa.doFinal(buffer, ISO7816.OFFSET_CDATA, len, ram, (short) 0);
Pro.send_array(ram, (short)0, len2);
break;
case ISO7816.INS_PERFORM_SECURITY_OPERATION:
len = apdu.setIncomingAndReceive();
// Sign and decrypt
short op = Util.makeShort(p1, p2);
if (op == (short)0x9E9A) { // sign
rsa.init(sign, Cipher.MODE_ENCRYPT);
len2 = rsa.doFinal(buffer, ISO7816.OFFSET_CDATA, len, ram, (short) 0);
Pro.send_array(ram, (short)0, len2);
} else if (op == (short)0x8086) { //decrypt
if (buffer[0] == 0x10) {
// Skip initial 0
short len1 = Util.arrayCopyNonAtomic(buffer, (short)(ISO7816.OFFSET_CDATA+1), ram, (short)2, (short)(len - 1));
// Store offset to pos0
Util.setShort(ram, (short) 0, len1);
} else {
// Concatenate
len2 = Util.arrayCopyNonAtomic(buffer, ISO7816.OFFSET_CDATA, ram, Util.makeShort(ram[0], ram[1]), len);
// Shift back by two
len = Util.arrayCopyNonAtomic(ram, (short)2, ram, (short)0, (short) (len2-2));
// Decrypt directly into APDU buffer
rsa.init(auth, Cipher.MODE_DECRYPT);
len2 = rsa.doFinal(ram, (short) 0, len, buffer, (short) 0);
Pro.send((short)0, len2);
}
} else
Pro.throwIt(ISO7816.SW_INCORRECT_P1P2);
break;
default:
Pro.throwIt(ISO7816.SW_INS_NOT_SUPPORTED);
}
}
private void process_mock_commands(APDU apdu, byte[] buffer) {
byte p1 = buffer[ISO7816.OFFSET_P1];
byte p2 = buffer[ISO7816.OFFSET_P2];
short len = 0;
short offset = 0;
byte [] src = null;
RSAPrivateCrtKey privkey = null;
// set/get the values
switch (buffer[ISO7816.OFFSET_INS]) {
case 0x01: // Set historical bytes
OPSystem.setATRHistBytes(histbytes, (short) 0, (byte) histbytes.length);
break;
case 0x02: // Store certificate
len = apdu.setIncomingAndReceive();
offset = Util.makeShort(buffer[ISO7816.OFFSET_CDATA], buffer[ISO7816.OFFSET_CDATA + 1]);
if (p1 == 0x01) {
Util.arrayCopyNonAtomic(buffer, (short) (ISO7816.OFFSET_CDATA + 2), authcert, offset, (short) (len - 2));
} else if (p1 == 0x02) {
Util.arrayCopyNonAtomic(buffer, (short) (ISO7816.OFFSET_CDATA + 2), signcert, offset, (short) (len - 2));
} else
ISOException.throwIt(ISO7816.SW_INCORRECT_P1P2);
break;
case 0x03: // key material
// Key material select
if (p1 == 0x01) {
privkey = auth;
} else if (p1 == 0x02) { // set
privkey = sign;
}
if (buffer[ISO7816.OFFSET_LC] == 0x00) { // get
if (p2 == 0x01) {
len = privkey.getP(buffer, (short)0);
} else if (p2 == 0x02) {
len = privkey.getQ(buffer, (short)0);
} else if (p2 == 0x03) {
len = privkey.getDP1(buffer, (short)0);
} else if (p2 == 0x04) {
len = privkey.getDQ1(buffer, (short)0);
} else if (p2 == 0x05) {
len = privkey.getPQ(buffer, (short)0);
} else {
ISOException.throwIt(ISO7816.SW_INCORRECT_P1P2);
}
apdu.setOutgoingAndSend((short)0, len);
} else { // set
len = apdu.setIncomingAndReceive();
if (p2 == 0x01) {
privkey.setP(buffer, ISO7816.OFFSET_CDATA, len);
} else if (p2 == 0x02) {
privkey.setQ(buffer, ISO7816.OFFSET_CDATA, len);
} else if (p2 == 0x03) {
privkey.setDP1(buffer, ISO7816.OFFSET_CDATA, len);
} else if (p2 == 0x04) {
privkey.setDQ1(buffer, ISO7816.OFFSET_CDATA, len);
} else if (p2 == 0x05) {
privkey.setPQ(buffer, ISO7816.OFFSET_CDATA, len);
} else {
ISOException.throwIt(ISO7816.SW_INCORRECT_P1P2);
}
}
break;
case 0x04: // personal data file
src = pd.rec2field(buffer[ISO7816.OFFSET_P1]);
if (buffer[ISO7816.OFFSET_LC] == 0x00) { // get
Pro.send_array(src);
} else { // set
len = apdu.setIncomingAndReceive();
// FIXME: padding with space is a vilation in new apps
Util.arrayFillNonAtomic(src, (short)0, (short)src.length, (byte) ' ');
Util.arrayCopyNonAtomic(buffer, ISO7816.OFFSET_CDATA, src, (short) 0, len);
}
break;
case 0x05: // PIN codes
if (p2 == 0x00) { // puk
src = puk;
} else if (p2 == 0x01) {
src = pin1;
} else if (p2 == 0x02) {
src = pin2;
} else
ISOException.throwIt(ISO7816.SW_INCORRECT_P1P2);
if (buffer[ISO7816.OFFSET_LC] == 0x00) { // get
Pro.send_array(src, (short) 0, (short) (src[0]+1));
} else { //set
len = apdu.setIncomingAndReceive();
Util.arrayCopyNonAtomic(buffer, ISO7816.OFFSET_LC, src, (short) 0, (short) len);
src[0] = (byte) len;
}
break;
default:
ISOException.throwIt(ISO7816.SW_INS_NOT_SUPPORTED);
}
}
private class PersonalDataFile {
byte[] surname;
byte[] name1;
byte[] name2;
byte[] gender;
byte[] nationality;
byte[] dob;
byte[] idcode;
byte[] serial;
byte[] valid;
byte[] pob;
byte[] issued;
byte[] permit;
byte[] notes1;
byte[] notes2;
byte[] notes3;
byte[] notes4;
public PersonalDataFile() {
surname = new byte[28];
name1 = new byte[15];
name2 = new byte[15];
gender = new byte[1];
nationality = new byte[3];
dob = new byte[10];
idcode = new byte[11];
serial = new byte[9];
valid = new byte[10];
pob = new byte[35];
issued = new byte[10];
permit = new byte[50];
notes1 = new byte[50];
notes2 = new byte[50];
notes3 = new byte[50];
notes4 = new byte[50];
}
public byte[] rec2field(byte n) {
switch (n) {
case 1:
return surname;
case 2:
return name1;
case 3:
return name2;
case 4:
return gender;
case 5:
return nationality;
case 6:
return dob;
case 7:
return idcode;
case 8:
return serial;
case 9:
return valid;
case 10:
return pob;
case 11:
return issued;
case 12:
return permit;
case 13:
return notes1;
case 14:
return notes2;
case 15:
return notes3;
case 16:
return notes4;
default:
return null;
}
}
}
}
| |
/**
* Copyright (c) 2010
* Signavio, Sven Wagner-Boysen
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package de.hpi.bpmn2_0.model.choreography;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlIDREF;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlTransient;
import javax.xml.bind.annotation.XmlType;
import de.hpi.bpmn2_0.annotations.CallingElement;
import de.hpi.bpmn2_0.annotations.ContainerElement;
import de.hpi.bpmn2_0.model.BaseElement;
import de.hpi.bpmn2_0.model.FlowElement;
import de.hpi.bpmn2_0.model.bpmndi.di.DiagramElement;
import de.hpi.bpmn2_0.model.callable.GlobalChoreographyTask;
import de.hpi.bpmn2_0.model.connector.Edge;
import de.hpi.bpmn2_0.transformation.Visitor;
/**
* <p>Java class for tCallChoreography complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="tCallChoreography">
* <complexContent>
* <extension base="{http://www.omg.org/spec/BPMN/20100524/MODEL}tChoreographyActivity">
* <sequence>
* <element ref="{http://www.omg.org/spec/BPMN/20100524/MODEL}participantAssociation" maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* <attribute name="calledChoreographyRef" type="{http://www.w3.org/2001/XMLSchema}QName" />
* <anyAttribute processContents='lax' namespace='##other'/>
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlRootElement
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "tCallChoreography", propOrder = {
"participantAssociation"
})
public class CallChoreography
extends ChoreographyActivity implements ContainerElement, CallingElement
{
protected List<ParticipantAssociation> participantAssociation;
@XmlAttribute(name = "calledChoreographyRef")
@XmlIDREF
protected Choreography calledChoreographyRef;
@XmlTransient
public List<DiagramElement> _diagramElements = new ArrayList<DiagramElement>();
/*
* Constructors
*/
public CallChoreography() {
super();
}
public CallChoreography(ChoreographyActivity choreoAct) {
super(choreoAct);
this.setStartQuantity(null);
this.setCompletionQuantity(null);
if(choreoAct instanceof ChoreographyTask) {
this.setCalledChoreographyRef(new GlobalChoreographyTask());
}
}
public List<BaseElement> getCalledElements() {
List<BaseElement> calledElements = new ArrayList<BaseElement>();
/* Global Task */
if(calledChoreographyRef instanceof GlobalChoreographyTask) {
calledElements.add(calledChoreographyRef);
}
/* Calling a sub choreography */
else if(calledChoreographyRef instanceof Choreography) {
for(FlowElement flowEl : calledChoreographyRef.getFlowElement()) {
if(flowEl instanceof CallingElement) {
calledElements.addAll(((CallingElement) flowEl).getCalledElements());
}
}
}
return calledElements;
}
public List<Edge> getChildEdges() {
List<Edge> edgeList = new ArrayList<Edge>();
for(FlowElement fe : this.getFlowElement()) {
if(fe instanceof Edge) {
edgeList.add((Edge) fe);
} else if(fe instanceof ContainerElement) {
edgeList.addAll(((ContainerElement) fe).getChildEdges());
}
}
return edgeList;
}
/* Getter & Setter */
/**
* Gets the value of the participantAssociation property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the participantAssociation property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getParticipantAssociation().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link ParticipantAssociation }
*
*
*/
public List<ParticipantAssociation> getParticipantAssociation() {
if (participantAssociation == null) {
participantAssociation = new ArrayList<ParticipantAssociation>();
}
return this.participantAssociation;
}
/**
* Gets the value of the calledChoreographyRef property.
*
* @return
* possible object is
* {@link Choreography }
*
*/
public Choreography getCalledChoreographyRef() {
return calledChoreographyRef;
}
/**
* Sets the value of the calledChoreographyRef property.
*
* @param value
* allowed object is
* {@link Choreography }
*
*/
public void setCalledChoreographyRef(Choreography value) {
this.calledChoreographyRef = value;
}
public void acceptVisitor(Visitor v){
v.visitCallChoreography(this);
}
public List<DiagramElement> _getDiagramElements() {
return _diagramElements;
}
public List<FlowElement> getFlowElement() {
if(this.getCalledChoreographyRef() != null) {
return this.getCalledChoreographyRef().getFlowElement();
}
return new ArrayList<FlowElement>();
}
}
| |
package com.bq.corbel.iam.auth;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Optional;
import java.util.Set;
import net.oauth.jsontoken.JsonToken;
import com.bq.corbel.iam.model.Client;
import com.bq.corbel.iam.model.Domain;
import com.bq.corbel.iam.model.Scope;
import com.bq.corbel.iam.model.User;
import com.bq.corbel.iam.repository.ClientRepository;
import com.bq.corbel.iam.repository.DomainRepository;
import com.bq.corbel.iam.repository.UserRepository;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
/**
* @author Alexander De Leon
*
*/
public class JsonTokenAuthorizationRequestContext implements AuthorizationRequestContext {
private static final String SCOPE = "scope";
private static final String OAUTH_SERVICE = "oauth.service";
private static final String REFRESH_TOKEN = "refresh_token";
private static final String VERSION = "version";
private static final String PRINCIPAL = "prn";
private static final String REQUEST_DOMAIN = "request_domain";
private static final String DEVICE_ID = "device_id";
private final JsonToken jsonToken;
private final ClientRepository clientRepository;
private final DomainRepository domainRepository;
private final UserRepository userRepository;
private Client client;
private Domain domain;
private Domain requestedDomain;
private User user;
private OauthParams oauthParams;
private BasicParams basicParams;
private Boolean hasPrincipal;
private String principalId;
private Set<String> tokenScopes;
private Set<Scope> expandedRequestedScopes;
public JsonTokenAuthorizationRequestContext(ClientRepository clientRepository, DomainRepository domainRepository,
UserRepository userRepository, JsonToken jsonToken) {
this.clientRepository = clientRepository;
this.domainRepository = domainRepository;
this.userRepository = userRepository;
this.jsonToken = jsonToken;
}
@Override
public String getIssuerClientId() {
return jsonToken.getIssuer();
}
@Override
public Client getIssuerClient() {
if (client == null) {
client = clientRepository.findOne(jsonToken.getIssuer());
}
return client;
}
@Override
public Domain getIssuerClientDomain() {
if (domain == null) {
domain = domainRepository.findOne(getIssuerClient().getDomain());
}
return domain;
}
@Override
public boolean isCrossDomain() {
return !getRequestedDomain().equals(getIssuerClientDomain());
}
@Override
public User getPrincipal() {
return hasPrincipal() ? getPrincipal(getPrincipalId()) : null;
}
@Override
public User getPrincipal(String principalId) {
if (user == null) {
Client issuerClient = getIssuerClient();
user = userRepository.findByUsernameAndDomain(principalId, issuerClient.getDomain());
}
return user;
}
@Override
public String getPrincipalId() {
if (principalId == null) {
principalId = hasPrincipal() ? jsonToken.getPayloadAsJsonObject().get(PRINCIPAL).getAsString() : null;
}
return principalId;
}
@Override
public void setPrincipalId(String principalId) {
hasPrincipal = null != principalId;
this.principalId = principalId;
}
@Override
public Set<String> getRequestedScopes() {
JsonObject payload = jsonToken.getPayloadAsJsonObject();
if (payload.has(SCOPE)) {
String requestedScopes = payload.get(SCOPE).getAsString();
if (requestedScopes.length() > 0) {
return new HashSet<>(Arrays.asList(requestedScopes.split(" ")));
}
}
return new HashSet<>();
}
@Override
public boolean hasPrincipal() {
if (hasPrincipal == null) {
JsonObject payload = jsonToken.getPayloadAsJsonObject();
hasPrincipal = payload.has(PRINCIPAL) && payload.get(PRINCIPAL).isJsonPrimitive();
}
return hasPrincipal;
}
@Override
public Long getAuthorizationExpiration() {
return jsonToken.getExpiration().getMillis();
}
@Override
public boolean isOAuth() {
return jsonToken.getPayloadAsJsonObject().has(OAUTH_SERVICE);
}
@Override
public String getOAuthService() {
return isOAuth() ? jsonToken.getPayloadAsJsonObject().get(OAUTH_SERVICE).getAsString() : null;
}
@Override
public OauthParams getOauthParams() {
return (oauthParams == null) ? (oauthParams = OauthParams.createFromJWT(jsonToken)) : oauthParams;
}
@Override
public BasicParams getBasicParams() {
return (basicParams == null) ? (basicParams = BasicParams.createFromJWT(jsonToken)) : basicParams;
}
@Override
public Set<String> getTokenScopes() {
return tokenScopes;
}
@Override
public void setTokenScopes(Set<String> tokenScopes) {
this.tokenScopes = tokenScopes;
}
@Override
public Set<Scope> getExpandedRequestedScopes() {
return expandedRequestedScopes;
}
@Override
public void setExpandedRequestedScopes(Set<Scope> expandedRequestedScopes) {
this.expandedRequestedScopes = expandedRequestedScopes;
}
@Override
public String getDeviceId() {
return Optional.ofNullable(jsonToken.getPayloadAsJsonObject().get(DEVICE_ID)).map(JsonElement::getAsString).orElseGet(() -> null);
}
@Override
public boolean hasRefreshToken() {
return jsonToken.getPayloadAsJsonObject().has(REFRESH_TOKEN);
}
@Override
public String getRefreshToken() {
return hasRefreshToken() ? jsonToken.getPayloadAsJsonObject().get(REFRESH_TOKEN).getAsString() : null;
}
@Override
public boolean hasVersion() {
return jsonToken.getPayloadAsJsonObject().has(VERSION);
}
@Override
public String getVersion() {
return hasVersion() ? jsonToken.getPayloadAsJsonObject().get(VERSION).getAsString() : null;
}
@Override
public boolean isBasic() {
return jsonToken.getPayloadAsJsonObject().has(BasicParams.BASIC_AUTH_USERNAME)
&& jsonToken.getPayloadAsJsonObject().has(BasicParams.BASIC_AUTH_PASSWORD);
}
@Override
public Domain getRequestedDomain() {
if (requestedDomain == null) {
requestedDomain = jsonToken.getPayloadAsJsonObject().has(REQUEST_DOMAIN) ? domainRepository.findOne(jsonToken
.getPayloadAsJsonObject().get(REQUEST_DOMAIN).getAsString()) : getIssuerClientDomain();
}
return requestedDomain;
}
}
| |
package com.dropbox.core;
import java.io.IOException;
import java.io.Serializable;
import java.util.Date;
import java.util.List;
import com.dropbox.core.json.*;
import com.dropbox.core.json.JsonArrayReader;
import com.dropbox.core.json.JsonReader;
import com.dropbox.core.util.Collector;
import com.dropbox.core.util.DumpWriter;
import com.dropbox.core.util.Dumpable;
import com.dropbox.core.util.LangUtil;
import com.fasterxml.jackson.core.JsonLocation;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
/*>>> import checkers.nullness.quals.Nullable; */
/*>>> import checkers.nullness.quals.PolyNull; */
/**
* Holds the metadata for a Dropbox file system entry. Can either be a regular file or a folder.
*/
public abstract class DbxEntry extends Dumpable implements Serializable
{
public static final long serialVersionUID = 0;
/**
* Just the last part of {@link #path}. Derived automatically from {@link #path}.
*
* @see DbxPath#getName
*/
public final String name;
/**
* The path to the file or folder, relative to your application's root.
* The path always starts with a {@code "/"}.
*
* <p>
* For full-Dropbox apps, the path is relative to the root
* of the user's Dropbox. For App Folder apps, the path
* is relative to your application's App Folder within the user's Dropbox.
* </p>
*/
public final String path;
/**
* The name of the icon to use for this file. The set of names returned by this call match up
* with icons in a set of icons provided by Dropbox. Read more about the <em>icon</em>
* field in <a href="https://www.dropbox.com/developers/reference/api#metadata">
* Dropbox's documentation for the {@code /metadata} HTTP endpoint</a>.
*/
public final String iconName;
/**
* Whether this file or folder might have a thumbnail image you can retrieve via
* the {@link DbxClient#getThumbnail DbxClient.getThumbnail} call.
* If this is {@code true}, there might be a thumbnail available. If this is
* {@code false}, there is definitely no thumbnail available.
*/
public final boolean mightHaveThumbnail;
/**
* @param path {@link #path}
* @param iconName {@link #iconName}
* @param mightHaveThumbnail {@link #mightHaveThumbnail}
*/
private DbxEntry(String path, String iconName, boolean mightHaveThumbnail)
{
this.name = DbxPath.getName(path);
this.path = path;
this.iconName = iconName;
this.mightHaveThumbnail = mightHaveThumbnail;
}
protected void dumpFields(DumpWriter w)
{
w.v(path);
w.f("iconName").v(iconName);
w.f("mightHaveThumbnail").v(mightHaveThumbnail);
}
/**
* Whether this metadata is for a folder, which can be cast to type
* {@link Folder}. (Every metadata object is either for a file or a folder.)
*/
public abstract boolean isFolder();
/**
* Whether this metadata is for a file, which can be cast to type
* {@link File}. (Every metadata object is either for a file or a folder.)
*/
public abstract boolean isFile();
/**
* If this metadata entry is a folder, return it as a {@code DbxEntry.Folder}
* instance. If it's not a folder, return {@code null}.
*/
public abstract Folder asFolder();
/**
* If this metadata entry is a file, return it as a {@code DbxEntry.File}
* instance. If it's not a file, return {@code null}.
*/
public abstract File asFile();
protected boolean partialEquals(DbxEntry o)
{
if (!name.equals(o.name)) return false;
if (!path.equals(o.path)) return false;
if (!iconName.equals(o.iconName)) return false;
if (mightHaveThumbnail != o.mightHaveThumbnail) return false;
return true;
}
protected int partialHashCode()
{
int h = name.hashCode();
h = h*31 + path.hashCode();
h = h*31 + iconName.hashCode();
h = h*31 + path.hashCode();
h = h*31 + (mightHaveThumbnail ? 1 : 0);
return h;
}
/**
* The subclass of {@link DbxEntry} used to represent folder metadata. Folders
* actually only have the same set of fields as {@link DbxEntry}.
*/
public static final class Folder extends DbxEntry
{
public static final long serialVersionUID = 0;
/**
* @param path {@link #path}
* @param iconName {@link #iconName}
* @param mightHaveThumbnail {@link #mightHaveThumbnail}
*/
public Folder(String path, String iconName, boolean mightHaveThumbnail)
{
super(path, iconName, mightHaveThumbnail);
}
protected String getTypeName() { return "Folder"; }
public boolean isFolder() { return true; }
public boolean isFile() { return false; }
public Folder asFolder() { return this; }
public File asFile() { throw new RuntimeException("not a file"); }
public static final JsonReader<DbxEntry.Folder> Reader = new JsonReader<DbxEntry.Folder>()
{
public final DbxEntry.Folder read(JsonParser parser)
throws IOException, JsonReadException
{
JsonLocation top = parser.getCurrentLocation();
DbxEntry e = DbxEntry.read(parser, null).entry;
if (!(e instanceof DbxEntry.Folder)) {
throw new JsonReadException("Expecting a file entry, got a folder entry", top);
}
return (DbxEntry.Folder) e;
}
};
public boolean equals(/*@Nullable*/Object o)
{
return o != null && getClass().equals(o.getClass()) && equals((Folder) o);
}
public boolean equals(Folder o)
{
if (!partialEquals(o)) return false;
return true;
}
public int hashCode()
{
return partialHashCode();
}
}
/**
* The subclass of {@link DbxEntry} used to represent file metadata (as opposed
* to folder metadata).
*/
public static final class File extends DbxEntry
{
public static final long serialVersionUID = 0;
/**
* The size, in bytes, of the file content.
*/
public final long numBytes;
/**
* A human-readable string version of the file size (ex: "13 kb"). This string will
* be localized based on the {@link java.util.Locale Locale} in {@link DbxRequestConfig#userLocale}
* (passed in to the {@link DbxClient} constructor).
*/
public final String humanSize;
/**
* The time the file was added, moved, or last had it's contents changed on the Dropbox
* server. (This probably won't match the time on the Dropbox user's filesystem. For that
* the {@link #clientMtime} is a better estimate.)
*
*/
public final Date lastModified;
/**
* The modification time sent up by the Dropbox desktop client when the file was added
* or modified. This time is based on the system clock of the particular host that the
* client was running on, as opposed to the system clock of the Dropbox servers.
*
* <p>
* This field <em>should not</em> be used to determine if a file has changed, but only as
* a way to sort files by date (when displaying a list of files to the user).
* </p>
*/
public final Date clientMtime;
/**
* The revision of the file at this path. This can be used with {@link DbxClient#uploadFile}
* and the {@link DbxWriteMode#update} mode to make sure you're overwriting the revision of
* the file you think you're overwriting.
*/
public final String rev;
/**
* If this file is a photo, this may contain additional photo-related information. This field is
* only populated if you use the {@code includeMediaInfo}
*/
public final /*@Nullable*/PhotoInfo photoInfo;
/**
* Contains details about this file if it is a video
*/
public final /*@Nullable*/VideoInfo videoInfo;
/**
* @param path {@link #path}
* @param iconName {@link #iconName}
* @param mightHaveThumbnail {@link #mightHaveThumbnail}
* @param numBytes {@link #numBytes}
* @param humanSize {@link #humanSize}
* @param lastModified {@link #lastModified}
* @param clientMtime {@link #clientMtime}
* @param rev {@link #rev}
* @param photoInfo {@link #photoInfo}
* @param videoInfo {@link #videoInfo}
*/
public File(String path, String iconName, boolean mightHaveThumbnail, long numBytes, String humanSize,
Date lastModified, Date clientMtime, String rev,
/*@Nullable*/PhotoInfo photoInfo, /*@Nullable*/VideoInfo videoInfo)
{
super(path, iconName, mightHaveThumbnail);
this.numBytes = numBytes;
this.humanSize = humanSize;
this.lastModified = lastModified;
this.clientMtime = clientMtime;
this.rev = rev;
this.photoInfo = photoInfo;
this.videoInfo = videoInfo;
}
/**
* Same as the other constructor except {@link #photoInfo} and {@link #videoInfo} are set to {@code null}.
*/
public File(String path, String iconName, boolean mightHaveThumbnail, long numBytes, String humanSize,
Date lastModified, Date clientMtime, String rev)
{
this(path, iconName, mightHaveThumbnail, numBytes, humanSize, lastModified, clientMtime, rev, null, null);
}
protected void dumpFields(DumpWriter w)
{
super.dumpFields(w);
w.f("numBytes").v(numBytes);
w.f("humanSize").v(humanSize);
w.f("lastModified").v(lastModified);
w.f("clientMtime").v(clientMtime);
w.f("rev").v(rev);
File.<PhotoInfo>nullablePendingField(w, "photoInfo", photoInfo, PhotoInfo.PENDING);
File.<VideoInfo>nullablePendingField(w, "videoInfo", videoInfo, VideoInfo.PENDING);
}
private static <T extends Dumpable> void nullablePendingField(
DumpWriter w, String fieldName, /*@Nullable*/T value, T pendingValue)
{
if (value == null) return;
w.f(fieldName);
if (value == pendingValue) {
w.verbatim("pending");
} else {
w.v(value);
}
}
protected String getTypeName() { return "File"; }
public boolean isFolder() { return false; }
public boolean isFile() { return true; }
public Folder asFolder() { throw new RuntimeException("not a folder"); }
public File asFile() { return this; }
public static final JsonReader<DbxEntry.File> Reader = new JsonReader<DbxEntry.File>()
{
public final DbxEntry.File read(JsonParser parser)
throws IOException, JsonReadException
{
JsonLocation top = parser.getCurrentLocation();
DbxEntry e = DbxEntry.read(parser, null).entry;
if (!(e instanceof DbxEntry.File)) {
throw new JsonReadException("Expecting a file entry, got a folder entry", top);
}
return (DbxEntry.File) e;
}
};
public static final JsonReader<DbxEntry./*@Nullable*/File> ReaderMaybeDeleted = new JsonReader<DbxEntry./*@Nullable*/File>()
{
public final DbxEntry./*@Nullable*/File read(JsonParser parser)
throws IOException, JsonReadException
{
JsonLocation top = parser.getCurrentLocation();
WithChildrenC<?> wc = DbxEntry._read(parser, null, true);
if (wc == null) return null;
DbxEntry e = wc.entry;
if (!(e instanceof DbxEntry.File)) {
throw new JsonReadException("Expecting a file entry, got a folder entry", top);
}
return (DbxEntry.File) e;
}
};
public boolean equals(/*@Nullable*/Object o)
{
return o != null && getClass().equals(o.getClass()) && equals((File) o);
}
public boolean equals(File o)
{
if (!partialEquals(o)) return false;
if (numBytes != o.numBytes) return false;
if (!humanSize.equals(o.humanSize)) return false;
if (!lastModified.equals(o.lastModified)) return false;
if (!clientMtime.equals(o.clientMtime)) return false;
if (!rev.equals(o.rev)) return false;
if (!LangUtil.nullableEquals(photoInfo, o.photoInfo)) return false;
if (!LangUtil.nullableEquals(videoInfo, o.videoInfo)) return false;
return true;
}
public int hashCode()
{
// Not including 'humanSize' since it's mostly derivable from 'numBytes'
int h = partialHashCode();
h = h*31 + (int) numBytes;
h = h*31 + lastModified.hashCode();
h = h*31 + clientMtime.hashCode();
h = h*31 + rev.hashCode();
h = h*31 + LangUtil.nullableHashCode(photoInfo);
h = h*31 + LangUtil.nullableHashCode(videoInfo);
return h;
}
/**
* Photo metadata that the Dropbox server extracted from the photo file.
*/
public static final class PhotoInfo extends Dumpable
{
/**
* When the photo was taken.
*/
public final /*@Nullable*/Date timeTaken;
/**
* Where the photo was taken.
*/
public final /*@Nullable*/Location location;
public PhotoInfo(/*@Nullable*/Date timeTaken, /*@Nullable*/Location location) {
this.timeTaken = timeTaken;
this.location = location;
}
public static JsonReader<PhotoInfo> Reader = new JsonReader<PhotoInfo>()
{
@Override
public PhotoInfo read(JsonParser parser)
throws IOException, JsonReadException
{
JsonReader.expectObjectStart(parser);
Date time_taken = null;
File.Location location = null;
while (parser.getCurrentToken() == JsonToken.FIELD_NAME) {
String fieldName = parser.getCurrentName();
JsonReader.nextToken(parser);
if (fieldName.equals("lat_long")) {
location = Location.Reader.read(parser);
} else if (fieldName.equals("time_taken")) {
time_taken = JsonDateReader.Dropbox.readOptional(parser);
} else {
JsonReader.skipValue(parser);
}
}
JsonReader.expectObjectEnd(parser);
return new File.PhotoInfo(time_taken, location);
}
};
public static final PhotoInfo PENDING = new PhotoInfo(null, null);
@Override
protected void dumpFields(DumpWriter w)
{
w.f("timeTaken").v(timeTaken);
w.f("location").v(location);
}
@Override
public boolean equals(/*@Nullable*/Object o)
{
return o != null && getClass().equals(o.getClass()) && equals((PhotoInfo) o);
}
public boolean equals(PhotoInfo o)
{
// For "pending" values, it must be an exact match.
if (o == PENDING || this == PENDING) return o == this;
if (!LangUtil.nullableEquals(timeTaken, o.timeTaken)) return false;
if (!LangUtil.nullableEquals(location, o.location)) return false;
return true;
}
@Override
public int hashCode()
{
int h = 0;
h = h*31 + LangUtil.nullableHashCode(timeTaken);
h = h*31 + LangUtil.nullableHashCode(location);
return h;
}
}
/**
* Video metadata that the Dropbox server extracted from the video file.
*/
public static final class VideoInfo extends Dumpable
{
/**
* When the video was recorded.
*/
public final /*@Nullable*/Date timeTaken;
/**
* Where the video was recorded.
*/
public final /*@Nullable*/Location location;
/**
* The duration of the video, in seconds.
*/
public final /*@Nullable*/Long duration;
public VideoInfo(/*@Nullable*/Date timeTaken, /*@Nullable*/Location location, /*@Nullable*/Long duration)
{
this.timeTaken = timeTaken;
this.location = location;
this.duration = duration;
}
public static JsonReader<VideoInfo> Reader = new JsonReader<VideoInfo>()
{
@Override
public VideoInfo read(JsonParser parser)
throws IOException, JsonReadException
{
JsonReader.expectObjectStart(parser);
File.Location location = null;
Date time_taken = null;
Long duration = null;
while (parser.getCurrentToken() == JsonToken.FIELD_NAME) {
String fieldName = parser.getCurrentName();
JsonReader.nextToken(parser);
if (fieldName.equals("lat_long")) {
location = Location.Reader.read(parser);
} else if (fieldName.equals("time_taken")) {
time_taken = JsonDateReader.Dropbox.readOptional(parser);
} else if (fieldName.equals("duration")) {
duration = JsonReader.UnsignedLongReader.readOptional(parser);
} else {
JsonReader.skipValue(parser);
}
}
JsonReader.expectObjectEnd(parser);
return new File.VideoInfo(time_taken, location, duration);
}
};
/**
* The singleton value used when the Dropbox server returns "pending" for
*/
public static final VideoInfo PENDING = new VideoInfo(null, null, null);
@Override
protected void dumpFields(DumpWriter w)
{
w.f("timeTaken").v(timeTaken);
w.f("location").v(location);
w.f("duration").v(duration);
}
@Override
public boolean equals(/*@Nullable*/Object o)
{
return o != null && getClass().equals(o.getClass()) && equals((VideoInfo) o);
}
public boolean equals(VideoInfo o)
{
// For "pending" values, it must be an exact match.
if (o == PENDING || this == PENDING) return o == this;
if (!LangUtil.nullableEquals(timeTaken, o.timeTaken)) return false;
if (!LangUtil.nullableEquals(location, o.location)) return false;
if (!LangUtil.nullableEquals(duration, o.duration)) return false;
return true;
}
@Override
public int hashCode()
{
int h = 0;
h = h*31 + LangUtil.nullableHashCode(timeTaken);
h = h*31 + LangUtil.nullableHashCode(location);
h = h*31 + LangUtil.nullableHashCode(duration);
return h;
}
}
/**
* A geo location
*/
public static class Location extends Dumpable
{
public final double latitude;
public final double longitude;
public Location(double latitude, double longitude) {
this.latitude = latitude;
this.longitude = longitude;
}
public static JsonReader</*@Nullable*/Location> Reader = new JsonReader</*@Nullable*/Location>()
{
@Override
public /*@Nullable*/Location read(JsonParser parser)
throws IOException, JsonReadException
{
Location location = null;
if (JsonArrayReader.isArrayStart(parser)) {
JsonReader.expectArrayStart(parser);
double latitude = JsonReader.readDouble(parser);
double longitude = JsonReader.readDouble(parser);
location = new Location(latitude,longitude);
JsonReader.expectArrayEnd(parser);
} else {
JsonReader.skipValue(parser);
}
return location;
}
};
@Override
protected void dumpFields(DumpWriter w)
{
w.f("latitude").v(latitude);
w.f("longitude").v(longitude);
}
@Override
public boolean equals(/*@Nullable*/Object o)
{
return o != null && getClass().equals(o.getClass()) && equals((Location) o);
}
public boolean equals(Location o)
{
if (latitude != o.latitude) return false;
if (longitude != o.longitude) return false;
return true;
}
}
}
// ------------------------------------------------------
// JSON parsing
public static final JsonReader<DbxEntry> Reader = new JsonReader<DbxEntry>()
{
public final DbxEntry read(JsonParser parser)
throws IOException, JsonReadException
{
return DbxEntry.read(parser, null).entry;
}
};
public static final JsonReader</*@Nullable*/DbxEntry> ReaderMaybeDeleted = new JsonReader</*@Nullable*/DbxEntry>()
{
public final /*@Nullable*/DbxEntry read(JsonParser parser)
throws IOException, JsonReadException
{
WithChildrenC<?> wc = DbxEntry.readMaybeDeleted(parser, null);
if (wc == null) return null;
return wc.entry;
}
};
/**
* Holds the metadata for a file or folder; if it's a folder, we also store the folder's
* hash and the metadata of its immediate children.
*
* @see DbxClient#getMetadataWithChildren
* @see DbxClient#getMetadataWithChildrenIfChanged
*/
public static final class WithChildren extends Dumpable implements Serializable
{
public static final long serialVersionUID = 0;
/**
* The metadata for the base file or folder.
*/
public final DbxEntry entry;
/**
* If {@link #entry} is a folder, this will contain a hash that identifies the folder's
* contents. This value can be used with {@link DbxClient#getMetadataWithChildrenIfChanged}
* to void downloading the folder contents if they havne't changed.
*/
public final /*@PolyNull*/String hash;
/**
* If {@link #entry} is a folder, this will contain the metadata of the folder's
* immediate children. If it's not a folder, this will be {@code null}.
*/
public final /*@PolyNull*/List<DbxEntry> children;
/**
* @param entry {@link #entry}
* @param hash {@link #hash}
* @param children {@link #children}
*/
public WithChildren(DbxEntry entry, /*@PolyNull*/String hash, /*@PolyNull*/List<DbxEntry> children)
{
this.entry = entry;
this.hash = hash;
this.children = children;
}
public static final JsonReader<WithChildren> Reader = new JsonReader<WithChildren>()
{
public final WithChildren read(JsonParser parser)
throws IOException, JsonReadException
{
WithChildrenC<List<DbxEntry>> c = DbxEntry.<List<DbxEntry>>read(parser, new Collector.ArrayListCollector<DbxEntry>());
return new WithChildren(c.entry, c.hash, c.children);
}
};
public static final JsonReader</*@Nullable*/WithChildren> ReaderMaybeDeleted = new JsonReader</*@Nullable*/WithChildren>()
{
public final /*@Nullable*/WithChildren read(JsonParser parser)
throws IOException, JsonReadException
{
WithChildrenC<List<DbxEntry>> c = DbxEntry.<List<DbxEntry>>readMaybeDeleted(parser, new Collector.ArrayListCollector<DbxEntry>());
if (c == null) return null;
return new WithChildren(c.entry, c.hash, c.children);
}
};
public boolean equals(/*@Nullable*/Object o)
{
return o != null && getClass().equals(o.getClass()) && equals((WithChildren) o);
}
public boolean equals(WithChildren o)
{
if (children != null ? !children.equals(o.children) : o.children != null)
return false;
if (!entry.equals(o.entry)) return false;
if (hash != null ? !hash.equals(o.hash) : o.hash != null) return false;
return true;
}
@Override
public int hashCode()
{
int result = entry.hashCode();
result = 31 * result + (hash != null ? hash.hashCode() : 0);
result = 31 * result + (children != null ? children.hashCode() : 0);
return result;
}
@Override
protected void dumpFields(DumpWriter w)
{
w.v(entry);
w.f("hash").v(hash);
w.f("children").v(children);
}
}
/**
* The more general case of {@link WithChildren}. It's used in the {@code C}-suffixed
* variants ({@link DbxClient#getMetadataWithChildrenC} and {@link DbxClient#getMetadataWithChildrenIfChanged}
* to allow you to process the {@link DbxEntry} values as the come in and aggregate them into
* your own object (instead of the default {@link List}) using a custom {@link Collector}.
*/
public static final class WithChildrenC<C> extends Dumpable implements Serializable
{
public static final long serialVersionUID = 0;
public final DbxEntry entry;
/**
* If {@link #entry} is a folder, this will contain a hash that identifies the folder's
* contents. This value can be used with {@link DbxClient#getMetadataWithChildrenIfChanged}
* to void downloading the folder contents if they havne't changed.
*/
public final /*@PolyNull*/String hash;
/**
* If {@link #entry} is a folder, this will contain the metadata of the folder's
* immediate children. If it's not a folder, this will be {@code null}.
*/
public final /*@PolyNull*/C children;
/**
* @param entry {@link #entry}
* @param hash {@link #hash}
* @param children {@link #children}
*/
public WithChildrenC(DbxEntry entry, /*@PolyNull*/String hash, /*@PolyNull*/C children)
{
this.entry = entry;
this.hash = hash;
this.children = children;
}
public static class Reader<C> extends JsonReader<WithChildrenC<C>>
{
private final Collector<DbxEntry,? extends C> collector;
public Reader(Collector<DbxEntry,? extends C> collector) { this.collector = collector; }
public final WithChildrenC<C> read(JsonParser parser)
throws IOException, JsonReadException
{
return DbxEntry.read(parser, collector);
}
}
public static class ReaderMaybeDeleted<C> extends JsonReader</*@Nullable*/WithChildrenC<C>>
{
private final Collector<DbxEntry,? extends C> collector;
public ReaderMaybeDeleted(Collector<DbxEntry,? extends C> collector) { this.collector = collector; }
public final /*@Nullable*/WithChildrenC<C> read(JsonParser parser)
throws IOException, JsonReadException
{
return DbxEntry.readMaybeDeleted(parser, collector);
}
}
@Override
public boolean equals(/*@Nullable*/Object o)
{
return o != null && getClass().equals(o.getClass()) && equals((WithChildrenC) o);
}
public boolean equals(WithChildrenC o)
{
if (children != null ? !children.equals(o.children) : o.children != null)
return false;
if (!entry.equals(o.entry)) return false;
if (hash != null ? !hash.equals(o.hash) : o.hash != null) return false;
return true;
}
@Override
public int hashCode()
{
int result = entry.hashCode();
result = 31 * result + (hash != null ? hash.hashCode() : 0);
result = 31 * result + (children != null ? children.hashCode() : 0);
return result;
}
@Override
protected void dumpFields(DumpWriter w)
{
w.v(entry);
w.f("hash").v(hash);
if (children != null) {
w.f("children").verbatim(children.toString());
}
}
}
public static <C> /*@Nullable*/WithChildrenC<C> readMaybeDeleted(JsonParser parser, /*@Nullable*/Collector<DbxEntry, ? extends C> collector)
throws IOException, JsonReadException
{
return _read(parser, collector, true);
}
public static <C> WithChildrenC<C> read(JsonParser parser, /*@Nullable*/Collector<DbxEntry, ? extends C> collector)
throws IOException, JsonReadException
{
WithChildrenC<C> r = _read(parser, collector, false);
assert r != null : "@AssumeAssertion(nullness)";
return r;
}
/**
* @return
* {@code null} if the entry is an 'is_deleted' entry.
*/
private static <C> /*@Nullable*/WithChildrenC<C> _read(JsonParser parser, /*@Nullable*/Collector<DbxEntry, ? extends C> collector, boolean allowDeleted)
throws IOException, JsonReadException
{
JsonLocation top = JsonReader.expectObjectStart(parser);
String size = null;
long bytes = -1;
String path = null;
Boolean is_dir = null;
Boolean is_deleted = null;
String rev = null;
Boolean thumb_exists = null;
String icon = null;
Date modified = null;
Date client_mtime = null;
String hash = null;
C contents = null;
File.PhotoInfo photo_info = null;
File.VideoInfo video_info = null;
while (parser.getCurrentToken() == JsonToken.FIELD_NAME) {
String fieldName = parser.getCurrentName();
JsonReader.nextToken(parser);
int fi = FM.get(fieldName);
try {
switch (fi) {
case -1: JsonReader.skipValue(parser); break;
case FM_size: size = JsonReader.StringReader.readField(parser, fieldName, size); break;
case FM_bytes: bytes = JsonReader.readUnsignedLongField(parser, fieldName, bytes); break;
case FM_path: path = JsonReader.StringReader.readField(parser, fieldName, path); break;
case FM_is_dir: is_dir = JsonReader.BooleanReader.readField(parser, fieldName, is_dir); break;
case FM_is_deleted: is_deleted = JsonReader.BooleanReader.readField(parser, fieldName, is_deleted); break;
case FM_rev: rev = JsonReader.StringReader.readField(parser, fieldName, rev); break;
case FM_thumb_exists: thumb_exists = JsonReader.BooleanReader.readField(parser, fieldName, thumb_exists); break;
case FM_icon: icon = JsonReader.StringReader.readField(parser, fieldName, icon); break;
case FM_modified: modified = JsonDateReader.Dropbox.readField(parser, fieldName, modified); break;
case FM_client_mtime: client_mtime = JsonDateReader.Dropbox.readField(parser, fieldName, client_mtime); break;
case FM_hash:
if (collector == null) throw new JsonReadException("not expecting \"hash\" field, since we didn't ask for children", parser.getCurrentLocation());
hash = JsonReader.StringReader.readField(parser, fieldName, hash); break;
case FM_contents:
if (collector == null) throw new JsonReadException("not expecting \"contents\" field, since we didn't ask for children", parser.getCurrentLocation());
contents = JsonArrayReader.mk(Reader, collector).readField(parser, fieldName, contents); break;
case FM_photo_info:
photo_info = PendingReader.mk(File.PhotoInfo.Reader, File.PhotoInfo.PENDING).readField(parser, fieldName, photo_info);
break;
case FM_video_info:
video_info = PendingReader.mk(File.VideoInfo.Reader, File.VideoInfo.PENDING).readField(parser, fieldName, video_info);
break;
default:
throw new AssertionError("bad index: " + fi + ", field = \"" + fieldName + "\"");
}
}
catch (JsonReadException ex) {
throw ex.addFieldContext(fieldName);
}
}
JsonReader.expectObjectEnd(parser);
if (path == null) throw new JsonReadException("missing field \"path\"", top);
if (icon == null) throw new JsonReadException("missing field \"icon\"", top);
if (is_deleted == null) is_deleted = Boolean.FALSE;
if (is_dir == null) is_dir = Boolean.FALSE;
if (thumb_exists == null) thumb_exists = Boolean.FALSE;
if (is_dir && (contents != null || hash != null)) {
if (hash == null) throw new JsonReadException("missing \"hash\", when we asked for children", top);
if (contents == null) throw new JsonReadException("missing \"contents\", when we asked for children", top);
}
DbxEntry e;
if (is_dir) {
e = new Folder(path, icon, thumb_exists);
}
else {
// Normal File
if (size == null) throw new JsonReadException("missing \"size\" for a file entry", top);
if (bytes == -1) throw new JsonReadException("missing \"bytes\" for a file entry", top);
if (modified == null) throw new JsonReadException("missing \"modified\" for a file entry", top);
if (client_mtime == null) throw new JsonReadException("missing \"client_mtime\" for a file entry", top);
if (rev == null) throw new JsonReadException("missing \"rev\" for a file entry", top);
e = new File(path, icon, thumb_exists, bytes, size, modified, client_mtime, rev, photo_info, video_info);
}
if (is_deleted) {
if (allowDeleted) {
return null;
} else {
throw new JsonReadException("not expecting \"is_deleted\" entry here", top);
}
}
return new WithChildrenC<C>(e, hash, contents);
}
private static final class PendingReader<T> extends JsonReader<T>
{
private final JsonReader<T> reader;
private final T pendingValue;
public PendingReader(JsonReader<T> reader, T pendingValue)
{
this.reader = reader;
this.pendingValue = pendingValue;
}
public static <T> PendingReader<T> mk(JsonReader<T> reader, T pendingValue) { return new PendingReader<T>(reader, pendingValue); }
@Override
public T read(JsonParser parser)
throws IOException, JsonReadException
{
JsonToken token = parser.getCurrentToken();
if (token == JsonToken.VALUE_STRING) {
String s = parser.getText();
if (!s.equals("pending")) {
throw new JsonReadException("got a string, but the value wasn't \"pending\"", parser.getTokenLocation());
}
parser.nextToken();
return pendingValue;
} else {
return reader.read(parser);
}
}
}
private static final int FM_size = 0;
private static final int FM_bytes = 1;
private static final int FM_path = 2;
private static final int FM_is_dir = 3;
private static final int FM_is_deleted = 4;
private static final int FM_rev = 5;
private static final int FM_thumb_exists = 6;
private static final int FM_icon = 7;
private static final int FM_modified = 8;
private static final int FM_client_mtime = 9;
private static final int FM_hash = 10;
private static final int FM_contents = 11;
private static final int FM_photo_info = 12;
private static final int FM_video_info = 13;
private static final JsonReader.FieldMapping FM;
static {
JsonReader.FieldMapping.Builder b = new JsonReader.FieldMapping.Builder();
b.add("size", FM_size);
b.add("bytes", FM_bytes);
b.add("path", FM_path);
b.add("is_dir", FM_is_dir);
b.add("is_deleted", FM_is_deleted);
b.add("rev", FM_rev);
b.add("thumb_exists", FM_thumb_exists);
b.add("icon", FM_icon);
b.add("modified", FM_modified);
b.add("client_mtime", FM_client_mtime);
b.add("hash", FM_hash);
b.add("contents", FM_contents);
b.add("photo_info", FM_photo_info);
b.add("video_info", FM_video_info);
FM = b.build();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.net.examples.mail;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.nio.charset.StandardCharsets;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.net.pop3.POP3Client;
import org.apache.commons.net.pop3.POP3MessageInfo;
import org.apache.commons.net.pop3.POP3SClient;
/**
* This is an example program demonstrating how to use the POP3[S]Client class.
* This program connects to a POP3[S] server and writes the messages
* to an mbox file.
* <p>
* The code currently assumes that POP3Client decodes the POP3 data as iso-8859-1.
* The POP3 standard only allows for ASCII so in theory iso-8859-1 should be OK.
* However it appears that actual POP3 implementations may return 8bit data that is
* outside the ASCII range; this may result in loss of data when the mailbox is created.
* <p>
* See main() method for usage details
*/
public final class POP3ExportMbox
{
private static final Pattern PATFROM = Pattern.compile(">*From "); // unescaped From_
public static void main(final String[] args)
{
int argIdx;
String file = null;
for(argIdx = 0; argIdx < args.length; argIdx++) {
if (!args[argIdx].equals("-F")) {
break;
}
file = args[++argIdx];
}
final int argCount = args.length - argIdx;
if (argCount < 3)
{
System.err.println(
"Usage: POP3Mail [-F file/directory] <server[:port]> <username> <password|-|*|VARNAME> [TLS [true=implicit]]");
System.exit(1);
}
final String arg0[] = args[argIdx++].split(":");
final String server=arg0[0];
final String username = args[argIdx++];
String password = args[argIdx++];
// prompt for the password if necessary
try {
password = Utils.getPassword(username, password);
} catch (final IOException e1) {
System.err.println("Could not retrieve password: " + e1.getMessage());
return;
}
final String proto = argCount > 3 ? args[argIdx++] : null;
final boolean implicit = argCount > 4 ? Boolean.parseBoolean(args[argIdx++]) : false;
final POP3Client pop3;
if (proto != null) {
System.out.println("Using secure protocol: "+proto);
pop3 = new POP3SClient(proto, implicit);
} else {
pop3 = new POP3Client();
}
final int port;
if (arg0.length == 2) {
port = Integer.parseInt(arg0[1]);
} else {
port = pop3.getDefaultPort();
}
System.out.println("Connecting to server "+server+" on "+port);
// We want to timeout if a response takes longer than 60 seconds
pop3.setDefaultTimeout(60000);
try
{
pop3.connect(server);
}
catch (final IOException e)
{
System.err.println("Could not connect to server.");
e.printStackTrace();
return;
}
try
{
if (!pop3.login(username, password))
{
System.err.println("Could not login to server. Check password.");
pop3.disconnect();
return;
}
final POP3MessageInfo status = pop3.status();
if (status == null) {
System.err.println("Could not retrieve status.");
pop3.logout();
pop3.disconnect();
return;
}
System.out.println("Status: " + status);
final int count = status.number;
if (file != null) {
System.out.println("Getting messages: " + count);
final File mbox = new File(file);
if (mbox.isDirectory()) {
System.out.println("Writing dir: " + mbox);
// Currently POP3Client uses iso-8859-1
for (int i = 1; i <= count; i++) {
try (final OutputStreamWriter fw = new OutputStreamWriter(
new FileOutputStream(new File(mbox, i + ".eml")), StandardCharsets.ISO_8859_1)) {
writeFile(pop3, fw, i);
}
}
} else {
System.out.println("Writing file: " + mbox);
// Currently POP3Client uses iso-8859-1
try (final OutputStreamWriter fw = new OutputStreamWriter(new FileOutputStream(mbox),
StandardCharsets.ISO_8859_1)) {
for (int i = 1; i <= count; i++) {
writeMbox(pop3, fw, i);
}
}
}
}
pop3.logout();
pop3.disconnect();
}
catch (final IOException e)
{
e.printStackTrace();
return;
}
}
private static boolean startsWith(final String input, final Pattern pat) {
final Matcher m = pat.matcher(input);
return m.lookingAt();
}
private static void writeFile(final POP3Client pop3, final OutputStreamWriter fw, final int i) throws IOException {
try (final BufferedReader r = (BufferedReader) pop3.retrieveMessage(i)) {
String line;
while ((line = r.readLine()) != null) {
fw.write(line);
fw.write("\n");
}
}
}
private static void writeMbox(final POP3Client pop3, final OutputStreamWriter fw, final int i) throws IOException {
final SimpleDateFormat DATE_FORMAT // for mbox From_ lines
= new SimpleDateFormat("EEE MMM dd HH:mm:ss yyyy");
final String replyTo = "MAILER-DAEMON"; // default
final Date received = new Date();
try (final BufferedReader r = (BufferedReader) pop3.retrieveMessage(i)) {
fw.append("From ");
fw.append(replyTo);
fw.append(' ');
fw.append(DATE_FORMAT.format(received));
fw.append("\n");
String line;
while ((line = r.readLine()) != null) {
if (startsWith(line, PATFROM)) {
fw.write(">");
}
fw.write(line);
fw.write("\n");
}
fw.write("\n");
}
}
}
| |
package utils;
import org.apache.poi.hssf.usermodel.HSSFCell;
import org.apache.poi.hssf.usermodel.HSSFRow;
import org.apache.poi.hssf.usermodel.HSSFSheet;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.apache.poi.ss.usermodel.CellType;
import org.apache.poi.xssf.usermodel.XSSFCell;
import org.apache.poi.xssf.usermodel.XSSFRow;
import org.apache.poi.xssf.usermodel.XSSFSheet;
import org.apache.poi.xssf.usermodel.XSSFWorkbook;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
/**
* @Author Gladson Antony
* @Date 2021-08-09T00:27:03.862Z
*/
public class ExcelUtils {
private static XSSFSheet xlsxWorkSheet;
private static XSSFWorkbook xlsxWorkBook;
private static XSSFCell xlsxCell;
@SuppressWarnings("unused")
private static XSSFRow xlsxRow;
private static HSSFSheet xlsWorkSheet;
private static HSSFWorkbook xlsWorkBook;
private static HSSFCell xlsCell;
@SuppressWarnings("unused")
private static HSSFRow xlsRow;
/**
* To get the Excel-XLSX File with Path and SheetName
*/
public static void getExcelFile(String Path, String SheetName) throws Exception {
try {
File file = new File(Path);
if (file.getAbsolutePath().endsWith(".xlsx")) {
FileInputStream fis = new FileInputStream(file);
xlsxWorkBook = new XSSFWorkbook(fis);
xlsxWorkSheet = xlsxWorkBook.getSheet(SheetName);
} else if (file.getAbsolutePath().endsWith(".xls")) {
FileInputStream fis = new FileInputStream(file);
xlsWorkBook = new HSSFWorkbook(fis);
xlsWorkSheet = xlsWorkBook.getSheet(SheetName);
}
} catch (Exception e) {
throw (e);
}
}
/**
* To Return the Excel-XLSX Values given Path to the File and Sheet Name
*/
public static Object[][] getTableArray(String FilePath, String SheetName) throws Exception {
Object[][] tabArray = null;
try {
File file = new File(FilePath);
if (file.getAbsolutePath().endsWith(".xlsx")) {
FileInputStream ExcelFile = new FileInputStream(file);
xlsxWorkBook = new XSSFWorkbook(ExcelFile);
xlsxWorkSheet = xlsxWorkBook.getSheet(SheetName);
int startRow = 1;
int startCol = 0;
int ci, cj;
int totalRows = ExcelUtils.xlsxRowCount();
int totalCols = ExcelUtils.xlsxColumnCount();
tabArray = new Object[totalRows - 1][totalCols];
ci = 0;
for (int i = startRow; i < totalRows; i++) {
cj = 0;
for (int j = startCol; j < totalCols; j++) {
tabArray[ci][cj] = getCellData_XLSX(i, j);
cj++;
}
ci++;
}
} else if (file.getAbsolutePath().endsWith(".xls")) {
FileInputStream ExcelFile = new FileInputStream(file);
xlsWorkBook = new HSSFWorkbook(ExcelFile);
xlsWorkSheet = xlsWorkBook.getSheet(SheetName);
int startRow = 1;
int startCol = 0;
int ci, cj;
int totalRows = ExcelUtils.xlsRowCount();
int totalCols = ExcelUtils.xlsColumnCount();
tabArray = new Object[totalRows - 1][totalCols];
ci = 0;
for (int i = startRow; i < totalRows; i++) {
cj = 0;
for (int j = startCol; j < totalCols; j++) {
tabArray[ci][cj] = getCellData_XLS(i, j);
cj++;
}
ci++;
}
}
} catch (FileNotFoundException e) {
throw new Exception("Could not Find the Excel File/Sheet");
} catch (Exception e) {
throw new Exception("Could not Open the Excel File");
}
return (tabArray);
}
/**
* To Return the Excel-XLSX Values given Path to the File
*/
public static Object[][] getTableArray(String FilePath) throws Exception {
Object[][] tabArray = null;
try {
File file = new File(FilePath);
if (file.getAbsolutePath().endsWith(".xlsx")) {
FileInputStream ExcelFile = new FileInputStream(file);
xlsxWorkBook = new XSSFWorkbook(ExcelFile);
xlsxWorkSheet = xlsxWorkBook.getSheetAt(0);
int startRow = 1;
int startCol = 0;
int ci, cj;
int totalRows = ExcelUtils.xlsxRowCount();
int totalCols = ExcelUtils.xlsxColumnCount();
tabArray = new Object[totalRows - 1][totalCols];
ci = 0;
for (int i = startRow; i < totalRows; i++) {
cj = 0;
for (int j = startCol; j < totalCols; j++) {
tabArray[ci][cj] = getCellData_XLSX(i, j);
cj++;
}
ci++;
}
} else if (file.getAbsolutePath().endsWith(".xls")) {
FileInputStream ExcelFile = new FileInputStream(file);
xlsWorkBook = new HSSFWorkbook(ExcelFile);
xlsWorkSheet = xlsWorkBook.getSheetAt(0);
int startRow = 1;
int startCol = 0;
int ci, cj;
int totalRows = ExcelUtils.xlsRowCount();
int totalCols = ExcelUtils.xlsColumnCount();
tabArray = new Object[totalRows - 1][totalCols];
ci = 0;
for (int i = startRow; i < totalRows; i++) {
cj = 0;
for (int j = startCol; j < totalCols; j++) {
tabArray[ci][cj] = getCellData_XLS(i, j);
cj++;
}
ci++;
}
}
} catch (FileNotFoundException e) {
throw new Exception("Could not Find the Excel File/Sheet");
} catch (Exception e) {
throw new Exception("Could not Open the Excel File");
}
return (tabArray);
}
/**
* To get cell data from Excel-XLSX
*/
public static Object getCellData_XLSX(int RowNum, int ColNum) throws Exception {
Object CellData = null;
try {
xlsxCell = xlsxWorkSheet.getRow(RowNum).getCell(ColNum);
if (xlsxCell.getCellType() == CellType.STRING) {
String stringCellData = xlsxCell.getStringCellValue();
CellData = stringCellData;
} else if (xlsxCell.getCellType() == CellType.NUMERIC) {
double numericCellData = xlsxCell.getNumericCellValue();
CellData = numericCellData;
} else if (xlsxCell.getCellType() == CellType.BOOLEAN) {
boolean booleanCellData = xlsxCell.getBooleanCellValue();
CellData = booleanCellData;
} else if (xlsxCell.getCellType() == CellType.FORMULA) {
String formulaCellData = xlsxCell.getCellFormula();
CellData = formulaCellData;
}
return CellData;
} catch (Exception e) {
return "";
}
}
/**
* To get cell data from Excel-XLS
*/
public static Object getCellData_XLS(int RowNum, int ColNum) throws Exception {
Object CellData = null;
try {
xlsCell = xlsWorkSheet.getRow(RowNum).getCell(ColNum);
if (xlsCell.getCellType() == CellType.STRING) {
String stringCellData = xlsCell.getStringCellValue();
CellData = stringCellData;
} else if (xlsCell.getCellType() == CellType.NUMERIC) {
double numericCellData = xlsCell.getNumericCellValue();
CellData = numericCellData;
} else if (xlsCell.getCellType() == CellType.BOOLEAN) {
boolean booleanCellData = xlsCell.getBooleanCellValue();
CellData = booleanCellData;
} else if (xlsxCell.getCellType() == CellType.FORMULA) {
String formulaCellData = xlsxCell.getCellFormula();
CellData = formulaCellData;
}
return CellData;
} catch (Exception e) {
return "";
}
}
/**
* To get Excel-XLSX Row Count
*/
public static int xlsxRowCount() {
int rowNum = xlsxWorkSheet.getLastRowNum() + 1;
return rowNum;
}
/**
* To get Excel-XLS Row Count
*/
public static int xlsRowCount() {
int rowNum = xlsWorkSheet.getLastRowNum() + 1;
return rowNum;
}
/**
* To get Excel-XLSX Column Count
*/
public static int xlsxColumnCount() {
int rowNum = xlsxWorkSheet.getRow(0).getLastCellNum();
return rowNum;
}
/**
* To get Excel-XLS Column Count
*/
public static int xlsColumnCount() {
int rowNum = xlsWorkSheet.getRow(0).getLastCellNum();
return rowNum;
}
}
| |
package cz.metacentrum.perun.core.bl;
import java.util.List;
import java.util.Map;
import cz.metacentrum.perun.core.api.Attribute;
import cz.metacentrum.perun.core.api.AttributeDefinition;
import cz.metacentrum.perun.core.api.ExtSource;
import cz.metacentrum.perun.core.api.Group;
import cz.metacentrum.perun.core.api.Member;
import cz.metacentrum.perun.core.api.Pair;
import cz.metacentrum.perun.core.api.Perun;
import cz.metacentrum.perun.core.api.PerunBean;
import cz.metacentrum.perun.core.api.PerunSession;
import cz.metacentrum.perun.core.api.Resource;
import cz.metacentrum.perun.core.api.RichGroup;
import cz.metacentrum.perun.core.api.RichMember;
import cz.metacentrum.perun.core.api.RichUser;
import cz.metacentrum.perun.core.api.Status;
import cz.metacentrum.perun.core.api.User;
import cz.metacentrum.perun.core.api.Vo;
import cz.metacentrum.perun.core.api.exceptions.AlreadyAdminException;
import cz.metacentrum.perun.core.api.exceptions.AlreadyMemberException;
import cz.metacentrum.perun.core.api.exceptions.AttributeNotExistsException;
import cz.metacentrum.perun.core.api.exceptions.ExtSourceNotExistsException;
import cz.metacentrum.perun.core.api.exceptions.GroupAlreadyRemovedException;
import cz.metacentrum.perun.core.api.exceptions.GroupAlreadyRemovedFromResourceException;
import cz.metacentrum.perun.core.api.exceptions.GroupExistsException;
import cz.metacentrum.perun.core.api.exceptions.GroupNotAdminException;
import cz.metacentrum.perun.core.api.exceptions.GroupNotExistsException;
import cz.metacentrum.perun.core.api.exceptions.GroupSynchronizationAlreadyRunningException;
import cz.metacentrum.perun.core.api.exceptions.InternalErrorException;
import cz.metacentrum.perun.core.api.exceptions.MemberAlreadyRemovedException;
import cz.metacentrum.perun.core.api.exceptions.NotGroupMemberException;
import cz.metacentrum.perun.core.api.exceptions.NotMemberOfParentGroupException;
import cz.metacentrum.perun.core.api.exceptions.ParentGroupNotExistsException;
import cz.metacentrum.perun.core.api.exceptions.RelationExistsException;
import cz.metacentrum.perun.core.api.exceptions.UserNotAdminException;
import cz.metacentrum.perun.core.api.exceptions.UserNotExistsException;
import cz.metacentrum.perun.core.api.exceptions.WrongAttributeAssignmentException;
import cz.metacentrum.perun.core.api.exceptions.WrongAttributeValueException;
import cz.metacentrum.perun.core.api.exceptions.WrongReferenceAttributeValueException;
import java.util.Date;
/**
* <p>Groups manager can do all work about groups in VOs.</p>
*
* <p>You must get an instance of GroupsManager from instance of Perun (perun si singleton - see how to get it's instance on wiki):</p>
* <pre>
* GroupsManager gm = perun.getGroupsManager();
* </pre>
*
* @author Michal Prochazka
* @author Slavek Licehammer
* @see Perun
*/
public interface GroupsManagerBl {
/**
* Creates a new top-level group and associate it with the VO.
*
* For this method (new group) has always same shortName like Name.
*
* @param perunSession
* @param vo
* @param group with name without ":"
*
* @return newly created top-level group
*
* @throws InternalErrorException if group.name contains ':' or other internal error occur
* @throws GroupExistsException
*/
Group createGroup(PerunSession perunSession, Vo vo, Group group) throws GroupExistsException, InternalErrorException;
/**
* Creates a new subgroup of the existing group.
*
* @param perunSession
* @param parentGroup
* @param group group.name must contain only shortName (without ":"). Hierarchy is defined by parentGroup parameter.
*
* @return newly created sub group with full group.Name with ":"
*
* @throws InternalErrorException if group.name contains ':' or other internal error occur
* @throws GroupExistsException
*/
Group createGroup(PerunSession perunSession, Group parentGroup, Group group) throws GroupExistsException, InternalErrorException;
/**
* Gets all groups which have enabled synchronization.
*
* @param sess
* @return list of groups to synchronize
* @throws InternalErrorException
* @throws GroupNotExistsException
*/
List<Group> getGroupsToSynchronize(PerunSession sess) throws InternalErrorException;
/**
* If forceDelete is false, delete only group and if this group has members or subgroups, throw an exception.
* If forceDelete is true, delete group with all subgroups, members and administrators, then delete this group.
*
* @param perunSession
* @param group group to delete
* @param forceDelete if forceDelete is false, delete group only if is empty and has no subgroups, if is true, delete anyway with all connections
*
* @throws InternalErrorException
* @throws RelationExistsException raise only if group has subgroups or members and forceDelete is false
* @throws GroupAlreadyRemovedException if there are 0 rows affected by deleting from DB
* @throws GroupAlreadyRemovedFromResourceException if there is at least 1 group on resource not affected by removing from DB
*/
void deleteGroup(PerunSession perunSession, Group group, boolean forceDelete) throws InternalErrorException, RelationExistsException, GroupAlreadyRemovedException, GroupAlreadyRemovedFromResourceException;
/**
* Delete all groups in list from perun. (Except members group)
*
* If forceDelete is false, delete groups only if none of them (IN MOMENT OF DELETING) has subgroups and members, in other case throw exception.
* if forceDelete is true, delete groups with all subgroups and members.
*
* Groups are deleted in order: from longest name to the shortest
* - ex: Group A:b:c will be deleted sooner than Group A:b etc.
* - reason for this: with group are deleted its subgroups too
*
* Important: Groups can be from different VOs.
*
* @param perunSession
* @param groups list of groups to deleted
* @param forceDelete if forceDelete is false, delete groups only if all of them have no subgroups and no members, if is true, delete anyway with all connections
*
* @throws InternalErrorException
* @throws GroupAlreadyRemovedException if any groups is already deleted
* @throws RelationExistsException raise if group has subgroups or member (forceDelete is false)
* @throws GroupAlreadyRemovedFromResourceException if any group is already removed from resource
*/
void deleteGroups(PerunSession perunSession, List<Group> groups, boolean forceDelete) throws InternalErrorException, GroupAlreadyRemovedException, RelationExistsException, GroupAlreadyRemovedFromResourceException;
/**
* Deletes built-in members group.
*
* @param sess
* @param vo
* @throws InternalErrorException
* @throws GroupAlreadyRemovedException if there are 0 rows affected by deleting from DB
* @throws GroupAlreadyRemovedFromResourceException if there is at least 1 group on resource not affected by deliting from DB
*/
void deleteMembersGroup(PerunSession sess, Vo vo) throws InternalErrorException, GroupAlreadyRemovedException, GroupAlreadyRemovedFromResourceException;
/**
* Deletes all groups under the VO except built-in groups (members, admins groups).
*
* @param perunSession
* @param vo VO
*
* @throws InternalErrorException
* @throws GroupAlreadyRemovedException if there is at least 1 group not affected by deleting from DB
* @throws GroupAlreadyRemovedFromResourceException if there is at least 1 group on resource not affected by deleting from DB
*
*/
void deleteAllGroups(PerunSession perunSession, Vo vo) throws InternalErrorException, GroupAlreadyRemovedException, GroupAlreadyRemovedFromResourceException;
/**
* Updates group by ID.
*
* Update shortName (use shortName) and description. Group.name is ignored.
* Return Group with correctly set parameters (including group.name)
*
* @param perunSession
* @param group to update (use only ID, shortName and description)
*
* @return updated group with correctly set parameters (including group.name)
*
* @throws InternalErrorException
*/
Group updateGroup(PerunSession perunSession, Group group) throws InternalErrorException;
/**
* Search for the group with specified id in all VOs.
*
* @param id
* @param perunSession
*
* @return group with specified id or throws
*
* @throws InternalErrorException
*/
Group getGroupById(PerunSession perunSession, int id) throws InternalErrorException, GroupNotExistsException;
/**
* Search for the group with specified name in specified VO.
*
* IMPORTANT: need to use full name of group (ex. 'toplevel:a:b', not the shortname which is in this example 'b')
*
* @param perunSession
* @param vo
* @param name
*
* @return group with specified name or throws in specified VO
*
* @throws InternalErrorException
*/
Group getGroupByName(PerunSession perunSession, Vo vo, String name) throws InternalErrorException, GroupNotExistsException;
/**
* Adds member of the VO to the group in the same VO. But not to administrators and members group.
*
* @param perunSession
* @param group
* @param member
* @throws InternalErrorException
* @throws AlreadyMemberException
* @throws WrongAttributeValueException if any member attribute value, required by resource (on which the group is assigned), is wrong
* @throws RelationExistsException
* @throws WrongReferenceAttributeValueException
*/
void addMember(PerunSession perunSession, Group group, Member member) throws InternalErrorException, AlreadyMemberException, WrongAttributeValueException, WrongReferenceAttributeValueException, NotMemberOfParentGroupException;
/**
* Special addMember which is able to add members into the members and administrators group.
*
* @param perunSession
* @param group
* @param member
* @throws InternalErrorException
* @throws AlreadyMemberException
* @throws WrongAttributeValueException
* @throws WrongReferenceAttributeValueException
* @throws NotMemberOfParentGroupException
*/
void addMemberToMembersGroup(PerunSession perunSession, Group group, Member member) throws InternalErrorException, AlreadyMemberException, WrongAttributeValueException, WrongReferenceAttributeValueException, NotMemberOfParentGroupException;
/** Return list of assigned groups on the resource (without subgroups unless they are assigned too)
*
* @param perunSession
* @param resource
*
* @return list of groups, which are assigned on the resource
*
* @throws InternalErrorException
*/
List<Group> getAssignedGroupsToResource(PerunSession perunSession, Resource resource) throws InternalErrorException;
/** Return list of assigned groups on the resource.
*
* @param perunSession
* @param resource
* @param withSubGroups if true returns also all subgroups of assigned groups
*
* @return list of groups, which are assigned on the resource
*
* @throws InternalErrorException
*/
List<Group> getAssignedGroupsToResource(PerunSession perunSession, Resource resource, boolean withSubGroups) throws InternalErrorException;
/**
* Removes member form the group. But not from members or administrators group.
*
* @param perunSession
* @param group
* @param member
*
* @throws InternalErrorException
* @throws NotGroupMemberException
*/
void removeMember(PerunSession perunSession, Group group, Member member) throws InternalErrorException, NotGroupMemberException;
/**
* Removes member from members or administrators group only.
*
* @param perunSession
* @param group
* @param member
* @throws InternalErrorException
* @throws NotGroupMemberException
*/
void removeMemberFromMembersOrAdministratorsGroup(PerunSession perunSession, Group group, Member member) throws InternalErrorException, NotGroupMemberException;
/**
* Return all group members.
*
* @param perunSession
* @param group
* @return list of users or empty list if the group is empty
*
* @throws InternalErrorException
*/
List<Member> getGroupMembers(PerunSession perunSession, Group group) throws InternalErrorException;
/**
* Return only valid, suspended, expired and disabled group members.
*
* @param perunSession
* @param group
*
* @return list members or empty list if there are no such members
*
* @throws InternalErrorException
*/
List<Member> getGroupMembersExceptInvalid(PerunSession perunSession, Group group) throws InternalErrorException;
/**
* Return only valid, suspended and expired group members.
*
* @param perunSession
* @param group
*
* @return list members or empty list if there are no such members
*
* @throws InternalErrorException
*/
List<Member> getGroupMembersExceptInvalidAndDisabled(PerunSession perunSession, Group group) throws InternalErrorException;
/**
* Return group members.
*
* @param perunSession
* @param group
* @param status
*
* @return list users or empty list if there are no users on specified page
*
* @throws InternalErrorException
*/
List<Member> getGroupMembers(PerunSession perunSession, Group group, Status status) throws InternalErrorException;
/**
* Return group users sorted by name.
*
* @param perunSession
* @param group
* @return list users sorted or empty list if there are no users on specified page
*/
List<User> getGroupUsers(PerunSession perunSession, Group group) throws InternalErrorException;
/**
* Returns group members in the RichMember object, which contains Member+User data.
*
* @param sess
* @param group
*
* @return list of RichMembers
* @throws InternalErrorException
*/
List<RichMember> getGroupRichMembers(PerunSession sess, Group group) throws InternalErrorException;
/**
* Returns only valid, suspended and expired group members in the RichMember object, which contains Member+User data.
*
* @param sess
* @param group
*
* @return list of RichMembers
* @throws InternalErrorException
*/
List<RichMember> getGroupRichMembersExceptInvalid(PerunSession sess, Group group) throws InternalErrorException;
/**
* Returns group members in the RichMember object, which contains Member+User data.
*
* @param sess
* @param group
* @param status
*
* @return list of RichMembers
* @throws InternalErrorException
*/
List<RichMember> getGroupRichMembers(PerunSession sess, Group group, Status status) throws InternalErrorException;
/**
* Returns group members in the RichMember object, which contains Member+User data. Also contains user and member attributes.
*
* @param sess
* @param group
*
* @return list of RichMembers
* @throws InternalErrorException
*/
List<RichMember> getGroupRichMembersWithAttributes(PerunSession sess, Group group) throws InternalErrorException;
/**
* Returns only valid, suspended and expired group members in the RichMember object, which contains Member+User data. Also contains user and member attributes.
*
* @param sess
* @param group
*
* @return list of RichMembers
* @throws InternalErrorException
*/
List<RichMember> getGroupRichMembersWithAttributesExceptInvalid(PerunSession sess, Group group) throws InternalErrorException;
/**
* Returns group members in the RichMember object, which contains Member+User data. Also contains user and member attributes.
*
* @param sess
* @param group
* @param status
*
* @return list of RichMembers
* @throws InternalErrorException
*/
List<RichMember> getGroupRichMembersWithAttributes(PerunSession sess, Group group, Status status) throws InternalErrorException;
/**
* @param perunSession
* @param group
*
* @return count of members of specified group
*
* @throws InternalErrorException
*/
int getGroupMembersCount(PerunSession perunSession, Group group) throws InternalErrorException;
/**
* Checks whether the user is member of the group.
*
* @param sess
* @param user
* @param group
* @return true if the user is member of the group
* @throws InternalErrorException
*/
boolean isUserMemberOfGroup(PerunSession sess, User user, Group group) throws InternalErrorException;
/**
* Get all groups of the VO.
*
* @param sess
* @param vo
*
* @return list of groups
*
* @throws InternalErrorException
*/
List<Group> getAllGroups(PerunSession sess, Vo vo) throws InternalErrorException;
/**
* Get all groups of the VO stored in the map reflecting the hierarchy.
*
* @param sess
* @param vo
*
* @return map of the groups hierarchically organized
*
* @throws InternalErrorException
*/
Map<Group, Object> getAllGroupsWithHierarchy(PerunSession sess, Vo vo) throws InternalErrorException;
/**
* Get parent group.
* If group is topLevel group or Members group, return Members group.
*
* @param sess
* @param group
* @return parent group
* @throws InternalErrorException
* @throws ParentGroupNotExistsException
*/
Group getParentGroup(PerunSession sess, Group group) throws InternalErrorException, ParentGroupNotExistsException;
/**
* Get all subgroups of the parent group under the VO.
*
* @param sess
* @param parentGroup parent group
*
* @return list of groups
* @throws InternalErrorException
*/
List<Group> getSubGroups(PerunSession sess, Group parentGroup) throws InternalErrorException;
/**
* Get all subgroups of the parentGroup recursively.
* (parentGroup subgroups, their subgroups etc...)
*
* @param sess
* @param parentGroup parent group
*
* @return list of groups
* @throws InternalErrorException
*/
List<Group> getAllSubGroups(PerunSession sess, Group parentGroup) throws InternalErrorException;
/**
* Adds an administrator of the group.
*
* @param perunSession
* @param group
* @param user
*
* @throws InternalErrorException
* @throws AlreadyAdminException
*/
void addAdmin(PerunSession perunSession, Group group, User user) throws InternalErrorException, AlreadyAdminException;
/**
* Adds a group administrator to the group.
*
* @param perunSession
* @param group - group that will be assigned admins (users) from authorizedGroup
* @param authorizedGroup - group that will be given the privilege
*
* @throws InternalErrorException
* @throws AlreadyAdminException
*/
void addAdmin(PerunSession perunSession, Group group, Group authorizedGroup) throws InternalErrorException, AlreadyAdminException;
/**
* Removes an administrator form the group.
*
* @param perunSession
* @param group
* @param user
*
* @throws InternalErrorException
* @throws UserNotAdminException
*/
void removeAdmin(PerunSession perunSession, Group group, User user) throws InternalErrorException, UserNotAdminException;
/**
* Removes a group administrator of the group.
*
* @param perunSession
* @param group
* @param authorizedGroup group that will be removed the privilege
*
* @throws InternalErrorException
* @throws GroupNotAdminException
*/
void removeAdmin(PerunSession perunSession, Group group, Group authorizedGroup) throws InternalErrorException, GroupNotAdminException;
/**
* Get list of all user administrators for supported role and specific group.
*
* If onlyDirectAdmins is true, return only direct users of the group for supported role.
*
* Supported roles: GroupAdmin
*
* @param perunSession
* @param group
* @param onlyDirectAdmins if true, get only direct user administrators (if false, get both direct and indirect)
*
* @return list of all user administrators of the given group for supported role
*
* @throws InternalErrorException
*/
List<User> getAdmins(PerunSession perunSession, Group group, boolean onlyDirectAdmins) throws InternalErrorException;
/**
* Get list of all richUser administrators for the group and supported role with specific attributes.
*
* Supported roles: GroupAdmin
*
* If "onlyDirectAdmins" is "true", return only direct users of the group for supported role with specific attributes.
* If "allUserAttributes" is "true", do not specify attributes through list and return them all in objects richUser. Ignoring list of specific attributes.
*
* @param perunSession
* @param group
*
* @param specificAttributes list of specified attributes which are needed in object richUser
* @param allUserAttributes if true, get all possible user attributes and ignore list of specificAttributes (if false, get only specific attributes)
* @param onlyDirectAdmins if true, get only direct user administrators (if false, get both direct and indirect)
*
* @return list of RichUser administrators for the group and supported role with attributes
*
* @throws InternalErrorException
* @throws UserNotExistsException
*/
List<RichUser> getRichAdmins(PerunSession perunSession, Group group, List<String> specificAttributes, boolean allUserAttributes, boolean onlyDirectAdmins) throws InternalErrorException, UserNotExistsException;
/**
* Gets list of all user administrators of this group.
* If some group is administrator of the given group, all members are included in the list.
*
* @param perunSession
* @param group
*
* @throws InternalErrorException
*
* @return list of administrators
*/
@Deprecated
List<User> getAdmins(PerunSession perunSession, Group group) throws InternalErrorException;
/**
* Gets list of direct user administrators of this group.
* 'Direct' means, there aren't included users, who are members of group administrators, in the returned list.
*
* @param perunSession
* @param group
*
* @throws InternalErrorException
*
* @return list of direct administrators
*/
@Deprecated
List<User> getDirectAdmins(PerunSession perunSession, Group group) throws InternalErrorException;
/**
* Gets list of all group administrators of this group.
*
* @param perunSession
* @param group
*
* @throws InternalErrorException
*
* @return list of group administrators
*/
List<Group> getAdminGroups(PerunSession perunSession, Group group) throws InternalErrorException;
/**
* Gets list of all administrators of this group like RichUsers without attributes.
*
* @param perunSession
* @param group
*
* @throws InternalErrorException
* @throws UserNotExistsException
*/
@Deprecated
List<RichUser> getRichAdmins(PerunSession perunSession, Group group) throws InternalErrorException, UserNotExistsException;
/**
* Gets list of all administrators of this group, which are assigned directly, like RichUsers without attributes.
*
* @param perunSession
* @param group
*
* @throws InternalErrorException
* @throws UserNotExistsException
*/
@Deprecated
List<RichUser> getDirectRichAdmins(PerunSession perunSession, Group group) throws InternalErrorException, UserNotExistsException;
/**
* Gets list of all administrators of this group like RichUsers with attributes.
*
* @param perunSession
* @param group
*
* @throws InternalErrorException
* @throws UserNotExistsException
*/
@Deprecated
List<RichUser> getRichAdminsWithAttributes(PerunSession perunSession, Group group) throws InternalErrorException, UserNotExistsException;
/**
* Get list of Group administrators with specific attributes.
* From list of specificAttributes get all Users Attributes and find those for every RichAdmin (only, other attributes are not searched)
*
* @param perunSession
* @param group
* @param specificAttributes
* @return list of RichUsers with specific attributes.
* @throws InternalErrorException
* @throws UserNotExistsException
*/
@Deprecated
List<RichUser> getRichAdminsWithSpecificAttributes(PerunSession perunSession, Group group, List<String> specificAttributes) throws InternalErrorException, UserNotExistsException;
/**
* Get list of Group administrators, which are directly assigned (not by group membership) with specific attributes.
* From list of specificAttributes get all Users Attributes and find those for every RichAdmin (only, other attributes are not searched)
*
* @param perunSession
* @param group
* @param specificAttributes
* @return list of RichUsers with specific attributes.
* @throws InternalErrorException
* @throws UserNotExistsException
*/
@Deprecated
List<RichUser> getDirectRichAdminsWithSpecificAttributes(PerunSession perunSession, Group group, List<String> specificAttributes) throws InternalErrorException, UserNotExistsException;
/**
* Get all groups of users under the VO.
*
* @param sess
* @param vo vo
*
* @throws InternalErrorException
*
* @return list of groups
*/
List<Group> getGroups(PerunSession sess, Vo vo) throws InternalErrorException;
/**
* Get groups by theirs Id.
*
* @param sess
* @param groupsIds
* @return list of groups
* @throws InternalErrorException
*/
List<Group> getGroupsByIds(PerunSession sess, List<Integer> groupsIds) throws InternalErrorException;
/**
* @param sess
* @param vo
*
* @return count of VO's groups
*
* @throws InternalErrorException
*/
int getGroupsCount(PerunSession sess, Vo vo) throws InternalErrorException;
/**
* Get count of all groups.
*
* @param perunSession
*
* @return count of all groups
*
* @throws InternalErrorException
*/
int getGroupsCount(PerunSession perunSession) throws InternalErrorException;
/**
* Returns number of immediate subgroups of the parent group.
*
* @param sess
* @param parentGroup
*
* @return count of parent group immediate subgroups
*
* @throws InternalErrorException
*/
int getSubGroupsCount(PerunSession sess, Group parentGroup) throws InternalErrorException;
/**
* Gets the Vo which is owner of the group.
*
* @param sess
* @param group
*
* @return Vo which is owner of the group.
*
* @throws InternalErrorException
*/
Vo getVo(PerunSession sess, Group group) throws InternalErrorException;
/**
* Get members from parent group. If the parent group doesn't exist (this is top level group) return all VO (from which the group is) members instead.
*
* @param sess
* @param group
* @return
*
* @throws InternalErrorException
*/
List<Member> getParentGroupMembers(PerunSession sess, Group group) throws InternalErrorException;
/**
* Get members form the parent group in RichMember format.
* @param sess
* @param group
* @return list of parent group rich members
* @throws InternalErrorException
*/
List<RichMember> getParentGroupRichMembers(PerunSession sess, Group group) throws InternalErrorException;
/**
* Get members form the parent group in RichMember format including user/member attributes.
* @param sess
* @param group
* @return list of parent group rich members
* @throws InternalErrorException
*/
List<RichMember> getParentGroupRichMembersWithAttributes(PerunSession sess, Group group) throws InternalErrorException;
/**
* Synchronizes the group with the external group without checking if the synchronization is already in progress.
* If some members from extSource of this group were skipped, return info about them.
* if not, return empty string instead, which means all members was successfully load from extSource.
*
* @param sess
* @param group
* @return List of strings with skipped users with reasons why were skipped
* @throws InternalErrorException
* @throws WrongAttributeValueException
* @throws WrongReferenceAttributeValueException
* @throws WrongAttributeAssignmentException
* @throws MemberAlreadyRemovedException if there is at least one member who need to be deleted, but DB returns 0 affected rows
*/
List<String> synchronizeGroup(PerunSession sess, Group group) throws InternalErrorException, MemberAlreadyRemovedException, AttributeNotExistsException, WrongAttributeAssignmentException, ExtSourceNotExistsException, WrongAttributeValueException, WrongReferenceAttributeValueException;
/**
* Synchronize the group with external group. It checks if the synchronization of the same group is already in progress.
*
* @param sess
* @param group
* @throws InternalErrorException
* @throws GroupSynchronizationAlreadyRunningException
*/
void forceGroupSynchronization(PerunSession sess, Group group) throws InternalErrorException, GroupSynchronizationAlreadyRunningException;
/**
* Synchronize all groups which have enabled synchronization. This method is run by the scheduler every 5 minutes.
*
* @throws InternalErrorException
*/
void synchronizeGroups(PerunSession sess) throws InternalErrorException;
/**
* Returns all members groups. Except 'members' group.
*
* @param sess
* @param member
* @return
* @throws InternalErrorException
*/
List<Group> getMemberGroups(PerunSession sess, Member member) throws InternalErrorException;
/**
* Get all groups (except member groups) where member has direct membership.
*
* @param sess
* @param member to get information about
* @return list of groups where member is direct member (not members group), empty list if there is no such group
* @throws InternalErrorException
*/
List<Group> getMemberDirectGroups(PerunSession sess, Member member) throws InternalErrorException;
/**
* Method return list of groups for selected member which (groups) has set specific attribute.
* Attribute can be only from namespace "GROUP"
*
* @param sess sess
* @param member member
* @param attribute attribute from "GROUP" namespace
*
* @return list of groups which contain member and have attribute with same value
*
* @throws InternalErrorException
* @throws WrongAttributeAssignmentException
*/
List<Group> getMemberGroupsByAttribute(PerunSession sess, Member member, Attribute attribute) throws WrongAttributeAssignmentException, InternalErrorException;
/**
* Return all member's groups. Included members and administrators groups.
*
* @param sess
* @param member
* @return
* @throws InternalErrorException
*/
List<Group> getAllMemberGroups(PerunSession sess, Member member) throws InternalErrorException;
/**
* Returns all groups which have set the attribute with the value. Searching only def and opt attributes.
*
* @param sess
* @param attribute
* @return list of groups
* @throws InternalErrorException
* @throws WrongAttributeAssignmentException
*/
List<Group> getGroupsByAttribute(PerunSession sess, Attribute attribute) throws InternalErrorException, WrongAttributeAssignmentException;
/**
* Returns all group-resource which have set the attribute with the value. Searching only def and opt attributes.
*
* @param sess
* @param attribute
* @return
* @throws InternalErrorException
* @throws WrongAttributeAssignmentException
*/
List<Pair<Group, Resource>> getGroupResourcePairsByAttribute(PerunSession sess, Attribute attribute) throws InternalErrorException, WrongAttributeAssignmentException;
/**
* Return true if Member is member of the Group
*
*
* @param sess
* @param group
* @param member
* @return true if Member is member of the Group
*
* @throws InternalErrorException
*/
boolean isGroupMember(PerunSession sess, Group group, Member member) throws InternalErrorException;
/**
* !!! Not Complete yet, need to implement all perunBeans !!!
*
* Get perunBean and try to find all connected Groups
*
* @param sess
* @param perunBean
* @return list of groups connected with perunBeans
* @throws InternalErrorException
*/
List<Group> getGroupsByPerunBean(PerunSession sess, PerunBean perunBean) throws InternalErrorException;
void checkGroupExists(PerunSession sess, Group group) throws InternalErrorException, GroupNotExistsException;
/**
* This method take list of members (also with duplicit) and:
* 1] add all members with direct membership to target list
* 2] add all members with indirect membership who are not already in target list to the target list
*
* @param members list of members to filtering
* @return filteredMembers list of members without duplicit after filtering
*/
List<Member> filterMembersByMembershipTypeInGroup(List<Member> members) throws InternalErrorException;
/**
* For richGroup filter all his group attributes and remove all which principal has no access to.
*
* @param sess
* @param richGroup
* @return richGroup with only allowed attributes
* @throws InternalErrorException
*/
RichGroup filterOnlyAllowedAttributes(PerunSession sess, RichGroup richGroup) throws InternalErrorException;
/**
* For list of richGroups filter all their group attributes and remove all which principal has no access to.
*
* @param sess
* @param richGroups
* @return list of RichGroups with only allowed attributes
* @throws InternalErrorException
*/
List<RichGroup> filterOnlyAllowedAttributes(PerunSession sess, List<RichGroup> richGroups) throws InternalErrorException;
/**
* This method takes group and creates RichGroup containing all attributes
*
* @param sess
* @param group
* @return RichGroup
* @throws InternalErrorException
*/
RichGroup convertGroupToRichGroupWithAttributes(PerunSession sess, Group group) throws InternalErrorException;
/**
* This method takes group and creates RichGroup containing selected attributes
*
* @param sess
* @param group
* @param attrNames list of selected attributes
* @return RichGroup
* @throws InternalErrorException
*/
RichGroup convertGroupToRichGroupWithAttributesByName(PerunSession sess, Group group, List<String> attrNames) throws InternalErrorException;
/**
* This method takes list of groups and creates list of RichGroups containing all attributes
*
* @param sess
* @param groups list of groups
* @return RichGroup
* @throws InternalErrorException
*/
List<RichGroup> convertGroupsToRichGroupsWithAttributes(PerunSession sess, List<Group> groups) throws InternalErrorException;
/**
* This method takes list of groups and creates list of RichGroups containing selected attributes
*
* @param sess
* @param groups list of groups
* @param attrNames list of selected attributes
* @return RichGroup
* @throws InternalErrorException
*/
List<RichGroup> convertGroupsToRichGroupsWithAttributes(PerunSession sess, List<Group> groups, List<String> attrNames) throws InternalErrorException;
/**
* Returns all RichGroups containing selected attributes
*
* @param sess
* @param vo
* @param attrNames if attrNames is null method will return RichGroups containing all attributes
* @return List of RichGroups
* @throws InternalErrorException
*/
List<RichGroup> getAllRichGroupsWithAttributesByNames(PerunSession sess, Vo vo, List<String> attrNames) throws InternalErrorException;
/**
* Returns all RichSubGroups from parentGroup containing selected attributes
*
* @param sess
* @param parentGroup
* @param attrNames if attrNames is null method will return RichGroups containing all attributes
* @return List of RichGroups
* @throws InternalErrorException
*/
List<RichGroup> getRichSubGroupsWithAttributesByNames(PerunSession sess, Group parentGroup, List<String> attrNames) throws InternalErrorException;
/**
* Returns RichGroup selected by id containing selected attributes
*
* @param sess
* @param groupId
* @param attrNames if attrNames is null method will return RichGroup containing all attributes
* @return RichGroup
* @throws InternalErrorException
* @throws GroupNotExistsException
*/
RichGroup getRichGroupByIdWithAttributesByNames(PerunSession sess, int groupId, List<String> attrNames) throws InternalErrorException, GroupNotExistsException;
/**
* This method will set timestamp and exceptionMessage to group attributes for the group.
* Also log information about failed synchronization to auditer_log.
*
* IMPORTANT: This method runs in new transaction (because of using in synchronization of groups)
*
* Set timestamp to attribute "group_def_lastSynchronizationTimestamp"
* Set exception message to attribute "group_def_lastSynchronizationState"
*
* FailedDueToException is true means group synchronization failed at all.
* FailedDueToException is false means group synchronization is ok or finished with some errors (some members were not synchronized)
*
* @param sess perun session
* @param group the group for synchronization
* @param failedDueToException if exception means fail of whole synchronization of this group or only problem with some data
* @param exceptionMessage message of an exception, ok if everything is ok
* @throws AttributeNotExistsException
* @throws InternalErrorException
* @throws WrongReferenceAttributeValueException
* @throws WrongAttributeAssignmentException
* @throws WrongAttributeValueException
*/
void saveInformationAboutGroupSynchronization(PerunSession sess, Group group, boolean failedDueToException, String exceptionMessage) throws AttributeNotExistsException, InternalErrorException, WrongReferenceAttributeValueException, WrongAttributeAssignmentException, WrongAttributeValueException;
/**
* Get all groups in specific vo with assigned extSource
*
* @param sess
* @param source
* @param vo
* @return l
* @throws InternalErrorException
*/
List<Group> getGroupsWithAssignedExtSourceInVo(PerunSession sess, ExtSource source, Vo vo) throws InternalErrorException;
}
| |
/*
* The MIT License (MIT)
Copyright (c) 2014
Hayda Almeida
Marie-Jean Meurs
Concordia University
Tsang Lab
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package triage.analyse;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import triage.configure.ConfigConstants;
/**
* Implements common tools to feature extractors
* classes used on XML doc instances
*
* @author Hayda Almeida
* @since 2014
*
*/
public class Extractor {
//relevant paper text fields
String openAbst;
String abstractLabel;
String openEC;
String classTag;
String openTitle;
String openJournal;
String copyR;
String closeCopyR;
String id;
String endId;
String openFile;
String endFile;
//String closeAbst;
//String closeJournal;
//String closeTitle;
//String closeEC;
//String pathFile;
/**
* Removes special characters for
* tokenization process
*
* @param str text to be cleaned
* @return cleaned string
*/
public String removeSpecialChar(String str){
if(!str.contains("'s")){
str = str.replace(".", "");
str = str.replace("'", "");
}
if(str.contains(",")){
if(!(str.startsWith(" ", str.indexOf(",")+1)))
str = str.replace(",", " ");
else str = str.replace(",", "");
}
str = str.replace("}", "");
str = str.replace("{", "");
str = str.replace("]", "");
str = str.replace("[", "");
str = str.replace("\"", "");
str = str.replace("<", "");
str = str.replace(">", "");
str = str.replace("/", " ");
str = str.replace("\\", " ");
str = str.replace("#", "");
str = str.replace("*", "");
str = str.replace(">", "");
str = str.replace("&apos", "");
str = str.replace("%", "");
str = str.replace(""", "");
str = str.replace("&", "");
str = str.replace("=", "");
str = str.replace("?", "");
str = str.replace("!", "");
str = str.replace(";", "");
str = str.replace(":", "");
str = str.replace(")", "");
str = str.replace("(", "");
str = str.replace("\t\t", "\t");
str = str.replace("+", "");
//losing ngrams because of hifen between names
str = str.replace("-", " ");
str = str.replace(" ", " ");
return str;
}
/**
* Handles external tags (and multiple abstract
* text tags) present in a single paper
* @param str abstract content
* @return string without external tags
*/
public String processAbstract(String str){
str = str.replace(" ", "");
if(str.contains("Copyright") && !(str.contains(".</"))) str = str.replace("</", ".</");
String[] remove = str.split("");
StringBuilder sb = new StringBuilder();
String temp = "";
String abstrac = "";
for(int i = 0; i < remove.length; i++){
temp = temp + remove[i];
//Handling inner/multiple abstract tags in the abstract text
if(temp.contains("<AbstractText ")){
temp = "";
do{
i++;
} while(!(remove[i].equalsIgnoreCase(">")));
}
//Handling the word "Copyright" before the end of abstract
if(temp.contains("Copyright ")){
temp = "";
do{
i++;
//an exception here can mean that a copyright information
//tag content did not ended with a period
}while(!(remove[i]).equalsIgnoreCase("."));
}
else sb.append(remove[i]);
}
abstrac = sb.toString();
abstrac = removeAbstractTags(abstrac);
return abstrac;
}
/**
* Removes specific tags encountered on Abstract texts.
* This is used to clean the abstract text before
* processing the feature count on the model.
* @param str
* @return
*/
public String removeAbstractTags(String str){
//this order of removing tags matters to
//exclude the first tag from the abstracts.
str = str.replace("<AbstractText>", "");
str = str.replace("<abstracttext>", "");
str = str.replace("</abstracttext>", "");
str = str.replace("<AbstractText", "");
str = str.replace("<CopyrightInformation>", "");
str = str.replace("</CopyrightInformation>", "");
str = str.replace("Copyright", "");
str = str.replace("</AbstractText>", "");
str = str.replace("<Abstract>", "");
str = str.replace("</Abstract>", "");
str = str.replace("<AbstractText.*?>", "");
return str;
}
/**
* Removes markup annotations of a
* text field, keeping its content
*
* @param str text containing markups
* @return string with cleaned text
*/
public String removeTags(String str) {
String[] remove = str.split("");
StringBuilder sb = new StringBuilder();
for(int i = 0; i < remove.length; i++){
//iterating over the text until finding opening tag
if(remove[i].equalsIgnoreCase("<")){
do{
i++;
}
//skipping the content until finding closing tag
while(!(remove[i].equalsIgnoreCase(">")));
}
else sb.append(remove[i]);
}
return sb.toString();
}
/**
* Displays the keys and values of the
* maps created.
*
* @param hash HashMap containing list,
* values, counts
*/
public void displayList(HashMap hash){
Iterator<Object> itr = hash.keySet().iterator();
int sum = 0;
while(itr.hasNext()){
Object str = itr.next();
System.out.println("key: "+str+"\t value: "+hash.get(str));
}
}
/**
* Exports hashmap of values extracted
* from dataset to an external file
*
* @param location folder, file name and file extension
* @param list values to be exported
*/
public void exportFile(String location, HashMap list){
String SEPARATOR = "\t";
StringBuffer line = new StringBuffer();
Iterator<Object> itr = list.keySet().iterator();
try{
BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(location), "UTF-8"));
while(itr.hasNext()){
Object str = itr.next();
if(str != null){
line.append(str).append(SEPARATOR).append(list.get(str));
if(line.toString().contains("="))
line.replace(line.indexOf("="), line.indexOf("=")+1,SEPARATOR);
//handling specificities from title content extraction
if(line.toString().contains(","))
line.replace(line.indexOf(","), line.indexOf(",")+1,SEPARATOR);
}
if(itr.hasNext()){
line.append(System.getProperty("line.separator"));
}
writer.write(removeSpecialChar(line.toString()));
line.replace(0, line.length(), "");
}
writer.flush();
writer.close();
}
catch(UnsupportedEncodingException e){
e.printStackTrace();
}
catch(FileNotFoundException e){
e.printStackTrace();
}
catch(IOException e){
e.printStackTrace();
}
}
/**
* Exports list of values extracted
* from dataset to a string variable
*
* @param list list of values to be exported
* @return string containing values on list
* @deprecated
*/
public String exportContent(HashMap list){
String SEPARATOR = "\t";
Iterator<String> itr = list.keySet().iterator();
StringBuffer export = new StringBuffer();
//try{
while(itr.hasNext()){
String str = itr.next();
if(str != null){
export.append(str).append(SEPARATOR).append(list.get(str));
if(export.toString().contains("="))
export.replace(export.indexOf("="), export.indexOf("=")+1,SEPARATOR);
}
if(itr.hasNext()){
export.append("\n");
}
}
return removeSpecialChar(export.toString());
}
/**
* Exports list of values extracted
* from dataset to external file
*
* @param location folder, file name and file extension
* @param list list of values to be exported
*
*/
public void exportList(String location, ArrayList<String> list){
String SEPARATOR = "\n";
StringBuffer line = new StringBuffer();
try{
BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(location), "UTF-8"));
for(int i = 0; i < list.size(); i++){
String str = list.get(i);
if(str != null){
line.append(str).append(SEPARATOR);
}
}
writer.write(removeSpecialChar(line.toString()));
writer.flush();
writer.close();
}
catch(UnsupportedEncodingException e){
e.printStackTrace();
}
catch(FileNotFoundException e){
e.printStackTrace();
}
catch(IOException e){
e.printStackTrace();
}
}
public void initialize(File featureDir, ConfigConstants pathVars){
try{
featureDir.mkdir();
}catch(Exception e){
System.out.println("Error creating" + featureDir + "folder.");
System.exit(0);
}
}
/**
* Accessors and mutators methods
* for Extractor class variables.
* @return
*/
public String getId() {
return id;
}
public String getEndId() {
return endId;
}
public String getOpenFile() {
return openFile;
}
public String getEndFile() {
return endFile;
}
public String getopenAbst() {
return openAbst;
}
public void setopenAbst(String openAbst) {
this.openAbst = openAbst;
}
public String getOpenEC() {
return openEC;
}
public void setOpenEC(String openEC) {
this.openEC = openEC;
}
public String getAbstractLabel() {
return abstractLabel;
}
public void setAbstractLabel(String abstractLabel) {
this.abstractLabel = abstractLabel;
}
public String getClassTag() {
return classTag;
}
public void setClassTag(String classTag) {
this.classTag = classTag;
}
public String getOpenTitle() {
return openTitle;
}
public void setOpenTitle(String titleTag) {
this.openTitle = titleTag;
}
public String getOpenJournal() {
return openJournal;
}
public void setOpenJournal(String openJournal) {
this.openJournal = openJournal;
}
}
| |
package cn.soon.imageviewtouch;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.Rect;
import android.graphics.RectF;
import android.os.Build;
import android.os.SystemClock;
import android.util.AttributeSet;
import android.util.Log;
import android.view.GestureDetector;
import android.view.GestureDetector.OnGestureListener;
import android.view.MotionEvent;
import android.view.ScaleGestureDetector;
import android.view.ScaleGestureDetector.OnScaleGestureListener;
import android.view.ViewConfiguration;
public class ImageViewTouch extends ImageViewTouchBase {
static final float SCROLL_DELTA_THRESHOLD = 1.0f;
/**
* minimum time between a scale event and a valid fling event
*/
public static final long MIN_FLING_DELTA_TIME = 150;
private float mScaleFactor;
protected ScaleGestureDetector mScaleDetector;
protected GestureDetector mGestureDetector;
protected int mTouchSlop;
protected int mDoubleTapDirection;
protected OnGestureListener mGestureListener;
protected OnScaleGestureListener mScaleListener;
protected boolean mDoubleTapEnabled = true;
protected boolean mScaleEnabled = true;
protected boolean mScrollEnabled = true;
private OnImageViewTouchDoubleTapListener mDoubleTapListener;
private OnImageViewTouchSingleTapListener mSingleTapListener;
public ImageViewTouch(Context context, AttributeSet attrs) {
super(context, attrs);
}
public ImageViewTouch(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
}
@Override
protected void init(Context context, AttributeSet attrs, int defStyle) {
super.init(context, attrs, defStyle);
mTouchSlop = ViewConfiguration.get(getContext()).getScaledTouchSlop();
mGestureListener = getGestureListener();
mScaleListener = getScaleListener();
mScaleDetector = new ScaleGestureDetector(getContext(), mScaleListener);
mGestureDetector = new GestureDetector(getContext(), mGestureListener, null, true);
mDoubleTapDirection = 1;
setQuickScaleEnabled(false);
}
@TargetApi (19)
public void setQuickScaleEnabled(boolean value) {
if (Build.VERSION.SDK_INT >= 19) {
mScaleDetector.setQuickScaleEnabled(value);
}
}
@TargetApi (19)
@SuppressWarnings ("unused")
public boolean getQuickScaleEnabled() {
if (Build.VERSION.SDK_INT >= 19) {
return mScaleDetector.isQuickScaleEnabled();
}
return false;
}
@SuppressWarnings ("unused")
public float getScaleFactor() {
return mScaleFactor;
}
public void setDoubleTapListener(OnImageViewTouchDoubleTapListener listener) {
mDoubleTapListener = listener;
}
public void setSingleTapListener(OnImageViewTouchSingleTapListener listener) {
mSingleTapListener = listener;
}
public void setDoubleTapEnabled(boolean value) {
mDoubleTapEnabled = value;
}
public void setScaleEnabled(boolean value) {
mScaleEnabled = value;
}
public void setScrollEnabled(boolean value) {
mScrollEnabled = value;
}
public boolean getDoubleTapEnabled() {
return mDoubleTapEnabled;
}
protected OnGestureListener getGestureListener() {
return new GestureListener();
}
protected OnScaleGestureListener getScaleListener() {
return new ScaleListener();
}
@Override
protected void onLayoutChanged(final int left, final int top, final int right, final int bottom) {
super.onLayoutChanged(left, top, right, bottom);
Log.v(TAG, "min: " + getMinScale() + ", max: " + getMaxScale() + ", result: " + (getMaxScale() - getMinScale()) / 2f);
mScaleFactor = ((getMaxScale() - getMinScale()) / 2f) + 0.5f;
}
long mPointerUpTime;
@Override
public boolean onTouchEvent(MotionEvent event) {
if (getBitmapChanged()) {
return false;
}
final int action = event.getActionMasked();
if (action == MotionEvent.ACTION_POINTER_UP) {
mPointerUpTime = event.getEventTime();
}
mScaleDetector.onTouchEvent(event);
if (!mScaleDetector.isInProgress()) {
mGestureDetector.onTouchEvent(event);
}
switch (action) {
case MotionEvent.ACTION_UP:
return onUp(event);
default:
break;
}
return true;
}
@Override
protected void onZoomAnimationCompleted(float scale) {
if (DEBUG) {
Log.d(TAG, "onZoomAnimationCompleted. scale: " + scale + ", minZoom: " + getMinScale());
}
if (scale < getMinScale()) {
zoomTo(getMinScale(), 50);
}
}
protected float onDoubleTapPost(float scale, final float maxZoom, final float minScale) {
if ((scale + mScaleFactor) <= maxZoom) {
return scale + mScaleFactor;
} else {
return minScale;
}
}
public boolean onSingleTapConfirmed(MotionEvent e) {
return true;
}
public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) {
if (!canScroll()) {
return false;
}
mUserScaled = true;
scrollBy(-distanceX, -distanceY);
invalidate();
return true;
}
public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) {
if (!canScroll()) {
return false;
}
if (DEBUG) {
Log.i(TAG, "onFling");
}
if (Math.abs(velocityX) > (mMinFlingVelocity * 4) || Math.abs(velocityY) > (mMinFlingVelocity * 4)) {
if (DEBUG) {
Log.v(TAG, "velocity: " + velocityY);
Log.v(TAG, "diff: " + (e2.getY() - e1.getY()));
}
final float scale = Math.min(Math.max(2f, getScale() / 2), 3.f);
float scaledDistanceX = ((velocityX) / mMaxFlingVelocity) * (getWidth() * scale);
float scaledDistanceY = ((velocityY) / mMaxFlingVelocity) * (getHeight() * scale);
if (DEBUG) {
Log.v(TAG, "scale: " + getScale() + ", scale_final: " + scale);
Log.v(TAG, "scaledDistanceX: " + scaledDistanceX);
Log.v(TAG, "scaledDistanceY: " + scaledDistanceY);
}
mUserScaled = true;
double total = Math.sqrt(Math.pow(scaledDistanceX, 2) + Math.pow(scaledDistanceY, 2));
scrollBy(scaledDistanceX, scaledDistanceY, (long) Math.min(Math.max(300, total / 5), 800));
postInvalidate();
return true;
}
return false;
}
public boolean onDown(MotionEvent e) {
if (getBitmapChanged()) {
return false;
}
return true;
}
public boolean onUp(MotionEvent e) {
if (getBitmapChanged()) {
return false;
}
if (getScale() < getMinScale()) {
zoomTo(getMinScale(), 50);
}
return true;
}
public boolean onSingleTapUp(MotionEvent e) {
if (getBitmapChanged()) {
return false;
}
return true;
}
public boolean canScroll() {
if (getScale() > 1) {
return true;
}
RectF bitmapRect = getBitmapRect();
return !mViewPort.contains(bitmapRect);
}
/**
* Determines whether this ImageViewTouch can be scrolled.
*
* @param direction - positive direction value means scroll from right to left,
* negative value means scroll from left to right
* @return true if there is some more place to scroll, false - otherwise.
*/
@SuppressWarnings ("unused")
public boolean canScroll(int direction) {
RectF bitmapRect = getBitmapRect();
updateRect(bitmapRect, mScrollPoint);
Rect imageViewRect = new Rect();
getGlobalVisibleRect(imageViewRect);
if (null == bitmapRect) {
return false;
}
if (bitmapRect.right >= imageViewRect.right) {
if (direction < 0) {
return Math.abs(bitmapRect.right - imageViewRect.right) > SCROLL_DELTA_THRESHOLD;
}
}
double bitmapScrollRectDelta = Math.abs(bitmapRect.left - mScrollPoint.x);
return bitmapScrollRectDelta > SCROLL_DELTA_THRESHOLD;
}
public class GestureListener extends GestureDetector.SimpleOnGestureListener {
@Override
public boolean onSingleTapConfirmed(MotionEvent e) {
if (null != mSingleTapListener) {
mSingleTapListener.onSingleTapConfirmed();
}
return ImageViewTouch.this.onSingleTapConfirmed(e);
}
@Override
public boolean onDoubleTap(MotionEvent e) {
if (DEBUG) {
Log.i(TAG, "onDoubleTap. double tap enabled? " + mDoubleTapEnabled);
}
if (mDoubleTapEnabled) {
if (Build.VERSION.SDK_INT >= 19) {
if (mScaleDetector.isQuickScaleEnabled()) {
return true;
}
}
mUserScaled = true;
float scale = getScale();
float targetScale;
targetScale = onDoubleTapPost(scale, getMaxScale(), getMinScale());
targetScale = Math.min(getMaxScale(), Math.max(targetScale, getMinScale()));
zoomTo(targetScale, e.getX(), e.getY(), mDefaultAnimationDuration);
}
if (null != mDoubleTapListener) {
mDoubleTapListener.onDoubleTap();
}
return super.onDoubleTap(e);
}
@Override
public void onLongPress(MotionEvent e) {
if (isLongClickable()) {
if (!mScaleDetector.isInProgress()) {
setPressed(true);
performLongClick();
}
}
}
@Override
public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) {
if (!mScrollEnabled) {
return false;
}
if (e1 == null || e2 == null) {
return false;
}
if (e1.getPointerCount() > 1 || e2.getPointerCount() > 1) {
return false;
}
if (mScaleDetector.isInProgress()) {
return false;
}
return ImageViewTouch.this.onScroll(e1, e2, distanceX, distanceY);
}
@Override
public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) {
if (!mScrollEnabled) {
return false;
}
if (e1 == null || e2 == null) {
return false;
}
if (e1.getPointerCount() > 1 || e2.getPointerCount() > 1) {
return false;
}
if (mScaleDetector.isInProgress()) {
return false;
}
final long delta = (SystemClock.uptimeMillis() - mPointerUpTime);
// prevent fling happening just
// after a quick pinch to zoom
if (delta > MIN_FLING_DELTA_TIME) {
return ImageViewTouch.this.onFling(e1, e2, velocityX, velocityY);
} else {
return false;
}
}
@Override
public boolean onSingleTapUp(MotionEvent e) {
return ImageViewTouch.this.onSingleTapUp(e);
}
@Override
public boolean onDown(MotionEvent e) {
if (DEBUG) {
Log.i(TAG, "onDown");
}
stopAllAnimations();
return ImageViewTouch.this.onDown(e);
}
}
public class ScaleListener extends ScaleGestureDetector.SimpleOnScaleGestureListener {
protected boolean mScaled = false;
@Override
public boolean onScale(ScaleGestureDetector detector) {
float span = detector.getCurrentSpan() - detector.getPreviousSpan();
float targetScale = getScale() * detector.getScaleFactor();
if (mScaleEnabled) {
if (mScaled && span != 0) {
mUserScaled = true;
targetScale = Math.min(getMaxScale(), Math.max(targetScale, getMinScale() - MIN_SCALE_DIFF));
zoomTo(targetScale, detector.getFocusX(), detector.getFocusY());
mDoubleTapDirection = 1;
invalidate();
return true;
}
// This is to prevent a glitch the first time
// image is scaled.
if (!mScaled) {
mScaled = true;
}
}
return true;
}
}
public interface OnImageViewTouchDoubleTapListener {
void onDoubleTap();
}
public interface OnImageViewTouchSingleTapListener {
void onSingleTapConfirmed();
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.tpch;
import com.facebook.presto.spi.ColumnHandle;
import com.facebook.presto.spi.ColumnMetadata;
import com.facebook.presto.spi.ConnectorNodePartitioning;
import com.facebook.presto.spi.ConnectorSession;
import com.facebook.presto.spi.ConnectorTableHandle;
import com.facebook.presto.spi.ConnectorTableLayout;
import com.facebook.presto.spi.ConnectorTableLayoutHandle;
import com.facebook.presto.spi.ConnectorTableLayoutResult;
import com.facebook.presto.spi.ConnectorTableMetadata;
import com.facebook.presto.spi.Constraint;
import com.facebook.presto.spi.LocalProperty;
import com.facebook.presto.spi.SchemaTableName;
import com.facebook.presto.spi.SchemaTablePrefix;
import com.facebook.presto.spi.SortingProperty;
import com.facebook.presto.spi.block.SortOrder;
import com.facebook.presto.spi.connector.ConnectorMetadata;
import com.facebook.presto.spi.predicate.Domain;
import com.facebook.presto.spi.predicate.NullableValue;
import com.facebook.presto.spi.predicate.TupleDomain;
import com.facebook.presto.spi.statistics.Estimate;
import com.facebook.presto.spi.statistics.TableStatistics;
import com.facebook.presto.spi.type.BigintType;
import com.facebook.presto.spi.type.DateType;
import com.facebook.presto.spi.type.DoubleType;
import com.facebook.presto.spi.type.IntegerType;
import com.facebook.presto.spi.type.Type;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import io.airlift.slice.Slice;
import io.airlift.slice.Slices;
import io.airlift.tpch.LineItemColumn;
import io.airlift.tpch.OrderColumn;
import io.airlift.tpch.OrderGenerator;
import io.airlift.tpch.TpchColumn;
import io.airlift.tpch.TpchColumnType;
import io.airlift.tpch.TpchEntity;
import io.airlift.tpch.TpchTable;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import static com.facebook.presto.spi.type.BigintType.BIGINT;
import static com.facebook.presto.spi.type.VarcharType.createVarcharType;
import static java.util.Objects.requireNonNull;
import static java.util.stream.Collectors.toSet;
public class TpchMetadata
implements ConnectorMetadata
{
public static final String TINY_SCHEMA_NAME = "tiny";
public static final double TINY_SCALE_FACTOR = 0.01;
public static final List<String> SCHEMA_NAMES = ImmutableList.of(
TINY_SCHEMA_NAME, "sf1", "sf100", "sf300", "sf1000", "sf3000", "sf10000", "sf30000", "sf100000");
public static final String ROW_NUMBER_COLUMN_NAME = "row_number";
private static final Set<String> ORDER_STATUS_VALUES = ImmutableSet.of("F", "O", "P");
private static final Set<NullableValue> ORDER_STATUS_NULLABLE_VALUES = ORDER_STATUS_VALUES.stream()
.map(value -> new NullableValue(getPrestoType(OrderColumn.ORDER_STATUS.getType()), Slices.utf8Slice(value)))
.collect(toSet());
private final String connectorId;
private final Set<String> tableNames;
private final boolean predicatePushdownEnabled;
private final ColumnNaming columnNaming;
public TpchMetadata(String connectorId)
{
this(connectorId, TpchConnectorFactory.DEFAULT_PREDICATE_PUSHDOWN_ENABLED, ColumnNaming.SIMPLIFIED);
}
public TpchMetadata(String connectorId, boolean predicatePushdownEnabled, ColumnNaming columnNaming)
{
ImmutableSet.Builder<String> tableNames = ImmutableSet.builder();
for (TpchTable<?> tpchTable : TpchTable.getTables()) {
tableNames.add(tpchTable.getTableName());
}
this.tableNames = tableNames.build();
this.connectorId = connectorId;
this.predicatePushdownEnabled = predicatePushdownEnabled;
this.columnNaming = columnNaming;
}
@Override
public List<String> listSchemaNames(ConnectorSession session)
{
return SCHEMA_NAMES;
}
@Override
public TpchTableHandle getTableHandle(ConnectorSession session, SchemaTableName tableName)
{
requireNonNull(tableName, "tableName is null");
if (!tableNames.contains(tableName.getTableName())) {
return null;
}
// parse the scale factor
double scaleFactor = schemaNameToScaleFactor(tableName.getSchemaName());
if (scaleFactor < 0) {
return null;
}
return new TpchTableHandle(connectorId, tableName.getTableName(), scaleFactor);
}
@Override
public List<ConnectorTableLayoutResult> getTableLayouts(
ConnectorSession session,
ConnectorTableHandle table,
Constraint<ColumnHandle> constraint,
Optional<Set<ColumnHandle>> desiredColumns)
{
TpchTableHandle tableHandle = (TpchTableHandle) table;
Optional<ConnectorNodePartitioning> nodePartition = Optional.empty();
Optional<Set<ColumnHandle>> partitioningColumns = Optional.empty();
List<LocalProperty<ColumnHandle>> localProperties = ImmutableList.of();
Optional<TupleDomain<ColumnHandle>> predicate = Optional.empty();
TupleDomain<ColumnHandle> unenforcedConstraint = constraint.getSummary();
Map<String, ColumnHandle> columns = getColumnHandles(session, tableHandle);
if (tableHandle.getTableName().equals(TpchTable.ORDERS.getTableName())) {
ColumnHandle orderKeyColumn = columns.get(columnNaming.getName(OrderColumn.ORDER_KEY));
nodePartition = Optional.of(new ConnectorNodePartitioning(
new TpchPartitioningHandle(
TpchTable.ORDERS.getTableName(),
calculateTotalRows(OrderGenerator.SCALE_BASE, tableHandle.getScaleFactor())),
ImmutableList.of(orderKeyColumn)));
partitioningColumns = Optional.of(ImmutableSet.of(orderKeyColumn));
localProperties = ImmutableList.of(new SortingProperty<>(orderKeyColumn, SortOrder.ASC_NULLS_FIRST));
if (predicatePushdownEnabled) {
predicate = Optional.of(toTupleDomain(ImmutableMap.of(
toColumnHandle(OrderColumn.ORDER_STATUS),
ORDER_STATUS_NULLABLE_VALUES.stream()
.filter(convertToPredicate(constraint.getSummary(), OrderColumn.ORDER_STATUS))
.collect(toSet()))));
unenforcedConstraint = filterOutColumnFromPredicate(constraint.getSummary(), OrderColumn.ORDER_STATUS);
}
}
else if (tableHandle.getTableName().equals(TpchTable.LINE_ITEM.getTableName())) {
ColumnHandle orderKeyColumn = columns.get(columnNaming.getName(LineItemColumn.ORDER_KEY));
nodePartition = Optional.of(new ConnectorNodePartitioning(
new TpchPartitioningHandle(
TpchTable.ORDERS.getTableName(),
calculateTotalRows(OrderGenerator.SCALE_BASE, tableHandle.getScaleFactor())),
ImmutableList.of(orderKeyColumn)));
partitioningColumns = Optional.of(ImmutableSet.of(orderKeyColumn));
localProperties = ImmutableList.of(
new SortingProperty<>(orderKeyColumn, SortOrder.ASC_NULLS_FIRST),
new SortingProperty<>(columns.get(columnNaming.getName(LineItemColumn.LINE_NUMBER)), SortOrder.ASC_NULLS_FIRST));
}
ConnectorTableLayout layout = new ConnectorTableLayout(
new TpchTableLayoutHandle(tableHandle, predicate),
Optional.empty(),
predicate.orElse(TupleDomain.all()), // TODO: return well-known properties (e.g., orderkey > 0, etc)
nodePartition,
partitioningColumns,
Optional.empty(),
localProperties);
return ImmutableList.of(new ConnectorTableLayoutResult(layout, unenforcedConstraint));
}
@Override
public ConnectorTableLayout getTableLayout(ConnectorSession session, ConnectorTableLayoutHandle handle)
{
TpchTableLayoutHandle layout = (TpchTableLayoutHandle) handle;
// tables in this connector have a single layout
return getTableLayouts(session, layout.getTable(), Constraint.alwaysTrue(), Optional.empty())
.get(0)
.getTableLayout();
}
@Override
public ConnectorTableMetadata getTableMetadata(ConnectorSession session, ConnectorTableHandle tableHandle)
{
TpchTableHandle tpchTableHandle = (TpchTableHandle) tableHandle;
TpchTable<?> tpchTable = TpchTable.getTable(tpchTableHandle.getTableName());
String schemaName = scaleFactorSchemaName(tpchTableHandle.getScaleFactor());
return getTableMetadata(schemaName, tpchTable, columnNaming);
}
private static ConnectorTableMetadata getTableMetadata(String schemaName, TpchTable<?> tpchTable, ColumnNaming columnNaming)
{
ImmutableList.Builder<ColumnMetadata> columns = ImmutableList.builder();
for (TpchColumn<? extends TpchEntity> column : tpchTable.getColumns()) {
columns.add(new ColumnMetadata(columnNaming.getName(column), getPrestoType(column.getType())));
}
columns.add(new ColumnMetadata(ROW_NUMBER_COLUMN_NAME, BIGINT, null, true));
SchemaTableName tableName = new SchemaTableName(schemaName, tpchTable.getTableName());
return new ConnectorTableMetadata(tableName, columns.build());
}
@Override
public Map<String, ColumnHandle> getColumnHandles(ConnectorSession session, ConnectorTableHandle tableHandle)
{
ImmutableMap.Builder<String, ColumnHandle> builder = ImmutableMap.builder();
for (ColumnMetadata columnMetadata : getTableMetadata(session, tableHandle).getColumns()) {
builder.put(columnMetadata.getName(), new TpchColumnHandle(columnMetadata.getName(), columnMetadata.getType()));
}
return builder.build();
}
@Override
public Map<SchemaTableName, List<ColumnMetadata>> listTableColumns(ConnectorSession session, SchemaTablePrefix prefix)
{
ImmutableMap.Builder<SchemaTableName, List<ColumnMetadata>> tableColumns = ImmutableMap.builder();
for (String schemaName : getSchemaNames(session, prefix.getSchemaName())) {
for (TpchTable<?> tpchTable : TpchTable.getTables()) {
if (prefix.getTableName() == null || tpchTable.getTableName().equals(prefix.getTableName())) {
ConnectorTableMetadata tableMetadata = getTableMetadata(schemaName, tpchTable, columnNaming);
tableColumns.put(new SchemaTableName(schemaName, tpchTable.getTableName()), tableMetadata.getColumns());
}
}
}
return tableColumns.build();
}
@Override
public TableStatistics getTableStatistics(ConnectorSession session, ConnectorTableHandle tableHandle, Constraint<ColumnHandle> constraint)
{
TpchTableHandle table = (TpchTableHandle) tableHandle;
return new TableStatistics(new Estimate(getRowCount(table, Optional.of(constraint.getSummary()))), ImmutableMap.of());
}
private long getRowCount(TpchTableHandle tpchTableHandle, Optional<TupleDomain<ColumnHandle>> predicate)
{
// todo expose row counts from airlift-tpch instead of hardcoding it here
// todo add stats for columns
String tableName = tpchTableHandle.getTableName();
double scaleFactor = tpchTableHandle.getScaleFactor();
switch (tableName.toLowerCase()) {
case "customer":
return (long) (150_000 * scaleFactor);
case "orders":
Set<String> orderStatusValues = predicate.map(tupleDomain ->
ORDER_STATUS_NULLABLE_VALUES.stream()
.filter(convertToPredicate(tupleDomain, OrderColumn.ORDER_STATUS))
.map(nullableValue -> ((Slice) nullableValue.getValue()).toStringUtf8())
.collect(toSet()))
.orElse(ORDER_STATUS_VALUES);
long totalRows = 0L;
if (orderStatusValues.contains("F")) {
totalRows = 729_413;
}
if (orderStatusValues.contains("O")) {
totalRows += 732_044;
}
if (orderStatusValues.contains("P")) {
totalRows += 38_543;
}
return (long) (totalRows * scaleFactor);
case "lineitem":
return (long) (6_000_000 * scaleFactor);
case "part":
return (long) (200_000 * scaleFactor);
case "partsupp":
return (long) (800_000 * scaleFactor);
case "supplier":
return (long) (10_000 * scaleFactor);
case "nation":
return 25;
case "region":
return 5;
default:
throw new IllegalArgumentException("unknown tpch table name '" + tableName + "'");
}
}
private TpchColumnHandle toColumnHandle(TpchColumn column)
{
return new TpchColumnHandle(columnNaming.getName(column), getPrestoType(column.getType()));
}
@Override
public ColumnMetadata getColumnMetadata(ConnectorSession session, ConnectorTableHandle tableHandle, ColumnHandle columnHandle)
{
ConnectorTableMetadata tableMetadata = getTableMetadata(session, tableHandle);
String columnName = ((TpchColumnHandle) columnHandle).getColumnName();
for (ColumnMetadata column : tableMetadata.getColumns()) {
if (column.getName().equals(columnName)) {
return column;
}
}
throw new IllegalArgumentException(String.format("Table %s does not have column %s", tableMetadata.getTable(), columnName));
}
@Override
public List<SchemaTableName> listTables(ConnectorSession session, String schemaNameOrNull)
{
ImmutableList.Builder<SchemaTableName> builder = ImmutableList.builder();
for (String schemaName : getSchemaNames(session, schemaNameOrNull)) {
for (TpchTable<?> tpchTable : TpchTable.getTables()) {
builder.add(new SchemaTableName(schemaName, tpchTable.getTableName()));
}
}
return builder.build();
}
private TupleDomain<ColumnHandle> toTupleDomain(Map<TpchColumnHandle, Set<NullableValue>> predicate)
{
return TupleDomain.withColumnDomains(predicate.entrySet().stream()
.collect(Collectors.toMap(Map.Entry::getKey, entry -> {
Type type = entry.getKey().getType();
return entry.getValue().stream()
.map(nullableValue -> Domain.singleValue(type, nullableValue.getValue()))
.reduce((Domain::union))
.orElse(Domain.none(type));
})));
}
private Predicate<NullableValue> convertToPredicate(TupleDomain<ColumnHandle> predicate, TpchColumn column)
{
return nullableValue -> predicate.contains(TupleDomain.fromFixedValues(ImmutableMap.of(toColumnHandle(column), nullableValue)));
}
private TupleDomain<ColumnHandle> filterOutColumnFromPredicate(TupleDomain<ColumnHandle> predicate, TpchColumn column)
{
return filterColumns(predicate, tpchColumnHandle -> !tpchColumnHandle.equals(toColumnHandle(column)));
}
private TupleDomain<ColumnHandle> filterColumns(TupleDomain<ColumnHandle> predicate, Predicate<TpchColumnHandle> filterPredicate)
{
return predicate.transform(columnHandle -> {
TpchColumnHandle tpchColumnHandle = (TpchColumnHandle) columnHandle;
if (filterPredicate.test(tpchColumnHandle)) {
return tpchColumnHandle;
}
return null;
});
}
private List<String> getSchemaNames(ConnectorSession session, String schemaNameOrNull)
{
List<String> schemaNames;
if (schemaNameOrNull == null) {
schemaNames = listSchemaNames(session);
}
else if (schemaNameToScaleFactor(schemaNameOrNull) > 0) {
schemaNames = ImmutableList.of(schemaNameOrNull);
}
else {
schemaNames = ImmutableList.of();
}
return schemaNames;
}
private static String scaleFactorSchemaName(double scaleFactor)
{
return "sf" + scaleFactor;
}
private static double schemaNameToScaleFactor(String schemaName)
{
if (TINY_SCHEMA_NAME.equals(schemaName)) {
return TINY_SCALE_FACTOR;
}
if (!schemaName.startsWith("sf")) {
return -1;
}
try {
return Double.parseDouble(schemaName.substring(2));
}
catch (Exception ignored) {
return -1;
}
}
public static Type getPrestoType(TpchColumnType tpchType)
{
switch (tpchType.getBase()) {
case IDENTIFIER:
return BigintType.BIGINT;
case INTEGER:
return IntegerType.INTEGER;
case DATE:
return DateType.DATE;
case DOUBLE:
return DoubleType.DOUBLE;
case VARCHAR:
return createVarcharType((int) (long) tpchType.getPrecision().get());
}
throw new IllegalArgumentException("Unsupported type " + tpchType);
}
private long calculateTotalRows(int scaleBase, double scaleFactor)
{
double totalRows = scaleBase * scaleFactor;
if (totalRows > Long.MAX_VALUE) {
throw new IllegalArgumentException("Total rows is larger than 2^64");
}
return (long) totalRows;
}
}
| |
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vJAXB 2.1.10 in JDK 6
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2010.10.30 at 09:30:35 AM EDT
//
package jaxb;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.adapters.CollapsedStringAdapter;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element ref="{http://www.collada.org/2005/11/COLLADASchema}instance_force_field" maxOccurs="unbounded" minOccurs="0"/>
* <element ref="{http://www.collada.org/2005/11/COLLADASchema}instance_rigid_body" maxOccurs="unbounded" minOccurs="0"/>
* <element ref="{http://www.collada.org/2005/11/COLLADASchema}instance_rigid_constraint" maxOccurs="unbounded" minOccurs="0"/>
* <element ref="{http://www.collada.org/2005/11/COLLADASchema}extra" maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* <attribute name="url" use="required" type="{http://www.w3.org/2001/XMLSchema}anyURI" />
* <attribute name="sid" type="{http://www.w3.org/2001/XMLSchema}NCName" />
* <attribute name="name" type="{http://www.w3.org/2001/XMLSchema}NCName" />
* <attribute name="parent" type="{http://www.w3.org/2001/XMLSchema}anyURI" />
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"instanceForceFields",
"instanceRigidBodies",
"instanceRigidConstraints",
"extras"
})
@XmlRootElement(name = "instance_physics_model")
public class InstancePhysicsModel {
@XmlElement(name = "instance_force_field")
protected List<InstanceWithExtra> instanceForceFields;
@XmlElement(name = "instance_rigid_body")
protected List<InstanceRigidBody> instanceRigidBodies;
@XmlElement(name = "instance_rigid_constraint")
protected List<InstanceRigidConstraint> instanceRigidConstraints;
@XmlElement(name = "extra")
protected List<Extra> extras;
@XmlAttribute(required = true)
@XmlSchemaType(name = "anyURI")
protected String url;
@XmlAttribute
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
@XmlSchemaType(name = "NCName")
protected String sid;
@XmlAttribute
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
@XmlSchemaType(name = "NCName")
protected String name;
@XmlAttribute
@XmlSchemaType(name = "anyURI")
protected String parent;
/**
*
* The instance_physics_model element may instance any number of force_field elements.
* Gets the value of the instanceForceFields property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the instanceForceFields property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getInstanceForceFields().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link InstanceWithExtra }
*
*
*/
public List<InstanceWithExtra> getInstanceForceFields() {
if (instanceForceFields == null) {
instanceForceFields = new ArrayList<InstanceWithExtra>();
}
return this.instanceForceFields;
}
/**
*
* The instance_physics_model element may instance any number of rigid_body elements.
* Gets the value of the instanceRigidBodies property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the instanceRigidBodies property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getInstanceRigidBodies().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link InstanceRigidBody }
*
*
*/
public List<InstanceRigidBody> getInstanceRigidBodies() {
if (instanceRigidBodies == null) {
instanceRigidBodies = new ArrayList<InstanceRigidBody>();
}
return this.instanceRigidBodies;
}
/**
*
* The instance_physics_model element may instance any number of rigid_constraint elements.
* Gets the value of the instanceRigidConstraints property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the instanceRigidConstraints property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getInstanceRigidConstraints().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link InstanceRigidConstraint }
*
*
*/
public List<InstanceRigidConstraint> getInstanceRigidConstraints() {
if (instanceRigidConstraints == null) {
instanceRigidConstraints = new ArrayList<InstanceRigidConstraint>();
}
return this.instanceRigidConstraints;
}
/**
*
* The extra element may appear any number of times.
* Gets the value of the extras property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the extras property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getExtras().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link Extra }
*
*
*/
public List<Extra> getExtras() {
if (extras == null) {
extras = new ArrayList<Extra>();
}
return this.extras;
}
/**
* Gets the value of the url property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getUrl() {
return url;
}
/**
* Sets the value of the url property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setUrl(String value) {
this.url = value;
}
/**
* Gets the value of the sid property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getSid() {
return sid;
}
/**
* Sets the value of the sid property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setSid(String value) {
this.sid = value;
}
/**
* Gets the value of the name property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getName() {
return name;
}
/**
* Sets the value of the name property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setName(String value) {
this.name = value;
}
/**
* Gets the value of the parent property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getParent() {
return parent;
}
/**
* Sets the value of the parent property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setParent(String value) {
this.parent = value;
}
}
| |
package com.github.fakemongo;
import com.github.fakemongo.junit.FongoRule;
import com.google.common.collect.ImmutableList;
import static com.google.common.collect.ImmutableList.copyOf;
import com.mongodb.BasicDBObject;
import com.mongodb.DB;
import com.mongodb.DBCollection;
import com.mongodb.DBObject;
import com.mongodb.MapReduceCommand;
import com.mongodb.MapReduceOutput;
import java.util.List;
import static org.assertj.core.api.Assertions.assertThat;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
/**
* @author Vladimir Shakhov <bogdad@gmail.com>
*/
public class FongoMapReduceOutputModesTest {
@Rule
public FongoRule fongoRule = new FongoRule(false);
private DBCollection users;
private DBCollection typeHeights;
private DBCollection userLogins;
private DBCollection joinUsersLogins;
@Before
public void setUp() {
DB db = fongoRule.getDB();
users = db.getCollection("users");
userLogins = db.getCollection("userLogins");
typeHeights = db.getCollection("typeHeights");
joinUsersLogins = db.getCollection("joinUsersLogins");
}
@Test
public void inline() {
BasicDBObject user1 = new BasicDBObject("_id", "idUser1")
.append("type", "neutral").append("height", "100");
BasicDBObject user2 = new BasicDBObject("_id", "idUser2")
.append("type", "neutral").append("height", "150");
BasicDBObject user3 = new BasicDBObject("_id", "idUser3")
.append("type", "human").append("height", "200");
BasicDBObject user4 = new BasicDBObject("_id", "idUser4")
.append("type", "human").append("height", "400");
users.insert(user1, user2, user3, user4);
String map = "function () {" +
"emit(this.type, this);" +
"};";
String reduce = "function (key, values) {" +
" var sum = '';" +
" for (var i in values) {" +
" sum += values[i].height;" +
" }" +
" return {sum : sum};" +
"}";
MapReduceOutput result = users.mapReduce(map, reduce, typeHeights.getName(),
MapReduceCommand.OutputType.INLINE, new BasicDBObject());
ImmutableList<DBObject> actual = copyOf(result.results());
assertThat(actual).containsOnly(new BasicDBObject()
.append("_id", "neutral")
.append("value", new BasicDBObject("sum", "100150")),
new BasicDBObject()
.append("_id", "human")
.append("value", new BasicDBObject("sum", "200400"))
);
}
@Test
public void replace() {
BasicDBObject existingCat = new BasicDBObject()
.append("_id", "cat")
.append("value", new BasicDBObject().append("sum", "YY"));
BasicDBObject existingNeutral = new BasicDBObject()
.append("_id", "neutral")
.append("value", new BasicDBObject().append("sum", "XX"));
typeHeights.insert(existingNeutral, existingCat);
BasicDBObject user1 = new BasicDBObject().append("_id", "idUser1")
.append("type", "neutral").append("height", "100");
BasicDBObject user2 = new BasicDBObject().append("_id", "idUser2")
.append("type", "neutral").append("height", "150");
BasicDBObject user3 = new BasicDBObject().append("_id", "idUser3")
.append("type", "human").append("height", "200");
BasicDBObject user4 = new BasicDBObject("_id", "idUser4")
.append("type", "human").append("height", "400");
users.insert(user1, user2, user3, user4);
String map = "function () {" +
"emit(this.type, this);" +
"};";
String reduce = "function (key, values) {" +
" var sum = '';" +
" for (var i in values) {" +
" sum += values[i].height;" +
" }" +
" return {sum : sum};" +
"}";
final MapReduceOutput output = users.mapReduce(map, reduce, typeHeights.getName(),
MapReduceCommand.OutputType.REPLACE, new BasicDBObject());
List<DBObject> actual = typeHeights.find().toArray();
assertThat(actual).containsOnly(new BasicDBObject()
.append("_id", "neutral")
.append("value", new BasicDBObject().append("sum", "100150")),
new BasicDBObject()
.append("_id", "human")
.append("value", new BasicDBObject().append("sum", "200400"))
);
assertThat(actual).doesNotContain(existingCat);
assertThat(actual).doesNotContain(existingNeutral);
assertThat(output.results()).containsOnly(new BasicDBObject()
.append("_id", "neutral")
.append("value", new BasicDBObject().append("sum", "100150")),
new BasicDBObject()
.append("_id", "human")
.append("value", new BasicDBObject().append("sum", "200400"))
);
}
@Test
public void merge() {
BasicDBObject existingCat = new BasicDBObject()
.append("_id", "cat")
.append("value", new BasicDBObject().append("sum", "YY"));
BasicDBObject existingNeutral = new BasicDBObject()
.append("_id", "neutral")
.append("value", new BasicDBObject().append("sum", "XX"));
typeHeights.insert(existingNeutral, existingCat);
BasicDBObject user1 = new BasicDBObject().append("_id", "idUser1")
.append("type", "neutral").append("height", "100");
BasicDBObject user2 = new BasicDBObject().append("_id", "idUser2")
.append("type", "neutral").append("height", "150");
BasicDBObject user3 = new BasicDBObject().append("_id", "idUser3")
.append("type", "human").append("height", "200");
BasicDBObject user4 = new BasicDBObject("_id", "idUser4")
.append("type", "human").append("height", "400");
users.insert(user1, user2, user3, user4);
String map = "function () {" +
"emit(this.type, this);" +
"};";
String reduce = "function (key, values) {" +
" var sum = '';" +
" for (var i in values) {" +
" sum += values[i].height;" +
" }" +
" return {sum : sum};" +
"}";
final MapReduceOutput output = users.mapReduce(map, reduce, typeHeights.getName(),
MapReduceCommand.OutputType.MERGE, new BasicDBObject());
Iterable<DBObject> actual = typeHeights.find().toArray();
assertThat(actual).containsOnly(new BasicDBObject()
.append("_id", "neutral")
.append("value", new BasicDBObject().append("sum", "100150")),
new BasicDBObject()
.append("_id", "human")
.append("value", new BasicDBObject().append("sum", "200400")),
existingCat
);
assertThat(actual).doesNotContain(existingNeutral);
assertThat(output.results()).containsOnly(new BasicDBObject()
.append("_id", "neutral")
.append("value", new BasicDBObject().append("sum", "100150")),
new BasicDBObject()
.append("_id", "human")
.append("value", new BasicDBObject().append("sum", "200400")),
existingCat
);
}
@Test
public void reduceForJoinDataAllreadyThere() {
joinUsersLogins.insert(new BasicDBObject()
.append("_id", "idUser1")
.append("somekey", "somevalue"));
BasicDBObject user1Login = new BasicDBObject()
.append("_id", "idUser1")
.append("login", "bloble");
BasicDBObject user2Login = new BasicDBObject()
.append("_id", "idUser2")
.append("login", "wwww");
BasicDBObject user3Login = new BasicDBObject()
.append("_id", "idUser3")
.append("login", "wordpress");
userLogins.insert(user1Login, user2Login, user3Login);
BasicDBObject user1 = new BasicDBObject().append("_id", "idUser1")
.append("type", "neutral").append("height", "100");
BasicDBObject user2 = new BasicDBObject().append("_id", "idUser2")
.append("type", "neutral").append("height", "150");
BasicDBObject user3 = new BasicDBObject().append("_id", "idUser3")
.append("type", "human").append("height", "200");
users.insert(user1, user2, user3);
String mapUsers = "function () {" +
"emit(this._id, this);" +
"};";
String mapUserLogins = "function () {" +
"emit(this._id, this);" +
"};";
String reduce = "function (id, values) {" +
"function ifnull(r, v, key) {\n" +
" if (v[key] != undefined) r[key] = v[key];\n" +
" return r;\n" +
" }\n" +
" function ifnulls(r, v, keys) {\n" +
" for(var i in keys) r = ifnull(r, v, keys[i]);\n" +
" return r;\n" +
" }\n" +
" res = {};\n" +
" for (var i in values) {\n" +
" res = ifnulls(res, values[i], ['_id', 'login', 'type', 'height']);\n" +
" }\n" +
" return res;\n" +
"}";
users.mapReduce(mapUsers, reduce, joinUsersLogins.getName(),
MapReduceCommand.OutputType.REDUCE, new BasicDBObject());
final MapReduceOutput output = userLogins.mapReduce(mapUserLogins, reduce, joinUsersLogins.getName(),
MapReduceCommand.OutputType.REDUCE, new BasicDBObject());
Iterable<DBObject> actual = joinUsersLogins.find();
assertThat(actual).containsOnly(new BasicDBObject()
.append("_id", user1.get("_id"))
.append("value", user1.append("login", user1Login.get("login"))),
new BasicDBObject()
.append("_id", user2.get("_id"))
.append("value", user2.append("login", user2Login.get("login"))),
new BasicDBObject()
.append("_id", user3.get("_id"))
.append("value", user3.append("login", user3Login.get("login"))));
assertThat(output.results()).containsOnly(new BasicDBObject()
.append("_id", user1.get("_id"))
.append("value", user1.append("login", user1Login.get("login"))),
new BasicDBObject()
.append("_id", user2.get("_id"))
.append("value", user2.append("login", user2Login.get("login"))),
new BasicDBObject()
.append("_id", user3.get("_id"))
.append("value", user3.append("login", user3Login.get("login"))));
}
@Test
public void reduceForJoin() {
BasicDBObject user1Login = new BasicDBObject()
.append("_id", "idUser1")
.append("login", "bloble");
BasicDBObject user2Login = new BasicDBObject()
.append("_id", "idUser2")
.append("login", "wwww");
BasicDBObject user3Login = new BasicDBObject()
.append("_id", "idUser3")
.append("login", "wordpress");
userLogins.insert(user1Login, user2Login, user3Login);
BasicDBObject user1 = new BasicDBObject().append("_id", "idUser1")
.append("type", "neutral").append("height", "100");
BasicDBObject user2 = new BasicDBObject().append("_id", "idUser2")
.append("type", "neutral").append("height", "150");
BasicDBObject user3 = new BasicDBObject().append("_id", "idUser3")
.append("type", "human").append("height", "200");
users.insert(user1, user2, user3);
String mapUsers = "function () {" +
"emit(this._id, this);" +
"};";
String mapUserLogins = "function () {" +
"emit(this._id, this);" +
"};";
String reduce = "function (id, values) {" +
"function ifnull(r, v, key) {\n" +
" if (key in v && v[key] !=null) r[key] = v[key];\n" +
" return r;\n" +
" }\n" +
" function ifnulls(r, v, keys) {\n" +
" for(var i in keys) r = ifnull(r, v, keys[i]);\n" +
" return r;\n" +
" }\n" +
" res = {};\n" +
" for (var i in values) {\n" +
" res = ifnulls(res, values[i], ['_id', 'login', 'type', 'height']);\n" +
" }\n" +
" return res;\n" +
"}";
users.mapReduce(mapUsers, reduce, joinUsersLogins.getName(),
MapReduceCommand.OutputType.REDUCE, new BasicDBObject());
userLogins.mapReduce(mapUserLogins, reduce, joinUsersLogins.getName(),
MapReduceCommand.OutputType.REDUCE, new BasicDBObject());
Iterable<DBObject> actual = joinUsersLogins.find();
assertThat(actual).containsOnly(new BasicDBObject()
.append("_id", user1.get("_id"))
.append("value", user1.append("login", user1Login.get("login"))),
new BasicDBObject()
.append("_id", user2.get("_id"))
.append("value", user2.append("login", user2Login.get("login"))),
new BasicDBObject()
.append("_id", user3.get("_id"))
.append("value", user3.append("login", user3Login.get("login"))));
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.bunjlabs.fuga.foundation;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.util.Map;
import java.util.TreeMap;
public final class Result {
private int status;
private InputStream stream;
private String contentType;
private long contentLength;
private Map<String, String> headers = new TreeMap<>();
/**
*
*/
public Result() {
this.status = -1;
this.contentType = "application/octet-stream";
this.contentLength = -1;
}
/**
*
* @param is content input stream
*/
public Result(InputStream is) {
this.status = -1;
this.stream = is;
this.contentType = "application/octet-stream";
this.contentLength = -1;
}
/**
*
* @param bytes content bytes
*/
public Result(byte[] bytes) {
this.status = -1;
this.stream = new ByteArrayInputStream(bytes);
this.contentType = "application/octet-stream";
this.contentLength = bytes.length;
}
/**
*
* @param f content file
* @throws IOException if file read error
*/
public Result(File f) throws IOException {
this.status = -1;
this.stream = new FileInputStream(f);
this.contentType = Files.probeContentType(f.toPath());
this.contentLength = f.length();
}
/**
*
* @param s content string
*/
public Result(String s) {
this(s.getBytes());
this.contentType = "text/html";
}
/**
*
* @return result status
*/
public int status() {
return status;
}
/**
* Set status of current result
*
* @param status status
* @return self
*/
public Result status(int status) {
this.status = status;
return this;
}
/**
*
* @return crrent headers map of this result.
*/
public Map<String, String> headers() {
return headers;
}
/**
* Add or change response http header.
*
* @param name Header name
* @param value Header value
* @return self
*/
public Result header(String name, String value) {
headers.put(name, value);
return this;
}
/**
*
* @return result content length
*/
public long length() {
return contentLength;
}
/**
*
* @param contentLength content length of this result
* @return self
*/
public Result length(long contentLength) {
this.contentLength = contentLength;
return this;
}
/**
*
* @param stream content stream
* @return self
*/
public Result stream(InputStream stream) {
this.stream = stream;
return this;
}
/**
*
* @return content stream of this result
*/
public InputStream stream() {
return stream;
}
/**
*
* @return content type of this result
*/
public String contentType() {
return contentType;
}
/**
* Set content type of current result to text/plain
*
* @return self
*/
public Result asText() {
this.contentType = "text/plain";
return this;
}
/**
* Set content type of current result to text/html
*
* @return self
*/
public Result asHtml() {
this.contentType = "text/html";
return this;
}
/**
* Set content type of current result to application/json
*
* @return self
*/
public Result asJson() {
this.contentType = "application/json";
return this;
}
/**
* Set content type of current result to text/xml
*
* @return self
*/
public Result asXml() {
this.contentType = "text/xml";
return this;
}
/**
* Set content type of current result to application/javascript
*
* @return self
*/
public Result asJavascript() {
this.contentType = "application/javascript";
return this;
}
/**
* Set content type of current result to text/css
*
* @return self
*/
public Result asCss() {
this.contentType = "text/css";
return this;
}
/**
* Set content type of current result
*
* @param contentType result content type
* @return self
*/
public Result as(String contentType) {
this.contentType = contentType;
return this;
}
/**
* Check that content stream of this result is null.
*
* @return content stream is null
*/
public boolean isEmpty() {
return stream == null;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysml.parser;
import java.util.ArrayList;
import java.util.HashMap;
import org.apache.sysml.conf.ConfigurationManager;
import org.apache.sysml.hops.Hop;
import org.apache.sysml.hops.recompile.Recompiler;
import org.apache.sysml.lops.Lop;
public class WhileStatementBlock extends StatementBlock
{
private Hop _predicateHops;
private Lop _predicateLops = null;
private boolean _requiresPredicateRecompile = false;
@Override
public VariableSet validate(DMLProgram dmlProg, VariableSet ids, HashMap<String,ConstIdentifier> constVars, boolean conditional)
{
if (_statements.size() > 1){
raiseValidateError("WhileStatementBlock should have only 1 statement (while statement)", conditional);
}
WhileStatement wstmt = (WhileStatement) _statements.get(0);
ConditionalPredicate predicate = wstmt.getConditionalPredicate();
// Record original size information before loop for ALL variables
// Will compare size / type info for these after loop completes
// Replace variables with changed size with unknown value
VariableSet origVarsBeforeBody = new VariableSet();
for (String key : ids.getVariableNames()){
DataIdentifier origId = ids.getVariable(key);
DataIdentifier copyId = new DataIdentifier(origId);
origVarsBeforeBody.addVariable(key, copyId);
}
//////////////////////////////////////////////////////////////////////////////
// FIRST PASS: process the predicate / statement blocks in the body of the for statement
///////////////////////////////////////////////////////////////////////////////
//remove updated vars from constants
for( String var : _updated.getVariableNames() )
if( constVars.containsKey( var ) )
constVars.remove( var );
// process the statement blocks in the body of the while statement
predicate.getPredicate().validateExpression(ids.getVariables(), constVars, conditional);
ArrayList<StatementBlock> body = wstmt.getBody();
_dmlProg = dmlProg;
for(StatementBlock sb : body)
{
//always conditional
ids = sb.validate(dmlProg, ids, constVars, true);
constVars = sb.getConstOut();
}
if (!body.isEmpty()) {
_constVarsIn.putAll(body.get(0).getConstIn());
_constVarsOut.putAll(body.get(body.size()-1).getConstOut());
}
// for each updated variable
boolean revalidationRequired = false;
for (String key : _updated.getVariableNames())
{
DataIdentifier startVersion = origVarsBeforeBody.getVariable(key);
DataIdentifier endVersion = ids.getVariable(key);
if (startVersion != null && endVersion != null)
{
//handle data type change (reject)
if (!startVersion.getOutput().getDataType().equals(endVersion.getOutput().getDataType())){
raiseValidateError("WhileStatementBlock has unsupported conditional data type change of variable '"+key+"' in loop body.", conditional);
}
//handle size change
long startVersionDim1 = (startVersion instanceof IndexedIdentifier) ? ((IndexedIdentifier)startVersion).getOrigDim1() : startVersion.getDim1();
long endVersionDim1 = (endVersion instanceof IndexedIdentifier) ? ((IndexedIdentifier)endVersion).getOrigDim1() : endVersion.getDim1();
long startVersionDim2 = (startVersion instanceof IndexedIdentifier) ? ((IndexedIdentifier)startVersion).getOrigDim2() : startVersion.getDim2();
long endVersionDim2 = (endVersion instanceof IndexedIdentifier) ? ((IndexedIdentifier)endVersion).getOrigDim2() : endVersion.getDim2();
boolean sizeUnchanged = ((startVersionDim1 == endVersionDim1) &&
(startVersionDim2 == endVersionDim2) );
//handle sparsity change
//NOTE: nnz not propagated via validate, and hence, we conservatively assume that nnz have been changed.
//long startVersionNNZ = startVersion.getNnz();
//long endVersionNNZ = endVersion.getNnz();
//boolean nnzUnchanged = (startVersionNNZ == endVersionNNZ);
boolean nnzUnchanged = false;
// IF size has changed --
if (!sizeUnchanged || !nnzUnchanged){
revalidationRequired = true;
DataIdentifier recVersion = new DataIdentifier(endVersion);
if(!sizeUnchanged)
recVersion.setDimensions(-1, -1);
if(!nnzUnchanged)
recVersion.setNnz(-1);
origVarsBeforeBody.addVariable(key, recVersion);
}
}
}
// revalidation is required -- size was updated for at least 1 variable
if (revalidationRequired)
{
// update ids to the reconciled values
ids = origVarsBeforeBody;
//////////////////////////////////////////////////////////////////////////////
// SECOND PASS: process the predicate / statement blocks in the body of the for statement
///////////////////////////////////////////////////////////////////////////////
//remove updated vars from constants
for( String var : _updated.getVariableNames() )
if( constVars.containsKey( var ) )
constVars.remove( var );
// process the statement blocks in the body of the while statement
predicate.getPredicate().validateExpression(ids.getVariables(), constVars, conditional);
body = wstmt.getBody();
_dmlProg = dmlProg;
for(StatementBlock sb : body)
{
//always conditional
ids = sb.validate(dmlProg, ids, constVars, true);
constVars = sb.getConstOut();
}
if (!body.isEmpty()) {
_constVarsIn.putAll(body.get(0).getConstIn());
_constVarsOut.putAll(body.get(body.size()-1).getConstOut());
}
}
return ids;
}
@Override
public VariableSet initializeforwardLV(VariableSet activeInPassed) {
WhileStatement wstmt = (WhileStatement)_statements.get(0);
if (_statements.size() > 1){
LOG.error(_statements.get(0).printErrorLocation() + "WhileStatementBlock should have only 1 statement (while statement)");
throw new LanguageException(_statements.get(0).printErrorLocation() + "WhileStatementBlock should have only 1 statement (while statement)");
}
_read = new VariableSet();
_read.addVariables(wstmt.getConditionalPredicate().variablesRead());
_updated.addVariables(wstmt.getConditionalPredicate().variablesUpdated());
_gen = new VariableSet();
_gen.addVariables(wstmt.getConditionalPredicate().variablesRead());
VariableSet current = new VariableSet();
current.addVariables(activeInPassed);
for( StatementBlock sb : wstmt.getBody() )
{
current = sb.initializeforwardLV(current);
// for each generated variable in this block, check variable not killed
// in prior statement block in while stmt blody
for (String varName : sb._gen.getVariableNames()){
// IF the variable is NOT set in the while loop PRIOR to this stmt block,
// THEN needs to be generated
if (!_kill.getVariableNames().contains(varName)){
_gen.addVariable(varName, sb._gen.getVariable(varName));
}
}
_read.addVariables(sb._read);
_updated.addVariables(sb._updated);
// only add kill variables for statement blocks guaranteed to execute
if (!(sb instanceof WhileStatementBlock) && !(sb instanceof ForStatementBlock) ){
_kill.addVariables(sb._kill);
}
}
// set preliminary "warn" set -- variables that if used later may cause runtime error
// if the loop is not executed
// warnSet = (updated MINUS (updatedIfBody INTERSECT updatedElseBody)) MINUS current
for (String varName : _updated.getVariableNames()){
if (!activeInPassed.containsVariable(varName)) {
_warnSet.addVariable(varName, _updated.getVariable(varName));
}
}
// activeOut includes variables from passed live in and updated in the while body
_liveOut = new VariableSet();
_liveOut.addVariables(current);
_liveOut.addVariables(_updated);
return _liveOut;
}
@Override
public VariableSet initializebackwardLV(VariableSet loPassed) {
WhileStatement wstmt = (WhileStatement)_statements.get(0);
VariableSet lo = new VariableSet();
lo.addVariables(loPassed);
// calls analyze for each statement block in while stmt body
int numBlocks = wstmt.getBody().size();
for (int i = numBlocks - 1; i >= 0; i--){
lo = wstmt.getBody().get(i).analyze(lo);
}
VariableSet loReturn = new VariableSet();
loReturn.addVariables(lo);
return loReturn;
}
public void setPredicateHops(Hop hops) {
_predicateHops = hops;
}
public Hop getPredicateHops(){
return _predicateHops;
}
public Lop get_predicateLops() {
return _predicateLops;
}
public void set_predicateLops(Lop predicateLops) {
_predicateLops = predicateLops;
}
@Override
public VariableSet analyze(VariableSet loPassed) {
VariableSet predVars = new VariableSet();
predVars.addVariables(((WhileStatement)_statements.get(0)).getConditionalPredicate().variablesRead());
predVars.addVariables(((WhileStatement)_statements.get(0)).getConditionalPredicate().variablesUpdated());
VariableSet candidateLO = new VariableSet();
candidateLO.addVariables(loPassed);
candidateLO.addVariables(_gen);
candidateLO.addVariables(predVars);
VariableSet origLiveOut = new VariableSet();
origLiveOut.addVariables(_liveOut);
origLiveOut.addVariables(predVars);
origLiveOut.addVariables(_gen);
_liveOut = new VariableSet();
for (String name : candidateLO.getVariableNames()){
if (origLiveOut.containsVariable(name)){
_liveOut.addVariable(name, candidateLO.getVariable(name));
}
}
initializebackwardLV(_liveOut);
// set final warnSet: remove variables NOT in live out
VariableSet finalWarnSet = new VariableSet();
for (String varName : _warnSet.getVariableNames()){
if (_liveOut.containsVariable(varName)){
finalWarnSet.addVariable(varName,_warnSet.getVariable(varName));
}
}
_warnSet = finalWarnSet;
// for now just print the warn set
for (String varName : _warnSet.getVariableNames()){
LOG.warn(_warnSet.getVariable(varName).printWarningLocation() + "Initialization of " + varName + " depends on while execution");
}
// Cannot remove kill variables
_liveIn = new VariableSet();
_liveIn.addVariables(_liveOut);
_liveIn.addVariables(_gen);
VariableSet liveInReturn = new VariableSet();
liveInReturn.addVariables(_liveIn);
return liveInReturn;
}
/////////
// materialized hops recompilation flags
////
public boolean updatePredicateRecompilationFlag() {
return (_requiresPredicateRecompile =
ConfigurationManager.isDynamicRecompilation()
&& Recompiler.requiresRecompilation(getPredicateHops()));
}
public boolean requiresPredicateRecompilation() {
return _requiresPredicateRecompile;
}
}
| |
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.vcs.impl;
import com.intellij.execution.filters.TextConsoleBuilderFactory;
import com.intellij.execution.ui.ConsoleView;
import com.intellij.execution.ui.ConsoleViewContentType;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.ActionManager;
import com.intellij.openapi.actionSystem.ActionToolbar;
import com.intellij.openapi.actionSystem.DefaultActionGroup;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.components.PersistentStateComponent;
import com.intellij.openapi.components.State;
import com.intellij.openapi.components.Storage;
import com.intellij.openapi.components.StoragePathMacros;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.markup.TextAttributes;
import com.intellij.openapi.fileTypes.FileTypeManager;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.progress.util.BackgroundTaskUtil;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ProjectManager;
import com.intellij.openapi.project.ProjectManagerListener;
import com.intellij.openapi.roots.FileIndexFacade;
import com.intellij.openapi.startup.StartupActivity;
import com.intellij.openapi.ui.SimpleToolWindowPanel;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.InvalidDataException;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.WriteExternalException;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vcs.*;
import com.intellij.openapi.vcs.changes.ChangesUtil;
import com.intellij.openapi.vcs.changes.VcsAnnotationLocalChangesListener;
import com.intellij.openapi.vcs.checkout.CompositeCheckoutListener;
import com.intellij.openapi.vcs.ex.ProjectLevelVcsManagerEx;
import com.intellij.openapi.vcs.history.VcsHistoryCache;
import com.intellij.openapi.vcs.impl.projectlevelman.*;
import com.intellij.openapi.vcs.roots.VcsRootScanner;
import com.intellij.openapi.vcs.update.ActionInfo;
import com.intellij.openapi.vcs.update.UpdateInfoTree;
import com.intellij.openapi.vcs.update.UpdatedFiles;
import com.intellij.openapi.vfs.VfsUtilCore;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.wm.ToolWindow;
import com.intellij.openapi.wm.ToolWindowId;
import com.intellij.openapi.wm.ToolWindowManager;
import com.intellij.project.ProjectKt;
import com.intellij.ui.content.Content;
import com.intellij.ui.content.ContentFactory;
import com.intellij.ui.content.ContentManager;
import com.intellij.util.ContentUtilEx;
import com.intellij.util.Processor;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.messages.MessageBusConnection;
import com.intellij.util.text.DateFormatUtil;
import com.intellij.vcs.ViewUpdateInfoNotification;
import org.jdom.Attribute;
import org.jdom.DataConversionException;
import org.jdom.Element;
import org.jetbrains.annotations.*;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
@State(name = "ProjectLevelVcsManager", storages = @Storage(StoragePathMacros.WORKSPACE_FILE))
public final class ProjectLevelVcsManagerImpl extends ProjectLevelVcsManagerEx implements PersistentStateComponent<Element>, Disposable {
private static final Logger LOG = Logger.getInstance(ProjectLevelVcsManagerImpl.class);
@NonNls private static final String SETTINGS_EDITED_MANUALLY = "settingsEditedManually";
private final ProjectLevelVcsManagerSerialization mySerialization;
private final OptionsAndConfirmations myOptionsAndConfirmations;
private final NewMappings myMappings;
private final Project myProject;
private ConsoleView myConsole;
@Nullable private final VcsInitialization myInitialization;
@NonNls private static final String ELEMENT_MAPPING = "mapping";
@NonNls private static final String ATTRIBUTE_DIRECTORY = "directory";
@NonNls private static final String ATTRIBUTE_VCS = "vcs";
@NonNls private static final String ATTRIBUTE_DEFAULT_PROJECT = "defaultProject";
@NonNls private static final String ELEMENT_ROOT_SETTINGS = "rootSettings";
@NonNls private static final String ATTRIBUTE_CLASS = "class";
private boolean myMappingsLoaded;
private boolean myHaveLegacyVcsConfiguration;
@NotNull private final AtomicInteger myBackgroundOperationCounter = new AtomicInteger();
private final Set<ActionKey> myBackgroundRunningTasks = new HashSet<>();
private final List<Pair<String, ConsoleViewContentType>> myPendingOutput = new ArrayList<>();
private final FileIndexFacade myExcludedIndex;
public ProjectLevelVcsManagerImpl(@NotNull Project project) {
myProject = project;
myExcludedIndex = FileIndexFacade.getInstance(project);
mySerialization = new ProjectLevelVcsManagerSerialization();
myOptionsAndConfirmations = new OptionsAndConfirmations();
if (project.isDefault()) {
myInitialization = null;
}
else {
myInitialization = new VcsInitialization(myProject);
}
myMappings = new NewMappings(myProject, this);
Disposer.register(this, myMappings);
}
static final class MyStartUpActivity implements StartupActivity.DumbAware {
MyStartUpActivity() {
ApplicationManager.getApplication().getMessageBus().connect().subscribe(ProjectManager.TOPIC, new ProjectManagerListener() {
@Override
public void projectClosing(@NotNull Project project) {
ProjectLevelVcsManagerImpl manager = (ProjectLevelVcsManagerImpl)project.getServiceIfCreated(ProjectLevelVcsManager.class);
if (manager != null && manager.myInitialization != null) {
// wait for the thread spawned in VcsInitialization to terminate
manager.myInitialization.cancelBackgroundInitialization();
}
}
});
}
@Override
public void runActivity(@NotNull Project project) {
ProjectLevelVcsManagerImpl manager = getInstanceImpl(project);
if (manager.myInitialization != null) {
manager.myInitialization.startInitialization();
}
}
}
public static ProjectLevelVcsManagerImpl getInstanceImpl(@NotNull Project project) {
return (ProjectLevelVcsManagerImpl)getInstance(project);
}
public void registerVcs(AbstractVcs vcs) {
AllVcses.getInstance(myProject).registerManually(vcs);
}
@Override
@Nullable
public AbstractVcs findVcsByName(String name) {
AbstractVcs result = myProject.isDisposed() ? null : AllVcses.getInstance(myProject).getByName(name);
ProgressManager.checkCanceled();
return result;
}
@Override
@Nullable
public VcsDescriptor getDescriptor(final String name) {
if (name == null) return null;
if (myProject.isDisposed()) return null;
return AllVcses.getInstance(myProject).getDescriptor(name);
}
@Override
public void iterateVfUnderVcsRoot(VirtualFile file, Processor<? super VirtualFile> processor) {
VcsRootIterator.iterateVfUnderVcsRoot(myProject, file, processor);
}
@Override
public VcsDescriptor[] getAllVcss() {
return AllVcses.getInstance(myProject).getAll();
}
public boolean haveVcses() {
return !AllVcses.getInstance(myProject).isEmpty();
}
@Override
public void dispose() {
releaseConsole();
}
@NotNull
@Override
public VcsAnnotationLocalChangesListener getAnnotationLocalChangesListener() {
return myProject.getService(VcsAnnotationLocalChangesListener.class);
}
@Override
public boolean checkAllFilesAreUnder(AbstractVcs abstractVcs, VirtualFile[] files) {
if (files == null) return false;
for (VirtualFile file : files) {
if (getVcsFor(file) != abstractVcs) {
return false;
}
}
return true;
}
@Override
@Nullable
public AbstractVcs getVcsFor(@NotNull VirtualFile file) {
if (myProject.isDisposed()) return null;
NewMappings.MappedRoot root = myMappings.getMappedRootFor(file);
return root != null ? root.vcs : null;
}
@Override
@Nullable
public AbstractVcs getVcsFor(@NotNull FilePath file) {
if (myProject.isDisposed()) return null;
NewMappings.MappedRoot root = myMappings.getMappedRootFor(file);
return root != null ? root.vcs : null;
}
@Override
@Nullable
public VirtualFile getVcsRootFor(@Nullable VirtualFile file) {
if (file == null || myProject.isDisposed()) return null;
NewMappings.MappedRoot root = myMappings.getMappedRootFor(file);
return root != null ? root.root : null;
}
@Override
@Nullable
public VcsRoot getVcsRootObjectFor(@Nullable VirtualFile file) {
if (file == null || myProject.isDisposed()) return null;
NewMappings.MappedRoot root = myMappings.getMappedRootFor(file);
return root != null ? new VcsRoot(root.vcs, root.root) : null;
}
@Override
@Nullable
public VirtualFile getVcsRootFor(@Nullable FilePath file) {
if (file == null || myProject.isDisposed()) return null;
NewMappings.MappedRoot root = myMappings.getMappedRootFor(file);
return root != null ? root.root : null;
}
@Override
public VcsRoot getVcsRootObjectFor(@Nullable FilePath file) {
if (file == null || myProject.isDisposed()) return null;
NewMappings.MappedRoot root = myMappings.getMappedRootFor(file);
return root != null ? new VcsRoot(root.vcs, root.root) : null;
}
public void unregisterVcs(@NotNull AbstractVcs vcs) {
if (!ApplicationManager.getApplication().isUnitTestMode() && myMappings.haveActiveVcs(vcs.getName())) {
// unlikely
LOG.warn("Active vcs '" + vcs.getName() + "' is being unregistered. Remove from mappings first.");
}
myMappings.beingUnregistered(vcs.getName());
AllVcses.getInstance(myProject).unregisterManually(vcs);
}
@Nullable
@Override
public ContentManager getContentManager() {
ToolWindow changes = ToolWindowManager.getInstance(myProject).getToolWindow(ToolWindowId.VCS);
return changes == null ? null : changes.getContentManager();
}
@Override
public boolean checkVcsIsActive(AbstractVcs vcs) {
return checkVcsIsActive(vcs.getName());
}
@Override
public boolean checkVcsIsActive(final String vcsName) {
return myMappings.haveActiveVcs(vcsName);
}
@Override
public AbstractVcs @NotNull [] getAllActiveVcss() {
return myMappings.getActiveVcses();
}
@Override
public boolean hasActiveVcss() {
return myMappings.hasActiveVcss();
}
@Override
public boolean hasAnyMappings() {
return !myMappings.isEmpty();
}
@Deprecated
@Override
public void addMessageToConsoleWindow(final String message, final TextAttributes attributes) {
addMessageToConsoleWindow(message, new ConsoleViewContentType("", attributes));
}
@Override
public void addMessageToConsoleWindow(@Nullable final String message, @NotNull final ConsoleViewContentType contentType) {
if (!Registry.is("vcs.showConsole")) {
return;
}
if (StringUtil.isEmptyOrSpaces(message)) {
return;
}
ApplicationManager.getApplication().invokeLater(() -> {
// for default and disposed projects the ContentManager is not available.
if (myProject.isDisposed() || myProject.isDefault()) return;
final ContentManager contentManager = getContentManager();
if (contentManager == null) {
myPendingOutput.add(Pair.create(message, contentType));
}
else {
getOrCreateConsoleContent(contentManager);
printToConsole(message, contentType);
}
}, ModalityState.defaultModalityState());
}
private void getOrCreateConsoleContent(final ContentManager contentManager) {
final String displayName = VcsBundle.message("vcs.console.toolwindow.display.name");
Content content = contentManager.findContent(displayName);
if (content == null) {
releaseConsole();
ConsoleView console = TextConsoleBuilderFactory.getInstance().createBuilder(myProject).getConsole();
myConsole = console;
SimpleToolWindowPanel panel = new SimpleToolWindowPanel(false, true);
panel.setContent(console.getComponent());
ActionToolbar toolbar = ActionManager.getInstance()
.createActionToolbar("VcsManager", new DefaultActionGroup(console.createConsoleActions()), false);
panel.setToolbar(toolbar.getComponent());
content = ContentFactory.SERVICE.getInstance().createContent(panel, displayName, true);
content.setDisposer(() -> releaseConsole());
content.setPreferredFocusedComponent(() -> console.getPreferredFocusableComponent());
contentManager.addContent(content);
for (Pair<String, ConsoleViewContentType> pair : myPendingOutput) {
printToConsole(pair.first, pair.second);
}
myPendingOutput.clear();
}
}
private void printToConsole(@NotNull String message, @NotNull ConsoleViewContentType contentType) {
myConsole.print(message + "\n", contentType);
}
private void releaseConsole() {
if (myConsole != null) {
Disposer.dispose(myConsole);
myConsole = null;
}
}
@Override
@NotNull
public VcsShowSettingOption getOptions(VcsConfiguration.StandardOption option) {
return myOptionsAndConfirmations.getOptions(option);
}
@Override
public List<VcsShowOptionsSettingImpl> getAllOptions() {
return myOptionsAndConfirmations.getAllOptions();
}
@Override
@NotNull
public VcsShowSettingOption getStandardOption(@NotNull VcsConfiguration.StandardOption option, @NotNull AbstractVcs vcs) {
final VcsShowOptionsSettingImpl options = (VcsShowOptionsSettingImpl)getOptions(option);
options.addApplicableVcs(vcs);
return options;
}
@Override
@NotNull
public VcsShowSettingOption getOrCreateCustomOption(@NotNull String vcsActionName, @NotNull AbstractVcs vcs) {
return myOptionsAndConfirmations.getOrCreateCustomOption(vcsActionName, vcs);
}
@CalledInAwt
@Override
public void showProjectOperationInfo(final UpdatedFiles updatedFiles, String displayActionName) {
UpdateInfoTree tree = showUpdateProjectInfo(updatedFiles, displayActionName, ActionInfo.STATUS, false);
if (tree != null) ViewUpdateInfoNotification.focusUpdateInfoTree(myProject, tree);
}
@CalledInAwt
@Nullable
@Override
public UpdateInfoTree showUpdateProjectInfo(UpdatedFiles updatedFiles, String displayActionName, ActionInfo actionInfo, boolean canceled) {
if (!myProject.isOpen() || myProject.isDisposed()) return null;
ContentManager contentManager = getContentManager();
if (contentManager == null) {
return null; // content manager is made null during dispose; flag is set later
}
final UpdateInfoTree updateInfoTree = new UpdateInfoTree(contentManager, myProject, updatedFiles, displayActionName, actionInfo);
ContentUtilEx.addTabbedContent(contentManager, updateInfoTree, "Update Info", DateFormatUtil.formatDateTime(System.currentTimeMillis()), false, updateInfoTree);
updateInfoTree.expandRootChildren();
return updateInfoTree;
}
@Override
public List<VcsDirectoryMapping> getDirectoryMappings() {
return myMappings.getDirectoryMappings();
}
@Override
public List<VcsDirectoryMapping> getDirectoryMappings(final AbstractVcs vcs) {
return myMappings.getDirectoryMappings(vcs.getName());
}
@Override
@Nullable
public VcsDirectoryMapping getDirectoryMappingFor(@Nullable FilePath file) {
if (file == null || myProject.isDisposed()) return null;
NewMappings.MappedRoot root = myMappings.getMappedRootFor(file);
return root != null ? root.mapping : null;
}
@Nullable
private VcsDirectoryMapping getDirectoryMappingFor(@Nullable VirtualFile file) {
if (file == null || myProject.isDisposed()) return null;
NewMappings.MappedRoot root = myMappings.getMappedRootFor(file);
return root != null ? root.mapping : null;
}
@Override
public void setDirectoryMapping(@NotNull String path, @Nullable String activeVcsName) {
if (myMappingsLoaded) return; // ignore per-module VCS settings if the mapping table was loaded from .ipr
myHaveLegacyVcsConfiguration = true;
myMappings.setMapping(FileUtil.toSystemIndependentName(path), activeVcsName);
}
/**
* @deprecated use {@link #setAutoDirectoryMappings(List)}
*/
@Deprecated
public void setAutoDirectoryMapping(@NotNull String path, @Nullable String activeVcsName) {
setAutoDirectoryMappings(ContainerUtil.append(myMappings.getDirectoryMappings(), new VcsDirectoryMapping(path, activeVcsName)));
}
public void setAutoDirectoryMappings(@NotNull List<? extends VcsDirectoryMapping> mappings) {
myMappings.setDirectoryMappings(mappings);
myMappings.cleanupMappings();
}
public void removeDirectoryMapping(@NotNull VcsDirectoryMapping mapping) {
myMappings.removeDirectoryMapping(mapping);
}
@Override
public void setDirectoryMappings(@NotNull List<VcsDirectoryMapping> items) {
myHaveLegacyVcsConfiguration = true;
myMappings.setDirectoryMappings(items);
}
@Override
public void scheduleMappedRootsUpdate() {
myMappings.scheduleMappedRootsUpdate();
}
@Override
public void iterateVcsRoot(final VirtualFile root, final Processor<? super FilePath> iterator) {
VcsRootIterator.iterateVcsRoot(myProject, root, iterator);
}
@Override
public void iterateVcsRoot(VirtualFile root,
Processor<? super FilePath> iterator,
@Nullable VirtualFileFilter directoryFilter) {
VcsRootIterator.iterateVcsRoot(myProject, root, iterator, directoryFilter);
}
@NotNull
@Override
public Element getState() {
Element element = new Element("state");
mySerialization.writeExternalUtil(element, myOptionsAndConfirmations);
if (myHaveLegacyVcsConfiguration) {
element.setAttribute(SETTINGS_EDITED_MANUALLY, "true");
}
return element;
}
@Override
public void loadState(@NotNull Element state) {
mySerialization.readExternalUtil(state, myOptionsAndConfirmations);
final Attribute attribute = state.getAttribute(SETTINGS_EDITED_MANUALLY);
if (attribute != null) {
try {
myHaveLegacyVcsConfiguration = attribute.getBooleanValue();
}
catch (DataConversionException ignored) {
}
}
}
@Override
@NotNull
public VcsShowConfirmationOption getStandardConfirmation(@NotNull VcsConfiguration.StandardConfirmation option,
AbstractVcs vcs) {
final VcsShowConfirmationOptionImpl result = getConfirmation(option);
if (vcs != null) {
result.addApplicableVcs(vcs);
}
return result;
}
@Override
public List<VcsShowConfirmationOptionImpl> getAllConfirmations() {
return myOptionsAndConfirmations.getAllConfirmations();
}
@Override
@NotNull
public VcsShowConfirmationOptionImpl getConfirmation(VcsConfiguration.StandardConfirmation option) {
return myOptionsAndConfirmations.getConfirmation(option);
}
private final Map<VcsListener, MessageBusConnection> myAdapters = new HashMap<>();
@Override
public void addVcsListener(VcsListener listener) {
MessageBusConnection connection = myProject.getMessageBus().connect();
connection.subscribe(VCS_CONFIGURATION_CHANGED, listener);
myAdapters.put(listener, connection);
}
@Override
public void removeVcsListener(VcsListener listener) {
final MessageBusConnection connection = myAdapters.remove(listener);
if (connection != null) {
connection.disconnect();
}
}
@Override
public void startBackgroundVcsOperation() {
myBackgroundOperationCounter.incrementAndGet();
}
@Override
public void stopBackgroundVcsOperation() {
// in fact, the condition is "should not be called under ApplicationManager.invokeLater() and similar"
assert !ApplicationManager.getApplication().isDispatchThread() || ApplicationManager.getApplication().isUnitTestMode();
int counter = myBackgroundOperationCounter.getAndDecrement();
LOG.assertTrue(counter > 0, "myBackgroundOperationCounter was " + counter + " while should have been > 0");
}
@Override
public boolean isBackgroundVcsOperationRunning() {
return myBackgroundOperationCounter.get() > 0;
}
@Override
public List<VirtualFile> getRootsUnderVcsWithoutFiltering(final AbstractVcs vcs) {
return myMappings.getMappingsAsFilesUnderVcs(vcs);
}
@Override
public VirtualFile @NotNull [] getRootsUnderVcs(@NotNull AbstractVcs vcs) {
return MappingsToRoots.getRootsUnderVcs(myProject, myMappings, vcs);
}
@Override
public List<VirtualFile> getDetailedVcsMappings(@NotNull AbstractVcs vcs) {
return MappingsToRoots.getDetailedVcsMappings(myProject, myMappings, vcs);
}
@Override
public VirtualFile[] getAllVersionedRoots() {
List<VirtualFile> vFiles = new ArrayList<>();
final AbstractVcs[] vcses = myMappings.getActiveVcses();
for (AbstractVcs vcs : vcses) {
Collections.addAll(vFiles, getRootsUnderVcs(vcs));
}
return VfsUtilCore.toVirtualFileArray(vFiles);
}
@Override
public VcsRoot @NotNull [] getAllVcsRoots() {
List<VcsRoot> vcsRoots = new ArrayList<>();
final AbstractVcs[] vcses = myMappings.getActiveVcses();
for (AbstractVcs vcs : vcses) {
final VirtualFile[] roots = getRootsUnderVcs(vcs);
for (VirtualFile root : roots) {
vcsRoots.add(new VcsRoot(vcs, root));
}
}
return vcsRoots.toArray(new VcsRoot[0]);
}
@Override
public void notifyDirectoryMappingChanged() {
BackgroundTaskUtil.syncPublisher(myProject, VCS_CONFIGURATION_CHANGED).directoryMappingChanged();
}
void readDirectoryMappings(final Element element) {
final List<VcsDirectoryMapping> mappingsList = new ArrayList<>();
boolean haveNonEmptyMappings = false;
for (Element child : element.getChildren(ELEMENT_MAPPING)) {
String vcs = child.getAttributeValue(ATTRIBUTE_VCS);
String directory = child.getAttributeValue(ATTRIBUTE_DIRECTORY);
if (directory == null) continue;
VcsRootSettings rootSettings = null;
Element rootSettingsElement = child.getChild(ELEMENT_ROOT_SETTINGS);
if (rootSettingsElement != null) {
String className = rootSettingsElement.getAttributeValue(ATTRIBUTE_CLASS);
AbstractVcs vcsInstance = findVcsByName(vcs);
if (vcsInstance != null && className != null) {
rootSettings = vcsInstance.createEmptyVcsRootSettings();
if (rootSettings != null) {
try {
rootSettings.readExternal(rootSettingsElement);
}
catch (InvalidDataException e) {
LOG.error("Failed to load VCS root settings class " + className + " for VCS " + vcsInstance.getClass().getName(), e);
}
}
}
}
VcsDirectoryMapping mapping = new VcsDirectoryMapping(directory, vcs, rootSettings);
mappingsList.add(mapping);
haveNonEmptyMappings |= !mapping.isDefaultMapping();
}
boolean defaultProject = Boolean.TRUE.toString().equals(element.getAttributeValue(ATTRIBUTE_DEFAULT_PROJECT));
// run autodetection if there's no VCS in default project and
if (haveNonEmptyMappings || !defaultProject) {
myMappingsLoaded = true;
}
myMappings.setDirectoryMappings(mappingsList);
}
void writeDirectoryMappings(@NotNull Element element) {
if (myProject.isDefault()) {
element.setAttribute(ATTRIBUTE_DEFAULT_PROJECT, Boolean.TRUE.toString());
}
for (VcsDirectoryMapping mapping : getDirectoryMappings()) {
VcsRootSettings rootSettings = mapping.getRootSettings();
if (rootSettings == null && StringUtil.isEmpty(mapping.getDirectory()) && StringUtil.isEmpty(mapping.getVcs())) {
continue;
}
Element child = new Element(ELEMENT_MAPPING);
child.setAttribute(ATTRIBUTE_DIRECTORY, mapping.getDirectory());
child.setAttribute(ATTRIBUTE_VCS, mapping.getVcs());
if (rootSettings != null) {
Element rootSettingsElement = new Element(ELEMENT_ROOT_SETTINGS);
rootSettingsElement.setAttribute(ATTRIBUTE_CLASS, rootSettings.getClass().getName());
try {
rootSettings.writeExternal(rootSettingsElement);
child.addContent(rootSettingsElement);
}
catch (WriteExternalException e) {
// don't add element
}
}
element.addContent(child);
}
}
public boolean needAutodetectMappings() {
return !myHaveLegacyVcsConfiguration && !myMappingsLoaded;
}
/**
* Used to guess VCS for automatic mapping through a look into a working copy
*/
@Override
@Nullable
public AbstractVcs findVersioningVcs(VirtualFile file) {
final VcsDescriptor[] vcsDescriptors = getAllVcss();
VcsDescriptor probableVcs = null;
for (VcsDescriptor vcsDescriptor : vcsDescriptors) {
if (vcsDescriptor.probablyUnderVcs(file)) {
if (probableVcs != null) {
return null;
}
probableVcs = vcsDescriptor;
}
}
return probableVcs == null ? null : findVcsByName(probableVcs.getName());
}
@NotNull
@Override
public VcsRootChecker getRootChecker(@NotNull AbstractVcs vcs) {
for (VcsRootChecker checker : VcsRootChecker.EXTENSION_POINT_NAME.getExtensionList()) {
if (checker.getSupportedVcs().equals(vcs.getKeyInstanceMethod())) return checker;
}
return new DefaultVcsRootChecker(vcs);
}
@Override
public CheckoutProvider.Listener getCompositeCheckoutListener() {
return new CompositeCheckoutListener(myProject);
}
@Override
public void fireDirectoryMappingsChanged() {
if (myProject.isOpen() && !myProject.isDisposed()) {
myMappings.mappingsChanged();
}
}
/**
* @return VCS name for default mapping, if any
*/
@Nullable
@Override
public String haveDefaultMapping() {
return myMappings.haveDefaultMapping();
}
/**
* @deprecated use {@link BackgroundableActionLock}
*/
@Deprecated
public BackgroundableActionEnabledHandler getBackgroundableActionHandler(final VcsBackgroundableActions action) {
ApplicationManager.getApplication().assertIsDispatchThread();
return new BackgroundableActionEnabledHandler(myProject, action);
}
@CalledInAwt
boolean isBackgroundTaskRunning(Object @NotNull ... keys) {
ApplicationManager.getApplication().assertIsDispatchThread();
return myBackgroundRunningTasks.contains(new ActionKey(keys));
}
@CalledInAwt
void startBackgroundTask(Object @NotNull ... keys) {
ApplicationManager.getApplication().assertIsDispatchThread();
LOG.assertTrue(myBackgroundRunningTasks.add(new ActionKey(keys)));
}
@CalledInAwt
void stopBackgroundTask(Object @NotNull ... keys) {
ApplicationManager.getApplication().assertIsDispatchThread();
LOG.assertTrue(myBackgroundRunningTasks.remove(new ActionKey(keys)));
}
public void addInitializationRequest(@NotNull VcsInitObject vcsInitObject, @NotNull Runnable runnable) {
if (myInitialization != null) {
myInitialization.add(vcsInitObject, runnable);
}
}
@Override
public boolean isFileInContent(@Nullable final VirtualFile vf) {
if (vf == null) return false;
return ReadAction.compute(() -> {
boolean isUnderProject = isFileInBaseDir(vf) ||
isInDirectoryBasedRoot(vf) ||
hasExplicitMapping(vf) ||
myExcludedIndex.isInContent(vf) ||
!Registry.is("ide.hide.excluded.files") && myExcludedIndex.isExcludedFile(vf);
return isUnderProject && !isIgnored(vf);
});
}
@Override
public boolean isIgnored(@NotNull VirtualFile vf) {
return ReadAction.compute(() -> {
if (myProject.isDisposed() || myProject.isDefault()) return false;
if (Registry.is("ide.hide.excluded.files")) {
return myExcludedIndex.isExcludedFile(vf);
}
else {
return myExcludedIndex.isUnderIgnored(vf);
}
});
}
@Override
public boolean isIgnored(@NotNull FilePath filePath) {
return ReadAction.compute(() -> {
if (myProject.isDisposed() || myProject.isDefault()) return false;
if (Registry.is("ide.hide.excluded.files")) {
VirtualFile vf = ChangesUtil.findValidParentAccurately(filePath);
return vf != null && myExcludedIndex.isExcludedFile(vf);
}
else {
FileTypeManager fileTypeManager = FileTypeManager.getInstance();
for (String name : StringUtil.tokenize(filePath.getPath(), "/")) {
if (fileTypeManager.isFileIgnored(name)) {
return true;
}
}
return false;
}
});
}
private boolean isInDirectoryBasedRoot(@NotNull VirtualFile file) {
if (ProjectKt.isDirectoryBased(myProject)) {
return ProjectKt.getStateStore(myProject).isProjectFile(file);
}
return false;
}
private boolean isFileInBaseDir(@NotNull VirtualFile file) {
VirtualFile baseDir = myProject.getBaseDir();
if (baseDir == null) return false;
if (file.isDirectory()) {
return baseDir.equals(file);
}
else {
return baseDir.equals(file.getParent());
}
}
private boolean hasExplicitMapping(@NotNull VirtualFile vFile) {
final VcsDirectoryMapping mapping = getDirectoryMappingFor(vFile);
return mapping != null && !mapping.isDefaultMapping();
}
@Override
public VcsHistoryCache getVcsHistoryCache() {
return VcsCacheManager.getInstance(myProject).getVcsHistoryCache();
}
@Override
public ContentRevisionCache getContentRevisionCache() {
return VcsCacheManager.getInstance(myProject).getContentRevisionCache();
}
@TestOnly
public void waitForInitialized() {
if (myInitialization != null) {
myInitialization.waitFinished();
}
}
private static class ActionKey {
private final Object[] myObjects;
ActionKey(Object @NotNull ... objects) {
myObjects = objects;
}
@Override
public final boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
return Arrays.equals(myObjects, ((ActionKey)o).myObjects);
}
@Override
public final int hashCode() {
return Arrays.hashCode(myObjects);
}
@Override
public String toString() {
return getClass() + " - " + Arrays.toString(myObjects);
}
}
static final class MyProjectManagerListener implements ProjectManagerListener {
@Override
public void projectOpened(@NotNull Project project) {
if (ApplicationManager.getApplication().isUnitTestMode()) {
return;
}
getInstanceImpl(project).addInitializationRequest(VcsInitObject.AFTER_COMMON, () -> {
List<VcsRootChecker> checkers = VcsRootChecker.EXTENSION_POINT_NAME.getExtensionList();
if (checkers.size() != 0) {
VcsRootScanner.start(project, checkers);
}
});
}
@Override
public void projectClosed(@NotNull Project project) {
ProjectLevelVcsManagerImpl manager = (ProjectLevelVcsManagerImpl)project.getServiceIfCreated(ProjectLevelVcsManager.class);
if (manager != null) {
manager.releaseConsole();
}
}
}
}
| |
package ca.ualberta.cs.poker;
/***************************************************************************
Copyright (c) 2000:
University of Alberta,
Deptartment of Computing Science
Computer Poker Research Group
See "Liscence.txt"
***************************************************************************/
import java.util.Random;
/**
* A Deck of 52 Cards which can be dealt and shuffled
* @author Aaron Davidson
*/
public class Deck {
public static final int NUM_CARDS = 52;
private Card[] gCards = new Card[NUM_CARDS];
private char position; // top of deck
private Random r = new Random();
/**
* Constructor.
*/
public Deck() {
position = 0;
for (int i=0;i<NUM_CARDS;i++) {
gCards[i] = new Card(i);
}
}
/**
* Constructor w/ shuffle seed.
* @param seed the seed to use in randomly shuffling the deck.
*/
public Deck(long seed) {
this();
if (seed == 0) {
seed = System.currentTimeMillis();
}
r.setSeed(seed);
}
/**
* Places all cards back into the deck.
* Note: Does not sort the deck.
*/
public synchronized void reset() { position = 0; }
/**
* Shuffles the cards in the deck.
*/
public synchronized void shuffle() {
Card tempCard;
int i,j;
for (i=0; i<NUM_CARDS; i++) {
j = i + randInt(NUM_CARDS-i);
tempCard = gCards[j];
gCards[j] = gCards[i];
gCards[i] = tempCard;
}
position = 0;
}
/**
* Obtain the next card in the deck.
* If no cards remain, a null card is returned
* @return the card dealt
*/
public synchronized Card deal() {
return (position < NUM_CARDS ? gCards[position++] : null);
}
/**
* Obtain the next card in the deck.
* If no cards remain, a null card is returned
* @return the card dealt
*/
public synchronized Card dealCard() {
return extractRandomCard();
}
/**
* Find position of Card in Deck.
*/
public synchronized int findCard(Card c) {
int i = position;
int n = c.getIndex();
while (i < NUM_CARDS && n != gCards[i].getIndex())
i++;
return (i < NUM_CARDS ? i : -1);
}
private synchronized int findDiscard(Card c) {
int i = 0;
int n = c.getIndex();
while (i < position && n != gCards[i].getIndex())
i++;
return (n == gCards[i].getIndex() ? i : -1);
}
/**
* Remove all cards in the given hand from the Deck.
*/
public synchronized void extractHand(Hand h) {
for (int i=1;i<=h.size();i++)
this.extractCard(h.getCard(i));
}
/**
* Remove a card from within the deck.
* @param c the card to remove.
*/
public synchronized void extractCard(Card c) {
int i = findCard(c);
if (i != -1) {
Card t = gCards[i];
gCards[i] = gCards[position];
gCards[position] = t;
position++;
} else {
System.err.println("*** ERROR: could not find card " + c);
Thread.currentThread().dumpStack();
}
}
/**
* Remove and return a randomly selected card from within the deck.
*/
public synchronized Card extractRandomCard() {
int pos = position+randInt(NUM_CARDS-position);
Card c = gCards[pos];
gCards[pos] = gCards[position];
gCards[position] = c;
position++;
return c;
}
/**
* Return a randomly selected card from within the deck without removing it.
*/
public synchronized Card pickRandomCard() {
return gCards[position+randInt(NUM_CARDS-position)];
}
/**
* Place a card back into the deck.
* @param c the card to insert.
*/
public synchronized void replaceCard(Card c) {
int i = findDiscard(c);
if (i != -1) {
position--;
Card t = gCards[i];
gCards[i] = gCards[position];
gCards[position] = t;
}
}
/**
* Obtain the position of the top card.
* (the number of cards dealt from the deck)
* @return the top card index
*/
public synchronized int getTopCardIndex() {
return position;
}
/**
* Obtain the number of cards left in the deck
*/
public synchronized int cardsLeft() {
return NUM_CARDS-position;
}
/**
* Obtain the card at a specific index in the deck.
* Does not matter if card has been dealt or not.
* If i < topCardIndex it has been dealt.
* @param i the index into the deck (0..51)
* @return the card at position i
*/
public synchronized Card getCard(int i) {
return gCards[i];
}
public String toString() {
StringBuffer s = new StringBuffer();
s.append("* ");
for (int i=0;i<position;i++)
s.append(gCards[i].toString()+" ");
s.append("\n* ");
for (int i=position;i<NUM_CARDS;i++)
s.append(gCards[i].toString()+" ");
return s.toString();
}
private int randInt(int range) {
return (int)(r.nextDouble()*range);
}
}
| |
// Copyright (C) 2015 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.client.change;
import com.google.gerrit.client.changes.CommentApi;
import com.google.gerrit.client.changes.CommentInfo;
import com.google.gerrit.client.diff.CommentRange;
import com.google.gerrit.client.rpc.GerritCallback;
import com.google.gerrit.client.rpc.RestApi;
import com.google.gerrit.common.Nullable;
import com.google.gerrit.common.PageLinks;
import com.google.gerrit.extensions.client.Side;
import com.google.gerrit.reviewdb.client.Change;
import com.google.gerrit.reviewdb.client.PatchSet;
import com.google.gerrit.reviewdb.client.Project;
import com.google.gwt.storage.client.Storage;
import com.google.gwt.user.client.Cookies;
import java.util.ArrayList;
import java.util.Collection;
public class LocalComments {
@Nullable private final Project.NameKey project;
private final Change.Id changeId;
private final PatchSet.Id psId;
private final StorageBackend storage;
private static class InlineComment {
@Nullable final Project.NameKey project;
final PatchSet.Id psId;
final CommentInfo commentInfo;
InlineComment(@Nullable Project.NameKey project, PatchSet.Id psId, CommentInfo commentInfo) {
this.project = project;
this.psId = psId;
this.commentInfo = commentInfo;
}
}
private static class StorageBackend {
private final Storage storageBackend;
StorageBackend() {
storageBackend =
(Storage.isLocalStorageSupported())
? Storage.getLocalStorageIfSupported()
: Storage.getSessionStorageIfSupported();
}
String getItem(String key) {
if (storageBackend == null) {
return Cookies.getCookie(key);
}
return storageBackend.getItem(key);
}
void setItem(String key, String value) {
if (storageBackend == null) {
Cookies.setCookie(key, value);
return;
}
storageBackend.setItem(key, value);
}
void removeItem(String key) {
if (storageBackend == null) {
Cookies.removeCookie(key);
return;
}
storageBackend.removeItem(key);
}
Collection<String> getKeys() {
if (storageBackend == null) {
return Cookies.getCookieNames();
}
ArrayList<String> result = new ArrayList<>(storageBackend.getLength());
for (int i = 0; i < storageBackend.getLength(); i++) {
result.add(storageBackend.key(i));
}
return result;
}
}
public LocalComments(@Nullable Project.NameKey project, Change.Id changeId) {
this.project = project;
this.changeId = changeId;
this.psId = null;
this.storage = new StorageBackend();
}
public LocalComments(@Nullable Project.NameKey project, PatchSet.Id psId) {
this.project = project;
this.changeId = psId.getParentKey();
this.psId = psId;
this.storage = new StorageBackend();
}
public String getReplyComment() {
String comment = storage.getItem(getReplyCommentName());
storage.removeItem(getReplyCommentName());
return comment;
}
public void setReplyComment(String comment) {
storage.setItem(getReplyCommentName(), comment.trim());
}
public boolean hasReplyComment() {
return storage.getKeys().contains(getReplyCommentName());
}
public void removeReplyComment() {
if (hasReplyComment()) {
storage.removeItem(getReplyCommentName());
}
}
private String getReplyCommentName() {
return "savedReplyComment~" + PageLinks.toChangeId(project, changeId);
}
public static void saveInlineComments() {
final StorageBackend storage = new StorageBackend();
for (String cookie : storage.getKeys()) {
if (isInlineComment(cookie)) {
InlineComment input = getInlineComment(cookie);
if (input.commentInfo.id() == null) {
CommentApi.createDraft(
Project.NameKey.asStringOrNull(input.project),
input.psId,
input.commentInfo,
new GerritCallback<CommentInfo>() {
@Override
public void onSuccess(CommentInfo result) {
storage.removeItem(cookie);
}
});
} else {
CommentApi.updateDraft(
Project.NameKey.asStringOrNull(input.project),
input.psId,
input.commentInfo.id(),
input.commentInfo,
new GerritCallback<CommentInfo>() {
@Override
public void onSuccess(CommentInfo result) {
storage.removeItem(cookie);
}
@Override
public void onFailure(Throwable caught) {
if (RestApi.isNotFound(caught)) {
// the draft comment, that was supposed to be updated,
// was deleted in the meantime
storage.removeItem(cookie);
} else {
super.onFailure(caught);
}
}
});
}
}
}
}
public void setInlineComment(CommentInfo comment) {
String name = getInlineCommentName(comment);
if (name == null) {
// Failed to get the store key -- so we can't continue.
return;
}
storage.setItem(name, comment.message().trim());
}
public boolean hasInlineComments() {
for (String cookie : storage.getKeys()) {
if (isInlineComment(cookie)) {
return true;
}
}
return false;
}
private static boolean isInlineComment(String key) {
return key.startsWith("patchCommentEdit~")
|| key.startsWith("patchReply~")
|| key.startsWith("patchComment~");
}
private static InlineComment getInlineComment(String key) {
String path;
Side side = Side.PARENT;
int line = 0;
CommentRange range;
StorageBackend storage = new StorageBackend();
String[] elements = key.split("~");
int offset = 1;
if (key.startsWith("patchReply~") || key.startsWith("patchCommentEdit~")) {
offset = 2;
}
ProjectChangeId id = ProjectChangeId.create(elements[offset + 0]);
PatchSet.Id psId = new PatchSet.Id(id.getChangeId(), Integer.parseInt(elements[offset + 1]));
path = atob(elements[offset + 2]);
side = (Side.PARENT.toString().equals(elements[offset + 3])) ? Side.PARENT : Side.REVISION;
range = null;
if (elements[offset + 4].startsWith("R")) {
String rangeStart = elements[offset + 4].substring(1);
String rangeEnd = elements[offset + 5];
String[] split = rangeStart.split(",");
int sl = Integer.parseInt(split[0]);
int sc = Integer.parseInt(split[1]);
split = rangeEnd.split(",");
int el = Integer.parseInt(split[0]);
int ec = Integer.parseInt(split[1]);
range = CommentRange.create(sl, sc, el, ec);
line = sl;
} else {
line = Integer.parseInt(elements[offset + 4]);
}
CommentInfo info = CommentInfo.create(path, side, line, range, false);
info.message(storage.getItem(key));
if (key.startsWith("patchReply~")) {
info.inReplyTo(elements[1]);
} else if (key.startsWith("patchCommentEdit~")) {
info.id(elements[1]);
}
InlineComment inlineComment = new InlineComment(id.getProject(), psId, info);
return inlineComment;
}
private String getInlineCommentName(CommentInfo comment) {
if (psId == null) {
return null;
}
String result = "patchComment~";
if (comment.id() != null) {
result = "patchCommentEdit~" + comment.id() + "~";
} else if (comment.inReplyTo() != null) {
result = "patchReply~" + comment.inReplyTo() + "~";
}
result += PageLinks.toChangeId(project, changeId);
result += "~" + psId.getId() + "~" + btoa(comment.path()) + "~" + comment.side() + "~";
if (comment.hasRange()) {
result +=
"R"
+ comment.range().startLine()
+ ","
+ comment.range().startCharacter()
+ "~"
+ comment.range().endLine()
+ ","
+ comment.range().endCharacter();
} else {
result += comment.line();
}
return result;
}
private static native String btoa(String a) /*-{ return btoa(a); }-*/;
private static native String atob(String b) /*-{ return atob(b); }-*/;
}
| |
/****************************************************************************
Copyright (c) 2010-2011 cocos2d-x.org
http://www.cocos2d-x.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
****************************************************************************/
package org.cocos2dx.lib;
import android.content.Context;
import android.opengl.GLSurfaceView;
import android.os.Handler;
import android.os.Message;
import android.text.Editable;
import android.text.TextWatcher;
import android.util.AttributeSet;
import android.util.Log;
import android.view.KeyEvent;
import android.view.MotionEvent;
import android.view.inputmethod.InputMethodManager;
import android.widget.TextView;
import android.widget.TextView.OnEditorActionListener;
class TextInputWraper implements TextWatcher, OnEditorActionListener {
private static final Boolean debug = false;
private void LogD(String msg) {
if (debug) Log.d("TextInputFilter", msg);
}
private Cocos2dxGLSurfaceView mMainView;
private String mText;
private String mOriginText;
private Boolean isFullScreenEdit() {
InputMethodManager imm = (InputMethodManager)mMainView.getTextField().getContext().getSystemService(Context.INPUT_METHOD_SERVICE);
return imm.isFullscreenMode();
}
public TextInputWraper(Cocos2dxGLSurfaceView view) {
mMainView = view;
}
public void setOriginText(String text) {
mOriginText = text;
}
@Override
public void afterTextChanged(Editable s) {
if (isFullScreenEdit()) {
return;
}
LogD("afterTextChanged: " + s);
int nModified = s.length() - mText.length();
if (nModified > 0) {
final String insertText = s.subSequence(mText.length(), s.length()).toString();
mMainView.insertText(insertText);
LogD("insertText(" + insertText + ")");
}
else {
for (; nModified < 0; ++nModified) {
mMainView.deleteBackward();
LogD("deleteBackward");
}
}
mText = s.toString();
}
@Override
public void beforeTextChanged(CharSequence s, int start, int count,
int after) {
LogD("beforeTextChanged(" + s + ")start: " + start + ",count: " + count + ",after: " + after);
mText = s.toString();
}
@Override
public void onTextChanged(CharSequence s, int start, int before, int count) {
}
@Override
public boolean onEditorAction(TextView v, int actionId, KeyEvent event) {
if (mMainView.getTextField() == v && isFullScreenEdit()) {
// user press the action button, delete all old text and insert new text
for (int i = mOriginText.length(); i > 0; --i) {
mMainView.deleteBackward();
LogD("deleteBackward");
}
String text = v.getText().toString();
/*
* If user input nothing, translate "\n" to engine.
*/
if (text.compareTo("") == 0){
text = "\n";
}
if ('\n' != text.charAt(text.length() - 1)) {
text += '\n';
}
final String insertText = text;
mMainView.insertText(insertText);
LogD("insertText(" + insertText + ")");
}
return false;
}
}
public class Cocos2dxGLSurfaceView extends GLSurfaceView {
static private Cocos2dxGLSurfaceView mainView;
private static final String TAG = Cocos2dxGLSurfaceView.class.getCanonicalName();
private Cocos2dxRenderer mRenderer;
private static final boolean debug = false;
///////////////////////////////////////////////////////////////////////////
// for initialize
///////////////////////////////////////////////////////////////////////////
public Cocos2dxGLSurfaceView(Context context) {
super(context);
initView();
}
public Cocos2dxGLSurfaceView(Context context, AttributeSet attrs) {
super(context, attrs);
initView();
}
public void setCocos2dxRenderer(Cocos2dxRenderer renderer){
mRenderer = renderer;
setRenderer(mRenderer);
}
protected void initView() {
setFocusableInTouchMode(true);
textInputWraper = new TextInputWraper(this);
handler = new Handler(){
public void handleMessage(Message msg){
switch(msg.what){
case HANDLER_OPEN_IME_KEYBOARD:
if (null != mTextField && mTextField.requestFocus()) {
mTextField.removeTextChangedListener(textInputWraper);
mTextField.setText("");
String text = (String)msg.obj;
mTextField.append(text);
textInputWraper.setOriginText(text);
mTextField.addTextChangedListener(textInputWraper);
InputMethodManager imm = (InputMethodManager)mainView.getContext().getSystemService(Context.INPUT_METHOD_SERVICE);
imm.showSoftInput(mTextField, 0);
Log.d("GLSurfaceView", "showSoftInput");
}
break;
case HANDLER_CLOSE_IME_KEYBOARD:
if (null != mTextField) {
mTextField.removeTextChangedListener(textInputWraper);
InputMethodManager imm = (InputMethodManager)mainView.getContext().getSystemService(Context.INPUT_METHOD_SERVICE);
imm.hideSoftInputFromWindow(mTextField.getWindowToken(), 0);
Log.d("GLSurfaceView", "HideSoftInput");
}
break;
}
}
};
mainView = this;
}
public void onPause(){
queueEvent(new Runnable() {
@Override
public void run() {
mRenderer.handleOnPause();
}
});
super.onPause();
}
public void onResume(){
super.onResume();
queueEvent(new Runnable() {
@Override
public void run() {
mRenderer.handleOnResume();
}
});
}
///////////////////////////////////////////////////////////////////////////
// for text input
///////////////////////////////////////////////////////////////////////////
private final static int HANDLER_OPEN_IME_KEYBOARD = 2;
private final static int HANDLER_CLOSE_IME_KEYBOARD = 3;
private static Handler handler;
private static TextInputWraper textInputWraper;
private Cocos2dxEditText mTextField;
public TextView getTextField() {
return mTextField;
}
public void setTextField(Cocos2dxEditText view) {
mTextField = view;
if (null != mTextField && null != textInputWraper) {
mTextField.setOnEditorActionListener(textInputWraper);
mTextField.setMainView(this);
this.requestFocus();
}
}
public static void openIMEKeyboard() {
Message msg = new Message();
msg.what = HANDLER_OPEN_IME_KEYBOARD;
msg.obj = mainView.getContentText();
handler.sendMessage(msg);
}
private String getContentText() {
return mRenderer.getContentText();
}
public static void closeIMEKeyboard() {
Message msg = new Message();
msg.what = HANDLER_CLOSE_IME_KEYBOARD;
handler.sendMessage(msg);
}
public void insertText(final String text) {
queueEvent(new Runnable() {
@Override
public void run() {
mRenderer.handleInsertText(text);
}
});
}
public void deleteBackward() {
queueEvent(new Runnable() {
@Override
public void run() {
mRenderer.handleDeleteBackward();
}
});
}
///////////////////////////////////////////////////////////////////////////
// for touch event
///////////////////////////////////////////////////////////////////////////
public boolean onTouchEvent(final MotionEvent event) {
// these data are used in ACTION_MOVE and ACTION_CANCEL
final int pointerNumber = event.getPointerCount();
final int[] ids = new int[pointerNumber];
final float[] xs = new float[pointerNumber];
final float[] ys = new float[pointerNumber];
for (int i = 0; i < pointerNumber; i++) {
ids[i] = event.getPointerId(i);
xs[i] = event.getX(i);
ys[i] = event.getY(i);
}
switch (event.getAction() & MotionEvent.ACTION_MASK) {
case MotionEvent.ACTION_POINTER_DOWN:
final int indexPointerDown = event.getAction() >> MotionEvent.ACTION_POINTER_ID_SHIFT;
final int idPointerDown = event.getPointerId(indexPointerDown);
final float xPointerDown = event.getX(indexPointerDown);
final float yPointerDown = event.getY(indexPointerDown);
queueEvent(new Runnable() {
@Override
public void run() {
mRenderer.handleActionDown(idPointerDown, xPointerDown, yPointerDown);
}
});
break;
case MotionEvent.ACTION_DOWN:
// there are only one finger on the screen
final int idDown = event.getPointerId(0);
final float xDown = xs[0];
final float yDown = ys[0];
queueEvent(new Runnable() {
@Override
public void run() {
mRenderer.handleActionDown(idDown, xDown, yDown);
}
});
break;
case MotionEvent.ACTION_MOVE:
queueEvent(new Runnable() {
@Override
public void run() {
mRenderer.handleActionMove(ids, xs, ys);
}
});
break;
case MotionEvent.ACTION_POINTER_UP:
final int indexPointUp = event.getAction() >> MotionEvent.ACTION_POINTER_ID_SHIFT;
final int idPointerUp = event.getPointerId(indexPointUp);
final float xPointerUp = event.getX(indexPointUp);
final float yPointerUp = event.getY(indexPointUp);
queueEvent(new Runnable() {
@Override
public void run() {
mRenderer.handleActionUp(idPointerUp, xPointerUp, yPointerUp);
}
});
break;
case MotionEvent.ACTION_UP:
// there are only one finger on the screen
final int idUp = event.getPointerId(0);
final float xUp = xs[0];
final float yUp = ys[0];
queueEvent(new Runnable() {
@Override
public void run() {
mRenderer.handleActionUp(idUp, xUp, yUp);
}
});
break;
case MotionEvent.ACTION_CANCEL:
queueEvent(new Runnable() {
@Override
public void run() {
mRenderer.handleActionCancel(ids, xs, ys);
}
});
break;
}
if (debug){
dumpEvent(event);
}
return true;
}
/*
* This function is called before Cocos2dxRenderer.nativeInit(), so the width and height is correct.
*/
protected void onSizeChanged(int w, int h, int oldw, int oldh){
this.mRenderer.setScreenWidthAndHeight(w, h);
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
final int kc = keyCode;
if (keyCode == KeyEvent.KEYCODE_BACK || keyCode == KeyEvent.KEYCODE_MENU) {
queueEvent(new Runnable() {
@Override
public void run() {
mRenderer.handleKeyDown(kc);
}
});
return true;
}
return super.onKeyDown(keyCode, event);
}
// Show an event in the LogCat view, for debugging
private void dumpEvent(MotionEvent event) {
String names[] = { "DOWN" , "UP" , "MOVE" , "CANCEL" , "OUTSIDE" ,
"POINTER_DOWN" , "POINTER_UP" , "7?" , "8?" , "9?" };
StringBuilder sb = new StringBuilder();
int action = event.getAction();
int actionCode = action & MotionEvent.ACTION_MASK;
sb.append("event ACTION_" ).append(names[actionCode]);
if (actionCode == MotionEvent.ACTION_POINTER_DOWN
|| actionCode == MotionEvent.ACTION_POINTER_UP) {
sb.append("(pid " ).append(
action >> MotionEvent.ACTION_POINTER_ID_SHIFT);
sb.append(")" );
}
sb.append("[" );
for (int i = 0; i < event.getPointerCount(); i++) {
sb.append("#" ).append(i);
sb.append("(pid " ).append(event.getPointerId(i));
sb.append(")=" ).append((int) event.getX(i));
sb.append("," ).append((int) event.getY(i));
if (i + 1 < event.getPointerCount())
sb.append(";" );
}
sb.append("]" );
Log.d(TAG, sb.toString());
}
}
| |
package uk.gov.eastlothian.gowalk.ui;
import android.app.ActionBar;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.database.Cursor;
import android.net.Uri;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentActivity;
import android.support.v4.app.LoaderManager;
import android.support.v4.content.CursorLoader;
import android.support.v4.content.Loader;
import android.support.v4.widget.SimpleCursorAdapter;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.CursorAdapter;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ListView;
import android.widget.TextView;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import uk.gov.eastlothian.gowalk.R;
import uk.gov.eastlothian.gowalk.data.WalksContract;
import uk.gov.eastlothian.gowalk.model.Wildlife;
public class LogEntryActivity extends MainMenuActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_log_entry);
if (savedInstanceState == null) {
getSupportFragmentManager().beginTransaction()
.add(R.id.container, new LogEntryFragment())
.commit();
}
}
/**
* A placeholder fragment containing a simple view.
*/
public static class LogEntryFragment extends Fragment implements LoaderManager.LoaderCallbacks<Cursor> {
static final int WILDLIFE_ID = 0;
static final int WILDLIFE_LOG_QUERY_ID = 1;
long wildlifeId;
ImageView imageView;
ListView listView;
LogListAdapter adapter;
public LogEntryFragment() {
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_log_entry, container, false);
// set the wildlife id
wildlifeId = getActivity().getIntent().getLongExtra("wildlife_id", -1);
// bind the views
imageView = (ImageView) rootView.findViewById(R.id.log_entry_imageview);
listView = (ListView) rootView.findViewById(R.id.log_entry_listview);
adapter = new LogListAdapter(getActivity(), null, false);
listView.setAdapter(adapter);
listView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> adapterView, View view, int position, long id) {
Long entryId = (Long) view.getTag();
Intent intent = new Intent(getActivity(), LogBookSightingDetailActivity.class);
intent.putExtra("log_entry_id", entryId);
startActivity(intent);
}
});
// get the view
int width = this.getResources().getDisplayMetrics().widthPixels;
imageView.setScaleType(ImageView.ScaleType.CENTER_CROP);
int height = (int) (0.74 * width); // TODO: It's magic!
imageView.setLayoutParams(new LinearLayout.LayoutParams(width, height));
// start the query
getLoaderManager().initLoader(WILDLIFE_ID, null, this);
getLoaderManager().initLoader(WILDLIFE_LOG_QUERY_ID, null, this);
return rootView;
}
@Override
public Loader<Cursor> onCreateLoader(int id, Bundle args) {
Loader<Cursor> rtnCursor;
switch(id) {
case WILDLIFE_ID: {
Uri uri = WalksContract.WildlifeEntry.buildWildLifeUri(wildlifeId);
rtnCursor = new CursorLoader(getActivity(), uri, null, null, null, null);
break;
}
case WILDLIFE_LOG_QUERY_ID: {
Uri uri = WalksContract.LogEntry.buildLogsForWildlifeUri(wildlifeId);
rtnCursor = new CursorLoader(getActivity(), uri, null, null, null, null);
break;
}
default:
throw new UnsupportedOperationException("Unknown loader id.");
}
return rtnCursor;
}
@Override
public void onLoadFinished(Loader<Cursor> loader, Cursor data) {
switch(loader.getId()) {
case WILDLIFE_ID:
Wildlife wildlife = Wildlife.fromCursor(data).get(0);
imageView.setImageResource(wildlife.getImageResourceId(getActivity()));
getActivity().getActionBar().setTitle(wildlife.getCapitalisedName());
break;
case WILDLIFE_LOG_QUERY_ID:
adapter.swapCursor(data);
break;
default:
throw new UnsupportedOperationException("Unknown loader id.");
}
}
@Override
public void onLoaderReset(Loader<Cursor> loader) {
}
public static class LogListAdapter extends CursorAdapter {
LayoutInflater inflater;
public LogListAdapter(Context context, Cursor cursor, boolean autoRequery) {
super(context, cursor, autoRequery);
inflater = LayoutInflater.from(context);
}
@Override
public View newView(Context context, Cursor cursor, ViewGroup parent) {
View element = inflater.inflate(R.layout.logbook_entry_list_element, parent, false);
return element;
}
@Override
public void bindView(View view, Context context, Cursor cursor) {
// TODO: some of this stuff could be cached.
int idIdx = cursor.getColumnIndex(WalksContract.LogEntry._ID);
int latIdx = cursor.getColumnIndex(WalksContract.LogEntry.COLUMN_LAT);
int lngIdx = cursor.getColumnIndex(WalksContract.LogEntry.COLUMN_LNG);
int dateTimeIdx = cursor.getColumnIndex(WalksContract.LogEntry.COLUMN_DATATIME);
int weatherIdx = cursor.getColumnIndex(WalksContract.LogEntry.COLUMN_WEATHER);
long id = cursor.getLong(idIdx);
String lat = cursor.getString(latIdx);
String lng = cursor.getString(lngIdx);
String dateTime = cursor.getString(dateTimeIdx);
String weather = cursor.getString(weatherIdx);
TextView locationText = (TextView) view.findViewById(R.id.log_entry_place);
TextView datetimeText = (TextView) view.findViewById(R.id.log_entry_datetime);
// TODO: format this better
SimpleDateFormat iso8601Format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
try {
iso8601Format.parse(dateTime);
final Calendar calendar = iso8601Format.getCalendar();
SimpleDateFormat prettyFormat = new SimpleDateFormat("HH:mm dd/MM/yyyy");
dateTime = prettyFormat.format(calendar.getTime());
datetimeText.setText("Sighting at " + dateTime.split(" ")[0]
+ " on " + dateTime.split(" ")[1]);
} catch (ParseException e) {
e.printStackTrace();
}
locationText.setText("" + weather + " at " + lat + ", " + lng + "");
view.setTag(id);
}
}
}
}
| |
package io.github.droidkaigi.confsched2017.viewmodel;
import android.content.Context;
import android.databinding.BaseObservable;
import android.support.annotation.ColorRes;
import android.support.annotation.NonNull;
import android.support.annotation.StringRes;
import android.support.annotation.StyleRes;
import android.text.TextUtils;
import android.view.View;
import java.util.Date;
import java.util.Locale;
import javax.inject.Inject;
import io.github.droidkaigi.confsched2017.R;
import io.github.droidkaigi.confsched2017.model.Session;
import io.github.droidkaigi.confsched2017.repository.sessions.MySessionsRepository;
import io.github.droidkaigi.confsched2017.repository.sessions.SessionsRepository;
import io.github.droidkaigi.confsched2017.util.AlarmUtil;
import io.github.droidkaigi.confsched2017.util.DateUtil;
import io.github.droidkaigi.confsched2017.util.LocaleUtil;
import io.github.droidkaigi.confsched2017.view.helper.Navigator;
import io.reactivex.Completable;
import timber.log.Timber;
public class SessionDetailViewModel extends BaseObservable implements ViewModel {
private static final String TAG = SessionDetailViewModel.class.getSimpleName();
private final Context context;
private final Navigator navigator;
private final SessionsRepository sessionsRepository;
private final MySessionsRepository mySessionsRepository;
private String sessionTitle;
@ColorRes
private int sessionVividColorResId = R.color.white;
@ColorRes
private int sessionPaleColorResId = R.color.white;
@StyleRes
private int sessionThemeResId = R.color.white;
@StringRes
private int languageResId = R.string.lang_en;
private String sessionTimeRange;
public Session session;
private boolean isMySession;
private int slideIconVisibility;
private int dashVideoIconVisibility;
private int roomVisibility;
private int topicVisibility;
private Callback callback;
@Inject
public SessionDetailViewModel(Context context, Navigator navigator, SessionsRepository sessionsRepository,
MySessionsRepository mySessionsRepository) {
this.context = context;
this.navigator = navigator;
this.sessionsRepository = sessionsRepository;
this.mySessionsRepository = mySessionsRepository;
}
private void setSession(@NonNull Session session) {
this.session = session;
this.sessionTitle = session.title;
TopicColor topicColor = TopicColor.from(session.topic);
this.sessionVividColorResId = topicColor.vividColorResId;
this.sessionPaleColorResId = topicColor.paleColorResId;
this.sessionThemeResId = topicColor.themeId;
this.sessionTimeRange = decideSessionTimeRange(context, session);
this.isMySession = mySessionsRepository.isExist(session.id);
this.slideIconVisibility = session.slideUrl != null ? View.VISIBLE : View.GONE;
this.dashVideoIconVisibility = session.movieUrl != null && session.movieDashUrl != null ? View.VISIBLE : View.GONE;
this.roomVisibility = session.room != null ? View.VISIBLE : View.GONE;
this.topicVisibility = session.topic != null ? View.VISIBLE : View.GONE;
this.languageResId = session.lang != null ? decideLanguageResId(new Locale(session.lang.toLowerCase()))
: R.string.lang_en;
}
public Completable loadSession(int sessionId) {
return sessionsRepository.find(sessionId, Locale.getDefault())
.flatMapCompletable(session -> {
setSession(session);
return Completable.complete();
});
}
private int decideLanguageResId(@NonNull Locale locale) {
if (locale.equals(Locale.JAPANESE)) {
return R.string.lang_ja;
} else {
return R.string.lang_en;
}
}
@Override
public void destroy() {
// Do nothing
}
public boolean shouldShowShareMenuItem() {
return !TextUtils.isEmpty(session.shareUrl);
}
public void onClickShareMenuItem() {
//
}
public void onClickFeedbackButton(@SuppressWarnings("unused") View view) {
navigator.navigateToFeedbackPage(session);
}
public void onClickSlideIcon(@SuppressWarnings("unused") View view) {
// if (session.hasSlide()) {
// }
}
public void onClickMovieIcon(@SuppressWarnings("unused") View view) {
// if (session.hasDashVideo()) {
// }
}
public void onClickFab(@SuppressWarnings("unused") View view) {
boolean selected = true;
if (mySessionsRepository.isExist(session.id)) {
selected = false;
mySessionsRepository.delete(session)
.subscribe((result) -> Timber.tag(TAG).d("Deleted my session"),
throwable -> Timber.tag(TAG).e(throwable, "Failed to delete my session"));
AlarmUtil.unregisterAlarm(context, session);
} else {
selected = true;
mySessionsRepository.save(session)
.subscribe(() -> Timber.tag(TAG).d("Saved my session"),
throwable -> Timber.tag(TAG).e(throwable, "Failed to save my session"));
AlarmUtil.registerAlarm(context, session);
}
if (callback != null) {
callback.onClickFab(selected);
}
}
public void onOverScroll() {
if (callback != null) {
callback.onOverScroll();
}
}
private String decideSessionTimeRange(Context context, Session session) {
Date displaySTime = LocaleUtil.getDisplayDate(session.stime, context);
Date displayETime = LocaleUtil.getDisplayDate(session.etime, context);
return context.getString(R.string.session_time_range,
DateUtil.getLongFormatDate(displaySTime),
DateUtil.getHourMinute(displayETime),
DateUtil.getMinutes(displaySTime, displayETime));
}
public String getSessionTitle() {
return sessionTitle;
}
public int getSessionVividColorResId() {
return sessionVividColorResId;
}
public int getSessionPaleColorResId() {
return sessionPaleColorResId;
}
public int getTopicThemeResId() {
return sessionThemeResId;
}
public int getLanguageResId() {
return languageResId;
}
public String getSessionTimeRange() {
return sessionTimeRange;
}
public boolean isMySession() {
return isMySession;
}
public int getSlideIconVisibility() {
return slideIconVisibility;
}
public int getDashVideoIconVisibility() {
return dashVideoIconVisibility;
}
public int getTopicVisibility() {
return topicVisibility;
}
public int getRoomVisibility() {
return roomVisibility;
}
public void setCallback(Callback callback) {
this.callback = callback;
}
public interface Callback {
void onClickFab(boolean selected);
void onOverScroll();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.mongomk.impl.model;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
import org.apache.jackrabbit.mongomk.api.instruction.Instruction;
import org.apache.jackrabbit.mongomk.api.model.Commit;
import com.mongodb.BasicDBObject;
/**
* The {@code MongoDB} representation of a commit.
*/
public class MongoCommit extends BasicDBObject implements Commit {
public static final String KEY_AFFECTED_PATH = "affPaths";
public static final String KEY_BASE_REVISION_ID = "baseRevId";
public static final String KEY_BRANCH_ID = "branchId";
public static final String KEY_DIFF = "diff";
public static final String KEY_FAILED = "failed";
public static final String KEY_MESSAGE = "msg";
public static final String KEY_PATH = "path";
public static final String KEY_REVISION_ID = "revId";
public static final String KEY_TIMESTAMP = "ts";
private final List<Instruction> instructions;
private SortedSet<String> affectedPaths;
private static final long serialVersionUID = 6656294757102309827L;
/**
* Default constructor. Needed for MongoDB serialization.
*/
public MongoCommit() {
instructions = new LinkedList<Instruction>();
setTimestamp(new Date().getTime());
}
//--------------------------------------------------------------------------
//
// These properties are persisted to MongoDB
//
//--------------------------------------------------------------------------
@Override
@SuppressWarnings("unchecked")
public SortedSet<String> getAffectedPaths() {
if (affectedPaths == null) {
affectedPaths = new TreeSet<String>(
inflate((byte[]) get(KEY_AFFECTED_PATH)));
}
return Collections.unmodifiableSortedSet(affectedPaths);
}
public void setAffectedPaths(Set<String> affectedPaths) {
this.affectedPaths = new TreeSet<String>(affectedPaths);
put(KEY_AFFECTED_PATH, compress(this.affectedPaths));
}
@Override
public Long getBaseRevisionId() {
return containsField(KEY_BASE_REVISION_ID)? getLong(KEY_BASE_REVISION_ID) : null;
}
public void setBaseRevisionId(Long baseRevisionId) {
if (baseRevisionId == null) {
removeField(KEY_BASE_REVISION_ID);
} else {
put(KEY_BASE_REVISION_ID, baseRevisionId);
}
}
@Override
public String getBranchId() {
return getString(KEY_BRANCH_ID);
}
public void setBranchId(String branchId) {
put(KEY_BRANCH_ID, branchId);
}
@Override
public String getDiff() {
List<String> data = inflate((byte[]) get(KEY_DIFF));
if (data == null || data.isEmpty()) {
return null;
} else {
return data.get(0);
}
}
public void setDiff(String diff) {
put(KEY_DIFF, compress(Collections.singleton(diff)));
}
public boolean isFailed() {
return getBoolean(KEY_FAILED);
}
public void setFailed() {
put(KEY_FAILED, Boolean.TRUE);
}
@Override
public String getMessage() {
return getString(KEY_MESSAGE);
}
public void setMessage(String message) {
put(KEY_MESSAGE, message);
}
@Override
public String getPath() {
return getString(KEY_PATH);
}
public void setPath(String path) {
put(KEY_PATH, path);
}
@Override
public Long getRevisionId() {
return containsField(KEY_REVISION_ID)? getLong(KEY_REVISION_ID) : null;
}
@Override
public void setRevisionId(Long revisionId) {
put(KEY_REVISION_ID, revisionId);
}
@Override
public Long getTimestamp() {
return getLong(KEY_TIMESTAMP);
}
public void setTimestamp(Long timestamp) {
put(KEY_TIMESTAMP, timestamp);
}
//--------------------------------------------------------------------------
//
// These properties are used to keep track but not persisted to MongoDB
//
//--------------------------------------------------------------------------
/**
* Adds the given {@link Instruction}.
*
* @param instruction The {@code Instruction}.
*/
public void addInstruction(Instruction instruction) {
instructions.add(instruction);
}
@Override
public List<Instruction> getInstructions() {
return Collections.unmodifiableList(instructions);
}
//-----------------------------< internal >---------------------------------
private List<String> inflate(byte[] bytes) {
if (bytes == null) {
return null;
}
List<String> data = new ArrayList<String>();
try {
DataInputStream in = new DataInputStream(new GZIPInputStream(
new ByteArrayInputStream(bytes)));
int num = in.readInt();
for (int i = 0; i < num; i++) {
int len = in.readInt();
byte[] buffer = new byte[len];
in.readFully(buffer);
data.add(new String(buffer, "UTF-8"));
}
} catch (IOException e) {
// FIXME: shoulnd't actually happen with byte[] and UTF-8
throw new RuntimeException(e);
}
return data;
}
private byte[] compress(Collection<String> strings) {
try {
ByteArrayOutputStream byteOut = new ByteArrayOutputStream();
OutputStream out = new GZIPOutputStream(byteOut);
DataOutputStream dataOut = new DataOutputStream(out);
dataOut.writeInt(strings.size());
for (String s : strings) {
byte[] buffer = s.getBytes("UTF-8");
dataOut.writeInt(buffer.length);
dataOut.write(buffer);
}
dataOut.close();
out.close();
return byteOut.toByteArray();
} catch (IOException e) {
// FIXME: shoulnd't actually happen with byte[] and UTF-8
throw new RuntimeException(e);
}
}
}
| |
/*
* Copyright 2015 Loic Merckel
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package lh.api.showcase.client;
import com.google.gwt.core.client.GWT;
public interface Messages extends com.google.gwt.i18n.client.Messages {
public static enum Util {
INSTANCE;
private Messages instance;
Util() {
instance = (Messages) GWT.create(Messages.class);
}
public Messages get() {
return instance;
}
}
@DefaultMessage("Home")
String home();
@DefaultMessage("About")
String about();
@DefaultMessage("Reference Data")
String referenceData();
@DefaultMessage("Operations")
String operations();
@DefaultMessage("Offers")
String offers();
@DefaultMessage("Seat Maps")
String seatMaps();
@DefaultMessage("Go")
String go();
@DefaultMessage("Countries")
String countries();
@DefaultMessage("Cities")
String cities();
@DefaultMessage("Airports")
String airports();
@DefaultMessage("Nearest Airports")
String nearestAirports();
@DefaultMessage("Airlines")
String airlines();
@DefaultMessage("Aircraft")
String aircraft();
@DefaultMessage("Schedule")
String schedule();
@DefaultMessage("Flight Status")
String flightStatus();
@DefaultMessage("Flight Status By Route")
String flightStatusByRoute();
@DefaultMessage("Arrivals Status")
String arrivalsStatus();
@DefaultMessage("Departures Status")
String departureStatus();
@DefaultMessage("Flight Number")
String flightNumber();
@DefaultMessage("Airport Code Origin")
String airportCodeOrigin();
@DefaultMessage("Airport Code Destination")
String airportCodeDestination();
@DefaultMessage("Departure Date")
String departureDate();
@DefaultMessage("Cabin Class")
String cabinClass();
@DefaultMessage("Results")
String results();
@DefaultMessage("Raw Results")
String rawResults();
@DefaultMessage("Some parameters are not valid")
String invalidParametersError();
@DefaultMessage("Name")
String name () ;
@DefaultMessage("Code")
String code () ;
@DefaultMessage("Zone")
String zone () ;
@DefaultMessage("Country Code (optional)")
String countryCodeOptional () ;
@DefaultMessage("Lang (optional)")
String langOptional () ;
@DefaultMessage("Aircraft Code (optional)")
String aircraftCodeOptional () ;
@DefaultMessage("Airline Code (optional)")
String airlineCodeOptional () ;
@DefaultMessage("Airport Code (optional)")
String airportCodeOptional () ;
@DefaultMessage("City Code (optional)")
String cityCodeOptional () ;
@DefaultMessage("Latitude")
String latitude () ;
@DefaultMessage("Longiture")
String longiture () ;
@DefaultMessage("Direct Flight?")
String isDirectFlight () ;
@DefaultMessage("Time Range")
String timeRange () ;
@DefaultMessage("Date")
String date () ;
@DefaultMessage("Airport Code Departure")
String airportCodeDeparture();
@DefaultMessage("Airport Code Arrival")
String airportCodeArrival();
@DefaultMessage("Airport Code")
String airportCode();
@DefaultMessage("City Code")
String cityCode();
@DefaultMessage("Country Code")
String countryCode();
@DefaultMessage("Location Type")
String locationType();
@DefaultMessage("Coordinate")
String coordinate();
@DefaultMessage("Distance")
String distance();
@DefaultMessage("Type")
String type();
@DefaultMessage("Unknown")
String unknown();
@DefaultMessage("ID")
String id();
@DefaultMessage("ID ICAO")
String idIcao();
@DefaultMessage("Other IDs")
String otherIds();
@DefaultMessage("Aircraft Code")
String aircraftCode();
@DefaultMessage("Airline Equip Code")
String airlineEquipCode();
@DefaultMessage("Duration")
String duration();
@DefaultMessage("Flight")
String flight();
@DefaultMessage("Back")
String back();
@DefaultMessage("Departure")
String departure();
@DefaultMessage("Arrival")
String arrival();
@DefaultMessage("Status")
String status();
@DefaultMessage("Some parameters seem not to be valid.")
String badRequest();
@DefaultMessage("The server is experiencing some problems. Try again later.")
String serverProblem();
@DefaultMessage("No information currently available.")
String resourceNotFound();
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.tools;
import java.io.File;
import java.net.*;
import java.util.*;
import com.google.common.base.Joiner;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Multimap;
import com.google.common.collect.Sets;
import org.apache.commons.cli.*;
import org.apache.thrift.protocol.TBinaryProtocol;
import org.apache.thrift.protocol.TProtocol;
import org.apache.thrift.transport.TTransport;
import org.apache.cassandra.auth.IAuthenticator;
import org.apache.cassandra.config.*;
import org.apache.cassandra.db.Keyspace;
import org.apache.cassandra.db.SystemKeyspace;
import org.apache.cassandra.db.marshal.UTF8Type;
import org.apache.cassandra.dht.Range;
import org.apache.cassandra.dht.Token;
import org.apache.cassandra.exceptions.ConfigurationException;
import org.apache.cassandra.io.sstable.SSTableLoader;
import org.apache.cassandra.streaming.*;
import org.apache.cassandra.thrift.*;
import org.apache.cassandra.utils.ByteBufferUtil;
import org.apache.cassandra.utils.JVMStabilityInspector;
import org.apache.cassandra.utils.OutputHandler;
public class BulkLoader
{
private static final String TOOL_NAME = "sstableloader";
private static final String VERBOSE_OPTION = "verbose";
private static final String HELP_OPTION = "help";
private static final String NOPROGRESS_OPTION = "no-progress";
private static final String IGNORE_NODES_OPTION = "ignore";
private static final String INITIAL_HOST_ADDRESS_OPTION = "nodes";
private static final String RPC_PORT_OPTION = "port";
private static final String USER_OPTION = "username";
private static final String PASSWD_OPTION = "password";
private static final String THROTTLE_MBITS = "throttle";
private static final String TRANSPORT_FACTORY = "transport-factory";
/* client encryption options */
private static final String SSL_TRUSTSTORE = "truststore";
private static final String SSL_TRUSTSTORE_PW = "truststore-password";
private static final String SSL_KEYSTORE = "keystore";
private static final String SSL_KEYSTORE_PW = "keystore-password";
private static final String SSL_PROTOCOL = "ssl-protocol";
private static final String SSL_ALGORITHM = "ssl-alg";
private static final String SSL_STORE_TYPE = "store-type";
private static final String SSL_CIPHER_SUITES = "ssl-ciphers";
private static final String CONNECTIONS_PER_HOST = "connections-per-host";
private static final String CONFIG_PATH = "conf-path";
public static void main(String args[])
{
Config.setClientMode(true);
LoaderOptions options = LoaderOptions.parseArgs(args);
OutputHandler handler = new OutputHandler.SystemOutput(options.verbose, options.debug);
SSTableLoader loader = new SSTableLoader(
options.directory,
new ExternalClient(
options.hosts,
options.rpcPort,
options.user,
options.passwd,
options.transportFactory,
options.storagePort,
options.sslStoragePort,
options.serverEncOptions),
handler,
options.connectionsPerHost);
DatabaseDescriptor.setStreamThroughputOutboundMegabitsPerSec(options.throttle);
StreamResultFuture future = null;
ProgressIndicator indicator = new ProgressIndicator();
try
{
if (options.noProgress)
{
future = loader.stream(options.ignores);
}
else
{
future = loader.stream(options.ignores, indicator);
}
}
catch (Exception e)
{
JVMStabilityInspector.inspectThrowable(e);
System.err.println(e.getMessage());
if (e.getCause() != null)
System.err.println(e.getCause());
e.printStackTrace(System.err);
System.exit(1);
}
try
{
future.get();
if (!options.noProgress)
indicator.printSummary(options.connectionsPerHost);
// Give sockets time to gracefully close
Thread.sleep(1000);
System.exit(0); // We need that to stop non daemonized threads
}
catch (Exception e)
{
System.err.println("Streaming to the following hosts failed:");
System.err.println(loader.getFailedHosts());
e.printStackTrace(System.err);
System.exit(1);
}
}
// Return true when everything is at 100%
static class ProgressIndicator implements StreamEventHandler
{
private long start;
private long lastProgress;
private long lastTime;
private int peak = 0;
private int totalFiles = 0;
private final Multimap<InetAddress, SessionInfo> sessionsByHost = HashMultimap.create();
public ProgressIndicator()
{
start = lastTime = System.nanoTime();
}
public void onSuccess(StreamState finalState) {}
public void onFailure(Throwable t) {}
public synchronized void handleStreamEvent(StreamEvent event)
{
if (event.eventType == StreamEvent.Type.STREAM_PREPARED)
{
SessionInfo session = ((StreamEvent.SessionPreparedEvent) event).session;
sessionsByHost.put(session.peer, session);
}
else if (event.eventType == StreamEvent.Type.FILE_PROGRESS || event.eventType == StreamEvent.Type.STREAM_COMPLETE)
{
ProgressInfo progressInfo = null;
if (event.eventType == StreamEvent.Type.FILE_PROGRESS)
{
progressInfo = ((StreamEvent.ProgressEvent) event).progress;
}
long time = System.nanoTime();
long deltaTime = time - lastTime;
StringBuilder sb = new StringBuilder();
sb.append("\rprogress: ");
long totalProgress = 0;
long totalSize = 0;
boolean updateTotalFiles = totalFiles == 0;
// recalculate progress across all sessions in all hosts and display
for (InetAddress peer : sessionsByHost.keySet())
{
sb.append("[").append(peer.toString()).append("]");
for (SessionInfo session : sessionsByHost.get(peer))
{
long size = session.getTotalSizeToSend();
long current = 0;
int completed = 0;
if (progressInfo != null && session.peer.equals(progressInfo.peer) && (session.sessionIndex == progressInfo.sessionIndex))
{
session.updateProgress(progressInfo);
}
for (ProgressInfo progress : session.getSendingFiles())
{
if (progress.isCompleted())
completed++;
current += progress.currentBytes;
}
totalProgress += current;
totalSize += size;
sb.append(session.sessionIndex).append(":");
sb.append(completed).append("/").append(session.getTotalFilesToSend());
sb.append(" ").append(String.format("%-3d", size == 0 ? 100L : current * 100L / size)).append("% ");
if (updateTotalFiles)
totalFiles += session.getTotalFilesToSend();
}
}
lastTime = time;
long deltaProgress = totalProgress - lastProgress;
lastProgress = totalProgress;
sb.append("total: ").append(totalSize == 0 ? 100L : totalProgress * 100L / totalSize).append("% ");
sb.append(String.format("%-3d", mbPerSec(deltaProgress, deltaTime))).append("MB/s");
int average = mbPerSec(totalProgress, (time - start));
if (average > peak)
peak = average;
sb.append("(avg: ").append(average).append(" MB/s)");
System.out.print(sb.toString());
}
}
private int mbPerSec(long bytes, long timeInNano)
{
double bytesPerNano = ((double)bytes) / timeInNano;
return (int)((bytesPerNano * 1000 * 1000 * 1000) / (1024 * 1024));
}
private void printSummary(int connectionsPerHost)
{
long end = System.nanoTime();
long durationMS = ((end - start) / (1000000));
int average = mbPerSec(lastProgress, (end - start));
StringBuilder sb = new StringBuilder();
sb.append("\nSummary statistics: \n");
sb.append(String.format(" %-30s: %-10d%n", "Connections per host: ", connectionsPerHost));
sb.append(String.format(" %-30s: %-10d%n", "Total files transferred: ", totalFiles));
sb.append(String.format(" %-30s: %-10d%n", "Total bytes transferred: ", lastProgress));
sb.append(String.format(" %-30s: %-10d%n", "Total duration (ms): ", durationMS));
sb.append(String.format(" %-30s: %-10d%n", "Average transfer rate (MB/s): ", + average));
sb.append(String.format(" %-30s: %-10d%n", "Peak transfer rate (MB/s): ", + peak));
System.out.println(sb.toString());
}
}
public static class ExternalClient extends SSTableLoader.Client
{
private final Map<String, CFMetaData> knownCfs = new HashMap<>();
private final Set<InetAddress> hosts;
private final int rpcPort;
private final String user;
private final String passwd;
private final ITransportFactory transportFactory;
private final int storagePort;
private final int sslStoragePort;
private final EncryptionOptions.ServerEncryptionOptions serverEncOptions;
public ExternalClient(Set<InetAddress> hosts,
int port,
String user,
String passwd,
ITransportFactory transportFactory,
int storagePort,
int sslStoragePort,
EncryptionOptions.ServerEncryptionOptions serverEncryptionOptions)
{
super();
this.hosts = hosts;
this.rpcPort = port;
this.user = user;
this.passwd = passwd;
this.transportFactory = transportFactory;
this.storagePort = storagePort;
this.sslStoragePort = sslStoragePort;
this.serverEncOptions = serverEncryptionOptions;
}
@Override
public void init(String keyspace)
{
Iterator<InetAddress> hostiter = hosts.iterator();
while (hostiter.hasNext())
{
try
{
// Query endpoint to ranges map and schemas from thrift
InetAddress host = hostiter.next();
Cassandra.Client client = createThriftClient(host.getHostAddress(), rpcPort, this.user, this.passwd, this.transportFactory);
setPartitioner(client.describe_partitioner());
Token.TokenFactory tkFactory = getPartitioner().getTokenFactory();
for (TokenRange tr : client.describe_ring(keyspace))
{
Range<Token> range = new Range<>(tkFactory.fromString(tr.start_token), tkFactory.fromString(tr.end_token), getPartitioner());
for (String ep : tr.endpoints)
{
addRangeForEndpoint(range, InetAddress.getByName(ep));
}
}
String cfQuery = String.format("SELECT * FROM %s.%s WHERE keyspace_name = '%s'",
Keyspace.SYSTEM_KS,
SystemKeyspace.SCHEMA_COLUMNFAMILIES_CF,
keyspace);
CqlResult cfRes = client.execute_cql3_query(ByteBufferUtil.bytes(cfQuery), Compression.NONE, ConsistencyLevel.ONE);
for (CqlRow row : cfRes.rows)
{
String columnFamily = UTF8Type.instance.getString(row.columns.get(1).bufferForName());
String columnsQuery = String.format("SELECT * FROM %s.%s WHERE keyspace_name = '%s' AND columnfamily_name = '%s'",
Keyspace.SYSTEM_KS,
SystemKeyspace.SCHEMA_COLUMNS_CF,
keyspace,
columnFamily);
CqlResult columnsRes = client.execute_cql3_query(ByteBufferUtil.bytes(columnsQuery), Compression.NONE, ConsistencyLevel.ONE);
CFMetaData metadata = CFMetaData.fromThriftCqlRow(row, columnsRes);
knownCfs.put(metadata.cfName, metadata);
}
break;
}
catch (Exception e)
{
if (!hostiter.hasNext())
throw new RuntimeException("Could not retrieve endpoint ranges: ", e);
}
}
}
@Override
public StreamConnectionFactory getConnectionFactory()
{
return new BulkLoadConnectionFactory(storagePort, sslStoragePort, serverEncOptions, false);
}
@Override
public CFMetaData getCFMetaData(String keyspace, String cfName)
{
return knownCfs.get(cfName);
}
private static Cassandra.Client createThriftClient(String host, int port, String user, String passwd, ITransportFactory transportFactory) throws Exception
{
TTransport trans = transportFactory.openTransport(host, port);
TProtocol protocol = new TBinaryProtocol(trans);
Cassandra.Client client = new Cassandra.Client(protocol);
if (user != null && passwd != null)
{
Map<String, String> credentials = new HashMap<>();
credentials.put(IAuthenticator.USERNAME_KEY, user);
credentials.put(IAuthenticator.PASSWORD_KEY, passwd);
AuthenticationRequest authenticationRequest = new AuthenticationRequest(credentials);
client.login(authenticationRequest);
}
return client;
}
}
static class LoaderOptions
{
public final File directory;
public boolean debug;
public boolean verbose;
public boolean noProgress;
public int rpcPort = 9160;
public String user;
public String passwd;
public int throttle = 0;
public int storagePort;
public int sslStoragePort;
public ITransportFactory transportFactory = new TFramedTransportFactory();
public EncryptionOptions encOptions = new EncryptionOptions.ClientEncryptionOptions();
public int connectionsPerHost = 1;
public EncryptionOptions.ServerEncryptionOptions serverEncOptions = new EncryptionOptions.ServerEncryptionOptions();
public final Set<InetAddress> hosts = new HashSet<>();
public final Set<InetAddress> ignores = new HashSet<>();
LoaderOptions(File directory)
{
this.directory = directory;
}
public static LoaderOptions parseArgs(String cmdArgs[])
{
CommandLineParser parser = new GnuParser();
CmdLineOptions options = getCmdLineOptions();
try
{
CommandLine cmd = parser.parse(options, cmdArgs, false);
if (cmd.hasOption(HELP_OPTION))
{
printUsage(options);
System.exit(0);
}
String[] args = cmd.getArgs();
if (args.length == 0)
{
System.err.println("Missing sstable directory argument");
printUsage(options);
System.exit(1);
}
if (args.length > 1)
{
System.err.println("Too many arguments");
printUsage(options);
System.exit(1);
}
String dirname = args[0];
File dir = new File(dirname);
if (!dir.exists())
errorMsg("Unknown directory: " + dirname, options);
if (!dir.isDirectory())
errorMsg(dirname + " is not a directory", options);
LoaderOptions opts = new LoaderOptions(dir);
opts.verbose = cmd.hasOption(VERBOSE_OPTION);
opts.noProgress = cmd.hasOption(NOPROGRESS_OPTION);
if (cmd.hasOption(RPC_PORT_OPTION))
opts.rpcPort = Integer.parseInt(cmd.getOptionValue(RPC_PORT_OPTION));
if (cmd.hasOption(USER_OPTION))
opts.user = cmd.getOptionValue(USER_OPTION);
if (cmd.hasOption(PASSWD_OPTION))
opts.passwd = cmd.getOptionValue(PASSWD_OPTION);
if (cmd.hasOption(INITIAL_HOST_ADDRESS_OPTION))
{
String[] nodes = cmd.getOptionValue(INITIAL_HOST_ADDRESS_OPTION).split(",");
try
{
for (String node : nodes)
{
opts.hosts.add(InetAddress.getByName(node.trim()));
}
}
catch (UnknownHostException e)
{
errorMsg("Unknown host: " + e.getMessage(), options);
}
}
else
{
System.err.println("Initial hosts must be specified (-d)");
printUsage(options);
System.exit(1);
}
if (cmd.hasOption(IGNORE_NODES_OPTION))
{
String[] nodes = cmd.getOptionValue(IGNORE_NODES_OPTION).split(",");
try
{
for (String node : nodes)
{
opts.ignores.add(InetAddress.getByName(node.trim()));
}
}
catch (UnknownHostException e)
{
errorMsg("Unknown host: " + e.getMessage(), options);
}
}
if (cmd.hasOption(CONNECTIONS_PER_HOST))
opts.connectionsPerHost = Integer.parseInt(cmd.getOptionValue(CONNECTIONS_PER_HOST));
// try to load config file first, so that values can be rewritten with other option values.
// otherwise use default config.
Config config;
if (cmd.hasOption(CONFIG_PATH))
{
File configFile = new File(cmd.getOptionValue(CONFIG_PATH));
if (!configFile.exists())
{
errorMsg("Config file not found", options);
}
config = new YamlConfigurationLoader().loadConfig(configFile.toURI().toURL());
}
else
{
config = new Config();
}
opts.storagePort = config.storage_port;
opts.sslStoragePort = config.ssl_storage_port;
opts.throttle = config.stream_throughput_outbound_megabits_per_sec;
opts.encOptions = config.client_encryption_options;
opts.serverEncOptions = config.server_encryption_options;
if (cmd.hasOption(THROTTLE_MBITS))
{
opts.throttle = Integer.parseInt(cmd.getOptionValue(THROTTLE_MBITS));
}
if (cmd.hasOption(SSL_TRUSTSTORE))
{
opts.encOptions.truststore = cmd.getOptionValue(SSL_TRUSTSTORE);
}
if (cmd.hasOption(SSL_TRUSTSTORE_PW))
{
opts.encOptions.truststore_password = cmd.getOptionValue(SSL_TRUSTSTORE_PW);
}
if (cmd.hasOption(SSL_KEYSTORE))
{
opts.encOptions.keystore = cmd.getOptionValue(SSL_KEYSTORE);
// if a keystore was provided, lets assume we'll need to use it
opts.encOptions.require_client_auth = true;
}
if (cmd.hasOption(SSL_KEYSTORE_PW))
{
opts.encOptions.keystore_password = cmd.getOptionValue(SSL_KEYSTORE_PW);
}
if (cmd.hasOption(SSL_PROTOCOL))
{
opts.encOptions.protocol = cmd.getOptionValue(SSL_PROTOCOL);
}
if (cmd.hasOption(SSL_ALGORITHM))
{
opts.encOptions.algorithm = cmd.getOptionValue(SSL_ALGORITHM);
}
if (cmd.hasOption(SSL_STORE_TYPE))
{
opts.encOptions.store_type = cmd.getOptionValue(SSL_STORE_TYPE);
}
if (cmd.hasOption(SSL_CIPHER_SUITES))
{
opts.encOptions.cipher_suites = cmd.getOptionValue(SSL_CIPHER_SUITES).split(",");
}
if (cmd.hasOption(TRANSPORT_FACTORY))
{
ITransportFactory transportFactory = getTransportFactory(cmd.getOptionValue(TRANSPORT_FACTORY));
configureTransportFactory(transportFactory, opts);
opts.transportFactory = transportFactory;
}
return opts;
}
catch (ParseException | ConfigurationException | MalformedURLException e)
{
errorMsg(e.getMessage(), options);
return null;
}
}
private static ITransportFactory getTransportFactory(String transportFactory)
{
try
{
Class<?> factory = Class.forName(transportFactory);
if (!ITransportFactory.class.isAssignableFrom(factory))
throw new IllegalArgumentException(String.format("transport factory '%s' " +
"not derived from ITransportFactory", transportFactory));
return (ITransportFactory) factory.newInstance();
}
catch (Exception e)
{
throw new IllegalArgumentException(String.format("Cannot create a transport factory '%s'.", transportFactory), e);
}
}
private static void configureTransportFactory(ITransportFactory transportFactory, LoaderOptions opts)
{
Map<String, String> options = new HashMap<>();
// If the supplied factory supports the same set of options as our SSL impl, set those
if (transportFactory.supportedOptions().contains(SSLTransportFactory.TRUSTSTORE))
options.put(SSLTransportFactory.TRUSTSTORE, opts.encOptions.truststore);
if (transportFactory.supportedOptions().contains(SSLTransportFactory.TRUSTSTORE_PASSWORD))
options.put(SSLTransportFactory.TRUSTSTORE_PASSWORD, opts.encOptions.truststore_password);
if (transportFactory.supportedOptions().contains(SSLTransportFactory.PROTOCOL))
options.put(SSLTransportFactory.PROTOCOL, opts.encOptions.protocol);
if (transportFactory.supportedOptions().contains(SSLTransportFactory.CIPHER_SUITES))
options.put(SSLTransportFactory.CIPHER_SUITES, Joiner.on(',').join(opts.encOptions.cipher_suites));
if (transportFactory.supportedOptions().contains(SSLTransportFactory.KEYSTORE)
&& opts.encOptions.require_client_auth)
options.put(SSLTransportFactory.KEYSTORE, opts.encOptions.keystore);
if (transportFactory.supportedOptions().contains(SSLTransportFactory.KEYSTORE_PASSWORD)
&& opts.encOptions.require_client_auth)
options.put(SSLTransportFactory.KEYSTORE_PASSWORD, opts.encOptions.keystore_password);
// Now check if any of the factory's supported options are set as system properties
for (String optionKey : transportFactory.supportedOptions())
if (System.getProperty(optionKey) != null)
options.put(optionKey, System.getProperty(optionKey));
transportFactory.setOptions(options);
}
private static void errorMsg(String msg, CmdLineOptions options)
{
System.err.println(msg);
printUsage(options);
System.exit(1);
}
private static CmdLineOptions getCmdLineOptions()
{
CmdLineOptions options = new CmdLineOptions();
options.addOption("v", VERBOSE_OPTION, "verbose output");
options.addOption("h", HELP_OPTION, "display this help message");
options.addOption(null, NOPROGRESS_OPTION, "don't display progress");
options.addOption("i", IGNORE_NODES_OPTION, "NODES", "don't stream to this (comma separated) list of nodes");
options.addOption("d", INITIAL_HOST_ADDRESS_OPTION, "initial hosts", "Required. try to connect to these hosts (comma separated) initially for ring information");
options.addOption("p", RPC_PORT_OPTION, "rpc port", "port used for rpc (default 9160)");
options.addOption("t", THROTTLE_MBITS, "throttle", "throttle speed in Mbits (default unlimited)");
options.addOption("u", USER_OPTION, "username", "username for cassandra authentication");
options.addOption("pw", PASSWD_OPTION, "password", "password for cassandra authentication");
options.addOption("tf", TRANSPORT_FACTORY, "transport factory", "Fully-qualified ITransportFactory class name for creating a connection to cassandra");
options.addOption("cph", CONNECTIONS_PER_HOST, "connectionsPerHost", "number of concurrent connections-per-host.");
// ssl connection-related options
options.addOption("ts", SSL_TRUSTSTORE, "TRUSTSTORE", "Client SSL: full path to truststore");
options.addOption("tspw", SSL_TRUSTSTORE_PW, "TRUSTSTORE-PASSWORD", "Client SSL: password of the truststore");
options.addOption("ks", SSL_KEYSTORE, "KEYSTORE", "Client SSL: full path to keystore");
options.addOption("kspw", SSL_KEYSTORE_PW, "KEYSTORE-PASSWORD", "Client SSL: password of the keystore");
options.addOption("prtcl", SSL_PROTOCOL, "PROTOCOL", "Client SSL: connections protocol to use (default: TLS)");
options.addOption("alg", SSL_ALGORITHM, "ALGORITHM", "Client SSL: algorithm (default: SunX509)");
options.addOption("st", SSL_STORE_TYPE, "STORE-TYPE", "Client SSL: type of store");
options.addOption("ciphers", SSL_CIPHER_SUITES, "CIPHER-SUITES", "Client SSL: comma-separated list of encryption suites to use");
options.addOption("f", CONFIG_PATH, "path to config file", "cassandra.yaml file path for streaming throughput and client/server SSL.");
return options;
}
public static void printUsage(Options options)
{
String usage = String.format("%s [options] <dir_path>", TOOL_NAME);
String header = System.lineSeparator() +
"Bulk load the sstables found in the directory <dir_path> to the configured cluster." +
"The parent directories of <dir_path> are used as the target keyspace/table name. " +
"So for instance, to load an sstable named Standard1-g-1-Data.db into Keyspace1/Standard1, " +
"you will need to have the files Standard1-g-1-Data.db and Standard1-g-1-Index.db into a directory /path/to/Keyspace1/Standard1/.";
String footer = System.lineSeparator() +
"You can provide cassandra.yaml file with -f command line option to set up streaming throughput, client and server encryption options. " +
"Only stream_throughput_outbound_megabits_per_sec, server_encryption_options and client_encryption_options are read from yaml. " +
"You can override options read from cassandra.yaml with corresponding command line options.";
new HelpFormatter().printHelp(usage, header, options, footer);
}
}
public static class CmdLineOptions extends Options
{
/**
* Add option with argument and argument name
* @param opt shortcut for option name
* @param longOpt complete option name
* @param argName argument name
* @param description description of the option
* @return updated Options object
*/
public Options addOption(String opt, String longOpt, String argName, String description)
{
Option option = new Option(opt, longOpt, true, description);
option.setArgName(argName);
return addOption(option);
}
/**
* Add option without argument
* @param opt shortcut for option name
* @param longOpt complete option name
* @param description description of the option
* @return updated Options object
*/
public Options addOption(String opt, String longOpt, String description)
{
return addOption(new Option(opt, longOpt, false, description));
}
}
}
| |
/*
* Copyright 2013 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package network.bitmesh.channels;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.SettableFuture;
import com.google.protobuf.ByteString;
import org.bitcoin.paymentchannel.Protos;
import org.bitcoinj.core.*;
import org.bitcoinj.net.NioClient;
import org.bitcoinj.net.ProtobufParser;
import org.spongycastle.crypto.params.KeyParameter;
import javax.annotation.Nullable;
import java.io.IOException;
import java.net.InetSocketAddress;
/**
* A simple utility class that runs the micropayment protocol over a raw TCP socket using NIO, standalone.
*/
public class PaymentChannelClientConnection {
// Various futures which will be completed later
private final SettableFuture<PaymentChannelClientConnection> channelOpenFuture = SettableFuture.create();
private final PaymentChannelClient channelClient;
private final ProtobufParser<Protos.TwoWayChannelMessage> wireParser;
/**
* Attempts to open a new connection to and open a payment channel with the given host and port, blocking until the
* connection is open. The server is requested to keep the channel open for
* {@link PaymentChannelClient#DEFAULT_TIME_WINDOW}
* seconds. If the server proposes a longer time the channel will be closed.
*
* @param server The host/port pair where the server is listening.
* @param timeoutSeconds The connection timeout and read timeout during initialization. This should be large enough
* to accommodate ECDSA signature operations and network latency.
* @param wallet The wallet which will be paid from, and where completed transactions will be committed.
* Must be unencrypted. Must already have a {@link org.bitcoinj.protocols.channels.StoredPaymentChannelClientStates} object in its extensions set.
* @param myKey A freshly generated keypair used for the multisig contract and refund output.
* @param maxValue The maximum value this channel is allowed to request
* @param serverId A unique ID which is used to attempt reopening of an existing channel.
* This must be unique to the server, and, if your application is exposing payment channels to some
* API, this should also probably encompass some caller UID to avoid applications opening channels
* which were created by others.
*
* @throws IOException if there's an issue using the network.
* @throws ValueOutOfRangeException if the balance of wallet is lower than maxValue.
*/
public PaymentChannelClientConnection(InetSocketAddress server, int timeoutSeconds, Wallet wallet, ECKey myKey,
Coin maxValue, String serverId) throws IOException, ValueOutOfRangeException {
this(server, timeoutSeconds, wallet, myKey, maxValue, serverId,
PaymentChannelClient.DEFAULT_TIME_WINDOW, null);
}
/**
* Attempts to open a new connection to and open a payment channel with the given host and port, blocking until the
* connection is open. The server is requested to keep the channel open for {@param timeWindow}
* seconds. If the server proposes a longer time the channel will be closed.
*
* @param server The host/port pair where the server is listening.
* @param timeoutSeconds The connection timeout and read timeout during initialization. This should be large enough
* to accommodate ECDSA signature operations and network latency.
* @param wallet The wallet which will be paid from, and where completed transactions will be committed.
* Can be encrypted if user key is supplied when needed. Must already have a
* {@link StoredPaymentChannelClientStates} object in its extensions set.
* @param myKey A freshly generated keypair used for the multisig contract and refund output.
* @param maxValue The maximum value this channel is allowed to request
* @param serverId A unique ID which is used to attempt reopening of an existing channel.
* This must be unique to the server, and, if your application is exposing payment channels to some
* API, this should also probably encompass some caller UID to avoid applications opening channels
* which were created by others.
* @param timeWindow The time in seconds, relative to now, on how long this channel should be kept open.
* @param userKeySetup Key derived from a user password, used to decrypt myKey, if it is encrypted, during setup.
*
* @throws IOException if there's an issue using the network.
* @throws ValueOutOfRangeException if the balance of wallet is lower than maxValue.
*/
public PaymentChannelClientConnection(InetSocketAddress server, int timeoutSeconds, Wallet wallet, ECKey myKey,
Coin maxValue, String serverId, final long timeWindow,
@Nullable KeyParameter userKeySetup)
throws IOException, ValueOutOfRangeException {
// Glue the object which vends/ingests protobuf messages in order to manage state to the network object which
// reads/writes them to the wire in length prefixed form.
channelClient = new PaymentChannelClient(wallet, myKey, maxValue, Sha256Hash.of(serverId.getBytes()), timeWindow,
userKeySetup, new PaymentChannelClient.ClientConnection() {
@Override
public void sendToServer(Protos.TwoWayChannelMessage msg) {
wireParser.write(msg);
}
@Override
public void destroyConnection(PaymentChannelCloseException.CloseReason reason) {
channelOpenFuture.setException(new PaymentChannelCloseException("Payment channel client requested that the connection be closed: " + reason, reason));
wireParser.closeConnection();
}
@Override
public boolean acceptExpireTime(long expireTime) {
return expireTime <= (timeWindow + Utils.currentTimeSeconds() + 60); // One extra minute to compensate for time skew and latency
}
@Override
public void channelOpen(boolean wasInitiated) {
wireParser.setSocketTimeout(0);
// Inform the API user that we're done and ready to roll.
channelOpenFuture.set(PaymentChannelClientConnection.this);
}
});
// And glue back in the opposite direction - network to the channelClient.
wireParser = new ProtobufParser<Protos.TwoWayChannelMessage>(new ProtobufParser.Listener<Protos.TwoWayChannelMessage>() {
@Override
public void messageReceived(ProtobufParser<Protos.TwoWayChannelMessage> handler, Protos.TwoWayChannelMessage msg) {
try {
channelClient.receiveMessage(msg);
} catch (InsufficientMoneyException e) {
// We should only get this exception during INITIATE, so channelOpen wasn't called yet.
channelOpenFuture.setException(e);
}
}
@Override
public void connectionOpen(ProtobufParser<Protos.TwoWayChannelMessage> handler) {
channelClient.connectionOpen();
}
@Override
public void connectionClosed(ProtobufParser<Protos.TwoWayChannelMessage> handler) {
channelClient.connectionClosed();
channelOpenFuture.setException(new PaymentChannelCloseException("The TCP socket died",
PaymentChannelCloseException.CloseReason.CONNECTION_CLOSED));
}
}, Protos.TwoWayChannelMessage.getDefaultInstance(), Short.MAX_VALUE, timeoutSeconds*1000);
// Initiate the outbound network connection. We don't need to keep this around. The wireParser object will handle
// things from here on out.
new NioClient(server, wireParser, timeoutSeconds * 1000);
}
/**
* <p>Gets a future which returns this when the channel is successfully opened, or throws an exception if there is
* an error before the channel has reached the open state.</p>
*
* <p>After this future completes successfully, you may call
* {@link org.bitcoinj.protocols.channels.PaymentChannelClientConnection#incrementPayment(Coin)} or {@link org.bitcoinj.protocols.channels.PaymentChannelClientConnection#incrementPayment(Coin, ByteString)} to begin paying the server.</p>
*/
public ListenableFuture<PaymentChannelClientConnection> getChannelOpenFuture() {
return channelOpenFuture;
}
/**
* Increments the total value which we pay the server.
*
* @param size How many satoshis to increment the payment by (note: not the new total).
* @throws ValueOutOfRangeException If the size is negative or would pay more than this channel's total value
* ({@link org.bitcoinj.protocols.channels.PaymentChannelClientConnection#state()}.getTotalValue())
* @throws IllegalStateException If the channel has been closed or is not yet open
* (see {@link org.bitcoinj.protocols.channels.PaymentChannelClientConnection#getChannelOpenFuture()} for the second)
*/
public ListenableFuture<PaymentIncrementAck> incrementPayment(Coin size) throws ValueOutOfRangeException, IllegalStateException {
return channelClient.incrementPayment(size, null, null);
}
/**
* Increments the total value which we pay the server.
*
* @param size How many satoshis to increment the payment by (note: not the new total).
* @param info Information about this payment increment, used to extend this protocol.
* @throws ValueOutOfRangeException If the size is negative or would pay more than this channel's total value
* ({@link org.bitcoinj.protocols.channels.PaymentChannelClientConnection#state()}.getTotalValue())
* @throws IllegalStateException If the channel has been closed or is not yet open
* (see {@link org.bitcoinj.protocols.channels.PaymentChannelClientConnection#getChannelOpenFuture()} for the second)
*/
/*
public ListenableFuture<PaymentIncrementAck> incrementPayment(Coin size, ByteString info) throws ValueOutOfRangeException, IllegalStateException {
return channelClient.incrementPayment(size, info, null);
}
*/
/**
* Increments the total value which we pay the server.
*
* @param size How many satoshis to increment the payment by (note: not the new total).
* @param info Information about this payment increment, used to extend this protocol.
* @param userKey Key derived from a user password, needed for any signing when the wallet is encrypted.
* The wallet KeyCrypter is assumed.
* @throws ValueOutOfRangeException If the size is negative or would pay more than this channel's total value
* ({@link org.bitcoinj.protocols.channels.PaymentChannelClientConnection#state()}.getTotalValue())
* @throws IllegalStateException If the channel has been closed or is not yet open
* (see {@link org.bitcoinj.protocols.channels.PaymentChannelClientConnection#getChannelOpenFuture()} for the second)
*/
public ListenableFuture<PaymentIncrementAck> incrementPayment(Coin size,
@Nullable ByteString info,
@Nullable KeyParameter userKey)
throws ValueOutOfRangeException, IllegalStateException {
return channelClient.incrementPayment(size, info, userKey);
}
/**
* <p>Gets the {@link PaymentChannelClientState} object which stores the current state of the connection with the
* server.</p>
*
* <p>Note that if you call any methods which update state directly the server will not be notified and channel
* initialization logic in the connection may fail unexpectedly.</p>
*/
public PaymentChannelClientState state() {
return channelClient.state();
}
/**
* Closes the connection, notifying the server it should settle the channel by broadcasting the most recent payment
* transaction.
*/
public void settle() {
// Shutdown is a little complicated.
//
// This call will cause the CLOSE message to be written to the wire, and then the destroyConnection() method that
// we defined above will be called, which in turn will call wireParser.closeConnection(), which in turn will invoke
// NioClient.closeConnection(), which will then close the socket triggering interruption of the network
// thread it had created. That causes the background thread to die, which on its way out calls
// ProtobufParser.connectionClosed which invokes the connectionClosed method we defined above which in turn
// then configures the open-future correctly and closes the state object. Phew!
try {
channelClient.settle();
} catch (IllegalStateException e) {
// Already closed...oh well
}
}
/**
* Disconnects the network connection but doesn't request the server to settle the channel first (literally just
* unplugs the network socket and marks the stored channel state as inactive).
*/
public void disconnectWithoutSettlement() {
wireParser.closeConnection();
}
}
| |
/**
* Copyright (C) 2012-2013 Selventa, Inc.
*
* This file is part of the OpenBEL Framework.
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The OpenBEL Framework is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with the OpenBEL Framework. If not, see <http://www.gnu.org/licenses/>.
*
* Additional Terms under LGPL v3:
*
* This license does not authorize you and you are prohibited from using the
* name, trademarks, service marks, logos or similar indicia of Selventa, Inc.,
* or, in the discretion of other licensors or authors of the program, the
* name, trademarks, service marks, logos or similar indicia of such authors or
* licensors, in any marketing or advertising materials relating to your
* distribution of the program or any covered product. This restriction does
* not waive or limit your obligation to keep intact all copyright notices set
* forth in the program as delivered to you.
*
* If you distribute the program in whole or in part, or any modified version
* of the program, and you assume contractual liability to the recipient with
* respect to the program or modified version, then you will indemnify the
* authors and licensors of the program for any liabilities that these
* contractual assumptions directly impose on those licensors and authors.
*/
package org.openbel.framework.api.internal;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.openbel.framework.common.BELUtilities;
import org.openbel.framework.common.InvalidArgument;
import org.openbel.framework.common.cfg.SystemConfiguration;
import org.openbel.framework.core.df.AbstractJdbcDAO;
import org.openbel.framework.core.df.DBConnection;
import org.openbel.framework.api.AnnotationFilterCriteria;
import org.openbel.framework.api.BelDocumentFilterCriteria;
import org.openbel.framework.api.CitationFilterCriteria;
import org.openbel.framework.api.Filter;
import org.openbel.framework.api.FilterCriteria;
import org.openbel.framework.api.KamStoreObjectImpl;
import org.openbel.framework.api.NamespaceFilterCriteria;
import org.openbel.framework.api.RelationshipTypeFilterCriteria;
/**
* KAMCatalogDao provides a JDBC-driven DAO for accessing the KAM catalog
* of the KAMStore.
*
* @author Julian Ray {@code jray@selventa.com}
*/
public final class KAMCatalogDao extends AbstractJdbcDAO {
/**
* The SQL query to select all {@link KamInfo} objects from the KAM catalog.
*/
private static final String SELECT_KAM_CATALOG_SQL =
"SELECT " +
"kam_id, name, description, last_compiled, schema_name " +
"FROM " +
"@.kam " +
"ORDER BY name";
/**
* The SQL query to select a {@link KamInfo} object by kam name.
*/
private static final String SELECT_KAM_BY_NAME_SQL =
"SELECT " +
"kam_id, name, description, last_compiled, schema_name " +
"FROM " +
"@.kam " +
"WHERE " +
"name = ?";
/**
* The SQL query to select a {@link KamInfo} object by kam id.
*/
private static final String SELECT_KAM_BY_ID_SQL =
"SELECT " +
"kam_id, name, description, last_compiled, schema_name " +
"FROM " +
"@.kam " +
"WHERE " +
"kam_id = ?";
/**
* The SQL statement to insert a KAM into the KAM catalog.
*/
private static final String INSERT_KAM_SQL =
"INSERT INTO " +
"@.kam(name, description, last_compiled, schema_name) " +
"VALUES(?, ?, ?, ?)";
/**
* The SQL statement to update a KAM in the KAM catalog.
*/
private static final String UPDATE_KAM_SQL =
"UPDATE "
+
"@.kam "
+
"SET name = ?, description = ?, last_compiled = ?, schema_name = ? "
+
"WHERE " +
"kam_id = ?";
/**
* The SQL statement to delete a KAM from the KAM catalog.
*/
private static final String DELETE_KAM_SQL =
"DELETE " +
"FROM @.kam " +
"WHERE " +
"kam_id = ?";
private final String kamSchemaPrefix;
/**
* Creates a KAMStoreDaoImpl from the Jdbc {@link Connection} that will
* be used to load the KAM.
*
* @param dbc {@link Connection}, the database connection which should be
* non-null and already open for sql execution.
* @param kamCatalogSchema {@link String}, the kam catalog schema
* @throws InvalidArgument Thrown if {@code dbc} is null or the sql
* connection is already closed.
* @throws SQLException Thrown if a sql error occurred while loading
* the KAM.
*/
public KAMCatalogDao(DBConnection dbc, String kamCatalogSchema,
String kamSchemaPrefix) throws SQLException {
super(dbc, kamCatalogSchema);
if (StringUtils.isBlank(kamCatalogSchema)) {
throw new InvalidArgument("kamCatalogSchema is not set");
}
if (StringUtils.isBlank(kamSchemaPrefix)) {
throw new InvalidArgument("kamSchemaPrefix is not set");
}
if (dbc == null) {
throw new InvalidArgument("dbc is null");
}
if (dbc.getConnection().isClosed()) {
throw new InvalidArgument("dbc is closed and cannot be used");
}
this.kamSchemaPrefix = kamSchemaPrefix;
}
/**
* Retrieves the {@link KamInfo} objects from the KAM catalog database.
*
* <p>
* If the <tt>kam</tt> doesn't exist then a null {@link KamInfo} is returned.
* </p>
* @return {@link KamInfo}, the kam info object from the
* kam catalog database or null if the kam name cannot be found.
* @throws SQLException Thrown if a SQL error occurred while retrieving
* the {@link KamInfo} objects from the kam catalog.
*/
public List<KamInfo> getCatalog() throws SQLException {
List<KamInfo> list = new ArrayList<KamInfo>();
ResultSet rset = null;
try {
PreparedStatement ps = getPreparedStatement(SELECT_KAM_CATALOG_SQL);
rset = ps.executeQuery();
while (rset.next()) {
list.add(getKamInfo(rset));
}
} catch (SQLException ex) {
throw ex;
} finally {
close(rset);
}
return list;
}
/**
* Retrieves a {@link KamInfo} object, from the KAM catalog database,
* using the KAM's name.
*
* @param name {@link String}, the KAM name
* @return {@link KamInfo}, the queried KAM or null if no KAM is found by
* the <tt>name</tt>
* @throws SQLException Thrown if a SQL error occurred while retrieving
* the {@link KamInfo} object by kam name.
*/
public KamInfo getKamInfoByName(String kamName) throws SQLException {
KamInfo kamInfo = null;
ResultSet rset = null;
try {
PreparedStatement ps = getPreparedStatement(SELECT_KAM_BY_NAME_SQL);
ps.setString(1, kamName);
rset = ps.executeQuery();
if (rset.next()) {
kamInfo = getKamInfo(rset);
}
} catch (SQLException ex) {
throw ex;
} finally {
close(rset);
}
return kamInfo;
}
/**
* Retrieves a {@link KamInfo} object, from the KAM catalog database,
* using the KAM's id.
*
* @param id, the KAM id
* @return {@link KamInfo}, the queried KAM or null if no KAM is found by
* that <tt>id</tt>
* @throws SQLException Thrown if a SQL error occurred while retrieving
* the {@link KamInfo} object by kam id.
*/
public KamInfo getKamInfoById(final int id) throws SQLException {
KamInfo kamInfo = null;
ResultSet rset = null;
try {
PreparedStatement ps = getPreparedStatement(SELECT_KAM_BY_ID_SQL);
ps.setInt(1, id);
rset = ps.executeQuery();
if (rset.next()) {
kamInfo = getKamInfo(rset);
}
} catch (SQLException ex) {
throw ex;
} finally {
close(rset);
}
return kamInfo;
}
/**
* Saves the {@link KamDbObject} object to the KAM catalog database.
*
* <p>
* If the <tt>kamDb</tt> doesn't exist then create it, otherwise update
* the record's information. This method will look for an existing
* <tt>kamDb</tt> first by id, if that is not <tt>null</tt>, then
* by name. It can be used to update the name of the record, but it
* cannot be used to update the id.
* </p>
*
* @param kamInfo {@link KamDbObject}, the kam info to save to the kam catalog,
* which cannot be null, and must contain a non-null name
* @throws SQLException Thrown if a SQL error occurred saving the kam info
* to the kam catalog
* @throws InvalidArgument Thrown if <tt>kamDb</tt> is null or contains
* a null name.
*/
public void saveToCatalog(KamDbObject updated) throws SQLException {
if (updated == null) {
throw new InvalidArgument("kamInfo", updated);
}
if (updated.getName() == null) {
throw new InvalidArgument("kamInfo contains a null name");
}
if (updated.getDescription() == null) {
throw new InvalidArgument("kamInfo contains a null description");
}
// First check to see if the KAM already exists in the Catalog. This
// returns the name of the schema for the KAM or null if the KAM is
// not already there. The existence of KAMs is checked first by id
// then by name.
KamInfo originalInfo = null;
final Integer updatedId = updated.getId();
if (updatedId != null) {
originalInfo = getKamInfoById(updatedId.intValue());
}
if (originalInfo == null) {
originalInfo = getKamInfoByName(updated.getName());
}
// If the KAM exists we update the current catalog entry
try {
if (null != originalInfo) {
KamDbObject original = originalInfo.getKamDbObject();
updated.setSchemaName(original.getSchemaName());
//must update the kam info record.
PreparedStatement skips = getPreparedStatement(UPDATE_KAM_SQL);
skips.setString(1, updated.getName());
skips.setString(2, updated.getDescription());
skips.setTimestamp(3, new Timestamp(updated.getLastCompiled()
.getTime()));
skips.setString(4, original.getSchemaName());
skips.setInt(5, original.getId());
skips.execute();
} else {
// Otherwise we insert a new kam. Schema name is automatically
// generated
// find next available schema name
String schemaName = findNextSchemaName();
updated.setSchemaName(schemaName);
PreparedStatement ps = getPreparedStatement(INSERT_KAM_SQL,
Statement.RETURN_GENERATED_KEYS);
ps.setString(1, updated.getName());
ps.setString(2, updated.getDescription());
ps.setTimestamp(3, new Timestamp(updated.getLastCompiled()
.getTime()));
ps.setString(4, updated.getSchemaName());
ps.execute();
}
} catch (SQLException ex) {
throw ex;
}
}
/**
* Deletes the {@link KamInfo} object with a provided name from the KAM catalog database.
*
* @param kamName, the name of a kam info object to delete from the kam catalog
* @throws SQLException Thrown if a SQL error occurred deleting the kam info object
* from the kam catalog
*/
public void deleteFromCatalog(final String kamName) throws SQLException {
if (kamName != null) {
KamInfo kamInfo = getKamInfoByName(kamName);
if (null != kamInfo) {
try {
PreparedStatement ps = getPreparedStatement(DELETE_KAM_SQL);
ps.setInt(1, kamInfo.getId());
ps.execute();
} catch (SQLException ex) {
throw ex;
}
}
}
}
/**
* Find the next available schema name in the KAM catalog based on the
* {@link SystemConfiguration#getKamSchemaPrefix() KAM schema prefix}.
*
* @return the next available schema name
* @throws SQLException Thrown if the SQL query to the KAM catalog failed
*/
private String findNextSchemaName() throws SQLException {
ResultSet rset = null;
try {
// Find next available schema name.
PreparedStatement ps = getPreparedStatement(SELECT_KAM_CATALOG_SQL);
rset = ps.executeQuery();
int maxId = 0;
while (rset.next()) {
// read schemaName value, skip if it does not look like a
// schema prefix
String schema = rset.getString(5);
if (schema == null || !schema.startsWith(kamSchemaPrefix)) {
continue;
}
// extract the schema number
String id = schema.substring(schema.indexOf(kamSchemaPrefix)
+ kamSchemaPrefix.length());
// if the schema number is numeric, compare to current max
if (StringUtils.isNumeric(id)) {
maxId = Math.max(Integer.parseInt(id), maxId);
}
}
String schemaName = kamSchemaPrefix + (++maxId);
return schemaName;
} catch (SQLException ex) {
throw ex;
} finally {
close(rset);
}
}
/**
*
* @param rset
* @return
* @throws SQLException
*/
private KamInfo getKamInfo(ResultSet rset) throws SQLException {
Integer kamId = rset.getInt(1);
String name = rset.getString(2);
String description = rset.getString(3);
// handle timestamp as date+time
Timestamp cts = rset.getTimestamp(4);
Date lastCompiled = null;
if (cts != null) {
lastCompiled = new Date(cts.getTime());
}
String schemaName = rset.getString(5);
return new KamInfo(new KamDbObject(kamId, name, description,
lastCompiled, schemaName));
}
/**
*
* @author julianjray
*
*/
public static class KamInfo extends KamStoreObjectImpl {
private final KamDbObject kamDb;
/**
* Precalculate the kam info hash code since the object is immutable.
*/
private final int hashCode;
public KamInfo(KamDbObject kamDb) {
super(kamDb.getId()); //kamDb must not be null
this.kamDb = kamDb;
if (kamDb.getId() == null) {
throw new InvalidArgument(
"KamDbObject and KamDbObject Id cannot be null.");
}
this.hashCode = generateHashCode();
}
public KamDbObject getKamDbObject() {
return kamDb;
}
private int generateHashCode() {
return 31 * this.getId().hashCode();
}
/**
* {@inheritDoc}
*/
@Override
public boolean equals(Object o) {
if (o instanceof KamInfo) {
KamInfo ki = (KamInfo) o;
return this.getId().equals(ki.getId());
}
return false;
}
/**
* {@inheritDoc}
*/
@Override
public int hashCode() {
return hashCode;
}
/**
*
* @return
*/
public String getSchemaName() {
return kamDb.getSchemaName();
}
/**
*
* @return
*/
public Date getLastCompiled() {
return kamDb.getLastCompiled();
}
/**
*
* @return
*/
public String getDescription() {
return kamDb.getDescription();
}
/**
*
* @return
*/
public String getName() {
return kamDb.getName();
}
/**
* @return
* @throws InvalidArgument
*/
public AnnotationFilter createAnnotationFilter() throws InvalidArgument {
return new AnnotationFilter(this);
}
/**
* @return
* @throws InvalidArgument
*/
public NamespaceFilter createNamespaceFilter() throws InvalidArgument {
return new NamespaceFilter(this);
}
/**
* @return
* @throws InvalidArgument
*/
public KamFilter createKamFilter() throws InvalidArgument {
return new KamFilter(this);
}
}
/**
* @author julianjray
*/
public static class AnnotationFilter extends Filter {
private AnnotationFilter(KamInfo kamInfo) {
super(kamInfo);
}
/**
* Adds a new AnnotationFilterCriteria to the Filter
*
* @param annotationFilterCriteria
*/
public void add(AnnotationFilterCriteria annotationFilterCriteria) {
getFilterCriteria().add(annotationFilterCriteria);
}
}
/**
* @author julianjray
*/
public static class NamespaceFilter extends Filter {
private NamespaceFilter(KamInfo kamInfo) {
super(kamInfo);
}
/**
* Adds a new NamespaceFilterCritera to the Filter
*
* @param criteria
*/
public void add(NamespaceFilterCriteria criteria) {
getFilterCriteria().add(criteria);
}
}
/**
* @author julianjray
*/
public static class CitationFilter extends Filter {
private CitationFilter(KamInfo kamInfo) {
super(kamInfo);
}
/**
* Adds a new CitationFilterCriteria to the Filter
*
* @param citationFilterCriteria
*/
public void add(CitationFilterCriteria citationFilterCriteria) {
getFilterCriteria().add(citationFilterCriteria);
}
}
/**
* @author julianjray
*/
public static class KamFilter extends Filter {
private KamFilter(KamInfo kamInfo) {
super(kamInfo);
}
public void add(CitationFilterCriteria citationFilterCriteria) {
getFilterCriteria().add(citationFilterCriteria);
}
/**
* Adds a new AnnotationFilterCriteria to the Filter
*
* @param annotationFilterCriteria
*/
public void add(AnnotationFilterCriteria annotationFilterCriteria) {
getFilterCriteria().add(annotationFilterCriteria);
}
/**
* Adds a new RelationshipTypeFilterCriteria to the Filter
*
* @param relationshipTypeFilterCriteria
*/
public void add(
RelationshipTypeFilterCriteria relationshipTypeFilterCriteria) {
getFilterCriteria().add(relationshipTypeFilterCriteria);
}
/**
* Adds a new BelDocumentFilterCriteria to the Filter
*
* @param belDocumentFilterCriteria
*/
public void add(BelDocumentFilterCriteria belDocumentFilterCriteria) {
getFilterCriteria().add(belDocumentFilterCriteria);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
} else if (obj == null) {
return false;
} else if (!(obj instanceof KamFilter)) {
return false;
} else {
KamFilter other = (KamFilter) obj;
return (BELUtilities.equals(getKamInfo(), other.getKamInfo()) && BELUtilities
.equals(getFilterCriteria(), other.getFilterCriteria()));
}
}
@Override
public int hashCode() {
final int prime = 31;
int hash = 0;
int criteriaHash = 0;
final List<FilterCriteria> criteria = getFilterCriteria();
if (criteria != null) {
for (FilterCriteria criterion : criteria) {
criteriaHash ^= criterion.hashCode();
}
}
hash += getKamInfo().hashCode();
hash *= prime;
hash += criteriaHash;
return hash;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.main;
import java.util.LinkedHashMap;
import java.util.Locale;
import java.util.Map;
import java.util.Optional;
import java.util.Properties;
import java.util.function.Function;
import org.apache.camel.CamelContext;
import org.apache.camel.Component;
import org.apache.camel.ExtendedCamelContext;
import org.apache.camel.PropertyBindingException;
import org.apache.camel.spi.ExtendedPropertyConfigurerGetter;
import org.apache.camel.spi.PropertyConfigurer;
import org.apache.camel.support.PropertyBindingSupport;
import org.apache.camel.support.service.ServiceHelper;
import org.apache.camel.util.ObjectHelper;
import org.apache.camel.util.OrderedProperties;
import org.apache.camel.util.StringHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public final class MainHelper {
private static final Logger LOG = LoggerFactory.getLogger(MainHelper.class);
private MainHelper() {
}
public static String toEnvVar(String name) {
return name.toUpperCase(Locale.US).replaceAll("[^\\w]", "-").replace('-', '_');
}
public static Optional<String> lookupPropertyFromSysOrEnv(String name) {
String answer = System.getProperty(name);
if (answer == null) {
answer = System.getenv(toEnvVar(name));
}
return Optional.ofNullable(answer);
}
public static Properties loadEnvironmentVariablesAsProperties(String[] prefixes) {
Properties answer = new OrderedProperties();
if (prefixes == null || prefixes.length == 0) {
return answer;
}
for (String prefix : prefixes) {
final String pk = prefix.toUpperCase(Locale.US).replaceAll("[^\\w]", "-");
final String pk2 = pk.replace('-', '_');
System.getenv().forEach((k, v) -> {
k = k.toUpperCase(Locale.US);
if (k.startsWith(pk) || k.startsWith(pk2)) {
String key = k.toLowerCase(Locale.ENGLISH).replace('_', '.');
answer.put(key, v);
}
});
}
return answer;
}
public static Properties loadJvmSystemPropertiesAsProperties(String[] prefixes) {
Properties answer = new OrderedProperties();
if (prefixes == null || prefixes.length == 0) {
return answer;
}
for (String prefix : prefixes) {
final String pk = prefix.toUpperCase(Locale.US).replaceAll("[^\\w]", "-");
final String pk2 = pk.replace('-', '.');
System.getProperties().forEach((k, v) -> {
String key = k.toString().toUpperCase(Locale.US);
if (key.startsWith(pk) || key.startsWith(pk2)) {
answer.put(k.toString(), v);
}
});
}
return answer;
}
public static String optionKey(String key) {
// as we ignore case for property names we should use keys in same case and without dashes
key = StringHelper.dashToCamelCase(key);
return key;
}
public static boolean setPropertiesOnTarget(CamelContext context, Object target, Object source) throws Exception {
ObjectHelper.notNull(context, "context");
ObjectHelper.notNull(target, "target");
boolean rc = false;
PropertyConfigurer targetConfigurer = null;
if (target instanceof Component) {
// the component needs to be initialized to have the configurer ready
ServiceHelper.initService(target);
targetConfigurer = ((Component) target).getComponentPropertyConfigurer();
}
if (targetConfigurer == null) {
String name = target.getClass().getName();
// see if there is a configurer for it
targetConfigurer = context.adapt(ExtendedCamelContext.class)
.getConfigurerResolver().resolvePropertyConfigurer(name, context);
}
PropertyConfigurer sourceConfigurer = null;
if (source instanceof Component) {
// the component needs to be initialized to have the configurer ready
ServiceHelper.initService(source);
sourceConfigurer = ((Component) source).getComponentPropertyConfigurer();
}
if (sourceConfigurer == null) {
String name = source.getClass().getName();
// see if there is a configurer for it
sourceConfigurer = context.adapt(ExtendedCamelContext.class)
.getConfigurerResolver().resolvePropertyConfigurer(name, context);
}
if (targetConfigurer != null && sourceConfigurer instanceof ExtendedPropertyConfigurerGetter) {
ExtendedPropertyConfigurerGetter getter = (ExtendedPropertyConfigurerGetter) sourceConfigurer;
for (String key : getter.getAllOptions(source).keySet()) {
Object value = getter.getOptionValue(source, key, true);
if (value != null) {
rc |= targetConfigurer.configure(context, target, key, value, true);
}
}
}
return rc;
}
public static boolean setPropertiesOnTarget(
CamelContext context, Object target, Map<String, Object> properties,
String optionPrefix, boolean failIfNotSet, boolean ignoreCase,
Map<String, String> autoConfiguredProperties) {
ObjectHelper.notNull(context, "context");
ObjectHelper.notNull(target, "target");
ObjectHelper.notNull(properties, "properties");
boolean rc = false;
PropertyConfigurer configurer = null;
if (target instanceof Component) {
// the component needs to be initialized to have the configurer ready
ServiceHelper.initService(target);
configurer = ((Component) target).getComponentPropertyConfigurer();
}
if (configurer == null) {
String name = target.getClass().getName();
// see if there is a configurer for it (use bootstrap)
configurer = context.adapt(ExtendedCamelContext.class)
.getBootstrapConfigurerResolver().resolvePropertyConfigurer(name, context);
}
try {
// keep a reference of the original keys
Map<String, Object> backup = new LinkedHashMap<>(properties);
rc = PropertyBindingSupport.build()
.withMandatory(failIfNotSet)
.withRemoveParameters(true)
.withConfigurer(configurer)
.withIgnoreCase(ignoreCase)
.bind(context, target, properties);
for (Map.Entry<String, Object> entry : backup.entrySet()) {
if (entry.getValue() != null && !properties.containsKey(entry.getKey())) {
String prefix = optionPrefix;
if (prefix != null && !prefix.endsWith(".")) {
prefix = "." + prefix;
}
LOG.debug("Configured property: {}{}={} on bean: {}", prefix, entry.getKey(), entry.getValue(), target);
autoConfiguredProperties.put(prefix + entry.getKey(), entry.getValue().toString());
}
}
} catch (PropertyBindingException e) {
String key = e.getOptionKey();
if (key == null) {
String prefix = e.getOptionPrefix();
if (prefix != null && !prefix.endsWith(".")) {
prefix = "." + prefix;
}
key = prefix != null
? prefix + "." + e.getPropertyName()
: e.getPropertyName();
}
if (failIfNotSet) {
// enrich the error with more precise details with option prefix and key
throw new PropertyBindingException(
e.getTarget(), e.getPropertyName(), e.getValue(), optionPrefix, key, e.getCause());
} else {
LOG.debug("Error configuring property (" + key + ") with name: " + e.getPropertyName() + ") on bean: " + target
+ " with value: " + e.getValue() + ". This exception is ignored as failIfNotSet=false.",
e);
}
}
return rc;
}
public static void computeProperties(
String keyPrefix, String key, Properties prop, Map<PropertyOptionKey, Map<String, Object>> properties,
Function<String, Iterable<Object>> supplier) {
if (key.startsWith(keyPrefix)) {
// grab name
final int dot = key.indexOf('.', keyPrefix.length());
final String name = dot == -1 ? key.substring(keyPrefix.length()) : key.substring(keyPrefix.length(), dot);
// enabled is a virtual property
if ("enabled".equals(name)) {
return;
}
// skip properties as its already keyPrefix earlier
if ("properties".equals(name)) {
return;
}
// determine if the service is enabled or not by taking into account two options:
//
// 1. ${keyPrefix}.enabled = true|false
// 2. ${keyPrefix}.${name}.enabled = true|false
//
// The option [2] has the higher priority so as example:
//
// camel.component.enabled = false
// camel.component.seda.enabled = true
//
// enables auto configuration of the seda component only
if (!isServiceEnabled(keyPrefix, name, prop)) {
return;
}
String prefix = dot == -1 ? "" : key.substring(0, dot + 1);
String option = dot == -1 ? "" : key.substring(dot + 1);
String value = prop.getProperty(key, "");
// enabled is a virtual property
if ("enabled".equalsIgnoreCase(option)) {
return;
}
validateOptionAndValue(key, option, value);
Iterable<Object> targets = supplier.apply(name);
for (Object target : targets) {
PropertyOptionKey pok = new PropertyOptionKey(target, prefix);
Map<String, Object> values = properties.computeIfAbsent(pok, k -> new LinkedHashMap<>());
// we ignore case for property keys (so we should store them in canonical style
values.put(optionKey(option), value);
}
}
}
public static boolean isServiceEnabled(String prefix, String name, Properties properties) {
ObjectHelper.notNull(prefix, "prefix");
ObjectHelper.notNull(name, "name");
ObjectHelper.notNull(properties, "properties");
if (!prefix.endsWith(".")) {
prefix = prefix + ".";
}
final String group = properties.getProperty(prefix + "enabled", "true");
final String item = properties.getProperty(prefix + name + ".enabled", group);
return Boolean.parseBoolean(item);
}
public static void validateOptionAndValue(String key, String option, String value) {
if (ObjectHelper.isEmpty(option)) {
throw new IllegalArgumentException("Error configuring property: " + key + " because option is empty");
}
if (ObjectHelper.isEmpty(value)) {
throw new IllegalArgumentException("Error configuring property: " + key + " because value is empty");
}
}
}
| |
package gov.nih.nci.evs.api.controller;
import static org.assertj.core.api.Assertions.assertThat;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.context.SpringBootTest.WebEnvironment;
import org.springframework.boot.test.json.JacksonTester;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.MvcResult;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Sets;
import gov.nih.nci.evs.api.model.Concept;
import gov.nih.nci.evs.api.model.Terminology;
import gov.nih.nci.evs.api.properties.TestProperties;
/**
* Integration tests for ContentController organized around proper handling of
* property qualifiers. This is based on work from EVSRESTAPI-69.
*/
@RunWith(SpringRunner.class)
@SpringBootTest(webEnvironment = WebEnvironment.RANDOM_PORT)
@AutoConfigureMockMvc
public class QualifierTests {
/** The logger. */
private static final Logger log = LoggerFactory.getLogger(QualifierTests.class);
/** The mvc. */
@Autowired
private MockMvc mvc;
/** The test properties. */
@Autowired
TestProperties testProperties;
/** The object mapper. */
private ObjectMapper objectMapper;
/** The base url. */
private String baseUrl = "";
/** The meta base url. */
private String metaBaseUrl = "";
/**
* Sets the up.
*/
@Before
public void setUp() {
objectMapper = new ObjectMapper();
JacksonTester.initFields(this, objectMapper);
baseUrl = "/api/v1/concept";
metaBaseUrl = "/api/v1/metadata";
}
/**
* Test get concept.
*
* @throws Exception the exception
*/
@Test
public void testAltDefinitionQualifiers() throws Exception {
String url = null;
MvcResult result = null;
String content = null;
Concept concept = null;
// Test with "by code"
url = baseUrl + "/ncit/C101669?include=definitions";
log.info("Testing url - " + url);
result = mvc.perform(get(url)).andExpect(status().isOk()).andReturn();
content = result.getResponse().getContentAsString();
log.info(" content = " + content);
concept = new ObjectMapper().readValue(content, Concept.class);
assertThat(concept).isNotNull();
assertThat(concept.getCode()).isEqualTo("C101669");
assertThat(concept.getDefinitions().size()).isGreaterThan(0);
assertThat(concept.getDefinitions().stream()
.filter(d -> "ALT_DEFINITION".equals(d.getType()) && d.getQualifiers().size() > 0).count())
.isGreaterThan(0);
assertThat(concept.getDefinitions().stream()
.filter(d -> "ALT_DEFINITION".equals(d.getType()) && d.getQualifiers().size() > 0)
.flatMap(d -> d.getQualifiers().stream()).filter(q -> q.getType().equals("attribution"))
.count()).isGreaterThan(0);
}
/**
* Test definition qualifiers.
*
* @throws Exception the exception
*/
@Test
public void testDefinitionQualifiers() throws Exception {
String url = null;
MvcResult result = null;
String content = null;
Concept concept = null;
// Test with "by code"
url = baseUrl + "/ncit/C101046?include=definitions";
log.info("Testing url - " + url);
result = mvc.perform(get(url)).andExpect(status().isOk()).andReturn();
content = result.getResponse().getContentAsString();
log.info(" content = " + content);
concept = new ObjectMapper().readValue(content, Concept.class);
assertThat(concept).isNotNull();
assertThat(concept.getCode()).isEqualTo("C101046");
assertThat(concept.getDefinitions().size()).isGreaterThan(0);
assertThat(concept.getDefinitions().stream()
.filter(d -> !"ALT_DEFINITION".equals(d.getType()) && d.getQualifiers().size() > 0).count())
.isGreaterThan(0);
assertThat(concept.getDefinitions().stream()
.filter(d -> !"ALT_DEFINITION".equals(d.getType()) && d.getQualifiers().size() > 0)
.flatMap(d -> d.getQualifiers().stream()).filter(q -> q.getType().equals("attribution"))
.count()).isGreaterThan(0);
}
/**
* Test full syn.
*
* @throws Exception the exception
*/
@Test
public void testFullSyn() throws Exception {
String url = null;
MvcResult result = null;
String content = null;
Concept concept = null;
// Test with "by code"
url = baseUrl + "/ncit/C100065?include=synonyms";
log.info("Testing url - " + url);
result = mvc.perform(get(url)).andExpect(status().isOk()).andReturn();
content = result.getResponse().getContentAsString();
log.info(" content = " + content);
concept = new ObjectMapper().readValue(content, Concept.class);
assertThat(concept).isNotNull();
assertThat(concept.getCode()).isEqualTo("C100065");
assertThat(concept.getSynonyms().size()).isGreaterThan(0);
assertThat(concept.getSynonyms().stream().filter(s -> s.getSource() != null
&& s.getSubSource() != null && s.getCode() != null && s.getTermGroup() != null).count())
.isGreaterThan(0);
}
/**
* Test mappings.
*
* @throws Exception the exception
*/
@Test
public void testMappings() throws Exception {
String url = null;
MvcResult result = null;
String content = null;
Concept concept = null;
// Test with "by code"
url = baseUrl + "/ncit/C101034?include=maps";
log.info("Testing url - " + url);
result = mvc.perform(get(url)).andExpect(status().isOk()).andReturn();
content = result.getResponse().getContentAsString();
log.info(" content = " + content);
concept = new ObjectMapper().readValue(content, Concept.class);
assertThat(concept).isNotNull();
assertThat(concept.getCode()).isEqualTo("C101034");
assertThat(concept.getMaps().size()).isGreaterThan(0);
assertThat(concept.getMaps().stream()
.filter(m -> m.getType() != null && m.getTargetCode() != null
&& m.getTargetTerminologyVersion() != null && m.getTargetTermGroup() != null
&& m.getTargetTerminology() != null)
.count()).isGreaterThan(0);
}
/**
* Test go annotation.
*
* @throws Exception the exception
*/
@Test
public void testGoAnnotation() throws Exception {
String url = null;
MvcResult result = null;
String content = null;
Concept concept = null;
// Test with "by code"
url = baseUrl + "/ncit/C19799?include=properties";
log.info("Testing url - " + url);
result = mvc.perform(get(url)).andExpect(status().isOk()).andReturn();
content = result.getResponse().getContentAsString();
log.info(" content = " + content);
concept = new ObjectMapper().readValue(content, Concept.class);
assertThat(concept).isNotNull();
assertThat(concept.getCode()).isEqualTo("C19799");
assertThat(concept.getProperties().size()).isGreaterThan(0);
assertThat(concept.getProperties().stream()
.filter(p -> p.getType().contentEquals("GO_Annotation")).count()).isGreaterThan(0);
assertThat(
concept.getProperties().stream().filter(p -> p.getType().contentEquals("GO_Annotation"))
.flatMap(p -> p.getQualifiers().stream())
.filter(q -> q.getType().contentEquals("go-evi")).count()).isGreaterThan(0);
assertThat(
concept.getProperties().stream().filter(p -> p.getType().contentEquals("GO_Annotation"))
.flatMap(p -> p.getQualifiers().stream())
.filter(q -> q.getType().contentEquals("go-id")).count()).isGreaterThan(0);
assertThat(
concept.getProperties().stream().filter(p -> p.getType().contentEquals("GO_Annotation"))
.flatMap(p -> p.getQualifiers().stream())
.filter(q -> q.getType().contentEquals("go-source")).count()).isGreaterThan(0);
assertThat(
concept.getProperties().stream().filter(p -> p.getType().contentEquals("GO_Annotation"))
.flatMap(p -> p.getQualifiers().stream())
.filter(q -> q.getType().contentEquals("source-date")).count()).isGreaterThan(0);
}
/**
* Test no property qualifier overlap.
*
* @throws Exception the exception
*/
@Test
public void testNoPropertyQualifierOverlap() throws Exception {
String url = null;
MvcResult result = null;
String content = null;
// Get properties
url = metaBaseUrl + "/ncit/properties";
log.info("Testing url - " + url);
result = mvc.perform(get(url)).andExpect(status().isOk()).andReturn();
content = result.getResponse().getContentAsString();
log.info(" content = " + content);
final List<Concept> list1 =
new ObjectMapper().readValue(content, new TypeReference<List<Concept>>() {
// n/a
});
assertThat(list1).isNotEmpty();
// Get qualifiers
url = metaBaseUrl + "/ncit/qualifiers";
log.info("Testing url - " + url);
result = mvc.perform(get(url)).andExpect(status().isOk()).andReturn();
content = result.getResponse().getContentAsString();
log.info(" content = " + content);
final List<Concept> list2 =
new ObjectMapper().readValue(content, new TypeReference<List<Concept>>() {
// n/a
});
assertThat(list2).isNotEmpty();
// list1 and list2 should not have any codes in common
final Set<String> codes1 = list1.stream().map(c -> c.getCode()).collect(Collectors.toSet());
final Set<String> codes2 = list2.stream().map(c -> c.getCode()).collect(Collectors.toSet());
assertThat(Sets.intersection(codes1, codes2).size()).isEqualTo(0);
// list1 and list2 should not have any names in common
final Set<String> names1 = list1.stream().map(c -> c.getName()).collect(Collectors.toSet());
final Set<String> names2 = list2.stream().map(c -> c.getName()).collect(Collectors.toSet());
assertThat(Sets.intersection(names1, names2).size()).isEqualTo(0);
}
/**
* Test no property qualifier overlap via lookup.
*
* @throws Exception the exception
*/
@Test
public void testNoPropertyQualifierOverlapViaLookup() throws Exception {
String url = null;
MvcResult result = null;
String content = null;
// Get properties
url = metaBaseUrl + "/ncit/properties";
log.info("Testing url - " + url);
result = mvc.perform(get(url)).andExpect(status().isOk()).andReturn();
content = result.getResponse().getContentAsString();
log.info(" content = " + content);
final List<Concept> list1 =
new ObjectMapper().readValue(content, new TypeReference<List<Concept>>() {
// n/a
});
assertThat(list1).isNotEmpty();
// Get qualifiers
url = metaBaseUrl + "/ncit/qualifiers";
log.info("Testing url - " + url);
result = mvc.perform(get(url)).andExpect(status().isOk()).andReturn();
content = result.getResponse().getContentAsString();
log.info(" content = " + content);
final List<Concept> list2 =
new ObjectMapper().readValue(content, new TypeReference<List<Concept>>() {
// n/a
});
assertThat(list2).isNotEmpty();
// Take the each property and look it up as a qualifier, expect 404
for (final Concept property : list1) {
url = metaBaseUrl + "/ncit/qualifier/" + property.getCode();
log.info("Testing url - " + url);
result = mvc.perform(get(url)).andExpect(status().isNotFound()).andReturn();
content = result.getResponse().getContentAsString();
log.info(" content = " + content);
}
// Take the each qualifier and look it up as a property, expect 404
for (final Concept qualifier : list2) {
url = metaBaseUrl + "/ncit/property/" + qualifier.getCode();
log.info("Testing url - " + url);
result = mvc.perform(get(url)).andExpect(status().isNotFound()).andReturn();
content = result.getResponse().getContentAsString();
log.info(" content = " + content);
}
}
/**
*
* Test property not used.
*
* @throws Exception the exception
*/
@Test
public void testPropertyNotUsed() throws Exception {
String url = null;
MvcResult result = null;
String content = null;
// P379, P377, P388, P365, P382, P392, P380
url = metaBaseUrl + "/ncit/property/P379";
log.info("Testing url - " + url);
result = mvc.perform(get(url)).andExpect(status().isNotFound()).andReturn();
content = result.getResponse().getContentAsString();
log.info(" content = " + content);
url = metaBaseUrl + "/ncit/property/P380";
log.info("Testing url - " + url);
result = mvc.perform(get(url)).andExpect(status().isNotFound()).andReturn();
content = result.getResponse().getContentAsString();
log.info(" content = " + content);
}
/**
* Test common property used.
*
* @throws Exception the exception
*/
@SuppressWarnings("unused")
@Test
public void testCommonPropertyUsed() throws Exception {
String url = null;
MvcResult result = null;
String content = null;
Concept concept = null;
for (final String name : new String[] {
// "Maps_To", - this one removed for ReportWriter
"code", "name", "Preferred_Name", "DEFINITION", "ALT_DEFINITION", "FULL_SYN",
"label"
}) {
// skip name and label
if (name.equals("name") || name.equals("label")) {
continue;
}
// Try P98 - expect to not find it as a property
url = metaBaseUrl + "/ncit/property/" + name;
log.info("Testing url - " + url);
result = mvc.perform(get(url)).andExpect(status().isNotFound()).andReturn();
}
}
/**
* Test qualifier different label pref name.
*
* @throws Exception the exception
*/
@Test
public void testQualifierAttr() throws Exception {
String url = null;
MvcResult result = null;
String content = null;
Concept concept = null;
// attr
url = metaBaseUrl + "/ncit/qualifier/attribution";
log.info("Testing url - " + url);
result = mvc.perform(get(url)).andExpect(status().isOk()).andReturn();
content = result.getResponse().getContentAsString();
log.info(" content = " + content);
concept = new ObjectMapper().readValue(content, Concept.class);
assertThat(concept).isNotNull();
// Assert that the "preferred name" synonym does match the concept name
assertThat(
concept.getSynonyms().stream().filter(c -> c.getType().equals("Preferred_Name")).count())
.isGreaterThan(0);
assertThat(concept.getSynonyms().stream().filter(c -> c.getType().equals("Preferred_Name"))
.findFirst().get().getName()).isEqualTo("attribution");
}
/**
* Test all qualifiers can be individually resolved.
*
* @throws Exception the exception
*/
@Test
public void testAllQualifiers() throws Exception {
String url = null;
MvcResult result = null;
String content = null;
// Get qualifiers
url = metaBaseUrl + "/ncit/qualifiers";
log.info("Testing url - " + url);
result = mvc.perform(get(url)).andExpect(status().isOk()).andReturn();
content = result.getResponse().getContentAsString();
log.info(" content = " + content);
final List<Concept> list =
new ObjectMapper().readValue(content, new TypeReference<List<Concept>>() {
// n/a
});
assertThat(list).isNotEmpty();
// Assert that properties don't contain any "remodeled properties"
url = metaBaseUrl + "/terminologies";
result = mvc.perform(get(url)).andExpect(status().isOk()).andReturn();
content = result.getResponse().getContentAsString();
Terminology terminology =
new ObjectMapper().readValue(content, new TypeReference<List<Terminology>>() {
// n/a
}).stream().filter(t -> t.getTerminology().equals("ncit")).findFirst().get();
assertThat(list.stream()
.filter(c -> terminology.getMetadata().isRemodeledQualifier(c.getCode())).count())
.isEqualTo(0);
// Take the each qualifier and look it up as a qualifier, expect 200
for (final Concept qualifier : list) {
url = metaBaseUrl + "/ncit/qualifier/" + qualifier.getCode();
log.info("Testing url - " + url);
result = mvc.perform(get(url)).andExpect(status().isOk()).andReturn();
}
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide.ui.laf.darcula;
import com.intellij.ide.IdeEventQueue;
import com.intellij.ide.ui.laf.DarculaMetalTheme;
import com.intellij.ide.ui.laf.IdeaLaf;
import com.intellij.ide.ui.laf.LafManagerImpl;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.IconLoader;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.wm.IdeFocusManager;
import com.intellij.ui.ColorUtil;
import com.intellij.util.Alarm;
import com.intellij.util.containers.hash.HashMap;
import com.intellij.util.ui.JBUI;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import sun.awt.AppContext;
import javax.swing.*;
import javax.swing.plaf.*;
import javax.swing.plaf.basic.BasicLookAndFeel;
import javax.swing.plaf.metal.DefaultMetalTheme;
import javax.swing.plaf.metal.MetalLookAndFeel;
import javax.swing.text.html.HTMLEditorKit;
import javax.swing.text.html.StyleSheet;
import java.awt.*;
import java.awt.event.KeyEvent;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.net.URL;
import java.util.*;
import java.util.List;
/**
* @author Konstantin Bulenkov
*/
public class DarculaLaf extends BasicLookAndFeel {
public static final String NAME = "Darcula";
BasicLookAndFeel base;
private static Disposable myDisposable;
private static Alarm myMnemonicAlarm;
private static boolean myAltPressed;
public DarculaLaf() {
try {
if (SystemInfo.isWindows || SystemInfo.isLinux) {
base = new IdeaLaf();
} else {
final String name = UIManager.getSystemLookAndFeelClassName();
base = (BasicLookAndFeel)Class.forName(name).newInstance();
}
}
catch (Exception e) {
log(e);
}
}
private void callInit(String method, UIDefaults defaults) {
try {
final Method superMethod = BasicLookAndFeel.class.getDeclaredMethod(method, UIDefaults.class);
superMethod.setAccessible(true);
superMethod.invoke(base, defaults);
}
catch (Exception e) {
log(e);
}
}
@SuppressWarnings("UnusedParameters")
private static void log(Exception e) {
// everything is gonna be alright
e.printStackTrace();
}
@Override
public UIDefaults getDefaults() {
try {
final Method superMethod = BasicLookAndFeel.class.getDeclaredMethod("getDefaults");
superMethod.setAccessible(true);
final UIDefaults metalDefaults = (UIDefaults)superMethod.invoke(new MetalLookAndFeel());
final UIDefaults defaults = (UIDefaults)superMethod.invoke(base);
if (SystemInfo.isLinux) {
if (!Registry.is("darcula.use.native.fonts.on.linux")) {
Font font = findFont("DejaVu Sans");
if (font != null) {
for (Object key : defaults.keySet()) {
if (key instanceof String && ((String)key).endsWith(".font")) {
defaults.put(key, new FontUIResource(font.deriveFont(13f)));
}
}
}
} else if (Arrays.asList("CN", "JP", "KR", "TW").contains(Locale.getDefault().getCountry())) {
for (Object key : defaults.keySet()) {
if (key instanceof String && ((String)key).endsWith(".font")) {
final Font font = defaults.getFont(key);
if (font != null) {
defaults.put(key, new FontUIResource("Dialog", font.getStyle(), font.getSize()));
}
}
}
}
}
LafManagerImpl.initInputMapDefaults(defaults);
initIdeaDefaults(defaults);
patchStyledEditorKit(defaults);
patchComboBox(metalDefaults, defaults);
defaults.remove("Spinner.arrowButtonBorder");
defaults.put("Spinner.arrowButtonSize", JBUI.size(16, 5).asUIResource());
MetalLookAndFeel.setCurrentTheme(createMetalTheme());
if (SystemInfo.isWindows && Registry.is("ide.win.frame.decoration")) {
JFrame.setDefaultLookAndFeelDecorated(true);
JDialog.setDefaultLookAndFeelDecorated(true);
}
if (SystemInfo.isLinux && JBUI.isHiDPI()) {
applySystemFonts(defaults);
}
defaults.put("EditorPane.font", defaults.getFont("TextField.font"));
return defaults;
}
catch (Exception e) {
log(e);
}
return super.getDefaults();
}
private static void applySystemFonts(UIDefaults defaults) {
try {
String fqn = UIUtil.getSystemLookAndFeelClassName();
Object systemLookAndFeel = Class.forName(fqn).newInstance();
final Method superMethod = BasicLookAndFeel.class.getDeclaredMethod("getDefaults");
superMethod.setAccessible(true);
final UIDefaults systemDefaults = (UIDefaults)superMethod.invoke(systemLookAndFeel);
for (Map.Entry<Object, Object> entry : systemDefaults.entrySet()) {
if (entry.getValue() instanceof Font) {
defaults.put(entry.getKey(), entry.getValue());
}
}
} catch (Exception e) {
log(e);
}
}
protected DefaultMetalTheme createMetalTheme() {
return new DarculaMetalTheme();
}
private static Font findFont(String name) {
for (Font font : GraphicsEnvironment.getLocalGraphicsEnvironment().getAllFonts()) {
if (font.getName().equals(name)) {
return font;
}
}
return null;
}
private static void patchComboBox(UIDefaults metalDefaults, UIDefaults defaults) {
defaults.remove("ComboBox.ancestorInputMap");
defaults.remove("ComboBox.actionMap");
defaults.put("ComboBox.ancestorInputMap", metalDefaults.get("ComboBox.ancestorInputMap"));
defaults.put("ComboBox.actionMap", metalDefaults.get("ComboBox.actionMap"));
}
@SuppressWarnings("IOResourceOpenedButNotSafelyClosed")
private void patchStyledEditorKit(UIDefaults defaults) {
URL url = getClass().getResource(getPrefix() + (JBUI.isHiDPI() ? "@2x.css" : ".css"));
StyleSheet styleSheet = UIUtil.loadStyleSheet(url);
defaults.put("StyledEditorKit.JBDefaultStyle", styleSheet);
try {
Field keyField = HTMLEditorKit.class.getDeclaredField("DEFAULT_STYLES_KEY");
keyField.setAccessible(true);
AppContext.getAppContext().put(keyField.get(null), UIUtil.loadStyleSheet(url));
}
catch (Exception e) {
log(e);
}
}
protected String getPrefix() {
return "darcula";
}
private void call(String method) {
try {
final Method superMethod = BasicLookAndFeel.class.getDeclaredMethod(method);
superMethod.setAccessible(true);
superMethod.invoke(base);
}
catch (Exception ignore) {
log(ignore);
}
}
public void initComponentDefaults(UIDefaults defaults) {
callInit("initComponentDefaults", defaults);
}
@SuppressWarnings({"HardCodedStringLiteral"})
protected void initIdeaDefaults(UIDefaults defaults) {
loadDefaults(defaults);
defaults.put("Table.ancestorInputMap", new UIDefaults.LazyInputMap(new Object[] {
"ctrl C", "copy",
"ctrl V", "paste",
"ctrl X", "cut",
"COPY", "copy",
"PASTE", "paste",
"CUT", "cut",
"control INSERT", "copy",
"shift INSERT", "paste",
"shift DELETE", "cut",
"RIGHT", "selectNextColumn",
"KP_RIGHT", "selectNextColumn",
"LEFT", "selectPreviousColumn",
"KP_LEFT", "selectPreviousColumn",
"DOWN", "selectNextRow",
"KP_DOWN", "selectNextRow",
"UP", "selectPreviousRow",
"KP_UP", "selectPreviousRow",
"shift RIGHT", "selectNextColumnExtendSelection",
"shift KP_RIGHT", "selectNextColumnExtendSelection",
"shift LEFT", "selectPreviousColumnExtendSelection",
"shift KP_LEFT", "selectPreviousColumnExtendSelection",
"shift DOWN", "selectNextRowExtendSelection",
"shift KP_DOWN", "selectNextRowExtendSelection",
"shift UP", "selectPreviousRowExtendSelection",
"shift KP_UP", "selectPreviousRowExtendSelection",
"PAGE_UP", "scrollUpChangeSelection",
"PAGE_DOWN", "scrollDownChangeSelection",
"HOME", "selectFirstColumn",
"END", "selectLastColumn",
"shift PAGE_UP", "scrollUpExtendSelection",
"shift PAGE_DOWN", "scrollDownExtendSelection",
"shift HOME", "selectFirstColumnExtendSelection",
"shift END", "selectLastColumnExtendSelection",
"ctrl PAGE_UP", "scrollLeftChangeSelection",
"ctrl PAGE_DOWN", "scrollRightChangeSelection",
"ctrl HOME", "selectFirstRow",
"ctrl END", "selectLastRow",
"ctrl shift PAGE_UP", "scrollRightExtendSelection",
"ctrl shift PAGE_DOWN", "scrollLeftExtendSelection",
"ctrl shift HOME", "selectFirstRowExtendSelection",
"ctrl shift END", "selectLastRowExtendSelection",
"TAB", "selectNextColumnCell",
"shift TAB", "selectPreviousColumnCell",
//"ENTER", "selectNextRowCell",
"shift ENTER", "selectPreviousRowCell",
"ctrl A", "selectAll",
"meta A", "selectAll",
"ESCAPE", "cancel",
"F2", "startEditing"
}));
}
@SuppressWarnings("IOResourceOpenedButNotSafelyClosed")
protected void loadDefaults(UIDefaults defaults) {
final Properties properties = new Properties();
final String osSuffix = SystemInfo.isMac ? "mac" : SystemInfo.isWindows ? "windows" : "linux";
try {
InputStream stream = getClass().getResourceAsStream(getPrefix() + ".properties");
properties.load(stream);
stream.close();
stream = getClass().getResourceAsStream(getPrefix() + "_" + osSuffix + ".properties");
properties.load(stream);
stream.close();
HashMap<String, Object> darculaGlobalSettings = new HashMap<String, Object>();
final String prefix = getPrefix() + ".";
for (String key : properties.stringPropertyNames()) {
if (key.startsWith(prefix)) {
darculaGlobalSettings.put(key.substring(prefix.length()), parseValue(key, properties.getProperty(key)));
}
}
for (Object key : defaults.keySet()) {
if (key instanceof String && ((String)key).contains(".")) {
final String s = (String)key;
final String darculaKey = s.substring(s.lastIndexOf('.') + 1);
if (darculaGlobalSettings.containsKey(darculaKey)) {
defaults.put(key, darculaGlobalSettings.get(darculaKey));
}
}
}
for (String key : properties.stringPropertyNames()) {
final String value = properties.getProperty(key);
defaults.put(key, parseValue(key, value));
}
}
catch (IOException e) {log(e);}
}
protected Object parseValue(String key, @NotNull String value) {
if ("null".equals(value)) {
return null;
}
if (key.endsWith("Insets")) {
return parseInsets(value);
} else if (key.endsWith("Border") || key.endsWith("border")) {
try {
if (StringUtil.split(value, ",").size() == 4) {
return new BorderUIResource.EmptyBorderUIResource(parseInsets(value));
} else {
return Class.forName(value).newInstance();
}
} catch (Exception e) {
log(e);
}
} else {
final Color color = parseColor(value);
final Integer invVal = getInteger(value);
final Boolean boolVal = "true".equals(value) ? Boolean.TRUE : "false".equals(value) ? Boolean.FALSE : null;
Icon icon = value.startsWith("AllIcons.") ? IconLoader.getIcon(value) : null;
if (icon == null && value.endsWith(".png")) {
icon = IconLoader.findIcon(value, DarculaLaf.class, true);
}
if (color != null) {
return new ColorUIResource(color);
} else if (invVal != null) {
return invVal;
} else if (icon != null) {
return new IconUIResource(icon);
} else if (boolVal != null) {
return boolVal;
}
}
return value;
}
private static Insets parseInsets(String value) {
final List<String> numbers = StringUtil.split(value, ",");
return new InsetsUIResource(Integer.parseInt(numbers.get(0)),
Integer.parseInt(numbers.get(1)),
Integer.parseInt(numbers.get(2)),
Integer.parseInt(numbers.get(3)));
}
@SuppressWarnings("UseJBColor")
private static Color parseColor(String value) {
if (value != null && value.length() == 8) {
final Color color = ColorUtil.fromHex(value.substring(0, 6));
if (color != null) {
try {
int alpha = Integer.parseInt(value.substring(6, 8), 16);
return new ColorUIResource(new Color(color.getRed(), color.getGreen(), color.getBlue(), alpha));
} catch (Exception ignore){}
}
return null;
}
return ColorUtil.fromHex(value, null);
}
private static Integer getInteger(String value) {
try {
return Integer.parseInt(value);
}
catch (NumberFormatException e) {
return null;
}
}
@Override
public String getName() {
return NAME;
}
@Override
public String getID() {
return getName();
}
@Override
public String getDescription() {
return "IntelliJ Dark Look and Feel";
}
@Override
public boolean isNativeLookAndFeel() {
return true;
}
@Override
public boolean isSupportedLookAndFeel() {
return true;
}
@Override
protected void initSystemColorDefaults(UIDefaults defaults) {
callInit("initSystemColorDefaults", defaults);
}
@Override
protected void initClassDefaults(UIDefaults defaults) {
callInit("initClassDefaults", defaults);
}
@Override
public void initialize() {
try {
base.initialize();
} catch (Exception ignore) {}
myDisposable = Disposer.newDisposable();
Application application = ApplicationManager.getApplication();
if (application != null) {
Disposer.register(application, myDisposable);
}
myMnemonicAlarm = new Alarm(Alarm.ThreadToUse.SHARED_THREAD, myDisposable);
IdeEventQueue.getInstance().addDispatcher(new IdeEventQueue.EventDispatcher() {
@Override
public boolean dispatch(AWTEvent e) {
if (e instanceof KeyEvent && ((KeyEvent)e).getKeyCode() == KeyEvent.VK_ALT) {
myAltPressed = e.getID() == KeyEvent.KEY_PRESSED;
myMnemonicAlarm.cancelAllRequests();
final Component focusOwner = IdeFocusManager.findInstance().getFocusOwner();
if (focusOwner != null) {
myMnemonicAlarm.addRequest(new Runnable() {
@Override
public void run() {
repaintMnemonics(focusOwner, myAltPressed);
}
}, 10);
}
}
return false;
}
}, myDisposable);
}
public static boolean isAltPressed() {
return myAltPressed;
}
private static void repaintMnemonics(@NotNull Component focusOwner, boolean pressed) {
if (pressed != myAltPressed) return;
Window window = SwingUtilities.windowForComponent(focusOwner);
if (window != null) {
for (Component component : window.getComponents()) {
if (component instanceof JComponent) {
for (JComponent c : UIUtil.findComponentsOfType((JComponent)component, JComponent.class)) {
if ((c instanceof JLabel && ((JLabel)c).getDisplayedMnemonicIndex() != -1)
|| (c instanceof AbstractButton && ((AbstractButton)c).getDisplayedMnemonicIndex() != -1)
) {
c.repaint();
}
}
}
}
}
}
@Override
public void uninitialize() {
try {
base.initialize();
} catch (Exception ignore) {}
Disposer.dispose(myDisposable);
myDisposable = null;
}
@Override
protected void loadSystemColors(UIDefaults defaults, String[] systemColors, boolean useNative) {
try {
final Method superMethod = BasicLookAndFeel.class.getDeclaredMethod("loadSystemColors",
UIDefaults.class,
String[].class,
boolean.class);
superMethod.setAccessible(true);
superMethod.invoke(base, defaults, systemColors, useNative);
}
catch (Exception ignore) {
log(ignore);
}
}
@Override
public boolean getSupportsWindowDecorations() {
return true;
}
public static Icon loadIcon(String iconName) {
return IconLoader.findIcon("/com/intellij/ide/ui/laf/icons/" + iconName, DarculaLaf.class, true);
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster.routing;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateListener;
import org.elasticsearch.cluster.ClusterStateUpdateTask;
import org.elasticsearch.cluster.routing.allocation.AllocationService;
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
import org.elasticsearch.common.util.concurrent.FutureUtils;
import org.elasticsearch.threadpool.ThreadPool;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
/**
* The {@link DelayedAllocationService} listens to cluster state changes and checks
* if there are unassigned shards with delayed allocation (unassigned shards that have
* the delay marker). These are shards that have become unassigned due to a node leaving
* and which were assigned the delay marker based on the index delay setting
* {@link UnassignedInfo#INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING}
* (see {@link AllocationService#deassociateDeadNodes(RoutingAllocation)}).
* This class is responsible for choosing the next (closest) delay expiration of a
* delayed shard to schedule a reroute to remove the delay marker.
* The actual removal of the delay marker happens in
* {@link AllocationService#removeDelayMarkers(RoutingAllocation)}, triggering yet
* another cluster change event.
*/
public class DelayedAllocationService extends AbstractLifecycleComponent implements ClusterStateListener {
static final String CLUSTER_UPDATE_TASK_SOURCE = "delayed_allocation_reroute";
final ThreadPool threadPool;
private final ClusterService clusterService;
private final AllocationService allocationService;
AtomicReference<DelayedRerouteTask> delayedRerouteTask = new AtomicReference<>(); // package private to access from tests
/**
* represents a delayed scheduling of the reroute action that can be cancelled.
*/
class DelayedRerouteTask extends ClusterStateUpdateTask {
final TimeValue nextDelay; // delay until submitting the reroute command
final long baseTimestampNanos; // timestamp (in nanos) upon which delay was calculated
volatile ScheduledFuture future;
final AtomicBoolean cancelScheduling = new AtomicBoolean();
DelayedRerouteTask(TimeValue nextDelay, long baseTimestampNanos) {
this.nextDelay = nextDelay;
this.baseTimestampNanos = baseTimestampNanos;
}
public long scheduledTimeToRunInNanos() {
return baseTimestampNanos + nextDelay.nanos();
}
public void cancelScheduling() {
cancelScheduling.set(true);
FutureUtils.cancel(future);
removeIfSameTask(this);
}
public void schedule() {
future = threadPool.schedule(nextDelay, ThreadPool.Names.SAME, new AbstractRunnable() {
@Override
protected void doRun() throws Exception {
if (cancelScheduling.get()) {
return;
}
clusterService.submitStateUpdateTask(CLUSTER_UPDATE_TASK_SOURCE, DelayedRerouteTask.this);
}
@Override
public void onFailure(Exception e) {
logger.warn("failed to submit schedule/execute reroute post unassigned shard", e);
removeIfSameTask(DelayedRerouteTask.this);
}
});
}
@Override
public ClusterState execute(ClusterState currentState) throws Exception {
removeIfSameTask(this);
return allocationService.reroute(currentState, "assign delayed unassigned shards");
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
if (oldState == newState) {
// no state changed, check when we should remove the delay flag from the shards the next time.
// if cluster state changed, we can leave the scheduling of the next delay up to the clusterChangedEvent
// this should not be needed, but we want to be extra safe here
scheduleIfNeeded(currentNanoTime(), newState);
}
}
@Override
public void onFailure(String source, Exception e) {
removeIfSameTask(this);
logger.warn("failed to schedule/execute reroute post unassigned shard", e);
}
}
@Inject
public DelayedAllocationService(Settings settings, ThreadPool threadPool, ClusterService clusterService,
AllocationService allocationService) {
super(settings);
this.threadPool = threadPool;
this.clusterService = clusterService;
this.allocationService = allocationService;
clusterService.addFirst(this);
}
@Override
protected void doStart() {
}
@Override
protected void doStop() {
}
@Override
protected void doClose() {
clusterService.remove(this);
removeTaskAndCancel();
}
/** override this to control time based decisions during delayed allocation */
protected long currentNanoTime() {
return System.nanoTime();
}
@Override
public void clusterChanged(ClusterChangedEvent event) {
long currentNanoTime = currentNanoTime();
if (event.state().nodes().isLocalNodeElectedMaster()) {
scheduleIfNeeded(currentNanoTime, event.state());
}
}
private void removeTaskAndCancel() {
DelayedRerouteTask existingTask = delayedRerouteTask.getAndSet(null);
if (existingTask != null) {
logger.trace("cancelling existing delayed reroute task");
existingTask.cancelScheduling();
}
}
private void removeIfSameTask(DelayedRerouteTask expectedTask) {
delayedRerouteTask.compareAndSet(expectedTask, null);
}
/**
* Figure out if an existing scheduled reroute is good enough or whether we need to cancel and reschedule.
*/
private void scheduleIfNeeded(long currentNanoTime, ClusterState state) {
assertClusterStateThread();
long nextDelayNanos = UnassignedInfo.findNextDelayedAllocation(currentNanoTime, state);
if (nextDelayNanos < 0) {
logger.trace("no need to schedule reroute - no delayed unassigned shards");
removeTaskAndCancel();
} else {
TimeValue nextDelay = TimeValue.timeValueNanos(nextDelayNanos);
final boolean earlierRerouteNeeded;
DelayedRerouteTask existingTask = delayedRerouteTask.get();
DelayedRerouteTask newTask = new DelayedRerouteTask(nextDelay, currentNanoTime);
if (existingTask == null) {
earlierRerouteNeeded = true;
} else if (newTask.scheduledTimeToRunInNanos() < existingTask.scheduledTimeToRunInNanos()) {
// we need an earlier delayed reroute
logger.trace("cancelling existing delayed reroute task as delayed reroute has to happen [{}] earlier",
TimeValue.timeValueNanos(existingTask.scheduledTimeToRunInNanos() - newTask.scheduledTimeToRunInNanos()));
existingTask.cancelScheduling();
earlierRerouteNeeded = true;
} else {
earlierRerouteNeeded = false;
}
if (earlierRerouteNeeded) {
logger.info("scheduling reroute for delayed shards in [{}] ({} delayed shards)", nextDelay,
UnassignedInfo.getNumberOfDelayedUnassigned(state));
DelayedRerouteTask currentTask = delayedRerouteTask.getAndSet(newTask);
assert existingTask == currentTask || currentTask == null;
newTask.schedule();
} else {
logger.trace("no need to reschedule delayed reroute - currently scheduled delayed reroute in [{}] is enough", nextDelay);
}
}
}
// protected so that it can be overridden (and disabled) by unit tests
protected void assertClusterStateThread() {
ClusterService.assertClusterStateThread();
}
}
| |
package de.fzi.osh.device.battery;
import java.time.Instant;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.time.temporal.ChronoUnit;
import java.util.Iterator;
import java.util.Map;
import java.util.NavigableMap;
import java.util.TreeMap;
import java.util.logging.Logger;
import de.fzi.osh.core.data.Json;
import de.fzi.osh.device.battery.configuration.BatteryConfiguration;
import de.fzi.osh.device.battery.data.BatteryStateData;
import de.fzi.osh.device.time.Time;
import de.fzi.osh.device.battery.data.BatterySchedulerData;
import de.fzi.osh.types.flexibilities.Flexibility;
import de.fzi.osh.types.flexibilities.SchedulingResult;
import de.fzi.osh.types.flexibilities.Task;
import de.fzi.osh.types.math.IntInterval;
import de.fzi.osh.wamp.schedule.GetScheduleResponse;
/**
* Schedules battery operations. The battery is permanently running and automatically creates a flexibility and a task for each day.
* Updates are issued through adaptation.
*
*
* flex + task flex + task flex + task
* ------------|----------------------------------------------|-------------------------------
* next day next day + 1
*
* Every time the flexibilities are polled, they get updated to reflect the current state better. To ensure, that adaptations do not get blocked by
* previous changes to the flexibilities, the power and energy corridor for past values are removed.
*
* @author K. Foerderer
*
*/
public class BatteryScheduler implements Runnable {
private static Logger log = Logger.getLogger(BatteryScheduler.class.getName());
private BatteryConfiguration configuration;
private Battery battery;
/**
* Holds all the data needed for operation
*/
private volatile BatterySchedulerData data;
/**
* Name of file used to store the scheduler data
*/
private String schedulerDataFile;
/**
* Constructor
*
* @param battery
*/
public BatteryScheduler(Battery battery) {
this.battery = battery;
this.configuration = battery.getConfiguration();
// load scheduler data if available
schedulerDataFile = battery.getConfiguration().schedulerDataFile;
log.fine("Loading persistence data from file " + schedulerDataFile );
restoreState(schedulerDataFile);
}
/**
* Returns the scheduler data.
*
* @return
*/
public BatterySchedulerData getSchedulerData() {
return data;
}
/**
* Loads the scheduler data from a file.
*/
private void restoreState(String filename) {
data = Json.readFile(filename, BatterySchedulerData.class);
if(data == null) {
log.warning("Scheduler data could not be restored correctly.");
data = new BatterySchedulerData();
}
}
/**
* Writes the scheduler data to a file.
*/
private synchronized void saveState(String filename) {
Json.writeFile(filename, data);
}
/**
* Helper method that creates a flexibility and the corresponding task covering the day given through the time parameter (floored).
*
* @param time
*/
private int addFlexibilityForDay(long time) {
if(getTaskIdForTime(time, false) >= 0) {
// there already is a flexibility
return getTaskIdForTime(time, false);
}
// use zoned date time to accommodate for daylight savings
ZonedDateTime day = ZonedDateTime.ofInstant(Instant.ofEpochSecond(time), ZoneId.systemDefault());
day = day.truncatedTo(ChronoUnit.DAYS);
// create flexibility
Flexibility flexibility = new Flexibility();
// set id to a new value
flexibility.id = data.idCounter++;
// newly created flexibility
flexibility.taskId = flexibility.id;
// flexibility is adaptable
flexibility.adaptable = true;
// the flexibility start on 00:00 and runs 1d
ZonedDateTime nextDay = day.plus(1, ChronoUnit.DAYS);
flexibility.stoppingTime.min = nextDay.toEpochSecond();
flexibility.stoppingTime.max = flexibility.stoppingTime.min;
flexibility.runningTime.max = (int)(nextDay.toEpochSecond() - day.toEpochSecond());
flexibility.runningTime.min = flexibility.runningTime.max;
// offer power flexibility within the bounds specified in the configuration
flexibility.powerCorridor.put(0, new IntInterval(-configuration.maxFlexibilityDischarge, configuration.maxFlexibilityCharge));
// offer the remaining stored energy / remaining free storage space according to current schedule
int storedEnergy = getExpectedStoredEnergy(flexibility.stoppingTime.min - flexibility.runningTime.max);
int availableEnergy = storedEnergy - configuration.minStateOfCharge * configuration.nominalCapacity / 100 - configuration.flexibilityEnergyBuffer;
int availableStorage = configuration.maxStateOfCharge * configuration.nominalCapacity / 100 - storedEnergy - configuration.flexibilityEnergyBuffer;
flexibility.energyCorridor.put(0, new IntInterval(-availableEnergy * 60 * 60, availableStorage * 60 * 60));
// create a task with the same id
Task task = new Task();
task.adaptable = true;
task.flexibilityId = flexibility.id;
task.id = flexibility.taskId;
task.power = new TreeMap<Integer, Integer>();
task.power.put(0, 0);
task.runningTime = flexibility.runningTime.max;
task.startingTime = flexibility.stoppingTime.min - task.runningTime;
synchronized (this) {
// actual scheduling
data.scheduledPower.put(task.startingTime, 0);
// save data
data.flexibilities.put(flexibility.id, flexibility);
data.tasks.put(task.id, task);
//persistence
saveState(schedulerDataFile);
}
return task.id;
}
/**
* Returns the expected amount of stored energy for a given time in the future
*
* @param time
* @return
*/
public synchronized int getExpectedStoredEnergy(long time) {
int storedEnergy = battery.getCurrentStateData().stateOfCharge * configuration.nominalCapacity / 100;
if(time <= Time.service().now()) {
return storedEnergy;
}
NavigableMap<Long, Integer> schedule = data.scheduledPower;
long t = Time.service().now();
Long nextTime;
while((nextTime = schedule.higherKey(t)) != null) {
int power = schedule.floorEntry(t).getValue();
// delta Energy [Wh] = delta Time [s] / (60 * 60) [s/h] * -power [W]
storedEnergy += (int)((nextTime - t) * -power / (60.0 * 60));
// do time step
t = nextTime;
}
return storedEnergy;
}
/**
* Returns all current flexibilities
* @see de.fzi.osh.alljoyn.interfaces.Flexibilities#getFlexibilities(long)
*
* @param id
* @param power
* @return
* @throws BusException
*/
public GetScheduleResponse getSchedule(long from, long to) {
// < 48 hours
if(to - from > 2 * 24 * 60 * 60) {
return null;
}
// get flexibilities or create them if they are new
int fromTaskId = getTaskIdForTime(from, false);
if(fromTaskId < 0) {
fromTaskId = addFlexibilityForDay(from);
}
int toTaskId = getTaskIdForTime(to, true);
if(toTaskId < 0) {
toTaskId = addFlexibilityForDay(to);
}
// package flexibility and task data and adapt them to reflect the current state
Flexibility[] flexibilities;
Task[] tasks;
if(fromTaskId == toTaskId) {
// same day
flexibilities = new Flexibility[1];
tasks = new Task[1];
tasks[0] = data.tasks.get(fromTaskId);
flexibilities[0] = data.flexibilities.get(tasks[0].flexibilityId);
// remove old limitations and set new ones
long now = Time.service().now();
int relativeNow = (int)(now - tasks[0].startingTime);
// offer power flexibility within the bounds specified in the configuration
flexibilities[0].powerCorridor = new TreeMap<Integer, IntInterval>();
// deal with target soc settings
if(now < data.targetSocTime && data.targetSOC > 0) {
relativeNow = (int)(data.targetSocTime - tasks[0].startingTime);
flexibilities[0].powerCorridor.put(0, new IntInterval(0, 0));
} else {
flexibilities[0].powerCorridor.put(0, new IntInterval(0, 0));
}
flexibilities[0].powerCorridor.put(relativeNow < 0 ? 0 : relativeNow, new IntInterval(-configuration.maxFlexibilityDischarge, configuration.maxFlexibilityCharge));
// offer the remaining stored energy / remaining free storage space according to current schedule
int storedEnergy = getExpectedStoredEnergy(now);
int availableEnergy = storedEnergy - configuration.minStateOfCharge * configuration.nominalCapacity / 100 - configuration.flexibilityEnergyBuffer;
int availableStorage = configuration.maxStateOfCharge * configuration.nominalCapacity / 100 - storedEnergy - configuration.flexibilityEnergyBuffer;
flexibilities[0].energyCorridor = new TreeMap<Integer, IntInterval>();
flexibilities[0].energyCorridor.put(0, new IntInterval(Integer.MIN_VALUE, Integer.MAX_VALUE));
flexibilities[0].energyCorridor.put(relativeNow-1 < 0 ? 0 : relativeNow-1, new IntInterval(Integer.MIN_VALUE, Integer.MAX_VALUE));
flexibilities[0].energyCorridor.put(relativeNow < 0 ? 0 : relativeNow, new IntInterval(Math.min(0, -availableEnergy * 60 * 60), Math.max(0, availableStorage * 60 * 60)));
} else {
// different days
flexibilities = new Flexibility[2];
tasks = new Task[2];
tasks[0] = data.tasks.get(fromTaskId);
flexibilities[0] = data.flexibilities.get(tasks[0].flexibilityId);
tasks[1] = data.tasks.get(toTaskId);
flexibilities[1] = data.flexibilities.get(tasks[1].flexibilityId);
// remove old limitations and set new ones
long now = Time.service().now();
int relativeNow = (int)(now - tasks[0].startingTime);
// offer power flexibility within the bounds specified in the configuration
flexibilities[0].powerCorridor = new TreeMap<Integer, IntInterval>();
// deal with target soc settings
if(now < data.targetSocTime && data.targetSOC > 0) {
relativeNow = (int)(data.targetSocTime - tasks[0].startingTime);
flexibilities[0].powerCorridor.put(0, new IntInterval(0, 0));
} else {
flexibilities[0].powerCorridor.put(0, new IntInterval(0, 0));
}
flexibilities[0].powerCorridor.put(relativeNow < 0 ? 0 : relativeNow, new IntInterval(-configuration.maxFlexibilityDischarge, configuration.maxFlexibilityCharge));
// offer the remaining stored energy / remaining free storage space according to current schedule
int storedEnergy = getExpectedStoredEnergy(now);
int availableEnergy = storedEnergy - configuration.minStateOfCharge * configuration.nominalCapacity / 100 - configuration.flexibilityEnergyBuffer;
int availableStorage = configuration.maxStateOfCharge * configuration.nominalCapacity / 100 - storedEnergy - configuration.flexibilityEnergyBuffer;
flexibilities[0].energyCorridor = new TreeMap<Integer, IntInterval>();
flexibilities[0].energyCorridor.put(0, new IntInterval(Integer.MIN_VALUE, Integer.MAX_VALUE));
flexibilities[0].energyCorridor.put(relativeNow-1 < 0 ? 0 : relativeNow-1, new IntInterval(Integer.MIN_VALUE, Integer.MAX_VALUE));
flexibilities[0].energyCorridor.put(relativeNow < 0 ? 0 : relativeNow, new IntInterval(Math.min(0, -availableEnergy * 60 * 60), Math.max(0, availableStorage * 60 * 60)));
// deal with target soc settings
if(tasks[1].startingTime < data.targetSocTime && data.targetSOC > 0) {
relativeNow = (int)(data.targetSocTime - tasks[1].startingTime);
flexibilities[1].powerCorridor.put(0, new IntInterval(0, 0));
flexibilities[1].powerCorridor.put(relativeNow, new IntInterval(-configuration.maxFlexibilityDischarge, configuration.maxFlexibilityCharge));
} else {
flexibilities[1].powerCorridor.put(0, new IntInterval(-configuration.maxFlexibilityDischarge, configuration.maxFlexibilityCharge));
}
// flexibilities[1] is always in the future, hence only adapt energy restrictions
storedEnergy = getExpectedStoredEnergy(tasks[1].startingTime);
availableEnergy = storedEnergy - configuration.minStateOfCharge * configuration.nominalCapacity / 100 - configuration.flexibilityEnergyBuffer;
availableStorage = configuration.maxStateOfCharge * configuration.nominalCapacity / 100 - storedEnergy - configuration.flexibilityEnergyBuffer;
flexibilities[1].energyCorridor.put(0, new IntInterval(-availableEnergy * 60 * 60, availableStorage * 60 * 60));
}
// package it into a bundle
GetScheduleResponse data = new GetScheduleResponse();
data.from = Time.service().now();
data.to = to;
data.constraints = new String[] {""};
int[] flexibilityIds = new int[flexibilities.length];
int i = 0;
for(Flexibility f : flexibilities) {
flexibilityIds[i] = f.id;
i++;
}
data.flexibilities = flexibilityIds;
int[] taskIds = new int[tasks.length];
i = 0;
for(Task t : tasks) {
taskIds[i] = t.id;
i++;
}
data.tasks = taskIds;
// publish
return data;
}
/**
* Returns the flexibility with the given id.
* @see de.fzi.osh.alljoyn.interfaces.Flexibilities#getFlexibility(int)
*
* @param id
* @return
*/
public Flexibility getFlexibility(int id) {
return data.flexibilities.get(id);
}
/**
* Returns the task with the given id
*
* @param id
* @return
*/
public Task getTask(int id) {
return data.tasks.get(id);
}
/**
* Adapts the schedule for an adaptable scheduled flexibility.
*
* @param id
* @param power
* @return
*/
public SchedulingResult adaptScheduledFlexibility(int id, NavigableMap<Integer, Integer> powers) {
// get flexibility data
Flexibility flexibility = data.flexibilities.get(id);
if(flexibility == null) {
log.warning("Unknown.");
return SchedulingResult.UnknownFlexibility;
}
// only adaptable flexibilities may be adapted
if(false == flexibility.adaptable) {
log.warning("Illegal.");
return SchedulingResult.Illegal;
}
// check if this is a valid update
// therefore do a temporary schedule update
Task task = data.tasks.get(flexibility.taskId);
NavigableMap<Integer, Integer> updatedPowers = new TreeMap<Integer, Integer>(task.power);
// debug
/*String msg = "{";
for(Map.Entry<Integer, Integer> entry : updatedPowers.entrySet()) {
msg += entry.getKey() + ":" + entry.getValue() + ", ";
}
msg = msg.substring(0, msg.length() - 2) + "}";
log.info(msg);*/
// the last key only marks the end of the update
powers.put(powers.lastKey() , updatedPowers.floorEntry(powers.lastKey()).getValue());
// delete values that get overwritten
NavigableMap<Integer, Integer> taskPowersSubMap = updatedPowers.subMap(powers.firstKey(), true, powers.lastKey(), true);
for(Iterator<Map.Entry<Integer, Integer>> iterator = taskPowersSubMap.entrySet().iterator(); iterator.hasNext(); ) {
iterator.next();
iterator.remove();
}
// write new data
for(Map.Entry<Integer, Integer> entry : powers.entrySet()) {
updatedPowers.put(entry.getKey(), entry.getValue());
}
// check if the adapted flexibility is still valid
if(false == flexibility.checkValidity(task.startingTime, updatedPowers)) {
log.warning("Invalid data.");
return SchedulingResult.InvalidData;
}
// valid update
synchronized (this) {
// do the update
task.power = updatedPowers;
// remove old power data
for(Iterator<Map.Entry<Long, Integer>> iterator = data.scheduledPower.subMap(task.startingTime + powers.firstKey(), true, task.startingTime + powers.lastKey(), true).entrySet().iterator(); iterator.hasNext();) {
iterator.next();
iterator.remove();
}
// write new power data
for(Map.Entry<Integer, Integer> entry : powers.entrySet()) {
data.scheduledPower.put(task.startingTime + entry.getKey(), entry.getValue());
}
// compress data
task.compress();
// no persistence for short term adaptations needed
}
return SchedulingResult.Ok;
}
/**
* Schedules a flexibility.
* @see de.fzi.osh.alljoyn.interfaces.Flexibilities#scheduleFlexibility(int, Map)
*
* @param id
* @param power
* @return
* @throws BusException
*/
public SchedulingResult scheduleFlexibility(int id, long startingTime, NavigableMap<Integer, Integer> powers) {
// get flexibility data
Flexibility flexibility = data.flexibilities.get(id);
if(flexibility == null) {
return SchedulingResult.UnknownFlexibility;
}
// has it started?
Task task = data.tasks.get(flexibility.taskId);
if(task.startingTime < Time.service().now()) {
// already running => use adapt
return SchedulingResult.Illegal;
}
// do the power fit the flexibility
if(flexibility.checkValidity(startingTime, powers) == false) {
String flexString = "{";
for(Map.Entry<Integer, Integer> entry : powers.entrySet()) {
flexString += entry.getKey() + ": " + entry.getValue() + ", ";
}
flexString = flexString.substring(0, flexString.length() - 2) + "}";
log.info("Received invalid flexibility schedule:");
log.info(flexString);
return SchedulingResult.InvalidData;
}
// adapt task
task.power = powers;
// all other data doesn't change, since there is no flexibility in those parameters
/* NOT NEEDED:
// check for conflicts with other tasks
//
// [!] keep in mind that a schedule could include periods with power = 0
//
for(Task scheduledTask : data.tasks.values()) {
// [a,b] intersects [x,y] <=> x <= b AND y >= a
// here [a,b) and [x,y)
if( scheduledTask.startingTime < task.startingTime + power.lastKey() &&
scheduledTask.startingTime + scheduledTask.runningTime > task.startingTime) {
return SchedulingResult.Conflict.getValue();
}
}*/
// compress data
task.compress();
// actual scheduling
synchronized (this) {
// unschedule old power values
for(Iterator<Map.Entry<Long, Integer>> iterator = data.scheduledPower.subMap(task.startingTime, true, task.startingTime + task.runningTime, true).entrySet().iterator();iterator.hasNext();) {
iterator.next();
iterator.remove();
}
// schedule new power values
for(Map.Entry<Integer, Integer> entry : powers.entrySet()) {
data.scheduledPower.put(entry.getKey() + startingTime, entry.getValue());
}
//persistence
saveState(schedulerDataFile);
}
log.info("Flexibility '" + id + "' has been scheduled.");
return SchedulingResult.Ok;
}
/**
* Removes the task corresponding to the given flexibility
* @see de.fzi.osh.alljoyn.interfaces.Flexibilities#unscheduleFlexibility(int)
*
* @param id
* @return
*/
public synchronized SchedulingResult unscheduleFlexibility(int id) {
Task task = data.tasks.get(id);
if(task == null) {
return SchedulingResult.UnknownTask;
}
// can only unschedule tasks that have not started yet => use adaptability to make changes [!]
if(task.startingTime < Time.service().now()) {
return SchedulingResult.Illegal;
}
synchronized (this) {
// remove scheduled power data
NavigableMap<Long, Integer> scheduledPowerSubMap = data.scheduledPower.subMap(task.startingTime, true, task.startingTime + task.runningTime, true);
for(Iterator<Map.Entry<Long, Integer>> iterator = scheduledPowerSubMap.entrySet().iterator();iterator.hasNext();) {
iterator.next();
iterator.remove();
}
// reset task data
task.power.clear();
task.power.put(0,0);
// task is not removed, since battery will still run and just do nothing
// persistence
saveState(schedulerDataFile);
}
return SchedulingResult.Ok;
}
/**
* Returns all scheduled flexibilities' ids
* @see de.fzi.osh.alljoyn.interfaces.Flexibilities#getScheduledFlexibilities()
*
* @return
*/
public Task[] getScheduledFlexibilities() {
return data.tasks.values().toArray(new Task[data.tasks.size()]);
}
/**
* Returns the power scheduled for the given time
*
* @param epochSeconds
* @return
*/
public synchronized int getScheduledPower(long epochSeconds) {
return data.scheduledPower.floorEntry(epochSeconds).getValue();
}
/**
* Checks whether the schedule can be held or not and adapts it if necessary.
*
* @return epoch second count of the earliest inconsistency. 0 if everything seems fine.
*/
public long fixSchedule(int storedEnergy, int minEnergy, int maxEnergy, NavigableMap<Long, Integer> schedule) {
// follow scheduled load profile and evaluate consumption
long result = 0;
long time = Time.service().now();
Long nextTime;
while((nextTime = schedule.higherKey(time)) != null) {
int power = schedule.floorEntry(time).getValue();
// delta Energy [Wh] = delta Time [s] / (60 * 60) [s/h] * -power [W]
int tmp = storedEnergy;
storedEnergy += (int)((nextTime - time) * power / (60.0 * 60));
if( storedEnergy > maxEnergy && power > 0 ||
storedEnergy < minEnergy && power < 0) {
// illegal state [!], make an adaptation
Map.Entry<Long, Integer> entry = schedule.floorEntry(time);
if(entry == null) {
schedule.put(time, 0); // setting 0 is always valid
} else {
schedule.put(entry.getKey(), 0);
}
// now change the task to reflect this change
int id = getTaskIdForTime(time, false);
Task task = data.tasks.get(id);
if(null == task) {
log.warning("Task " + id + " was not found.");
} else {
NavigableMap<Integer, Integer> taskPower = new TreeMap<Integer, Integer>(task.power);
Map.Entry<Integer, Integer> taskEntry = taskPower.floorEntry((int)(time - task.startingTime));
if(null == taskEntry) {
task.power.put((int)(time - task.startingTime), 0);
} else {
task.power.put(taskEntry.getKey(), 0);
}
}
// undo the last step
// delta Energy [Wh] = delta Time [s] / (60 * 60) [s/h] * -power [W]
storedEnergy = tmp;
// resulting stored energy is not an acceptable state
if(0 == result) {
result = time;
}
}
// do time step
time = nextTime;
}
// compress current task
Task task = data.tasks.get(getTaskIdForTime(Time.service().now(), false));
if(null != task) {
task.compress();
}
return result;
}
/**
* Returns the id of the task running at a given time
*
* @param epochSecond
* @return The tasks's id or -1, if no runnable was found
*/
public int getTaskIdForTime(long epochSecond, boolean endInclusive) {
for(Task task : data.tasks.values()) {
if(false == endInclusive) {
if(task.startingTime <= epochSecond && epochSecond < task.startingTime + task.runningTime) {
return task.id;
}
} else {
if(task.startingTime <= epochSecond && epochSecond <= task.startingTime + task.runningTime) {
return task.id;
}
}
}
return -1;
}
/**
* Background tasks like cleaning up, executed from time to time
*/
public void run() {
try {
// get current epoch seconds
long now = 0;
// clean up old schedule data
synchronized (this) {
now = Time.service().now();
// clean up all finished tasks
Iterator<Map.Entry<Integer, Task>> iterator = data.tasks.entrySet().iterator();
while(iterator.hasNext()) {
Task task = iterator.next().getValue();
if(now > task.startingTime + task.runningTime) {
// remove task and flex
log.finest("Removing old task and corresponding flexibility: " + task.id);
data.flexibilities.remove(task.flexibilityId);
iterator.remove();
}
}
Iterator<Map.Entry<Long, Integer>> scheduleIterator = data.scheduledPower.entrySet().iterator();
while(scheduleIterator.hasNext()) {
Map.Entry<Long, Integer> entry = scheduleIterator.next();
// do not remove instantly if it is in the past for debugging purposes
if(entry.getKey() != 0 && entry.getKey() + 24 * 60 * 60 < now) {
// entry is older than 24 hours => remove
scheduleIterator.remove();
}
}
}
if(data.targetSocTime > now && data.targetSOC > 0) {
// trying to achieve target soc
setTargetSoc(data.targetSOC, data.targetSocTime);
} else {
// usual business
synchronized(this) {
// retrieve relevant data
BatteryStateData currentState = battery.getCurrentStateData();
if(null != currentState) {
BatteryConfiguration configuration = battery.getConfiguration();
// calculate necessary values
int storedEnergy = currentState.stateOfCharge * configuration.nominalCapacity / 100;
int minEnergy = configuration.minStateOfCharge * configuration.nominalCapacity / 100;
int maxEnergy = configuration.maxStateOfCharge * configuration.nominalCapacity / 100;
// do a plausibility check once in a while and make adaptations if needed
long time = fixSchedule(storedEnergy, minEnergy, maxEnergy, data.scheduledPower);
//persistence
saveState(schedulerDataFile);
if(time > 0) {
// adaptations have been made !
int id = getTaskIdForTime(time, true);
if(id < 0) { // this should never happen. Nevertheless, check for debugging.
log.severe("Scheduled power without scheduled task!");
}
battery.publishScheduleChanged(time);
log.info("Changed schedule.");
}
}
}
}
} catch(Exception e) {
e.printStackTrace();
log.severe("Scheduler loop:" + e.toString());
}
}
/**
* Change schedule to reach target till time
*
* @param wh
* @param time
*/
public synchronized void setTargetSoc(int soc, long time) {
data.targetSOC = soc;
data.targetSocTime = time;
long t = Time.service().now();
int duration = (int)(time - t);
// [Ws]
int changeInCharge = (soc - battery.getCurrentStateData().stateOfCharge) * configuration.nominalCapacity * 60 * 60 / 100;
// delete everything between now and $time
for(Iterator<Map.Entry<Long, Integer>> iterator = data.scheduledPower.subMap(t, time).entrySet().iterator(); iterator.hasNext(); ){
iterator.next();
iterator.remove();
}
do {
int averagePower = changeInCharge / duration;
if(averagePower < 0) {
// loading
averagePower -= (averagePower % 100) - 100; // floor, to next 100 W step
averagePower = Math.max(averagePower, -configuration.maxFlexibilityCharge);
} else {
// unloading
averagePower += (averagePower % 100) - 100; // ceil, to next 100 W step
averagePower = Math.min(averagePower, configuration.maxFlexibilityDischarge);
}
data.scheduledPower.put(t, averagePower);
int step = Math.abs(changeInCharge / averagePower) + 1;
changeInCharge -= averagePower * step;
duration -= step;
t += step;
}while(t < time);
// done
data.scheduledPower.put(time, 0);
// save changes
saveState(schedulerDataFile);
}
/**
* Schedules a power for a given time.
* (Bypasses tasks an flexibilities)
*
* @param time
* @param power
*/
public synchronized void schedulePower(long time, int power) {
data.scheduledPower.put(time, power);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.test.system;
import java.io.IOException;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.test.system.process.ClusterProcessManager;
import org.apache.hadoop.test.system.process.RemoteProcess;
/**
* Abstract class which represent the cluster having multiple daemons.
*/
@SuppressWarnings("unchecked")
public abstract class AbstractDaemonCluster {
private static final Log LOG = LogFactory.getLog(AbstractDaemonCluster.class);
private String [] excludeExpList ;
private Configuration conf;
protected ClusterProcessManager clusterManager;
private Map<Enum<?>, List<AbstractDaemonClient>> daemons =
new LinkedHashMap<Enum<?>, List<AbstractDaemonClient>>();
/**
* Constructor to create a cluster client.<br/>
*
* @param conf
* Configuration to be used while constructing the cluster.
* @param rcluster
* process manger instance to be used for managing the daemons.
*
* @throws IOException
*/
public AbstractDaemonCluster(Configuration conf,
ClusterProcessManager rcluster) throws IOException {
this.conf = conf;
this.clusterManager = rcluster;
createAllClients();
}
/**
* The method returns the cluster manager. The system test cases require an
* instance of HadoopDaemonRemoteCluster to invoke certain operation on the
* daemon.
*
* @return instance of clusterManager
*/
public ClusterProcessManager getClusterManager() {
return clusterManager;
}
protected void createAllClients() throws IOException {
for (RemoteProcess p : clusterManager.getAllProcesses()) {
List<AbstractDaemonClient> dms = daemons.get(p.getRole());
if (dms == null) {
dms = new ArrayList<AbstractDaemonClient>();
daemons.put(p.getRole(), dms);
}
dms.add(createClient(p));
}
}
/**
* Method to create the daemon client.<br/>
*
* @param remoteprocess
* to manage the daemon.
* @return instance of the daemon client
*
* @throws IOException
*/
protected abstract AbstractDaemonClient<DaemonProtocol>
createClient(RemoteProcess process) throws IOException;
/**
* Get the global cluster configuration which was used to create the
* cluster. <br/>
*
* @return global configuration of the cluster.
*/
public Configuration getConf() {
return conf;
}
/**
*
/**
* Return the client handle of all the Daemons.<br/>
*
* @return map of role to daemon clients' list.
*/
public Map<Enum<?>, List<AbstractDaemonClient>> getDaemons() {
return daemons;
}
/**
* Checks if the cluster is ready for testing. <br/>
* Algorithm for checking is as follows : <br/>
* <ul>
* <li> Wait for Daemon to come up </li>
* <li> Check if daemon is ready </li>
* <li> If one of the daemon is not ready, return false </li>
* </ul>
*
* @return true if whole cluster is ready.
*
* @throws IOException
*/
public boolean isReady() throws IOException {
for (List<AbstractDaemonClient> set : daemons.values()) {
for (AbstractDaemonClient daemon : set) {
waitForDaemon(daemon);
if (!daemon.isReady()) {
return false;
}
}
}
return true;
}
protected void waitForDaemon(AbstractDaemonClient d) {
final int TEN_SEC = 10000;
while(true) {
try {
LOG.info("Waiting for daemon at " + d.getHostName() + " to come up.");
LOG.info("Daemon might not be " +
"ready or the call to setReady() method hasn't been " +
"injected to " + d.getClass() + " ");
d.connect();
break;
} catch (IOException e) {
try {
Thread.sleep(TEN_SEC);
} catch (InterruptedException ie) {
}
}
}
}
/**
* Starts the cluster daemons.
* @throws IOException
*/
public void start() throws IOException {
clusterManager.start();
}
/**
* Stops the cluster daemons.
* @throws IOException
*/
public void stop() throws IOException {
clusterManager.stop();
}
/**
* Connect to daemon RPC ports.
* @throws IOException
*/
public void connect() throws IOException {
for (List<AbstractDaemonClient> set : daemons.values()) {
for (AbstractDaemonClient daemon : set) {
daemon.connect();
}
}
}
/**
* Disconnect to daemon RPC ports.
* @throws IOException
*/
public void disconnect() throws IOException {
for (List<AbstractDaemonClient> set : daemons.values()) {
for (AbstractDaemonClient daemon : set) {
daemon.disconnect();
}
}
}
/**
* Ping all the daemons of the cluster.
* @throws IOException
*/
public void ping() throws IOException {
for (List<AbstractDaemonClient> set : daemons.values()) {
for (AbstractDaemonClient daemon : set) {
LOG.info("Daemon is : " + daemon.getHostName() + " pinging....");
daemon.ping();
}
}
}
/**
* Connect to the cluster and ensure that it is clean to run tests.
* @throws Exception
*/
public void setUp() throws Exception {
while (!isReady()) {
Thread.sleep(1000);
}
connect();
ping();
clearAllControlActions();
ensureClean();
populateExceptionCounts();
}
/**
* This is mainly used for the test cases to set the list of exceptions
* that will be excluded.
* @param excludeExpList list of exceptions to exclude
*/
public void setExcludeExpList(String [] excludeExpList)
{
this.excludeExpList = excludeExpList;
}
public void clearAllControlActions() throws IOException {
for (List<AbstractDaemonClient> set : daemons.values()) {
for (AbstractDaemonClient daemon : set) {
LOG.info("Daemon is : " + daemon.getHostName() + " pinging....");
daemon.getProxy().clearActions();
}
}
}
/**
* Ensure that the cluster is clean to run tests.
* @throws IOException
*/
public void ensureClean() throws IOException {
}
/**
* Ensure that cluster is clean. Disconnect from the RPC ports of the daemons.
* @throws IOException
*/
public void tearDown() throws IOException {
ensureClean();
clearAllControlActions();
assertNoExceptionMessages();
disconnect();
}
/**
* Populate the exception counts in all the daemons so that it can be checked when
* the testcase has finished running.<br/>
* @throws IOException
*/
protected void populateExceptionCounts() throws IOException {
for(List<AbstractDaemonClient> lst : daemons.values()) {
for(AbstractDaemonClient d : lst) {
d.populateExceptionCount(excludeExpList);
}
}
}
/**
* Assert no exception has been thrown during the sequence of the actions.
* <br/>
* @throws IOException
*/
protected void assertNoExceptionMessages() throws IOException {
for(List<AbstractDaemonClient> lst : daemons.values()) {
for(AbstractDaemonClient d : lst) {
d.assertNoExceptionsOccurred(excludeExpList);
}
}
}
}
| |
package com.valadian.bergecraft;
import java.text.DecimalFormat;
import java.util.HashMap;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.bukkit.Material;
import org.bukkit.enchantments.Enchantment;
import org.bukkit.entity.Damageable;
import org.bukkit.entity.Entity;
import org.bukkit.entity.HumanEntity;
import org.bukkit.entity.Player;
import org.bukkit.event.EventHandler;
import org.bukkit.event.EventPriority;
import org.bukkit.event.Listener;
import org.bukkit.event.entity.EntityDamageByEntityEvent;
import org.bukkit.event.entity.EntityDamageEvent;
import org.bukkit.event.entity.EntityDamageEvent.DamageCause;
import org.bukkit.event.inventory.InventoryCloseEvent;
import org.bukkit.event.player.PlayerJoinEvent;
import org.bukkit.event.player.PlayerTeleportEvent;
import org.bukkit.event.player.PlayerTeleportEvent.TeleportCause;
import org.bukkit.inventory.ItemStack;
import org.bukkit.inventory.PlayerInventory;
import org.bukkit.potion.PotionEffect;
import org.bukkit.potion.PotionEffectType;
import vg.civcraft.mc.civmodcore.ACivMod;
import vg.civcraft.mc.civmodcore.annotations.CivConfig;
import vg.civcraft.mc.civmodcore.annotations.CivConfigs;
import vg.civcraft.mc.civmodcore.annotations.CivConfigType;
import com.valadian.bergecraft.bergeypvp.WeaponTimer;
public class BergeyPvp extends ACivMod{
@Override
protected String getPluginName() {
return "BergeyPVP";
}
protected final Logger log_ = Logger.getLogger(getPluginName());
HashMap<Player,WeaponTimer> cooldowns = new HashMap<Player,WeaponTimer>();
DecimalFormat df = new DecimalFormat("#.00");
@CivConfig(name="debug_messages", def="false", type = CivConfigType.Bool)
public void Log(Level level, String message, Player... players){
log_.log(level, message);
if(players!=null){
for(Player player: players){
if(player!=null && GetConfig()!=null && GetConfig().get("debug_messages").getBool()){
player.sendMessage(message);
}
}
}
}
// private void Log(String message){
// Log(message, Level.INFO,null);
// }
public void Log(String message, Player... players){
Log(Level.INFO,message, players);
}
@CivConfigs({
@CivConfig(name="bergey_pvp_weapons", def="false", type = CivConfigType.Bool),
@CivConfig(name="bergey_pvp_weapon_cooldown", def="3000",type=CivConfigType.Int),
@CivConfig(name="nerf_sharpness", def="true", type = CivConfigType.Bool),
@CivConfig(name="sharpness_damage_per_level", type=CivConfigType.Double, def="0.66"),
@CivConfig(name="nerf_strength", def="true", type = CivConfigType.Bool),
@CivConfig(name="strength_multiplier", type=CivConfigType.Double, def="1.5")
})
@EventHandler(priority = EventPriority.LOW, ignoreCancelled = true)
public void onPlayerDamage(EntityDamageByEntityEvent event) {
if(event.getDamager() instanceof Player)
{
// One day may look into Cooldown code.
// Player attacker = (Player) event.getDamager();
// ItemStack stack = attacker.getItemInHand();
// stack.getDurability();
// long now = System.currentTimeMillis();
// if(config_.get("bergey_pvp_weapons").getBool())
// {
// int cooldown = config_.get("bergey_pvp_weapon_cooldown").getInt();
// if(cooldowns.containsKey(attacker) && !cooldowns.get(attacker).cancelled)
// {
// event.setCancelled(true);
// //cooldowns.get(attacker).resetTimer();
// //attacker.sendMessage("[Bergey Pvp] Attacking too fast");
// return;
// }
// else
// {
// //log_.log(Level.INFO, "Scheduling Cooldown!");
// //WeaponTimer timer = new WeaponTimer(attacker, stack, now, cooldown);
// //timer.runTaskTimer(this, 0, 20/5);
// //cooldowns.put(attacker, timer);
// //Bukkit.getServer().getScheduler().scheduleSyncRepeatingTask(this, new WeaponTimer(attacker, stack, now, cooldown), 0, 20/5);
// }
// }
if (!(event.getDamager() instanceof Player)) {
return;
}
Player player = (Player)event.getDamager();
ItemStack item = player.getItemInHand();
Entity entity = event.getEntity();
Player defender = null;
if (entity instanceof Player) {
defender = (Player)entity;
}
if (GetConfig().get("nerf_strength").getBool()) {
//Apply Strength Nerf
final double strengthMultiplier = GetConfig().get("strength_multiplier").getDouble();
if (player.hasPotionEffect(PotionEffectType.INCREASE_DAMAGE)) {
for (PotionEffect effect : player.getActivePotionEffects()) {
if (effect.getType().equals(PotionEffectType.INCREASE_DAMAGE)) {
final int potionLevel = effect.getAmplifier() + 1;
final double unbuffedDamage = event.getDamage() / (1.3 * potionLevel + 1);
final double newDamage = unbuffedDamage + (potionLevel * strengthMultiplier);
Log("STR NERF: Pre STR"+potionLevel+": "+df.format(unbuffedDamage)+". Reduce dam from: "+df.format(event.getDamage())+" to: "+df.format(newDamage), player, defender);
event.setDamage(newDamage);
break;
}
}
}
}
if (GetConfig().get("nerf_sharpness").getBool()) {
//Apply Sharp Nerf
int sharpness = item.getEnchantmentLevel(Enchantment.DAMAGE_ALL);
final double sharpnessOffset = GetConfig().get("sharpness_damage_per_level").getDouble();
if(sharpness>0){
final double unbuffedDamage = event.getDamage() - 1.25 * sharpness;
//final double unbuffedDamage = event.getDamage() / potionScale;
final double newDamage = unbuffedDamage + sharpnessOffset * sharpness;
//final double newDamage = fixedUnbuffedDamage * potionScale;
Log("STR NERF: Pre Sharp"+sharpness+": "+df.format(unbuffedDamage)+". Reduce dam from: "+df.format(event.getDamage())+" to: "+df.format(newDamage), player, defender);
//log_.log(Level.INFO, "Reducing Sharpness damage from: "+event.getDamage()+" to: "+newDamage);
event.setDamage(newDamage);
}
}
}
}
@CivConfigs ({
@CivConfig(name="bergey_armor", def="true", type = CivConfigType.Bool),
@CivConfig(name="bergey_armor_50_perc_mit", def="10",type=CivConfigType.Int),
@CivConfig(name="bergey_prot", def="true", type = CivConfigType.Bool),
@CivConfig(name="bergey_prot_50_perc_mit", def="7",type=CivConfigType.Int),
@CivConfig(name="bergey_prot_scale", def="0.5",type=CivConfigType.Double),
@CivConfig(name="bergey_linear_prot_epf", def="false",type=CivConfigType.Bool),
})
@EventHandler(priority = EventPriority.LOWEST) // ignoreCancelled=false
public void onPlayerTakeDamage(EntityDamageEvent event) {
if (!GetConfig().get("bergey_armor").getBool()) {
return;
}
double damage = event.getDamage();
if (damage <= 0.0000001D) {
return;
}
DamageCause cause = event.getCause();
if (!isCommonDamage(cause)) {
return;
}
boolean factorProt = cause.equals(DamageCause.ENTITY_ATTACK) ||
cause.equals(DamageCause.PROJECTILE);
Entity entity = event.getEntity();
if (!(entity instanceof Player)) {
return;
}
Player defender = (Player)entity;
double defense = getDefense(defender);
double epf = getAverageEPF(defender);
double bergey_epf = getAverageBergeyEPF(defender);
double vanilla_reduction = defense * 0.04;
double vanilla_protection_reduction = 0;
if(factorProt){
vanilla_protection_reduction = epf * 0.04;
}
double vanilla_damage_taken_ratio = (1 - vanilla_reduction) * (1 - vanilla_protection_reduction);
double originalDamage = damage / vanilla_damage_taken_ratio;
double bergey_reduction = defense / (defense + GetConfig().get("bergey_armor_50_perc_mit").getInt());
double bergey_prot_reduction = 0;
if(factorProt){
bergey_prot_reduction = bergey_epf / (bergey_epf + GetConfig().get("bergey_prot_50_perc_mit").getInt()) * GetConfig().get("bergey_prot_scale").getDouble();
}
double bergey_damage_taken_ratio = (1 - bergey_reduction) * (1 - bergey_prot_reduction);
double newDamage = originalDamage * bergey_damage_taken_ratio;
DecimalFormat df = new DecimalFormat("#.##");
if(factorProt) {
Log(Level.INFO, "[Vanilla] Armor: "+df.format(vanilla_reduction)+", Enchant: "+df.format(vanilla_protection_reduction)+"\n"+
"[Vanilla] Damage: "+df.format(damage*(1-vanilla_reduction)*(1-vanilla_protection_reduction))+", \n"+
"[Bergey ] Armor: "+df.format(bergey_reduction)+", Enchant: "+df.format(bergey_prot_reduction)+"\n"+
"[Bergey ] Damage: "+df.format(damage*(1-bergey_reduction)*(1-bergey_prot_reduction))+", \n"+
"Damage Before: "+df.format(damage)+ " Damage After: "+df.format(newDamage),defender);
}
else {
Log(Level.INFO, "[Vanilla] Armor: "+df.format(vanilla_reduction)+", \n"+
"[Vanilla] Damage: "+df.format(damage*(1-vanilla_reduction))+", \n"+
"[Bergey ] Armor: "+df.format(bergey_reduction)+"\n"+
"[Bergey ] Damage: "+df.format(damage*(1-bergey_reduction))+", \n"+
"Raw Damage Before: "+df.format(damage)+ " Damage After: "+df.format(newDamage),defender);
}
event.setDamage(newDamage);
}
private boolean isCommonDamage(DamageCause cause)
{
return cause.equals(DamageCause.ENTITY_ATTACK) ||
cause.equals(DamageCause.PROJECTILE) ||
cause.equals(DamageCause.FIRE) ||
cause.equals(DamageCause.LAVA) ||
cause.equals(DamageCause.CONTACT) ||
cause.equals(DamageCause.ENTITY_EXPLOSION) ||
cause.equals(DamageCause.LIGHTNING) ||
cause.equals(DamageCause.BLOCK_EXPLOSION);
}
private double getDefense(Player player)
{
PlayerInventory inv = player.getInventory();
ItemStack boots = inv.getBoots();
ItemStack helmet = inv.getHelmet();
ItemStack chest = inv.getChestplate();
ItemStack pants = inv.getLeggings();
int def = 0;
if(helmet!=null){
if(helmet.getType() == Material.LEATHER_HELMET)def = def + 1;
else if(helmet.getType() == Material.GOLD_HELMET)def = def + 2;
else if(helmet.getType() == Material.CHAINMAIL_HELMET)def = def + 2;
else if(helmet.getType() == Material.IRON_HELMET)def = def + 2;
else if(helmet.getType() == Material.DIAMOND_HELMET)def = def + 3;
}
//
if(boots!=null){
if(boots.getType() == Material.LEATHER_BOOTS)def = def + 1;
else if(boots.getType() == Material.GOLD_BOOTS)def = def + 1;
else if(boots.getType() == Material.CHAINMAIL_BOOTS)def = def + 1;
else if(boots.getType() == Material.IRON_BOOTS)def = def + 2;
else if(boots.getType() == Material.DIAMOND_BOOTS)def = def + 3;
}
//
if(pants!=null){
if(pants.getType() == Material.LEATHER_LEGGINGS)def = def + 2;
else if(pants.getType() == Material.GOLD_LEGGINGS)def = def + 3;
else if(pants.getType() == Material.CHAINMAIL_LEGGINGS)def = def + 4;
else if(pants.getType() == Material.IRON_LEGGINGS)def = def + 5;
else if(pants.getType() == Material.DIAMOND_LEGGINGS)def = def + 6;
}
//
if(chest!=null){
if(chest.getType() == Material.LEATHER_CHESTPLATE)def = def + 3;
else if(chest.getType() == Material.GOLD_CHESTPLATE)def = def + 5;
else if(chest.getType() == Material.CHAINMAIL_CHESTPLATE)def = def + 5;
else if(chest.getType() == Material.IRON_CHESTPLATE)def = def + 6;
else if(chest.getType() == Material.DIAMOND_CHESTPLATE)def = def + 8;
}
return def;
}
private double getAverageEPF(Player player)
{
PlayerInventory inv = player.getInventory();
int epf = 0;
for (ItemStack armor : inv.getArmorContents()) {
int level = armor.getEnchantmentLevel(Enchantment.PROTECTION_ENVIRONMENTAL);
if(level == 4)
level = 5;
epf += level;
}
return epf*0.75;
}
private double getAverageBergeyEPF(Player player)
{
if(!GetConfig().get("bergey_linear_prot_epf").getBool()){
return getAverageEPF(player);
}
PlayerInventory inv = player.getInventory();
int epf = 0;
for (ItemStack armor : inv.getArmorContents()) {
epf += armor.getEnchantmentLevel(Enchantment.PROTECTION_ENVIRONMENTAL) * 1.25;
}
return epf*0.75;
}
// @Bergification(opt="bergey_logout", def="true")
// @EventHandler(priority = EventPriority.LOWEST) // ignoreCancelled=false
// public void onEntityLogout(PlayerQuitEvent event) {
//
// }
@EventHandler(priority = EventPriority.LOWEST)
public void onPlayerLogin(PlayerJoinEvent event){
setMaxHealth(event.getPlayer());
}
@EventHandler(priority = EventPriority.LOWEST)
public void onPlayerCloseInventory(InventoryCloseEvent event){
HumanEntity human = event.getPlayer();
if(human instanceof Player){
setMaxHealth((Player) human);
}
}
@CivConfigs ({
@CivConfig(name="bergey_health", def="true", type = CivConfigType.Bool),
@CivConfig(name="bergey_base_health", def="20.0",type=CivConfigType.Double),
@CivConfig(name="bergey_max_bonus_health", def="29.0",type=CivConfigType.Double),
@CivConfig(name="bergey_health_bonus_50_perc_durability", def="850",type=CivConfigType.Double)
})
public void setMaxHealth(Player player){
if (!GetConfig().get("bergey_health").getBool()) {
return;
}
double maxHealth = GetConfig().get("bergey_base_health").getDouble();
double durability = 0;
for (ItemStack armor : player.getInventory().getArmorContents()) {
durability += armor.getType().getMaxDurability();
}
maxHealth += GetConfig().get("bergey_max_bonus_health").getDouble() *
durability / (durability + GetConfig().get("bergey_health_bonus_50_perc_durability").getDouble());
if(maxHealth != ((Damageable) player).getMaxHealth()){
log_.log(Level.INFO, "Setting Player: "+player.getName()+" to "+maxHealth+" health");
if(((Damageable)player).getHealth()>maxHealth)
{
player.setHealth(maxHealth);
}
player.setMaxHealth(maxHealth);
}
}
public void resetMaxHealth(Player player){
double maxHealth = 20.0d;
if(maxHealth != ((Damageable) player).getMaxHealth()){
log_.log(Level.INFO, "Setting Player: "+player.getName()+" to "+maxHealth+" health");
if(((Damageable)player).getHealth()>maxHealth)
{
player.setHealth(maxHealth);
}
player.setMaxHealth(maxHealth);
}
}
@CivConfig(name="ender_pearl_teleportation", def="true", type = CivConfigType.Bool)
@EventHandler(priority = EventPriority.LOWEST, ignoreCancelled = true)
public void onTeleport(PlayerTeleportEvent event) {
TeleportCause cause = event.getCause();
if (cause.equals(TeleportCause.ENDER_PEARL) &&
!GetConfig().get("ender_pearl_teleportation").getBool()) {
event.setCancelled(true);
event.getPlayer().sendMessage("Ender pearls are disabled in Bergecraft PVP mode.");
}
}
// @Override
// public void onLoad()
// {
// super.onLoad();
// }
}
| |
/*
* Copyright 2015 Yan Zhenjie
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.yanzhenjie.nohttp;
import android.text.TextUtils;
import com.yanzhenjie.nohttp.tools.CounterOutputStream;
import com.yanzhenjie.nohttp.tools.HeaderUtil;
import com.yanzhenjie.nohttp.tools.IOUtils;
import com.yanzhenjie.nohttp.tools.LinkedMultiValueMap;
import com.yanzhenjie.nohttp.tools.MultiValueMap;
import org.json.JSONObject;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.net.HttpCookie;
import java.net.Proxy;
import java.net.URLEncoder;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.BlockingQueue;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.SSLSocketFactory;
/**
* <p>
* Implement all the methods of the base class {@link IBasicRequest}.
* </p>
* Created in Nov 4, 2015 8:28:50 AM.
*
* @author Yan Zhenjie.
*/
public abstract class BasicRequest implements IBasicRequest {
private final String boundary = createBoundary();
private final String startBoundary = "--" + boundary;
private final String endBoundary = startBoundary + "--";
/**
* Request priority.
*/
private Priority mPriority = Priority.DEFAULT;
/**
* The sequence.
*/
private int sequence;
/**
* Target address.
*/
private String url;
/**
* Request method.
*/
private RequestMethod mRequestMethod;
/**
* MultipartFormEnable.
*/
private boolean isMultipartFormEnable = false;
/**
* Proxy server.
*/
private Proxy mProxy;
/**
* SSLSockets.
*/
private SSLSocketFactory mSSLSocketFactory = null;
/**
* HostnameVerifier.
*/
private HostnameVerifier mHostnameVerifier = null;
/**
* Connect timeout of request.
*/
private int mConnectTimeout = NoHttp.getConnectTimeout();
/**
* Read data timeout.
*/
private int mReadTimeout = NoHttp.getReadTimeout();
/**
* Request heads.
*/
private Headers mHeaders;
/**
* After the failure of retries.
*/
private int mRetryCount;
/**
* The params encoding.
*/
private String mParamEncoding;
/**
* Param collection.
*/
private MultiValueMap<String, Object> mParamKeyValues;
/**
* RequestBody.
*/
private InputStream mRequestBody;
/**
* Redirect handler.
*/
private RedirectHandler mRedirectHandler;
/**
* Request queue
*/
private BlockingQueue<?> blockingQueue;
/**
* The record has started.
*/
private boolean isStart = false;
/**
* The request is completed.
*/
private boolean isFinished = false;
/**
* Has been canceled.
*/
private boolean isCanceled = false;
/**
* Cancel sign.
*/
private Object mCancelSign;
/**
* Tag of request.
*/
private Object mTag;
/**
* Create a request, RequestMethod is {@link RequestMethod#GET}.
*
* @param url request address, like: http://www.yanzhenjie.com.
*/
public BasicRequest(String url) {
this(url, RequestMethod.GET);
}
/**
* Create a request.
*
* @param url request adress, like: http://www.yanzhenjie.com.
* @param requestMethod request method, like {@link RequestMethod#GET}, {@link RequestMethod#POST}.
*/
public BasicRequest(String url, RequestMethod requestMethod) {
this.url = url;
mRequestMethod = requestMethod;
mHeaders = new HttpHeaders();
mHeaders.set(Headers.HEAD_KEY_ACCEPT, Headers.HEAD_VALUE_ACCEPT_ALL);
mHeaders.set(Headers.HEAD_KEY_ACCEPT_ENCODING, Headers.HEAD_VALUE_ACCEPT_ENCODING_GZIP_DEFLATE);
mHeaders.set(Headers.HEAD_KEY_ACCEPT_LANGUAGE, HeaderUtil.systemAcceptLanguage());
mHeaders.set(Headers.HEAD_KEY_USER_AGENT, UserAgent.instance());
mParamKeyValues = new LinkedMultiValueMap<>();
}
@Override
public IBasicRequest setPriority(Priority priority) {
this.mPriority = priority;
return this;
}
@Override
public Priority getPriority() {
return mPriority;
}
@Override
public IBasicRequest setSequence(int sequence) {
this.sequence = sequence;
return this;
}
@Override
public int getSequence() {
return this.sequence;
}
@Override
public final int compareTo(IBasicRequest another) {
final Priority me = getPriority();
final Priority it = another.getPriority();
return me == it ? getSequence() - another.getSequence() : it.ordinal() - me.ordinal();
}
@Override
public String url() {
StringBuilder urlBuilder = new StringBuilder(url);
// first body.
if (hasDefineRequestBody()) {
buildUrl(urlBuilder);
return urlBuilder.toString();
}
// form or push params.
if (getRequestMethod().allowRequestBody())
return urlBuilder.toString();
// third common post.
buildUrl(urlBuilder);
return urlBuilder.toString();
}
/**
* Build complete url.
*
* @param urlBuilder url StringBuilder.
*/
private void buildUrl(StringBuilder urlBuilder) {
StringBuilder paramBuilder = buildCommonParams(getParamKeyValues(), getParamsEncoding());
if (paramBuilder.length() <= 0) return;
if (url.contains("?") && url.contains("=")) urlBuilder.append("&");
else if (!url.endsWith("?")) urlBuilder.append("?");
urlBuilder.append(paramBuilder);
}
@Override
public RequestMethod getRequestMethod() {
return mRequestMethod;
}
@Override
public IBasicRequest setMultipartFormEnable(boolean enable) {
validateMethodForBody("Form body");
isMultipartFormEnable = enable;
return this;
}
@Override
public boolean isMultipartFormEnable() {
return isMultipartFormEnable || hasBinary();
}
@Override
public IBasicRequest setProxy(Proxy proxy) {
this.mProxy = proxy;
return this;
}
@Override
public Proxy getProxy() {
return mProxy;
}
@Override
public IBasicRequest setSSLSocketFactory(SSLSocketFactory socketFactory) {
mSSLSocketFactory = socketFactory;
return this;
}
@Override
public SSLSocketFactory getSSLSocketFactory() {
return mSSLSocketFactory;
}
@Override
public IBasicRequest setHostnameVerifier(HostnameVerifier hostnameVerifier) {
mHostnameVerifier = hostnameVerifier;
return this;
}
@Override
public HostnameVerifier getHostnameVerifier() {
return mHostnameVerifier;
}
@Override
public IBasicRequest setConnectTimeout(int connectTimeout) {
mConnectTimeout = connectTimeout;
return this;
}
@Override
public int getConnectTimeout() {
return mConnectTimeout;
}
@Override
public IBasicRequest setReadTimeout(int readTimeout) {
mReadTimeout = readTimeout;
return this;
}
@Override
public int getReadTimeout() {
return mReadTimeout;
}
@Override
public IBasicRequest addHeader(String key, String value) {
mHeaders.add(key, value);
return this;
}
@Override
public IBasicRequest setHeader(String key, String value) {
mHeaders.set(key, value);
return this;
}
@Override
public IBasicRequest addHeader(HttpCookie cookie) {
if (cookie != null)
mHeaders.add(Headers.HEAD_KEY_COOKIE, cookie.getName() + "=" + cookie.getValue());
return this;
}
@Override
public IBasicRequest removeHeader(String key) {
mHeaders.remove(key);
return this;
}
@Override
public IBasicRequest removeAllHeader() {
mHeaders.clear();
return this;
}
@Override
public Headers headers() {
return mHeaders;
}
@Override
public IBasicRequest setAccept(String accept) {
mHeaders.set(Headers.HEAD_KEY_ACCEPT, accept);
return this;
}
@Override
public IBasicRequest setAcceptLanguage(String acceptLanguage) {
mHeaders.set(Headers.HEAD_KEY_ACCEPT_LANGUAGE, acceptLanguage);
return this;
}
@Override
public long getContentLength() {
CounterOutputStream outputStream = new CounterOutputStream();
try {
onWriteRequestBody(outputStream);
} catch (IOException e) {
Logger.e(e);
}
return outputStream.get();
}
@Override
public IBasicRequest setContentType(String contentType) {
mHeaders.set(Headers.HEAD_KEY_CONTENT_TYPE, contentType);
return this;
}
@Override
public String getContentType() {
String contentType = mHeaders.getValue(Headers.HEAD_KEY_CONTENT_TYPE, 0);
if (!TextUtils.isEmpty(contentType))
return contentType;
if (getRequestMethod().allowRequestBody() && isMultipartFormEnable())
return Headers.HEAD_VALUE_ACCEPT_MULTIPART_FORM_DATA + "; boundary=" + boundary;
else
return Headers.HEAD_VALUE_ACCEPT_APPLICATION_X_WWW_FORM_URLENCODED + "; charset=" + getParamsEncoding();
}
@Override
public IBasicRequest setUserAgent(String userAgent) {
mHeaders.set(Headers.HEAD_KEY_USER_AGENT, userAgent);
return this;
}
@Override
public IBasicRequest setRetryCount(int count) {
this.mRetryCount = count;
return this;
}
@Override
public int getRetryCount() {
return mRetryCount;
}
@Override
public IBasicRequest setParamsEncoding(String encoding) {
this.mParamEncoding = encoding;
return this;
}
@Override
public String getParamsEncoding() {
if (TextUtils.isEmpty(mParamEncoding))
mParamEncoding = "utf-8";
return mParamEncoding;
}
@Override
public IBasicRequest add(String key, int value) {
add(key, Integer.toString(value));
return this;
}
@Override
public IBasicRequest add(String key, long value) {
add(key, Long.toString(value));
return this;
}
@Override
public IBasicRequest add(String key, boolean value) {
add(key, String.valueOf(value));
return this;
}
@Override
public IBasicRequest add(String key, char value) {
add(key, String.valueOf(value));
return this;
}
@Override
public IBasicRequest add(String key, double value) {
add(key, Double.toString(value));
return this;
}
@Override
public IBasicRequest add(String key, float value) {
add(key, Float.toString(value));
return this;
}
@Override
public IBasicRequest add(String key, short value) {
add(key, Integer.toString(value));
return this;
}
@Override
public IBasicRequest add(String key, byte value) {
add(key, Integer.toString(value));
return this;
}
@Override
public IBasicRequest add(String key, String value) {
if (value != null) {
mParamKeyValues.add(key, value);
}
return this;
}
@Override
public IBasicRequest set(String key, String value) {
if (value != null)
mParamKeyValues.set(key, value);
return this;
}
/**
* Validate method for request body.
*
* @param methodObject message.
*/
private void validateMethodForBody(String methodObject) {
if (!getRequestMethod().allowRequestBody())
throw new IllegalArgumentException(methodObject + " only supports these request methods: " +
"POST/PUT/PATCH/DELETE.");
}
@Override
public IBasicRequest add(String key, Binary binary) {
validateMethodForBody("The Binary param");
mParamKeyValues.add(key, binary);
return this;
}
@Override
public IBasicRequest set(String key, Binary binary) {
validateMethodForBody("The Binary param");
mParamKeyValues.set(key, binary);
return this;
}
@Override
public IBasicRequest add(String key, File file) {
validateMethodForBody("The File param");
add(key, new FileBinary(file));
return this;
}
@Override
public IBasicRequest set(String key, File file) {
validateMethodForBody("The File param");
set(key, new FileBinary(file));
return this;
}
@Override
public IBasicRequest add(String key, List<Binary> binaries) {
validateMethodForBody("The List<Binary> param");
if (binaries != null) {
for (Binary binary : binaries)
mParamKeyValues.add(key, binary);
}
return this;
}
@Override
public IBasicRequest set(String key, List<Binary> binaries) {
validateMethodForBody("The List<Binary> param");
mParamKeyValues.remove(key);
add(key, binaries);
return this;
}
@Override
public IBasicRequest add(Map<String, String> params) {
if (params != null) {
for (Map.Entry<String, String> stringEntry : params.entrySet())
add(stringEntry.getKey(), stringEntry.getValue());
}
return this;
}
@Override
public IBasicRequest set(Map<String, String> params) {
if (params != null) {
for (Map.Entry<String, String> stringEntry : params.entrySet())
set(stringEntry.getKey(), stringEntry.getValue());
}
return this;
}
@Override
public IBasicRequest remove(String key) {
mParamKeyValues.remove(key);
return this;
}
@Override
public IBasicRequest removeAll() {
mParamKeyValues.clear();
return this;
}
@Override
public MultiValueMap<String, Object> getParamKeyValues() {
return mParamKeyValues;
}
/**
* Validate param null.
*
* @param body request body.
* @param contentType content type.
*/
private void validateParamForBody(Object body, String contentType) {
if (body == null || TextUtils.isEmpty(contentType))
throw new NullPointerException("The requestBody and contentType must be can't be null");
}
@Override
public IBasicRequest setDefineRequestBody(InputStream requestBody, String contentType) {
validateMethodForBody("Request body");
validateParamForBody(requestBody, contentType);
if (requestBody instanceof ByteArrayInputStream || requestBody instanceof FileInputStream) {
this.mRequestBody = requestBody;
mHeaders.set(Headers.HEAD_KEY_CONTENT_TYPE, contentType);
} else {
throw new IllegalArgumentException("Can only accept ByteArrayInputStream and FileInputStream type of " +
"stream");
}
return this;
}
@Override
public IBasicRequest setDefineRequestBody(String requestBody, String contentType) {
validateMethodForBody("Request body");
validateParamForBody(requestBody, contentType);
try {
mRequestBody = IOUtils.toInputStream(requestBody, getParamsEncoding());
mHeaders.set(Headers.HEAD_KEY_CONTENT_TYPE, contentType + "; charset=" + getParamsEncoding());
} catch (UnsupportedEncodingException e) {
mRequestBody = IOUtils.toInputStream(requestBody);
mHeaders.set(Headers.HEAD_KEY_CONTENT_TYPE, contentType);
}
return this;
}
@Override
public IBasicRequest setDefineRequestBodyForJson(String jsonBody) {
setDefineRequestBody(jsonBody, Headers.HEAD_VALUE_ACCEPT_APPLICATION_JSON);
return this;
}
@Override
public IBasicRequest setDefineRequestBodyForJson(JSONObject jsonBody) {
setDefineRequestBody(jsonBody.toString(), Headers.HEAD_VALUE_ACCEPT_APPLICATION_JSON);
return this;
}
@Override
public IBasicRequest setDefineRequestBodyForXML(String xmlBody) {
setDefineRequestBody(xmlBody, Headers.HEAD_VALUE_ACCEPT_APPLICATION_XML);
return this;
}
/**
* Has Binary.
*
* @return true, other wise is false.
*/
protected boolean hasBinary() {
Set<String> keys = mParamKeyValues.keySet();
for (String key : keys) {
List<Object> values = mParamKeyValues.getValues(key);
for (Object value : values) {
if (value instanceof Binary)
return true;
}
}
return false;
}
/**
* Is there a custom request inclusions.
*
* @return Returns true representatives have, return false on behalf of the no.
*/
protected boolean hasDefineRequestBody() {
return mRequestBody != null;
}
/**
* To getList custom inclusions.
*
* @return {@link InputStream}.
*/
protected InputStream getDefineRequestBody() {
return mRequestBody;
}
@Override
public void onPreExecute() {
// Do some time-consuming operation.
}
@Override
public void onWriteRequestBody(OutputStream writer) throws IOException {
if (hasDefineRequestBody()) {
writeRequestBody(writer);
} else if (isMultipartFormEnable()) {
writeFormStreamData(writer);
} else {
writeParamStreamData(writer);
}
}
/**
* Send request requestBody.
*
* @param writer {@link OutputStream}.
* @throws IOException write error.
*/
protected void writeRequestBody(OutputStream writer) throws IOException {
if (mRequestBody != null) {
if (writer instanceof CounterOutputStream) {
writer.write(mRequestBody.available());
} else {
IOUtils.write(mRequestBody, writer);
IOUtils.closeQuietly(mRequestBody);
mRequestBody = null;
}
}
}
/**
* Send form data.
*
* @param writer {@link OutputStream}.
* @throws IOException write error.
*/
protected void writeFormStreamData(OutputStream writer) throws IOException {
Set<String> keys = mParamKeyValues.keySet();
for (String key : keys) {
List<Object> values = mParamKeyValues.getValues(key);
for (Object value : values) {
if (!isCanceled()) {
if (value != null && value instanceof String) {
if (!(writer instanceof CounterOutputStream))
Logger.i(key + "=" + value);
writeFormString(writer, key, (String) value);
} else if (value != null && value instanceof Binary) {
if (!(writer instanceof CounterOutputStream))
Logger.i(key + " is Binary");
writeFormBinary(writer, key, (Binary) value);
}
writer.write("\r\n".getBytes());
}
}
}
writer.write((endBoundary).getBytes());
}
/**
* Send text data in a form.
*
* @param writer {@link OutputStream}
* @param key equivalent to form the name of the input label, {@code "Content-Disposition: form-data; name=key"}.
* @param value equivalent to form the value of the input label.
* @throws IOException Write the data may be abnormal.
*/
private void writeFormString(OutputStream writer, String key, String value) throws IOException {
String stringFieldBuilder = startBoundary + "\r\n"
+ "Content-Disposition: form-data; name=\"" + key + "\"\r\n"
+ "Content-Type: text/plain; charset=" + getParamsEncoding() + "\r\n\r\n";
writer.write(stringFieldBuilder.getBytes(getParamsEncoding()));
writer.write(value.getBytes(getParamsEncoding()));
}
/**
* Send binary data in a form.
*/
private void writeFormBinary(OutputStream writer, String key, Binary value) throws IOException {
if (!value.isCanceled()) {
String binaryFieldBuilder = startBoundary + "\r\n" +
"Content-Disposition: form-data; name=\"" + key + "\"" + "; filename=\"" + value.getFileName() +
"\"\r\n"
+ "Content-Type: " + value.getMimeType() + "\r\n"
+ "Content-Transfer-Encoding: binary\r\n\r\n";
writer.write(binaryFieldBuilder.getBytes());
if (writer instanceof CounterOutputStream) {
((CounterOutputStream) writer).write(value.getLength());
} else {
value.onWriteBinary(writer);
}
}
}
/**
* Write params.
*
* @param writer {@link OutputStream}.
* @throws IOException IOException.
*/
private void writeParamStreamData(OutputStream writer) throws IOException {
StringBuilder paramBuilder = buildCommonParams(mParamKeyValues, getParamsEncoding());
if (paramBuilder.length() > 0) {
String params = paramBuilder.toString();
Logger.i("Body: " + params);
IOUtils.write(params.getBytes(), writer);
}
}
@Override
public IBasicRequest setRedirectHandler(RedirectHandler redirectHandler) {
mRedirectHandler = redirectHandler;
return this;
}
@Override
public RedirectHandler getRedirectHandler() {
return mRedirectHandler;
}
@Override
public IBasicRequest setTag(Object tag) {
this.mTag = tag;
return this;
}
@Override
public Object getTag() {
return this.mTag;
}
@Override
public void setQueue(BlockingQueue<?> queue) {
blockingQueue = queue;
}
@Override
public boolean inQueue() {
return blockingQueue != null && blockingQueue.contains(this);
}
@Override
public void start() {
this.isStart = true;
}
@Override
public boolean isStarted() {
return isStart;
}
@Override
public void finish() {
this.isFinished = true;
}
@Override
public boolean isFinished() {
return isFinished;
}
@Override
public void cancel() {
if (!isCanceled) {
isCanceled = true;
if (mRequestBody != null)
IOUtils.closeQuietly(mRequestBody);
if (blockingQueue != null)
blockingQueue.remove(this);
// cancel file upload
Set<String> keys = mParamKeyValues.keySet();
for (String key : keys) {
List<Object> values = mParamKeyValues.getValues(key);
for (Object value : values)
if (value != null && value instanceof Binary)
((Binary) value).cancel();
}
}
}
@Override
public boolean isCanceled() {
return isCanceled;
}
public IBasicRequest setCancelSign(Object sign) {
this.mCancelSign = sign;
return this;
}
@Override
public void cancelBySign(Object sign) {
if (mCancelSign == sign)
cancel();
}
////////// static module /////////
/**
* Split joint non form data.
*
* @param paramMap param map.
* @param encodeCharset charset.
* @return string parameter combination, each key value on nails with {@code "&"} space.
*/
public static StringBuilder buildCommonParams(MultiValueMap<String, Object> paramMap, String encodeCharset) {
StringBuilder paramBuilder = new StringBuilder();
Set<String> keySet = paramMap.keySet();
for (String key : keySet) {
List<Object> values = paramMap.getValues(key);
for (Object value : values) {
if (value != null && value instanceof CharSequence) {
paramBuilder.append("&").append(key).append("=");
try {
paramBuilder.append(URLEncoder.encode(value.toString(), encodeCharset));
} catch (UnsupportedEncodingException e) {
Logger.e("Encoding " + encodeCharset + " format is not supported by the system.");
paramBuilder.append(value.toString());
}
}
}
}
if (paramBuilder.length() > 0)
paramBuilder.deleteCharAt(0);
return paramBuilder;
}
/**
* Randomly generated boundary mark.
*
* @return Random code.
*/
public static String createBoundary() {
StringBuilder sb = new StringBuilder("----NoHttpFormBoundary");
for (int t = 1; t < 12; t++) {
long time = System.currentTimeMillis() + t;
if (time % 3L == 0L) {
sb.append((char) (int) time % '\t');
} else if (time % 3L == 1L) {
sb.append((char) (int) (65L + time % 26L));
} else {
sb.append((char) (int) (97L + time % 26L));
}
}
return sb.toString();
}
}
| |
package uk.ac.ox.zoo.seeg.abraid.mp.dataacquisition.acquirers.healthmap;
import org.joda.time.DateTime;
import org.joda.time.DateTimeUtils;
import org.junit.Before;
import org.junit.Test;
import uk.ac.ox.zoo.seeg.abraid.mp.common.domain.DiseaseOccurrence;
import uk.ac.ox.zoo.seeg.abraid.mp.common.domain.Provenance;
import uk.ac.ox.zoo.seeg.abraid.mp.common.web.WebServiceClientException;
import uk.ac.ox.zoo.seeg.abraid.mp.dataacquisition.acquirers.DataAcquisitionException;
import uk.ac.ox.zoo.seeg.abraid.mp.dataacquisition.acquirers.ManualValidationEnforcer;
import uk.ac.ox.zoo.seeg.abraid.mp.dataacquisition.acquirers.healthmap.domain.HealthMapLocation;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static com.googlecode.catchexception.CatchException.catchException;
import static com.googlecode.catchexception.CatchException.caughtException;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.*;
/**
* Tests the HealthMapDataAcquirer class.
*
* Copyright (c) 2014 University of Oxford
*/
public class HealthMapDataAcquirerTest {
private HealthMapWebService webService;
private HealthMapDataConverter dataConverter;
private HealthMapLookupData lookupData;
private ManualValidationEnforcer manualValidationEnforcer;
@Before
public void resetMocks() {
webService = mock(HealthMapWebService.class);
dataConverter = mock(HealthMapDataConverter.class);
lookupData = mock(HealthMapLookupData.class);
manualValidationEnforcer = mock(ManualValidationEnforcer.class);
}
@Test
public void acquiresDataFromWebServiceOnFirstRunWithDefaultStartDateSet() {
// Arrange
fixCurrentDateTime();
DateTime defaultStartDate = new DateTime("2004-02-01T01:02:03+0000");
DateTime endDate = DateTime.now();
Provenance provenance = new Provenance();
List<HealthMapLocation> locations = new ArrayList<>();
when(webService.getDefaultStartDate()).thenReturn(defaultStartDate);
when(webService.getEndDateDaysAfterStartDate()).thenReturn(null);
when(lookupData.getHealthMapProvenance()).thenReturn(provenance);
when(webService.sendRequest(eq(defaultStartDate), eq(endDate))).thenReturn(locations);
// Act
HealthMapDataAcquirer dataAcquisition = new HealthMapDataAcquirer(webService, dataConverter, lookupData, manualValidationEnforcer);
dataAcquisition.acquireDataFromWebService();
// Assert
verify(dataConverter).convert(same(locations), eq(endDate));
}
@Test
public void acquiresDataFromWebServiceOnFirstRunWithDefaultStartDateDaysBeforeNowSet() {
// Arrange
fixCurrentDateTime();
int defaultStartDateDaysBeforeNow = 3;
DateTime startDate = DateTime.now().minusDays(3);
DateTime endDate = DateTime.now();
Provenance provenance = new Provenance();
List<HealthMapLocation> locations = new ArrayList<>();
when(webService.getDefaultStartDate()).thenReturn(null);
when(webService.getDefaultStartDateDaysBeforeNow()).thenReturn(defaultStartDateDaysBeforeNow);
when(webService.getEndDateDaysAfterStartDate()).thenReturn(null);
when(lookupData.getHealthMapProvenance()).thenReturn(provenance);
when(webService.sendRequest(eq(startDate), eq(endDate))).thenReturn(locations);
// Act
HealthMapDataAcquirer dataAcquisition = new HealthMapDataAcquirer(webService, dataConverter, lookupData, manualValidationEnforcer);
dataAcquisition.acquireDataFromWebService();
// Assert
verify(dataConverter).convert(same(locations), eq(endDate));
}
@Test
public void acquiresDataFromWebServiceOnFirstRunWithDefaultStartDateAndDefaultStartDateDaysBeforeNowSet() {
// Arrange
fixCurrentDateTime();
DateTime defaultStartDate = new DateTime("2004-02-01T01:02:03+0000");
DateTime endDate = DateTime.now();
Provenance provenance = new Provenance();
List<HealthMapLocation> locations = new ArrayList<>();
when(webService.getDefaultStartDate()).thenReturn(defaultStartDate);
when(webService.getDefaultStartDateDaysBeforeNow()).thenReturn(3);
when(webService.getEndDateDaysAfterStartDate()).thenReturn(null);
when(lookupData.getHealthMapProvenance()).thenReturn(provenance);
when(webService.sendRequest(eq(defaultStartDate), eq(endDate))).thenReturn(locations);
// Act
HealthMapDataAcquirer dataAcquisition = new HealthMapDataAcquirer(webService, dataConverter, lookupData, manualValidationEnforcer);
dataAcquisition.acquireDataFromWebService();
// Assert
verify(dataConverter).convert(same(locations), eq(endDate));
}
@Test
public void webServiceDoesNotDuplicateDataIfAlreadyRun() {
// Arrange
fixCurrentDateTime();
DateTime startDate = new DateTime("2004-02-01T01:02:03+0000");
DateTime defaultStartDate = new DateTime("2006-02-01T01:02:03+0000");
int defaultStartDateDaysBeforeNow = 3;
DateTime endDate = DateTime.now();
Provenance provenance = new Provenance();
provenance.setLastRetrievalEndDate(startDate);
List<HealthMapLocation> locations = new ArrayList<>();
when(webService.getDefaultStartDate()).thenReturn(defaultStartDate);
when(webService.getDefaultStartDateDaysBeforeNow()).thenReturn(defaultStartDateDaysBeforeNow);
when(webService.getEndDateDaysAfterStartDate()).thenReturn(null);
when(lookupData.getHealthMapProvenance()).thenReturn(provenance);
when(webService.sendRequest(eq(startDate), eq(endDate))).thenReturn(locations);
// Act
HealthMapDataAcquirer dataAcquisition = new HealthMapDataAcquirer(webService, dataConverter, lookupData, manualValidationEnforcer);
dataAcquisition.acquireDataFromWebService();
// Assert
verify(dataConverter).convert(same(locations), eq(endDate));
}
@Test
public void acquiresDataFromWebServiceWithEndDateDaysAfterStartDateSet() {
// Arrange
DateTime startDate = new DateTime("2004-02-01T01:02:03+0000");
int endDateDaysAfterStartDate = 3;
DateTime endDate = startDate.plusDays(endDateDaysAfterStartDate);
Provenance provenance = new Provenance();
provenance.setLastRetrievalEndDate(startDate);
List<HealthMapLocation> locations = new ArrayList<>();
when(webService.getDefaultStartDate()).thenReturn(null);
when(webService.getDefaultStartDateDaysBeforeNow()).thenReturn(null);
when(webService.getEndDateDaysAfterStartDate()).thenReturn(endDateDaysAfterStartDate);
when(lookupData.getHealthMapProvenance()).thenReturn(provenance);
when(webService.sendRequest(eq(startDate), eq(endDate))).thenReturn(locations);
// Act
HealthMapDataAcquirer dataAcquisition = new HealthMapDataAcquirer(webService, dataConverter, lookupData, manualValidationEnforcer);
dataAcquisition.acquireDataFromWebService();
// Assert
verify(dataConverter).convert(same(locations), eq(endDate));
}
@Test
public void acquiresDataFromWebServiceWithEndDateDaysAfterStartDateSetButBeyondNow() {
// Arrange
fixCurrentDateTime();
DateTime startDate = new DateTime("2100-02-01T01:02:03+0000");
int endDateDaysAfterStartDate = 3;
DateTime endDate = DateTime.now();
Provenance provenance = new Provenance();
provenance.setLastRetrievalEndDate(startDate);
List<HealthMapLocation> locations = new ArrayList<>();
when(webService.getDefaultStartDate()).thenReturn(null);
when(webService.getDefaultStartDateDaysBeforeNow()).thenReturn(null);
when(webService.getEndDateDaysAfterStartDate()).thenReturn(endDateDaysAfterStartDate);
when(lookupData.getHealthMapProvenance()).thenReturn(provenance);
when(webService.sendRequest(eq(startDate), eq(endDate))).thenReturn(locations);
// Act
HealthMapDataAcquirer dataAcquisition = new HealthMapDataAcquirer(webService, dataConverter, lookupData, manualValidationEnforcer);
dataAcquisition.acquireDataFromWebService();
// Assert
verify(dataConverter).convert(same(locations), eq(endDate));
}
@Test
public void doesNotAcquireDataIfWebServiceRequestFails() {
// Arrange
fixCurrentDateTime();
DateTime startDate = new DateTime("2004-02-01T01:02:03+0000");
DateTime endDate = DateTime.now();
Provenance provenance = new Provenance();
provenance.setLastRetrievalEndDate(startDate);
when(lookupData.getHealthMapProvenance()).thenReturn(provenance);
when(webService.sendRequest(eq(startDate), eq(endDate))).thenThrow(new WebServiceClientException(""));
when(webService.getEndDateDaysAfterStartDate()).thenReturn(null);
// Act
HealthMapDataAcquirer dataAcquisition = new HealthMapDataAcquirer(webService, dataConverter, lookupData, manualValidationEnforcer);
catchException(dataAcquisition).acquireDataFromWebService();
// Assert
//noinspection unchecked
verify(dataConverter, never()).convert(anyList(), any(DateTime.class));
assertThat(caughtException()).isInstanceOf(DataAcquisitionException.class);
assertThat(caughtException().getCause()).isInstanceOf(WebServiceClientException.class);
}
@Test
public void acquiresDataFromWebServiceCallsManualValidationEnforcerAfterSuccess() {
// Arrange
fixCurrentDateTime();
DateTime defaultStartDate = new DateTime("2004-02-01T01:02:03+0000");
DateTime endDate = DateTime.now();
Provenance provenance = new Provenance();
List<HealthMapLocation> locations = new ArrayList<>();
Set<DiseaseOccurrence> occurrences = new HashSet<>();
when(webService.getDefaultStartDate()).thenReturn(defaultStartDate);
when(webService.getEndDateDaysAfterStartDate()).thenReturn(null);
when(lookupData.getHealthMapProvenance()).thenReturn(provenance);
when(webService.sendRequest(eq(defaultStartDate), eq(endDate))).thenReturn(locations);
when(dataConverter.convert(same(locations), eq(endDate))).thenReturn(occurrences);
// Act
HealthMapDataAcquirer dataAcquisition = new HealthMapDataAcquirer(webService, dataConverter, lookupData, manualValidationEnforcer);
dataAcquisition.acquireDataFromWebService();
// Assert
verify(dataConverter).convert(same(locations), eq(endDate));
verify(manualValidationEnforcer).addRandomSubsetToManualValidation(same(occurrences));
}
@Test
public void acquiresDataFromWebServiceDoesNotCallsManualValidationEnforcerAfterFailure() {
// Arrange
fixCurrentDateTime();
DateTime defaultStartDate = new DateTime("2004-02-01T01:02:03+0000");
DateTime endDate = DateTime.now();
Provenance provenance = new Provenance();
List<HealthMapLocation> locations = new ArrayList<>();
when(webService.getDefaultStartDate()).thenReturn(defaultStartDate);
when(webService.getEndDateDaysAfterStartDate()).thenReturn(null);
when(lookupData.getHealthMapProvenance()).thenReturn(provenance);
when(webService.sendRequest(eq(defaultStartDate), eq(endDate))).thenReturn(locations);
when(dataConverter.convert(same(locations), eq(endDate))).thenReturn(null);
// Act
HealthMapDataAcquirer dataAcquisition = new HealthMapDataAcquirer(webService, dataConverter, lookupData, manualValidationEnforcer);
dataAcquisition.acquireDataFromWebService(); // null occurrences
// Assert
verify(manualValidationEnforcer, never()).addRandomSubsetToManualValidation(anySetOf(DiseaseOccurrence.class));
}
private void fixCurrentDateTime() {
// This ensures that DateTime.now() always returns a particular date/time, so that equality comparisons work
DateTimeUtils.setCurrentMillisFixed(DateTime.now().getMillis());
}
}
| |
/*
Copyright (c) 2012, Thomas Schueler, http://www.thomasschueler.de
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the author nor the
names of the contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THOMAS SCHUELER BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package utils;
import processing.core.*;
//Velocity: How fast each pixel is moving up or down
//Density: How much "fluid" is in each pixel.
//*note*
//Density isn't conserved as far as I know.
//Changing the velocity ends up changing the density too.
public class GridSolver implements Runnable {
private int cellSize;
// Use 2 dimensional arrays to store velocity and density for each pixel.
// To access, use this: grid[x/cellSize][y/cellSize]
private float [][] velocity;
private float [][] density;
private float [][] oldVelocity;
private float [][] oldDensity;
private boolean isUpdated=false;
private boolean hasBeenReturned=false;
private PApplet mainApplet;
private PGraphics canvas;
private Thread t;
private float friction = 0.58f;
private float speed = 20;
// Variables for the timeStep
private long previousTime;
private long currentTime;
private float timeScale = 1; // Play with this to slow down or speed up the fluid (the higher, the faster)
private final int fixedDeltaTime = (int)(10 / timeScale);
private float fixedDeltaTimeSeconds = (float)fixedDeltaTime / 1000;
private float leftOverDeltaTime = 0;
/* Constructor */
public GridSolver (int sizeOfCells, PApplet _mainApplet) {
mainApplet = _mainApplet;
mainApplet.registerDispose(this);
canvas = mainApplet.createGraphics(mainApplet.width,mainApplet.height,PConstants.P2D);
cellSize = sizeOfCells;
velocity = new float[canvas.width/cellSize][canvas.height/cellSize];
density = new float[canvas.width/cellSize][canvas.height/cellSize];
}
public void start() {
t = new Thread(this);
t.start();
}
public void run() {
/******** Physics ********/
// time related stuff
while(true) {
isUpdated=false;
// Calculate amount of time since last frame (Delta means "change in")
currentTime = mainApplet.millis();
long deltaTimeMS = (long)((currentTime - previousTime));
previousTime = currentTime; // reset previousTime
// timeStepAmt will be how many of our fixedDeltaTimes we need to make up for the passed time since last frame.
int timeStepAmt = (int)(((float)deltaTimeMS + leftOverDeltaTime) / (float)(fixedDeltaTime));
// If we have any left over time left, add it to the leftOverDeltaTime.
leftOverDeltaTime += deltaTimeMS - (timeStepAmt * (float)fixedDeltaTime);
if (timeStepAmt > 15) {
timeStepAmt = 15; // too much accumulation can freeze the program!
}
// Update physics
for (int iteration = 1; iteration <= 1; iteration++) {
this.solve(fixedDeltaTimeSeconds * timeScale);
}
this.draw();
hasBeenReturned = false;
isUpdated = true;
try {
this.wait(100);
} catch (Exception e) { PApplet.println("interrupted"); }
}
}
public void stop() {
t = null;
}
// this will magically be called by the parent once the user hits stop
// this functionality hasn't been tested heavily so if it doesn't work, file a bug
public void dispose() {
stop();
}
/* Drawing */
private void draw () {
canvas.beginDraw();
canvas.colorMode(PConstants.HSB, 255);
canvas.noStroke();
for (int x = 0; x < velocity.length; x++) {
for (int y = 0; y < velocity[x].length; y++) {
/* Sine probably isn't needed, but oh well. It's pretty and looks more organic. */
canvas.fill(127+ 127 * PApplet.sin(density[x][y]*0.0004f), 255, 127 + 127 * PApplet.sin(velocity[x][y]*0.01f));
canvas.rect(x*cellSize, y*cellSize, cellSize, cellSize);
}
}
canvas.endDraw();
}
public PImage getFluid () {
if (!hasBeenReturned) {
hasBeenReturned = true;
return canvas.get();
} else {
return null;
}
}
/* "Fluid" Solving
Based on http://www.cs.ubc.ca/~rbridson/fluidsimulation/GameFluids2007.pdf
To help understand this better, imagine each pixel as a spring.
Every spring pulls on springs adjacent to it as it moves up or down (The speed of the pull is the Velocity)
This pull flows throughout the window, and eventually deteriates due to friction
*/
private void solve (float timeStep) {
// Reset oldDensity and oldVelocity
oldDensity = (float[][])density.clone();
oldVelocity = (float[][])velocity.clone();
for (int x = 0; x < velocity.length; x++) {
for (int y = 0; y < velocity[x].length; y++) {
/* Equation for each cell:
Velocity = oldVelocity + (sum_Of_Adjacent_Old_Densities - oldDensity_Of_Cell * 4) * timeStep * speed)
Density = oldDensity + Velocity
Scientists and engineers: Please don't use this to model tsunamis, I'm pretty sure it's not *that* accurate
*/
velocity[x][y] = friction * oldVelocity[x][y] + ((getAdjacentDensitySum(x,y) - density[x][y] * 4) * timeStep * speed);
density[x][y] = oldDensity[x][y] + velocity[x][y];
}
}
}
public void setVelocity (int x, int y, float force) {
velocity[x][y] += force;
}
private float getAdjacentDensitySum (int x, int y) {
// If the x or y is at the boundary, use the closest available cell
float sum = 0;
if (x-1 > 0)
sum += oldDensity[x-1][y];
else
sum += oldDensity[0][y];
if (x+1 <= oldDensity.length-1)
sum += (oldDensity[x+1][y]);
else
sum += (oldDensity[oldDensity.length-1][y]);
if (y-1 > 0)
sum += (oldDensity[x][y-1]);
else
sum += (oldDensity[x][0]);
if (y+1 <= oldDensity[x].length-1)
sum += (oldDensity[x][y+1]);
else
sum += (oldDensity[x][oldDensity[x].length-1]);
return sum;
}
}
| |
/*
* Copyright 2017 StreamSets Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.pipeline;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.HashMap;
import java.util.Map;
public final class SdcClusterOffsetHelper {
private static final Logger LOG = LoggerFactory.getLogger(SdcClusterOffsetHelper.class);
private static final String SDC_STREAMING_OFFSET_FILE = "offset.json";
private static final String SDC_STREAMING_BACKUP_OFFSET_FILE = "backup_offset.json";
private static final String SDC_STREAMING_OFFSET_MARKER_FILE = "offset_marker";
private static final String SDC_STREAMING_OFFSET_VERSION = "1";
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final String CANNOT_RENAME_FROM_TO_TEMPLATE = "Cannot rename from {} to {}";
private final Path checkPointFilePath;
private final Path backupCheckPointFilePath;
private final Path checkPointMarkerFilePath;
private final FileSystem fs;
private final long duration;
private long lastOffsetStoredTime;
public SdcClusterOffsetHelper(Path checkPointPath, FileSystem fs, long duration) {
this.fs = fs;
this.duration = duration;
this.lastOffsetStoredTime = -1;
this.checkPointFilePath = new Path(checkPointPath, SDC_STREAMING_OFFSET_FILE);
this.backupCheckPointFilePath = new Path(checkPointPath, SDC_STREAMING_BACKUP_OFFSET_FILE);
this.checkPointMarkerFilePath = new Path(checkPointPath, SDC_STREAMING_OFFSET_MARKER_FILE);
LOG.info("SDC Checkpoint File Path : {}", checkPointPath.toString());
}
//This tell us SDC is check pointing
public boolean isSDCCheckPointing() {
try {
return fs.exists(checkPointFilePath) || fs.exists(backupCheckPointFilePath);
} catch (IOException ex) {
LOG.error("Error doing isSDCCheckPointing", ex);
throw new RuntimeException(Utils.format("Error checking exists on hdfs path: {}. Reason: {}", checkPointFilePath.toString(), ex.toString()), ex);
}
}
//Tells us whether checkpoint can be done. Only checkpoint at the right time interval
//Using a separate thread for this may be an overkill at this point
//Meaning we will checkpoint after the current batch is read
//and the time interval from last checkpoint time to current time is greater than or equal to duration configured.
private boolean canCheckPoint() {
long currentTime = System.currentTimeMillis();
return lastOffsetStoredTime < 0 || duration < 0 || ((currentTime - lastOffsetStoredTime) >= duration);
}
//Write the offsets in the main offset file
//This should be called after backing up the existing contents if the main offset file is not corrupted
//Or if the file is corrupted, we want to update the right offsets to the main offset file.
private void writeOffsetsToMainOffsetFile(Map<Integer, Long> partitionToOffsetMap) throws IOException {
LOG.info("Saving the following offset {} to {}", partitionToOffsetMap, checkPointFilePath);
//Creating a marker file (overwriting if it already exists) to mark that we are going to write offsets out the offsets to the main offset file.
try(OutputStream os = fs.create(checkPointMarkerFilePath, true)) {
//NOOP
}
//If the both above passes and writing fails or leaves corrupted file we will have the back file
try (OutputStream os = fs.create(checkPointFilePath, true)) {
OBJECT_MAPPER.writeValue(os, new ClusterSourceOffsetJson(serializeKafkaPartitionOffset(partitionToOffsetMap), SDC_STREAMING_OFFSET_VERSION));
}
//If this fails we are still good, as we will start from the backup offset file. (Not optimal, but deterministic)
boolean deleted = fs.delete(checkPointMarkerFilePath, false);
LOG.warn("Status {} for Deleting Marker File {}", deleted, checkPointMarkerFilePath);
//If the write fails we don't want to touch the timestamp and will error out so not doing this in finally
lastOffsetStoredTime = System.currentTimeMillis();
}
public void saveOffsets(Map<Integer, Long> partitionToOffsetMap) {
if (canCheckPoint()) {
try {
//Only if marker file does not exist meaning the current offset file is not corrupted or this is the first run
//where there are no offset files, we should try the below steps
if (!fs.exists(checkPointMarkerFilePath)) {
//Delete the backup file only if it exists
if (fs.exists(backupCheckPointFilePath)) {
LOG.info(
"Deleting the Backup Offset file {} before renaming Main Offset File {} to Backup Offset File {}",
backupCheckPointFilePath,
checkPointFilePath,
backupCheckPointFilePath
);
//If this fails we will still have the main offset file.
boolean deleted = fs.delete(backupCheckPointFilePath, false);
LOG.warn("Status {} for Deleting Backup Offset File {}", deleted, backupCheckPointFilePath);
}
//If the main offset file does not exist can't backup (The first offset save will not contain both offset files)
if (fs.exists(checkPointFilePath)) {
LOG.info("Renaming Main Offset File {} to Backup Offset File {}", checkPointFilePath, backupCheckPointFilePath);
//If this fails we will still have the main offset file
if (!fs.rename(checkPointFilePath, backupCheckPointFilePath)) {
throw new IOException(Utils.format(CANNOT_RENAME_FROM_TO_TEMPLATE, checkPointFilePath, backupCheckPointFilePath));
}
}
}
writeOffsetsToMainOffsetFile(partitionToOffsetMap);
} catch (IOException ex) {
LOG.error("Error when serializing partition offset", ex);
throw new RuntimeException(Utils.format("Error writing offset To to hdfs path: {}. Reason: {}", checkPointFilePath.toString(), ex.toString()), ex);
}
}
}
//Most Wrong information in offset file will throw IOException. (Illegal State Exception if less number of kafka partitions for topic than in offset)
//Ex: Partition Offset empty, empty map in the offset, cannot deserialize
private Map<Integer, Long> readClusterOffsetFile(Path checkPointFilePath, int numberOfPartitions) throws IOException{
if (!fs.exists(checkPointFilePath)) {
throw new IOException(Utils.format("Checkpoint file path {} does not exist", checkPointFilePath));
}
ClusterSourceOffsetJson clusterSourceOffsetJson = OBJECT_MAPPER.readValue(
(InputStream) fs.open(checkPointFilePath),
ClusterSourceOffsetJson.class
);
String lastSourceOffset = clusterSourceOffsetJson.getOffset();
if (!StringUtils.isEmpty(lastSourceOffset)) {
return deserializeKafkaPartitionOffset(lastSourceOffset, numberOfPartitions);
} else {
throw new IOException("Partition Offset Cannot be empty");
}
}
@SuppressWarnings("unchecked")
public Map<Integer, Long> readOffsets(int numberOfPartitions) {
Map<Integer, Long> offsets;
Path currentCheckPointFilePath = this.checkPointFilePath;
try {
if (fs.exists(checkPointMarkerFilePath)) {
//Try the backup offset file as marker file exists meaning the main offset file is probably corrupted
//Set the currentCheckPointPath to backup file
currentCheckPointFilePath = this.backupCheckPointFilePath;
//Force to read the backup file.
LOG.info(Utils.format("Checkpoint marker file present {}, which means the main offset file {} is corrupted", checkPointMarkerFilePath, checkPointFilePath));
LOG.info("Trying the backup offset file {}", backupCheckPointFilePath);
offsets = readClusterOffsetFile(currentCheckPointFilePath, numberOfPartitions);
LOG.info("Updating the probably corrupted Main Offset File {} to the offsets from backup file {}", checkPointFilePath, backupCheckPointFilePath);
//Both main and backup offset file will have the same content
writeOffsetsToMainOffsetFile(offsets);
} else {
//Try the main offset file
offsets = readClusterOffsetFile(currentCheckPointFilePath, numberOfPartitions);
}
return offsets;
} catch (IOException ex) {
LOG.error("Error when deserializing partition offset from Check Point Path : {}. Reason : {}", currentCheckPointFilePath, ex);
throw new RuntimeException(Utils.format("Error reading offset from hdfs path: {}. Reason: {}", currentCheckPointFilePath.toString(), ex.toString()), ex);
}
}
private String serializeKafkaPartitionOffset(Map<Integer, Long> partitionsToOffset) throws IOException {
return OBJECT_MAPPER.writeValueAsString(partitionsToOffset);
}
@SuppressWarnings("unchecked")
private Map<Integer, Long> deserializeKafkaPartitionOffset(String partitionOffset, int numberOfPartitions) throws IOException {
Map<Integer, Long> partitionToOffsetMap = new HashMap<>();
int greatestPartitionFromOffset = -1;
if (!StringUtils.isEmpty(partitionOffset)) {
Map<String, Object> deserializedPartitionOffset = OBJECT_MAPPER.readValue(partitionOffset, Map.class);
if (deserializedPartitionOffset.isEmpty()) {
throw new IOException("Partition Offset cannot be empty");
}
//Basically could happen when topic is deleted and recreated with less number of partitions
//Users should either delete the sdc checkpoint folder
//or use a new consumer group.
Utils.checkState(
deserializedPartitionOffset.size() <= numberOfPartitions,
"More number of partitions found in the offset than the number of partitions for the topic." +
" The topic may have been deleted and recreated with less partitions," +
" please use a new consumer group or delete the checkpoint directory"
);
for (Map.Entry<String, Object> partitionOffsetEntry : deserializedPartitionOffset.entrySet()) {
int partition = Integer.parseInt(partitionOffsetEntry.getKey());
Long offset = Long.parseLong(partitionOffsetEntry.getValue().toString());
partitionToOffsetMap.put(partition, offset);
greatestPartitionFromOffset = (partition > greatestPartitionFromOffset) ? partition : greatestPartitionFromOffset;
}
}
//Basically add new partitions with offset 0.
for (int partition = greatestPartitionFromOffset + 1; partition < numberOfPartitions; partition++) {
partitionToOffsetMap.put(partition, 0L);
}
LOG.info("Starting offsets: {}", partitionToOffsetMap);
return partitionToOffsetMap;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.planner.planPrinter;
import com.facebook.presto.Session;
import com.facebook.presto.SystemSessionProperties;
import com.facebook.presto.cost.PlanNodeCostEstimate;
import com.facebook.presto.cost.PlanNodeStatsEstimate;
import com.facebook.presto.cost.StatsAndCosts;
import com.facebook.presto.execution.StageInfo;
import com.facebook.presto.execution.StageStats;
import com.facebook.presto.metadata.FunctionManager;
import com.facebook.presto.metadata.OperatorNotFoundException;
import com.facebook.presto.metadata.Signature;
import com.facebook.presto.metadata.TableHandle;
import com.facebook.presto.operator.StageExecutionDescriptor;
import com.facebook.presto.spi.ColumnHandle;
import com.facebook.presto.spi.ConnectorTableLayoutHandle;
import com.facebook.presto.spi.predicate.Domain;
import com.facebook.presto.spi.predicate.Marker;
import com.facebook.presto.spi.predicate.NullableValue;
import com.facebook.presto.spi.predicate.Range;
import com.facebook.presto.spi.predicate.TupleDomain;
import com.facebook.presto.spi.statistics.ColumnStatisticMetadata;
import com.facebook.presto.spi.statistics.TableStatisticType;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.sql.InterpretedFunctionInvoker;
import com.facebook.presto.sql.planner.OrderingScheme;
import com.facebook.presto.sql.planner.Partitioning;
import com.facebook.presto.sql.planner.PartitioningScheme;
import com.facebook.presto.sql.planner.PlanFragment;
import com.facebook.presto.sql.planner.SubPlan;
import com.facebook.presto.sql.planner.Symbol;
import com.facebook.presto.sql.planner.TypeProvider;
import com.facebook.presto.sql.planner.iterative.GroupReference;
import com.facebook.presto.sql.planner.plan.AggregationNode;
import com.facebook.presto.sql.planner.plan.ApplyNode;
import com.facebook.presto.sql.planner.plan.AssignUniqueId;
import com.facebook.presto.sql.planner.plan.Assignments;
import com.facebook.presto.sql.planner.plan.DeleteNode;
import com.facebook.presto.sql.planner.plan.DistinctLimitNode;
import com.facebook.presto.sql.planner.plan.EnforceSingleRowNode;
import com.facebook.presto.sql.planner.plan.ExceptNode;
import com.facebook.presto.sql.planner.plan.ExchangeNode;
import com.facebook.presto.sql.planner.plan.ExchangeNode.Scope;
import com.facebook.presto.sql.planner.plan.ExplainAnalyzeNode;
import com.facebook.presto.sql.planner.plan.FilterNode;
import com.facebook.presto.sql.planner.plan.GroupIdNode;
import com.facebook.presto.sql.planner.plan.IndexJoinNode;
import com.facebook.presto.sql.planner.plan.IndexSourceNode;
import com.facebook.presto.sql.planner.plan.IntersectNode;
import com.facebook.presto.sql.planner.plan.JoinNode;
import com.facebook.presto.sql.planner.plan.LateralJoinNode;
import com.facebook.presto.sql.planner.plan.LimitNode;
import com.facebook.presto.sql.planner.plan.MarkDistinctNode;
import com.facebook.presto.sql.planner.plan.MetadataDeleteNode;
import com.facebook.presto.sql.planner.plan.OutputNode;
import com.facebook.presto.sql.planner.plan.PlanFragmentId;
import com.facebook.presto.sql.planner.plan.PlanNode;
import com.facebook.presto.sql.planner.plan.PlanNodeId;
import com.facebook.presto.sql.planner.plan.PlanVisitor;
import com.facebook.presto.sql.planner.plan.ProjectNode;
import com.facebook.presto.sql.planner.plan.RemoteSourceNode;
import com.facebook.presto.sql.planner.plan.RowNumberNode;
import com.facebook.presto.sql.planner.plan.SampleNode;
import com.facebook.presto.sql.planner.plan.SemiJoinNode;
import com.facebook.presto.sql.planner.plan.SortNode;
import com.facebook.presto.sql.planner.plan.SpatialJoinNode;
import com.facebook.presto.sql.planner.plan.StatisticAggregations;
import com.facebook.presto.sql.planner.plan.StatisticAggregationsDescriptor;
import com.facebook.presto.sql.planner.plan.StatisticsWriterNode;
import com.facebook.presto.sql.planner.plan.TableFinishNode;
import com.facebook.presto.sql.planner.plan.TableScanNode;
import com.facebook.presto.sql.planner.plan.TableWriterNode;
import com.facebook.presto.sql.planner.plan.TopNNode;
import com.facebook.presto.sql.planner.plan.TopNRowNumberNode;
import com.facebook.presto.sql.planner.plan.UnionNode;
import com.facebook.presto.sql.planner.plan.UnnestNode;
import com.facebook.presto.sql.planner.plan.ValuesNode;
import com.facebook.presto.sql.planner.plan.WindowNode;
import com.facebook.presto.sql.tree.ComparisonExpression;
import com.facebook.presto.sql.tree.Expression;
import com.facebook.presto.sql.tree.FunctionCall;
import com.facebook.presto.sql.tree.SymbolReference;
import com.facebook.presto.util.GraphvizPrinter;
import com.google.common.base.CaseFormat;
import com.google.common.base.Functions;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Streams;
import io.airlift.slice.Slice;
import io.airlift.units.Duration;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static com.facebook.presto.execution.StageInfo.getAllStages;
import static com.facebook.presto.spi.type.VarcharType.VARCHAR;
import static com.facebook.presto.sql.planner.SystemPartitioningHandle.SINGLE_DISTRIBUTION;
import static com.facebook.presto.sql.planner.planPrinter.PlanNodeStatsSummarizer.aggregateStageStats;
import static com.facebook.presto.sql.planner.planPrinter.TextRenderer.formatDouble;
import static com.facebook.presto.sql.planner.planPrinter.TextRenderer.formatPositions;
import static com.facebook.presto.sql.planner.planPrinter.TextRenderer.indentString;
import static com.google.common.base.CaseFormat.UPPER_UNDERSCORE;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Verify.verify;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.collect.ImmutableMap.toImmutableMap;
import static java.lang.String.format;
import static java.util.Arrays.stream;
import static java.util.Objects.requireNonNull;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.stream.Collectors.joining;
import static java.util.stream.Collectors.toList;
public class PlanPrinter
{
private final PlanRepresentation representation;
private final FunctionManager functionManager;
private PlanPrinter(
PlanNode planRoot,
TypeProvider types,
Optional<StageExecutionDescriptor> stageExecutionStrategy,
FunctionManager functionManager,
StatsAndCosts estimatedStatsAndCosts,
Session session,
Optional<Map<PlanNodeId, PlanNodeStats>> stats)
{
requireNonNull(planRoot, "planRoot is null");
requireNonNull(types, "types is null");
requireNonNull(functionManager, "functionManager is null");
requireNonNull(estimatedStatsAndCosts, "estimatedStatsAndCosts is null");
requireNonNull(stats, "stats is null");
this.functionManager = functionManager;
Optional<Duration> totalCpuTime = stats.map(s -> new Duration(s.values().stream()
.mapToLong(planNode -> planNode.getPlanNodeScheduledTime().toMillis())
.sum(), MILLISECONDS));
Optional<Duration> totalScheduledTime = stats.map(s -> new Duration(s.values().stream()
.mapToLong(planNode -> planNode.getPlanNodeCpuTime().toMillis())
.sum(), MILLISECONDS));
this.representation = new PlanRepresentation(planRoot, types, totalCpuTime, totalScheduledTime);
Visitor visitor = new Visitor(stageExecutionStrategy, types, estimatedStatsAndCosts, session, stats);
planRoot.accept(visitor, null);
}
public String toText(boolean verbose, int level)
{
return new TextRenderer(verbose, level).render(representation);
}
public String toJson()
{
return new JsonRenderer().render(representation);
}
public static String jsonFragmentPlan(PlanNode root, Map<Symbol, Type> symbols, FunctionManager functionManager, Session session)
{
TypeProvider typeProvider = TypeProvider.copyOf(symbols.entrySet().stream()
.distinct()
.collect(toImmutableMap(Map.Entry::getKey, Map.Entry::getValue)));
return new PlanPrinter(root, typeProvider, Optional.empty(), functionManager, StatsAndCosts.empty(), session, Optional.empty()).toJson();
}
public static String textLogicalPlan(PlanNode plan, TypeProvider types, FunctionManager functionManager, StatsAndCosts estimatedStatsAndCosts, Session session, int level)
{
return new PlanPrinter(plan, types, Optional.empty(), functionManager, estimatedStatsAndCosts, session, Optional.empty()).toText(false, level);
}
public static String textLogicalPlan(
PlanNode plan,
TypeProvider types,
FunctionManager functionManager,
StatsAndCosts estimatedStatsAndCosts,
Session session,
int level,
boolean verbose)
{
return textLogicalPlan(plan, types, Optional.empty(), functionManager, estimatedStatsAndCosts, session, Optional.empty(), level, verbose);
}
public static String textLogicalPlan(
PlanNode plan,
TypeProvider types,
Optional<StageExecutionDescriptor> stageExecutionStrategy,
FunctionManager functionManager,
StatsAndCosts estimatedStatsAndCosts,
Session session,
Optional<Map<PlanNodeId, PlanNodeStats>> stats,
int level,
boolean verbose)
{
return new PlanPrinter(plan, types, stageExecutionStrategy, functionManager, estimatedStatsAndCosts, session, stats).toText(verbose, level);
}
public static String textDistributedPlan(StageInfo outputStageInfo, FunctionManager functionManager, Session session, boolean verbose)
{
StringBuilder builder = new StringBuilder();
List<StageInfo> allStages = getAllStages(Optional.of(outputStageInfo));
List<PlanFragment> allFragments = allStages.stream()
.map(StageInfo::getPlan)
.collect(toImmutableList());
Map<PlanNodeId, PlanNodeStats> aggregatedStats = aggregateStageStats(allStages);
for (StageInfo stageInfo : allStages) {
builder.append(formatFragment(functionManager, session, stageInfo.getPlan(), Optional.of(stageInfo), Optional.of(aggregatedStats), verbose, allFragments));
}
return builder.toString();
}
public static String textDistributedPlan(SubPlan plan, FunctionManager functionManager, Session session, boolean verbose)
{
StringBuilder builder = new StringBuilder();
for (PlanFragment fragment : plan.getAllFragments()) {
builder.append(formatFragment(functionManager, session, fragment, Optional.empty(), Optional.empty(), verbose, plan.getAllFragments()));
}
return builder.toString();
}
private static String formatFragment(FunctionManager functionManager, Session session, PlanFragment fragment, Optional<StageInfo> stageInfo, Optional<Map<PlanNodeId, PlanNodeStats>> planNodeStats, boolean verbose, List<PlanFragment> allFragments)
{
StringBuilder builder = new StringBuilder();
builder.append(format("Fragment %s [%s]\n",
fragment.getId(),
fragment.getPartitioning()));
if (stageInfo.isPresent()) {
StageStats stageStats = stageInfo.get().getStageStats();
double avgPositionsPerTask = stageInfo.get().getTasks().stream().mapToLong(task -> task.getStats().getProcessedInputPositions()).average().orElse(Double.NaN);
double squaredDifferences = stageInfo.get().getTasks().stream().mapToDouble(task -> Math.pow(task.getStats().getProcessedInputPositions() - avgPositionsPerTask, 2)).sum();
double sdAmongTasks = Math.sqrt(squaredDifferences / stageInfo.get().getTasks().size());
builder.append(indentString(1))
.append(format("CPU: %s, Scheduled: %s, Input: %s (%s); per task: avg.: %s std.dev.: %s, Output: %s (%s)\n",
stageStats.getTotalCpuTime().convertToMostSuccinctTimeUnit(),
stageStats.getTotalScheduledTime().convertToMostSuccinctTimeUnit(),
formatPositions(stageStats.getProcessedInputPositions()),
stageStats.getProcessedInputDataSize(),
formatDouble(avgPositionsPerTask),
formatDouble(sdAmongTasks),
formatPositions(stageStats.getOutputPositions()),
stageStats.getOutputDataSize()));
}
PartitioningScheme partitioningScheme = fragment.getPartitioningScheme();
builder.append(indentString(1))
.append(format("Output layout: [%s]\n",
Joiner.on(", ").join(partitioningScheme.getOutputLayout())));
boolean replicateNullsAndAny = partitioningScheme.isReplicateNullsAndAny();
List<String> arguments = partitioningScheme.getPartitioning().getArguments().stream()
.map(argument -> {
if (argument.isConstant()) {
NullableValue constant = argument.getConstant();
String printableValue = castToVarchar(constant.getType(), constant.getValue(), functionManager, session);
return constant.getType().getDisplayName() + "(" + printableValue + ")";
}
return argument.getSymbol().toString();
})
.collect(toImmutableList());
builder.append(indentString(1));
if (replicateNullsAndAny) {
builder.append(format("Output partitioning: %s (replicate nulls and any) [%s]%s\n",
partitioningScheme.getPartitioning().getHandle(),
Joiner.on(", ").join(arguments),
formatHash(partitioningScheme.getHashColumn())));
}
else {
builder.append(format("Output partitioning: %s [%s]%s\n",
partitioningScheme.getPartitioning().getHandle(),
Joiner.on(", ").join(arguments),
formatHash(partitioningScheme.getHashColumn())));
}
builder.append(indentString(1)).append(format("Stage Execution Strategy: %s\n", fragment.getStageExecutionDescriptor().getStageExecutionStrategy()));
TypeProvider typeProvider = TypeProvider.copyOf(allFragments.stream()
.flatMap(f -> f.getSymbols().entrySet().stream())
.distinct()
.collect(toImmutableMap(Map.Entry::getKey, Map.Entry::getValue)));
builder.append(textLogicalPlan(fragment.getRoot(), typeProvider, Optional.of(fragment.getStageExecutionDescriptor()), functionManager, fragment.getStatsAndCosts(), session, planNodeStats, 1, verbose))
.append("\n");
return builder.toString();
}
public static String graphvizLogicalPlan(PlanNode plan, TypeProvider types)
{
// TODO: This should move to something like GraphvizRenderer
PlanFragment fragment = new PlanFragment(
new PlanFragmentId("graphviz_plan"),
plan,
types.allTypes(),
SINGLE_DISTRIBUTION,
ImmutableList.of(plan.getId()),
new PartitioningScheme(Partitioning.create(SINGLE_DISTRIBUTION, ImmutableList.of()), plan.getOutputSymbols()),
StageExecutionDescriptor.ungroupedExecution(),
StatsAndCosts.empty(),
Optional.empty());
return GraphvizPrinter.printLogical(ImmutableList.of(fragment));
}
public static String graphvizDistributedPlan(SubPlan plan)
{
return GraphvizPrinter.printDistributed(plan);
}
private class Visitor
extends PlanVisitor<Void, Void>
{
private final Optional<StageExecutionDescriptor> stageExecutionStrategy;
private final TypeProvider types;
private final StatsAndCosts estimatedStatsAndCosts;
private final Optional<Map<PlanNodeId, PlanNodeStats>> stats;
private final Session session;
public Visitor(Optional<StageExecutionDescriptor> stageExecutionStrategy, TypeProvider types, StatsAndCosts estimatedStatsAndCosts, Session session, Optional<Map<PlanNodeId, PlanNodeStats>> stats)
{
this.stageExecutionStrategy = requireNonNull(stageExecutionStrategy, "stageExecutionStrategy is null");
this.types = requireNonNull(types, "types is null");
this.estimatedStatsAndCosts = requireNonNull(estimatedStatsAndCosts, "estimatedStatsAndCosts is null");
this.stats = requireNonNull(stats, "stats is null");
this.session = requireNonNull(session, "session is null");
}
@Override
public Void visitExplainAnalyze(ExplainAnalyzeNode node, Void context)
{
addNode(node, "ExplainAnalyze");
return processChildren(node, context);
}
@Override
public Void visitJoin(JoinNode node, Void context)
{
List<Expression> joinExpressions = new ArrayList<>();
for (JoinNode.EquiJoinClause clause : node.getCriteria()) {
joinExpressions.add(clause.toExpression());
}
node.getFilter().ifPresent(joinExpressions::add);
NodeRepresentation nodeOutput;
if (node.isCrossJoin()) {
checkState(joinExpressions.isEmpty());
nodeOutput = addNode(node, "CrossJoin");
}
else {
nodeOutput = addNode(node,
node.getType().getJoinLabel(),
format("[%s]%s", Joiner.on(" AND ").join(joinExpressions), formatHash(node.getLeftHashSymbol(), node.getRightHashSymbol())));
}
node.getDistributionType().ifPresent(distributionType -> nodeOutput.appendDetails("Distribution: %s", distributionType));
node.getSortExpressionContext().ifPresent(sortContext -> nodeOutput.appendDetails("SortExpression[%s]", sortContext.getSortExpression()));
node.getLeft().accept(this, context);
node.getRight().accept(this, context);
return null;
}
@Override
public Void visitSpatialJoin(SpatialJoinNode node, Void context)
{
NodeRepresentation nodeOutput = addNode(node,
node.getType().getJoinLabel(),
format("[%s]", node.getFilter()));
nodeOutput.appendDetailsLine("Distribution: %s", node.getDistributionType());
node.getLeft().accept(this, context);
node.getRight().accept(this, context);
return null;
}
@Override
public Void visitSemiJoin(SemiJoinNode node, Void context)
{
NodeRepresentation nodeOutput = addNode(node,
"SemiJoin",
format("[%s = %s]%s",
node.getSourceJoinSymbol(),
node.getFilteringSourceJoinSymbol(),
formatHash(node.getSourceHashSymbol(), node.getFilteringSourceHashSymbol())));
node.getDistributionType().ifPresent(distributionType -> nodeOutput.appendDetailsLine("Distribution: %s", distributionType));
node.getSource().accept(this, context);
node.getFilteringSource().accept(this, context);
return null;
}
@Override
public Void visitIndexSource(IndexSourceNode node, Void context)
{
NodeRepresentation nodeOutput = addNode(node,
"IndexSource",
format("[%s, lookup = %s]", node.getIndexHandle(), node.getLookupSymbols()));
for (Map.Entry<Symbol, ColumnHandle> entry : node.getAssignments().entrySet()) {
if (node.getOutputSymbols().contains(entry.getKey())) {
nodeOutput.appendDetailsLine("%s := %s", entry.getKey(), entry.getValue());
}
}
return null;
}
@Override
public Void visitIndexJoin(IndexJoinNode node, Void context)
{
List<Expression> joinExpressions = new ArrayList<>();
for (IndexJoinNode.EquiJoinClause clause : node.getCriteria()) {
joinExpressions.add(new ComparisonExpression(ComparisonExpression.Operator.EQUAL,
clause.getProbe().toSymbolReference(),
clause.getIndex().toSymbolReference()));
}
addNode(node,
format("%sIndexJoin", node.getType().getJoinLabel()),
format("[%s]%s", Joiner.on(" AND ").join(joinExpressions), formatHash(node.getProbeHashSymbol(), node.getIndexHashSymbol())));
node.getProbeSource().accept(this, context);
node.getIndexSource().accept(this, context);
return null;
}
@Override
public Void visitLimit(LimitNode node, Void context)
{
addNode(node,
format("Limit%s", node.isPartial() ? "Partial" : ""),
format("[%s]", node.getCount()));
return processChildren(node, context);
}
@Override
public Void visitDistinctLimit(DistinctLimitNode node, Void context)
{
addNode(node,
format("DistinctLimit%s", node.isPartial() ? "Partial" : ""),
format("[%s]%s", node.getLimit(), formatHash(node.getHashSymbol())));
return processChildren(node, context);
}
@Override
public Void visitAggregation(AggregationNode node, Void context)
{
String type = "";
if (node.getStep() != AggregationNode.Step.SINGLE) {
type = format("(%s)", node.getStep().toString());
}
if (node.isStreamable()) {
type = format("%s(STREAMING)", type);
}
String key = "";
if (!node.getGroupingKeys().isEmpty()) {
key = node.getGroupingKeys().toString();
}
NodeRepresentation nodeOutput = addNode(node,
format("Aggregate%s%s%s", type, key, formatHash(node.getHashSymbol())));
for (Map.Entry<Symbol, AggregationNode.Aggregation> entry : node.getAggregations().entrySet()) {
if (entry.getValue().getMask().isPresent()) {
nodeOutput.appendDetailsLine("%s := %s (mask = %s)", entry.getKey(), entry.getValue().getCall(), entry.getValue().getMask().get());
}
else {
nodeOutput.appendDetailsLine("%s := %s", entry.getKey(), entry.getValue().getCall());
}
}
return processChildren(node, context);
}
@Override
public Void visitGroupId(GroupIdNode node, Void context)
{
// grouping sets are easier to understand in terms of inputs
List<List<Symbol>> inputGroupingSetSymbols = node.getGroupingSets().stream()
.map(set -> set.stream()
.map(symbol -> node.getGroupingColumns().get(symbol))
.collect(Collectors.toList()))
.collect(Collectors.toList());
NodeRepresentation nodeOutput = addNode(node, "GroupId", format("%s", inputGroupingSetSymbols));
for (Map.Entry<Symbol, Symbol> mapping : node.getGroupingColumns().entrySet()) {
nodeOutput.appendDetailsLine("%s := %s", mapping.getKey(), mapping.getValue());
}
return processChildren(node, context);
}
@Override
public Void visitMarkDistinct(MarkDistinctNode node, Void context)
{
addNode(node,
"MarkDistinct",
format("[distinct=%s marker=%s]%s", formatOutputs(types, node.getDistinctSymbols()), node.getMarkerSymbol(), formatHash(node.getHashSymbol())));
return processChildren(node, context);
}
@Override
public Void visitWindow(WindowNode node, Void context)
{
List<String> partitionBy = Lists.transform(node.getPartitionBy(), Functions.toStringFunction());
List<String> args = new ArrayList<>();
if (!partitionBy.isEmpty()) {
List<Symbol> prePartitioned = node.getPartitionBy().stream()
.filter(node.getPrePartitionedInputs()::contains)
.collect(toImmutableList());
List<Symbol> notPrePartitioned = node.getPartitionBy().stream()
.filter(column -> !node.getPrePartitionedInputs().contains(column))
.collect(toImmutableList());
StringBuilder builder = new StringBuilder();
if (!prePartitioned.isEmpty()) {
builder.append("<")
.append(Joiner.on(", ").join(prePartitioned))
.append(">");
if (!notPrePartitioned.isEmpty()) {
builder.append(", ");
}
}
if (!notPrePartitioned.isEmpty()) {
builder.append(Joiner.on(", ").join(notPrePartitioned));
}
args.add(format("partition by (%s)", builder));
}
if (node.getOrderingScheme().isPresent()) {
OrderingScheme orderingScheme = node.getOrderingScheme().get();
args.add(format("order by (%s)", Stream.concat(
orderingScheme.getOrderBy().stream()
.limit(node.getPreSortedOrderPrefix())
.map(symbol -> "<" + symbol + " " + orderingScheme.getOrdering(symbol) + ">"),
orderingScheme.getOrderBy().stream()
.skip(node.getPreSortedOrderPrefix())
.map(symbol -> symbol + " " + orderingScheme.getOrdering(symbol)))
.collect(Collectors.joining(", "))));
}
NodeRepresentation nodeOutput = addNode(node, "Window", format("[%s]%s", Joiner.on(", ").join(args), formatHash(node.getHashSymbol())));
for (Map.Entry<Symbol, WindowNode.Function> entry : node.getWindowFunctions().entrySet()) {
FunctionCall call = entry.getValue().getFunctionCall();
String frameInfo = formatFrame(entry.getValue().getFrame());
nodeOutput.appendDetailsLine("%s := %s(%s) %s", entry.getKey(), call.getName(), Joiner.on(", ").join(call.getArguments()), frameInfo);
}
return processChildren(node, context);
}
@Override
public Void visitTopNRowNumber(TopNRowNumberNode node, Void context)
{
List<String> partitionBy = node.getPartitionBy().stream()
.map(Functions.toStringFunction())
.collect(toImmutableList());
List<String> orderBy = node.getOrderingScheme().getOrderBy().stream()
.map(input -> input + " " + node.getOrderingScheme().getOrdering(input))
.collect(toImmutableList());
List<String> args = new ArrayList<>();
args.add(format("partition by (%s)", Joiner.on(", ").join(partitionBy)));
args.add(format("order by (%s)", Joiner.on(", ").join(orderBy)));
NodeRepresentation nodeOutput = addNode(node,
"TopNRowNumber",
format("[%s limit %s]%s", Joiner.on(", ").join(args), node.getMaxRowCountPerPartition(), formatHash(node.getHashSymbol())));
nodeOutput.appendDetailsLine("%s := %s", node.getRowNumberSymbol(), "row_number()");
return processChildren(node, context);
}
@Override
public Void visitRowNumber(RowNumberNode node, Void context)
{
List<String> partitionBy = Lists.transform(node.getPartitionBy(), Functions.toStringFunction());
List<String> args = new ArrayList<>();
if (!partitionBy.isEmpty()) {
args.add(format("partition by (%s)", Joiner.on(", ").join(partitionBy)));
}
if (node.getMaxRowCountPerPartition().isPresent()) {
args.add(format("limit = %s", node.getMaxRowCountPerPartition().get()));
}
NodeRepresentation nodeOutput = addNode(node,
"RowNumber",
format("[%s]%s", Joiner.on(", ").join(args), formatHash(node.getHashSymbol())));
nodeOutput.appendDetailsLine("%s := %s", node.getRowNumberSymbol(), "row_number()");
return processChildren(node, context);
}
@Override
public Void visitTableScan(TableScanNode node, Void context)
{
TableHandle table = node.getTable();
NodeRepresentation nodeOutput;
if (stageExecutionStrategy.isPresent()) {
nodeOutput = addNode(node,
"TableScan",
format("[%s, grouped = %s]", table, stageExecutionStrategy.get().isScanGroupedExecution(node.getId())));
}
else {
nodeOutput = addNode(node, "TableScan", format("[%s]", table));
}
printTableScanInfo(nodeOutput, node);
return null;
}
@Override
public Void visitValues(ValuesNode node, Void context)
{
NodeRepresentation nodeOutput = addNode(node, "Values");
for (List<Expression> row : node.getRows()) {
nodeOutput.appendDetailsLine("(" + Joiner.on(", ").join(row) + ")");
}
return null;
}
@Override
public Void visitFilter(FilterNode node, Void context)
{
return visitScanFilterAndProjectInfo(node, Optional.of(node), Optional.empty(), context);
}
@Override
public Void visitProject(ProjectNode node, Void context)
{
if (node.getSource() instanceof FilterNode) {
return visitScanFilterAndProjectInfo(node, Optional.of((FilterNode) node.getSource()), Optional.of(node), context);
}
return visitScanFilterAndProjectInfo(node, Optional.empty(), Optional.of(node), context);
}
private Void visitScanFilterAndProjectInfo(
PlanNode node,
Optional<FilterNode> filterNode,
Optional<ProjectNode> projectNode,
Void context)
{
checkState(projectNode.isPresent() || filterNode.isPresent());
PlanNode sourceNode;
if (filterNode.isPresent()) {
sourceNode = filterNode.get().getSource();
}
else {
sourceNode = projectNode.get().getSource();
}
Optional<TableScanNode> scanNode;
if (sourceNode instanceof TableScanNode) {
scanNode = Optional.of((TableScanNode) sourceNode);
}
else {
scanNode = Optional.empty();
}
String formatString = "[";
String operatorName = "";
List<Object> arguments = new LinkedList<>();
if (scanNode.isPresent()) {
operatorName += "Scan";
formatString += "table = %s, ";
TableHandle table = scanNode.get().getTable();
arguments.add(table);
if (stageExecutionStrategy.isPresent()) {
formatString += "grouped = %s, ";
arguments.add(stageExecutionStrategy.get().isScanGroupedExecution(scanNode.get().getId()));
}
}
if (filterNode.isPresent()) {
operatorName += "Filter";
formatString += "filterPredicate = %s, ";
arguments.add(filterNode.get().getPredicate());
}
if (formatString.length() > 1) {
formatString = formatString.substring(0, formatString.length() - 2);
}
formatString += "]";
if (projectNode.isPresent()) {
operatorName += "Project";
}
List<PlanNodeId> allNodes = Stream.of(scanNode, filterNode, projectNode)
.filter(Optional::isPresent)
.map(Optional::get)
.map(PlanNode::getId)
.collect(toList());
NodeRepresentation nodeOutput = addNode(
node,
operatorName,
format(formatString, arguments.toArray(new Object[0])),
allNodes,
ImmutableList.of(sourceNode),
ImmutableList.of());
if (projectNode.isPresent()) {
printAssignments(nodeOutput, projectNode.get().getAssignments());
}
if (scanNode.isPresent()) {
printTableScanInfo(nodeOutput, scanNode.get());
PlanNodeStats nodeStats = stats.map(s -> s.get(node.getId())).orElse(null);
if (nodeStats != null) {
// Add to 'details' rather than 'statistics', since these stats are node-specific
nodeOutput.appendDetails("Input: %s (%s)", formatPositions(nodeStats.getPlanNodeInputPositions()), nodeStats.getPlanNodeInputDataSize().toString());
double filtered = 100.0d * (nodeStats.getPlanNodeInputPositions() - nodeStats.getPlanNodeOutputPositions()) / nodeStats.getPlanNodeInputPositions();
nodeOutput.appendDetailsLine(", Filtered: %s%%", formatDouble(filtered));
}
return null;
}
sourceNode.accept(this, context);
return null;
}
private void printTableScanInfo(NodeRepresentation nodeOutput, TableScanNode node)
{
TableHandle table = node.getTable();
if (node.getLayout().isPresent()) {
// TODO: find a better way to do this
ConnectorTableLayoutHandle layout = node.getLayout().get().getConnectorHandle();
if (!table.getConnectorHandle().toString().equals(layout.toString())) {
nodeOutput.appendDetailsLine("LAYOUT: %s", layout);
}
}
TupleDomain<ColumnHandle> predicate = node.getCurrentConstraint();
if (predicate.isNone()) {
nodeOutput.appendDetailsLine(":: NONE");
}
else {
// first, print output columns and their constraints
for (Map.Entry<Symbol, ColumnHandle> assignment : node.getAssignments().entrySet()) {
ColumnHandle column = assignment.getValue();
nodeOutput.appendDetailsLine("%s := %s", assignment.getKey(), column);
printConstraint(nodeOutput, column, predicate);
}
// then, print constraints for columns that are not in the output
if (!predicate.isAll()) {
Set<ColumnHandle> outputs = ImmutableSet.copyOf(node.getAssignments().values());
predicate.getDomains().get()
.entrySet().stream()
.filter(entry -> !outputs.contains(entry.getKey()))
.forEach(entry -> {
ColumnHandle column = entry.getKey();
nodeOutput.appendDetailsLine("%s", column);
printConstraint(nodeOutput, column, predicate);
});
}
}
}
@Override
public Void visitUnnest(UnnestNode node, Void context)
{
addNode(node,
"Unnest",
format("[replicate=%s, unnest=%s]", formatOutputs(types, node.getReplicateSymbols()), formatOutputs(types, node.getUnnestSymbols().keySet())));
return processChildren(node, context);
}
@Override
public Void visitOutput(OutputNode node, Void context)
{
NodeRepresentation nodeOutput = addNode(node, "Output", format("[%s]", Joiner.on(", ").join(node.getColumnNames())));
for (int i = 0; i < node.getColumnNames().size(); i++) {
String name = node.getColumnNames().get(i);
Symbol symbol = node.getOutputSymbols().get(i);
if (!name.equals(symbol.toString())) {
nodeOutput.appendDetailsLine("%s := %s", name, symbol);
}
}
return processChildren(node, context);
}
@Override
public Void visitTopN(TopNNode node, Void context)
{
Iterable<String> keys = Iterables.transform(node.getOrderingScheme().getOrderBy(), input -> input + " " + node.getOrderingScheme().getOrdering(input));
addNode(node,
format("TopN%s", node.getStep() == TopNNode.Step.PARTIAL ? "Partial" : ""),
format("[%s by (%s)]", node.getCount(), Joiner.on(", ").join(keys)));
return processChildren(node, context);
}
@Override
public Void visitSort(SortNode node, Void context)
{
Iterable<String> keys = Iterables.transform(node.getOrderingScheme().getOrderBy(), input -> input + " " + node.getOrderingScheme().getOrdering(input));
boolean isPartial = false;
if (SystemSessionProperties.isDistributedSortEnabled(session)) {
isPartial = true;
}
addNode(node,
format("%sSort", isPartial ? "Partial" : ""),
format("[%s]", Joiner.on(", ").join(keys)));
return processChildren(node, context);
}
@Override
public Void visitRemoteSource(RemoteSourceNode node, Void context)
{
addNode(node,
format("Remote%s", node.getOrderingScheme().isPresent() ? "Merge" : "Source"),
format("[%s]", Joiner.on(',').join(node.getSourceFragmentIds())),
ImmutableList.of(),
ImmutableList.of(),
node.getSourceFragmentIds());
return null;
}
@Override
public Void visitUnion(UnionNode node, Void context)
{
addNode(node, "Union");
return processChildren(node, context);
}
@Override
public Void visitIntersect(IntersectNode node, Void context)
{
addNode(node, "Intersect");
return processChildren(node, context);
}
@Override
public Void visitExcept(ExceptNode node, Void context)
{
addNode(node, "Except");
return processChildren(node, context);
}
@Override
public Void visitTableWriter(TableWriterNode node, Void context)
{
NodeRepresentation nodeOutput = addNode(node, "TableWriter");
for (int i = 0; i < node.getColumnNames().size(); i++) {
String name = node.getColumnNames().get(i);
Symbol symbol = node.getColumns().get(i);
nodeOutput.appendDetailsLine("%s := %s", name, symbol);
}
if (node.getStatisticsAggregation().isPresent()) {
verify(node.getStatisticsAggregationDescriptor().isPresent(), "statisticsAggregationDescriptor is not present");
printStatisticAggregations(nodeOutput, node.getStatisticsAggregation().get(), node.getStatisticsAggregationDescriptor().get());
}
return processChildren(node, context);
}
@Override
public Void visitStatisticsWriterNode(StatisticsWriterNode node, Void context)
{
addNode(node, "StatisticsWriter", format("[%s]", node.getTarget()));
return processChildren(node, context);
}
@Override
public Void visitTableFinish(TableFinishNode node, Void context)
{
NodeRepresentation nodeOutput = addNode(node, "TableCommit", format("[%s]", node.getTarget()));
if (node.getStatisticsAggregation().isPresent()) {
verify(node.getStatisticsAggregationDescriptor().isPresent(), "statisticsAggregationDescriptor is not present");
printStatisticAggregations(nodeOutput, node.getStatisticsAggregation().get(), node.getStatisticsAggregationDescriptor().get());
}
return processChildren(node, context);
}
private void printStatisticAggregations(NodeRepresentation nodeOutput, StatisticAggregations aggregations, StatisticAggregationsDescriptor<Symbol> descriptor)
{
nodeOutput.appendDetailsLine("Collected statistics:");
printStatisticAggregationsInfo(nodeOutput, descriptor.getTableStatistics(), descriptor.getColumnStatistics(), aggregations.getAggregations());
nodeOutput.appendDetailsLine(indentString(1) + "grouped by => [%s]", getStatisticGroupingSetsInfo(descriptor.getGrouping()));
}
private String getStatisticGroupingSetsInfo(Map<String, Symbol> columnMappings)
{
return columnMappings.entrySet().stream()
.map(entry -> format("%s := %s", entry.getValue(), entry.getKey()))
.collect(joining(", "));
}
private void printStatisticAggregationsInfo(
NodeRepresentation nodeOutput,
Map<TableStatisticType, Symbol> tableStatistics,
Map<ColumnStatisticMetadata, Symbol> columnStatistics,
Map<Symbol, AggregationNode.Aggregation> aggregations)
{
nodeOutput.appendDetailsLine("aggregations =>");
for (Map.Entry<TableStatisticType, Symbol> tableStatistic : tableStatistics.entrySet()) {
nodeOutput.appendDetailsLine(indentString(1) + "%s => [%s := %s]",
tableStatistic.getValue(),
tableStatistic.getKey(),
aggregations.get(tableStatistic.getValue()).getCall());
}
for (Map.Entry<ColumnStatisticMetadata, Symbol> columnStatistic : columnStatistics.entrySet()) {
nodeOutput.appendDetailsLine(
indentString(1) + "%s[%s] => [%s := %s]",
columnStatistic.getKey().getStatisticType(),
columnStatistic.getKey().getColumnName(),
columnStatistic.getValue(),
aggregations.get(columnStatistic.getValue()).getCall());
}
}
@Override
public Void visitSample(SampleNode node, Void context)
{
addNode(node, "Sample", format("[%s: %s]", node.getSampleType(), node.getSampleRatio()));
return processChildren(node, context);
}
@Override
public Void visitExchange(ExchangeNode node, Void context)
{
if (node.getOrderingScheme().isPresent()) {
OrderingScheme orderingScheme = node.getOrderingScheme().get();
List<String> orderBy = orderingScheme.getOrderBy()
.stream()
.map(input -> input + " " + orderingScheme.getOrdering(input))
.collect(toImmutableList());
addNode(node,
format("%sMerge", UPPER_UNDERSCORE.to(CaseFormat.UPPER_CAMEL, node.getScope().toString())),
format("[%s]", Joiner.on(", ").join(orderBy)));
}
else if (node.getScope() == Scope.LOCAL) {
addNode(node,
"LocalExchange",
format("[%s%s]%s (%s)",
node.getPartitioningScheme().getPartitioning().getHandle(),
node.getPartitioningScheme().isReplicateNullsAndAny() ? " - REPLICATE NULLS AND ANY" : "",
formatHash(node.getPartitioningScheme().getHashColumn()),
Joiner.on(", ").join(node.getPartitioningScheme().getPartitioning().getArguments())));
}
else {
addNode(node,
format("%sExchange", UPPER_UNDERSCORE.to(CaseFormat.UPPER_CAMEL, node.getScope().toString())),
format("[%s%s]%s",
node.getType(),
node.getPartitioningScheme().isReplicateNullsAndAny() ? " - REPLICATE NULLS AND ANY" : "",
formatHash(node.getPartitioningScheme().getHashColumn())));
}
return processChildren(node, context);
}
@Override
public Void visitDelete(DeleteNode node, Void context)
{
addNode(node, "Delete", format("[%s]", node.getTarget()));
return processChildren(node, context);
}
@Override
public Void visitMetadataDelete(MetadataDeleteNode node, Void context)
{
addNode(node, "MetadataDelete", format("[%s]", node.getTarget()));
return processChildren(node, context);
}
@Override
public Void visitEnforceSingleRow(EnforceSingleRowNode node, Void context)
{
addNode(node, "EnforceSingleRow");
return processChildren(node, context);
}
@Override
public Void visitAssignUniqueId(AssignUniqueId node, Void context)
{
addNode(node, "AssignUniqueId");
return processChildren(node, context);
}
@Override
public Void visitGroupReference(GroupReference node, Void context)
{
addNode(node, "GroupReference", format("[%s]", node.getGroupId()), ImmutableList.of());
return null;
}
@Override
public Void visitApply(ApplyNode node, Void context)
{
NodeRepresentation nodeOutput = addNode(node, "Apply", format("[%s]", node.getCorrelation()));
printAssignments(nodeOutput, node.getSubqueryAssignments());
return processChildren(node, context);
}
@Override
public Void visitLateralJoin(LateralJoinNode node, Void context)
{
addNode(node, "Lateral", format("[%s]", node.getCorrelation()));
return processChildren(node, context);
}
@Override
protected Void visitPlan(PlanNode node, Void context)
{
throw new UnsupportedOperationException("not yet implemented: " + node.getClass().getName());
}
private Void processChildren(PlanNode node, Void context)
{
for (PlanNode child : node.getSources()) {
child.accept(this, context);
}
return null;
}
private void printAssignments(NodeRepresentation nodeOutput, Assignments assignments)
{
for (Map.Entry<Symbol, Expression> entry : assignments.getMap().entrySet()) {
if (entry.getValue() instanceof SymbolReference && ((SymbolReference) entry.getValue()).getName().equals(entry.getKey().getName())) {
// skip identity assignments
continue;
}
nodeOutput.appendDetailsLine("%s := %s", entry.getKey(), entry.getValue());
}
}
private void printConstraint(NodeRepresentation nodeOutput, ColumnHandle column, TupleDomain<ColumnHandle> constraint)
{
checkArgument(!constraint.isNone());
Map<ColumnHandle, Domain> domains = constraint.getDomains().get();
if (!constraint.isAll() && domains.containsKey(column)) {
nodeOutput.appendDetailsLine(" :: %s", formatDomain(domains.get(column).simplify()));
}
}
private String formatDomain(Domain domain)
{
ImmutableList.Builder<String> parts = ImmutableList.builder();
if (domain.isNullAllowed()) {
parts.add("NULL");
}
Type type = domain.getType();
domain.getValues().getValuesProcessor().consume(
ranges -> {
for (Range range : ranges.getOrderedRanges()) {
StringBuilder builder = new StringBuilder();
if (range.isSingleValue()) {
String value = castToVarchar(type, range.getSingleValue(), functionManager, session);
builder.append('[').append(value).append(']');
}
else {
builder.append((range.getLow().getBound() == Marker.Bound.EXACTLY) ? '[' : '(');
if (range.getLow().isLowerUnbounded()) {
builder.append("<min>");
}
else {
builder.append(castToVarchar(type, range.getLow().getValue(), functionManager, session));
}
builder.append(", ");
if (range.getHigh().isUpperUnbounded()) {
builder.append("<max>");
}
else {
builder.append(castToVarchar(type, range.getHigh().getValue(), functionManager, session));
}
builder.append((range.getHigh().getBound() == Marker.Bound.EXACTLY) ? ']' : ')');
}
parts.add(builder.toString());
}
},
discreteValues -> discreteValues.getValues().stream()
.map(value -> castToVarchar(type, value, functionManager, session))
.sorted() // Sort so the values will be printed in predictable order
.forEach(parts::add),
allOrNone -> {
if (allOrNone.isAll()) {
parts.add("ALL VALUES");
}
});
return "[" + Joiner.on(", ").join(parts.build()) + "]";
}
public NodeRepresentation addNode(PlanNode node, String name)
{
return addNode(node, name, "");
}
public NodeRepresentation addNode(PlanNode node, String name, String identifier)
{
return addNode(node, name, identifier, node.getSources());
}
public NodeRepresentation addNode(PlanNode node, String name, String identifier, List<PlanNode> children)
{
return addNode(node, name, identifier, ImmutableList.of(node.getId()), children, ImmutableList.of());
}
public NodeRepresentation addNode(PlanNode rootNode, String name, String identifier, List<PlanNodeId> allNodes, List<PlanNode> children, List<PlanFragmentId> remoteSources)
{
List<PlanNodeId> childrenIds = children.stream().map(PlanNode::getId).collect(toImmutableList());
List<PlanNodeStatsEstimate> estimatedStats = allNodes.stream()
.map(nodeId -> estimatedStatsAndCosts.getStats().getOrDefault(nodeId, PlanNodeStatsEstimate.unknown()))
.collect(toList());
List<PlanNodeCostEstimate> estimatedCosts = allNodes.stream()
.map(nodeId -> estimatedStatsAndCosts.getCosts().getOrDefault(nodeId, PlanNodeCostEstimate.unknown()))
.collect(toList());
NodeRepresentation nodeOutput = new NodeRepresentation(
rootNode.getId(),
name,
rootNode.getClass().getSimpleName(),
identifier,
rootNode.getOutputSymbols(),
stats.map(s -> s.get(rootNode.getId())),
estimatedStats,
estimatedCosts,
childrenIds,
remoteSources);
representation.addNode(nodeOutput);
return nodeOutput;
}
}
private static String castToVarchar(Type type, Object value, FunctionManager functionManager, Session session)
{
if (value == null) {
return "NULL";
}
try {
Signature coercion = functionManager.getCoercion(type, VARCHAR);
Slice coerced = (Slice) new InterpretedFunctionInvoker(functionManager).invoke(coercion, session.toConnectorSession(), value);
return coerced.toStringUtf8();
}
catch (OperatorNotFoundException e) {
return "<UNREPRESENTABLE VALUE>";
}
}
private static String formatFrame(WindowNode.Frame frame)
{
StringBuilder builder = new StringBuilder(frame.getType().toString());
frame.getOriginalStartValue().ifPresent(value -> builder.append(" ").append(value));
builder.append(" ").append(frame.getStartType());
frame.getOriginalEndValue().ifPresent(value -> builder.append(" ").append(value));
builder.append(" ").append(frame.getEndType());
return builder.toString();
}
private static String formatHash(Optional<Symbol>... hashes)
{
List<Symbol> symbols = stream(hashes)
.filter(Optional::isPresent)
.map(Optional::get)
.collect(toList());
if (symbols.isEmpty()) {
return "";
}
return "[" + Joiner.on(", ").join(symbols) + "]";
}
private static String formatOutputs(TypeProvider types, Iterable<Symbol> outputs)
{
return Streams.stream(outputs)
.map(input -> input + ":" + types.get(input).getDisplayName())
.collect(Collectors.joining(", "));
}
}
| |
package ca.uhn.fhir.model.api;
/*
* #%L
* HAPI FHIR - Core Library
* %%
* Copyright (C) 2014 - 2015 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import ca.uhn.fhir.model.primitive.BoundCodeDt;
import ca.uhn.fhir.model.primitive.CodeDt;
import ca.uhn.fhir.model.primitive.DecimalDt;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.model.primitive.InstantDt;
import ca.uhn.fhir.model.primitive.StringDt;
import ca.uhn.fhir.model.primitive.XhtmlDt;
import ca.uhn.fhir.model.valueset.BundleEntrySearchModeEnum;
import ca.uhn.fhir.model.valueset.BundleEntryTransactionMethodEnum;
import ca.uhn.fhir.util.ElementUtil;
public class BundleEntry extends BaseBundle {
//@formatter:off
/* ****************************************************
* NB: add any new fields to the isEmpty() method!!!
*****************************************************/
//@formatter:on
private TagList myCategories;
private InstantDt myDeletedAt;
private StringDt myDeletedByEmail;
private StringDt myDeletedByName;
private StringDt myDeletedComment;
private IdDt myDeletedResourceId;
private CodeDt myDeletedResourceType;
private StringDt myDeletedResourceVersion;
private StringDt myLinkAlternate;
private StringDt myLinkBase;
private StringDt myLinkSearch;
private StringDt myLinkSelf;
private InstantDt myPublished;
private IResource myResource;
private DecimalDt myScore;
private BoundCodeDt<BundleEntrySearchModeEnum> mySearchMode;
private BoundCodeDt<BundleEntryTransactionMethodEnum> myTransactionOperation;
private XhtmlDt mySummary;
private StringDt myTitle;
private InstantDt myUpdated;
/**
* @deprecated Tags wil become immutable in a future release of HAPI, so
* {@link #addCategory(String, String, String)} should be used instead
*/
@Deprecated
public Tag addCategory() {
Tag retVal = new Tag();
getCategories().add(retVal);
return retVal;
}
public void addCategory(String theScheme, String theTerm, String theLabel) {
getCategories().add(new Tag(theScheme, theTerm, theLabel));
}
public void addCategory(Tag theTag) {
getCategories().add(theTag);
}
public TagList getCategories() {
if (myCategories == null) {
myCategories = new TagList();
}
return myCategories;
}
/**
* Gets the date/time that thius entry was deleted.
*/
public InstantDt getDeletedAt() {
if (myDeletedAt == null) {
myDeletedAt = new InstantDt();
}
return myDeletedAt;
}
public StringDt getDeletedByEmail() {
if (myDeletedByEmail == null) {
myDeletedByEmail = new StringDt();
}
return myDeletedByEmail;
}
public StringDt getDeletedByName() {
if (myDeletedByName == null) {
myDeletedByName = new StringDt();
}
return myDeletedByName;
}
public StringDt getDeletedComment() {
if (myDeletedComment == null) {
myDeletedComment = new StringDt();
}
return myDeletedComment;
}
public IdDt getDeletedResourceId() {
if (myDeletedResourceId == null) {
myDeletedResourceId = new IdDt();
}
return myDeletedResourceId;
}
public CodeDt getDeletedResourceType() {
if (myDeletedResourceType == null) {
myDeletedResourceType = new CodeDt();
}
return myDeletedResourceType;
}
public StringDt getDeletedResourceVersion() {
if (myDeletedResourceVersion == null) {
myDeletedResourceVersion = new StringDt();
}
return myDeletedResourceVersion;
}
/**
* @deprecated Setting IDs on bundle entries is redundant since resources already have an ID field. Instead of
* providing an ID using this method, set the ID on the resource using {@link IResource#setId(IdDt)} or
* if this entry represents a deleted resource, use {@link #setDeletedResourceId(IdDt)}.
*/
@Override
@Deprecated
public IdDt getId() {
return super.getId();
}
public StringDt getLinkAlternate() {
if (myLinkAlternate == null) {
myLinkAlternate = new StringDt();
}
return myLinkAlternate;
}
/**
* @deprecated Use resource ID to determine base URL
*/
@Deprecated
public StringDt getLinkBase() {
if (myLinkBase == null) {
myLinkBase = new StringDt();
}
return myLinkBase;
}
public StringDt getLinkSearch() {
if (myLinkSearch == null) {
myLinkSearch = new StringDt();
}
return myLinkSearch;
}
public StringDt getLinkSelf() {
if (myLinkSelf == null) {
myLinkSelf = new StringDt();
}
return myLinkSelf;
}
public InstantDt getPublished() {
if (myPublished == null) {
myPublished = new InstantDt();
}
return myPublished;
}
public IResource getResource() {
return myResource;
}
public DecimalDt getScore() {
if (myScore == null) {
myScore = new DecimalDt();
}
return myScore;
}
public XhtmlDt getSummary() {
if (mySummary == null) {
mySummary = new XhtmlDt();
}
return mySummary;
}
public StringDt getTitle() {
if (myTitle == null) {
myTitle = new StringDt();
}
return myTitle;
}
/**
* @deprecated <b>DSTU2 Note:</b> As of DSTU2, bundle entries no longer have an updated time (this bit of metadata
* has been moved to the resource <meta/> element so it is redundant here). In preparation for
* DSTU2, it is recommended that you migrate code away from using this method and over to using resource
* metadata instead.
*/
@Deprecated
public InstantDt getUpdated() {
if (myUpdated == null) {
myUpdated = new InstantDt();
}
if (myUpdated.isEmpty() && myResource != null) {
InstantDt resourceUpdated = ResourceMetadataKeyEnum.UPDATED.get(myResource);
if (resourceUpdated != null && !resourceUpdated.isEmpty()) {
return resourceUpdated;
}
}
return myUpdated;
}
@Override
public boolean isEmpty() {
//@formatter:off
return super.isEmpty() &&
ElementUtil.isEmpty(
myDeletedResourceId, myDeletedResourceType, myDeletedResourceVersion, myDeletedAt,
myScore, mySearchMode, myTransactionOperation, myCategories,
myLinkAlternate, myLinkSelf, myPublished, myResource, mySummary,
myTitle, myUpdated, myDeletedByEmail, myDeletedByName, myDeletedComment);
//@formatter:on
}
/**
* Sets the date/time that this entry was deleted.
*/
public void setDeleted(InstantDt theDeletedAt) {
myDeletedAt = theDeletedAt;
}
public void setDeletedByEmail(StringDt theDeletedByEmail) {
myDeletedByEmail = theDeletedByEmail;
}
public void setDeletedByName(StringDt theDeletedByName) {
if (myDeletedByName == null) {
myDeletedByName = new StringDt();
}
myDeletedByName = theDeletedByName;
}
public void setDeletedComment(StringDt theDeletedComment) {
myDeletedComment = theDeletedComment;
}
public void setDeletedResourceId(IdDt theDeletedResourceId) {
myDeletedResourceId = theDeletedResourceId;
}
public void setDeletedResourceType(CodeDt theDeletedResourceType) {
myDeletedResourceType = theDeletedResourceType;
}
public void setDeletedResourceVersion(StringDt theDeletedResourceVersion) {
myDeletedResourceVersion = theDeletedResourceVersion;
}
/**
* @deprecated Bundle entries no longer have an ID in DSTU2, as ID is explicitly stated in the resource itself.
*/
@Override
@Deprecated
public void setId(IdDt theId) {
super.setId(theId);
}
public void setLinkAlternate(StringDt theLinkAlternate) {
myLinkAlternate = theLinkAlternate;
}
/**
* @deprecated Use resource ID to determine base URL
*/
@Deprecated
public void setLinkBase(StringDt theLinkBase) {
myLinkBase = theLinkBase;
}
public void setLinkSearch(StringDt theLinkSearch) {
myLinkSearch = theLinkSearch;
}
public void setLinkSelf(StringDt theLinkSelf) {
if (myLinkSelf == null) {
myLinkSelf = new StringDt();
}
myLinkSelf = theLinkSelf;
}
public void setPublished(InstantDt thePublished) {
Validate.notNull(thePublished, "Published may not be null");
myPublished = thePublished;
}
public void setResource(IResource theResource) {
myResource = theResource;
}
public void setScore(DecimalDt theScore) {
myScore = theScore;
}
/**
* @deprecated <b>DSTU2 Note:</b> As of DSTU2, bundle entries no longer have an updated time (this bit of metadata
* has been moved to the resource <meta/> element so it is redundant here). In preparation for
* DSTU2, it is recommended that you migrate code away from using this method and over to using resource
* metadata instead.
*/
@Deprecated
public void setUpdated(InstantDt theUpdated) {
Validate.notNull(theUpdated, "Updated may not be null");
myUpdated = theUpdated;
}
@Override
public String toString() {
ToStringBuilder b = new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE);
if (getResource() != null) {
b.append("type", getResource().getClass().getSimpleName());
} else {
b.append("No resource");
}
b.append("id", getId());
return b.toString();
}
public BoundCodeDt<BundleEntrySearchModeEnum> getSearchMode() {
if (mySearchMode == null) {
mySearchMode = new BoundCodeDt<BundleEntrySearchModeEnum>(BundleEntrySearchModeEnum.VALUESET_BINDER);
}
return mySearchMode;
}
public void setSearchMode(BoundCodeDt<BundleEntrySearchModeEnum> theSearchMode) {
mySearchMode = theSearchMode;
}
public BoundCodeDt<BundleEntryTransactionMethodEnum> getTransactionMethod() {
if (myTransactionOperation == null) {
myTransactionOperation = new BoundCodeDt<BundleEntryTransactionMethodEnum>(BundleEntryTransactionMethodEnum.VALUESET_BINDER);
}
return myTransactionOperation;
}
public void setTransactionMethod(BoundCodeDt<BundleEntryTransactionMethodEnum> theTransactionOperation) {
myTransactionOperation = theTransactionOperation;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.server.lookup;
import com.fasterxml.jackson.annotation.JsonTypeName;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.query.lookup.LookupExtractor;
import org.apache.druid.server.lookup.cache.polling.OffHeapPollingCache;
import org.apache.druid.server.lookup.cache.polling.OnHeapPollingCache;
import org.apache.druid.server.lookup.cache.polling.PollingCacheFactory;
import org.apache.druid.testing.InitializedNullHandlingTest;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import javax.annotation.Nullable;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
@RunWith(Parameterized.class)
public class PollingLookupTest extends InitializedNullHandlingTest
{
private static final Map<String, String> FIRST_LOOKUP_MAP = ImmutableMap.of(
"foo", "bar",
"bad", "bar",
"how about that", "foo",
"empty string", ""
);
private static final Map<String, String> SECOND_LOOKUP_MAP = ImmutableMap.of(
"new-foo", "new-bar",
"new-bad", "new-bar"
);
private static final long POLL_PERIOD = 1000L;
@Rule
public ExpectedException expectedException = ExpectedException.none();
@JsonTypeName("mock")
private static class MockDataFetcher implements DataFetcher
{
private int callNumber = 0;
@Override
public Iterable fetchAll()
{
if (callNumber == 0) {
callNumber++;
return FIRST_LOOKUP_MAP.entrySet();
}
return SECOND_LOOKUP_MAP.entrySet();
}
@Nullable
@Override
public Object fetch(Object key)
{
return null;
}
@Override
public Iterable fetch(Iterable keys)
{
return null;
}
@Override
public List reverseFetchKeys(Object value)
{
return null;
}
@Override
@SuppressWarnings("EqualsHashCode")
public boolean equals(Object obj)
{
return obj instanceof MockDataFetcher;
}
}
@Parameterized.Parameters
public static Collection<Object[]> inputData()
{
return Arrays.asList(new Object[][]{
{new OffHeapPollingCache.OffHeapPollingCacheProvider()},
{new OnHeapPollingCache.OnHeapPollingCacheProvider<>()}
});
}
private final PollingCacheFactory pollingCacheFactory;
private final DataFetcher dataFetcher = new MockDataFetcher();
private PollingLookup pollingLookup;
public PollingLookupTest(PollingCacheFactory pollingCacheFactory)
{
this.pollingCacheFactory = pollingCacheFactory;
}
@Before
public void setUp()
{
pollingLookup = new PollingLookup(POLL_PERIOD, dataFetcher, pollingCacheFactory);
}
@After
public void tearDown()
{
if (pollingLookup != null) {
pollingLookup.close();
}
pollingLookup = null;
}
@Test(expected = ISE.class)
public void testClose()
{
pollingLookup.close();
pollingLookup.apply("key");
}
@Test
public void testApply()
{
assertMapLookup(FIRST_LOOKUP_MAP, pollingLookup);
}
@Test(timeout = POLL_PERIOD * 3)
public void testApplyAfterDataChange() throws InterruptedException
{
assertMapLookup(FIRST_LOOKUP_MAP, pollingLookup);
Thread.sleep(POLL_PERIOD * 2);
assertMapLookup(SECOND_LOOKUP_MAP, pollingLookup);
}
@Test
public void testUnapply()
{
Assert.assertEquals(
"reverse lookup should match",
Sets.newHashSet("foo", "bad"),
Sets.newHashSet(pollingLookup.unapply("bar"))
);
Assert.assertEquals(
"reverse lookup should match",
Sets.newHashSet("how about that"),
Sets.newHashSet(pollingLookup.unapply("foo"))
);
Assert.assertEquals(
"reverse lookup should match",
Sets.newHashSet("empty string"),
Sets.newHashSet(pollingLookup.unapply(""))
);
Assert.assertEquals(
"reverse lookup of none existing value should be empty list",
Collections.emptyList(),
pollingLookup.unapply("does't exist")
);
}
@Test
public void testBulkApply()
{
Map<String, String> map = pollingLookup.applyAll(FIRST_LOOKUP_MAP.keySet());
Assert.assertEquals(FIRST_LOOKUP_MAP, Maps.transformValues(map, new Function<String, String>()
{
@Override
public String apply(String input)
{
//make sure to rewrite null strings as empty.
return NullHandling.nullToEmptyIfNeeded(input);
}
}));
}
@Test
public void testGetCacheKey()
{
PollingLookup pollingLookup2 = new PollingLookup(1L, dataFetcher, pollingCacheFactory);
Assert.assertFalse(Arrays.equals(pollingLookup2.getCacheKey(), pollingLookup.getCacheKey()));
}
@Test
public void testCanGetKeySet()
{
Assert.assertFalse(pollingLookup.canGetKeySet());
}
@Test
public void testKeySet()
{
expectedException.expect(UnsupportedOperationException.class);
pollingLookup.keySet();
}
private void assertMapLookup(Map<String, String> map, LookupExtractor lookup)
{
for (Map.Entry<String, String> entry : map.entrySet()) {
String key = entry.getKey();
String val = entry.getValue();
Assert.assertEquals("non-null check", NullHandling.emptyToNullIfNeeded(val), lookup.apply(key));
}
}
}
| |
/*
* Copyright 2016 Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.store.primitives.impl;
import com.google.common.collect.ImmutableMultiset;
import com.google.common.collect.Maps;
import com.google.common.collect.Multiset;
import org.onlab.util.Tools;
import org.onosproject.store.service.AsyncConsistentMultimap;
import org.onosproject.store.service.Versioned;
import java.util.Collection;
import java.util.EnumSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.function.BiConsumer;
import java.util.function.BinaryOperator;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Collector;
import java.util.stream.Collectors;
/**
* An {@link AsyncConsistentMultimap} that maps its operation to operations to
* a differently typed {@link AsyncConsistentMultimap} by transcoding operation
* inputs and outputs while maintaining version numbers.
*
* @param <K2> key type of other map
* @param <V2> value type of other map
* @param <K1> key type of this map
* @param <V1> value type of this map
*/
public class TranscodingAsyncConsistentMultimap<K1, V1, K2, V2>
implements AsyncConsistentMultimap<K1, V1> {
private final AsyncConsistentMultimap<K2, V2> backingMap;
private final Function<K1, K2> keyEncoder;
private final Function<K2, K1> keyDecoder;
private final Function<V2, V1> valueDecoder;
private final Function<V1, V2> valueEncoder;
private final Function<? extends Versioned<V2>,
? extends Versioned<V1>> versionedValueTransform;
private final Function<Versioned<Collection<? extends V2>>,
Versioned<Collection<? extends V1>>> versionedValueCollectionDecode;
private final Function<Collection<? extends V1>, Collection<V2>>
valueCollectionEncode;
public TranscodingAsyncConsistentMultimap(
AsyncConsistentMultimap<K2, V2> backingMap,
Function<K1, K2> keyEncoder,
Function<K2, K1> keyDecoder,
Function<V2, V1> valueDecoder,
Function<V1, V2> valueEncoder) {
this.backingMap = backingMap;
this.keyEncoder = k -> k == null ? null : keyEncoder.apply(k);
this.keyDecoder = k -> k == null ? null : keyDecoder.apply(k);
this.valueDecoder = v -> v == null ? null : valueDecoder.apply(v);
this.valueEncoder = v -> v == null ? null : valueEncoder.apply(v);
this.versionedValueTransform = v -> v == null ? null :
v.map(valueDecoder);
this.versionedValueCollectionDecode = v -> v == null ? null :
new Versioned<>(
v.value()
.stream()
.map(valueDecoder)
.collect(Collectors.toSet()),
v.version(),
v.creationTime());
this.valueCollectionEncode = v -> v == null ? null :
v.stream().map(valueEncoder).collect(Collectors.toSet());
}
@Override
public CompletableFuture<Integer> size() {
return backingMap.size();
}
@Override
public CompletableFuture<Boolean> isEmpty() {
return backingMap.isEmpty();
}
@Override
public CompletableFuture<Boolean> containsKey(K1 key) {
try {
return backingMap.containsKey(keyEncoder.apply(key));
} catch (Exception e) {
return Tools.exceptionalFuture(e);
}
}
@Override
public CompletableFuture<Boolean> containsValue(V1 value) {
try {
return backingMap.containsValue(valueEncoder.apply(value));
} catch (Exception e) {
return Tools.exceptionalFuture(e);
}
}
@Override
public CompletableFuture<Boolean> containsEntry(K1 key, V1 value) {
try {
return backingMap.containsEntry(keyEncoder.apply(key),
valueEncoder.apply(value));
} catch (Exception e) {
return Tools.exceptionalFuture(e);
}
}
@Override
public CompletableFuture<Boolean> put(K1 key, V1 value) {
try {
return backingMap.put(keyEncoder.apply(key),
valueEncoder.apply(value));
} catch (Exception e) {
return Tools.exceptionalFuture(e);
}
}
@Override
public CompletableFuture<Boolean> remove(K1 key, V1 value) {
try {
return backingMap.remove(keyEncoder.apply(key), valueEncoder
.apply(value));
} catch (Exception e) {
return Tools.exceptionalFuture(e);
}
}
@Override
public CompletableFuture<Boolean> removeAll(
K1 key, Collection<? extends V1> values) {
try {
return backingMap.removeAll(
keyEncoder.apply(key),
values.stream().map(valueEncoder).collect(
Collectors.toSet()));
} catch (Exception e) {
return Tools.exceptionalFuture(e);
}
}
@Override
public CompletableFuture<Versioned<Collection<? extends V1>>>
removeAll(K1 key) {
try {
return backingMap.removeAll(keyEncoder.apply(key))
.thenApply(versionedValueCollectionDecode);
} catch (Exception e) {
return Tools.exceptionalFuture(e);
}
}
@Override
public CompletableFuture<Boolean>
putAll(K1 key, Collection<? extends V1> values) {
try {
return backingMap.putAll(keyEncoder.apply(key),
valueCollectionEncode.apply(values));
} catch (Exception e) {
return Tools.exceptionalFuture(e);
}
}
@Override
public CompletableFuture<Versioned<Collection<? extends V1>>>
replaceValues(K1 key, Collection<V1> values) {
try {
return backingMap.replaceValues(keyEncoder.apply(key),
valueCollectionEncode.apply(values))
.thenApply(versionedValueCollectionDecode);
} catch (Exception e) {
return Tools.exceptionalFuture(e);
}
}
@Override
public CompletableFuture<Void> clear() {
return backingMap.clear();
}
@Override
public CompletableFuture<Versioned<Collection<? extends V1>>> get(K1 key) {
try {
return backingMap.get(keyEncoder.apply(key))
.thenApply(versionedValueCollectionDecode);
} catch (Exception e) {
return Tools.exceptionalFuture(e);
}
}
@Override
public CompletableFuture<Set<K1>> keySet() {
return backingMap.keySet().thenApply(s -> s.stream()
.map(keyDecoder)
.collect(Collectors.toSet()));
}
@Override
public CompletableFuture<Multiset<K1>> keys() {
return backingMap.keys().thenApply(s -> s.stream().map(keyDecoder)
.collect(new MultisetCollector<>()));
}
@Override
public CompletableFuture<Multiset<V1>> values() {
return backingMap.values().thenApply(s -> s.stream().map(valueDecoder)
.collect(new MultisetCollector<>()));
}
@Override
public CompletableFuture<Collection<Map.Entry<K1, V1>>> entries() {
return backingMap.entries().thenApply(s -> s.stream()
.map(e -> Maps.immutableEntry(keyDecoder.apply(e.getKey()),
valueDecoder.apply(e.getValue())))
.collect(Collectors.toSet()));
}
@Override
public CompletableFuture<Map<K1, Collection<V1>>> asMap() {
throw new UnsupportedOperationException("Unsupported operation.");
}
@Override
public String name() {
return backingMap.name();
}
@Override
public void addStatusChangeListener(Consumer<Status> listener) {
backingMap.addStatusChangeListener(listener);
}
@Override
public void removeStatusChangeListener(Consumer<Status> listener) {
backingMap.removeStatusChangeListener(listener);
}
@Override
public Collection<Consumer<Status>> statusChangeListeners() {
return backingMap.statusChangeListeners();
}
private class MultisetCollector<T> implements Collector<T,
ImmutableMultiset.Builder<T>,
Multiset<T>> {
@Override
public Supplier<ImmutableMultiset.Builder<T>> supplier() {
return ImmutableMultiset::builder;
}
@Override
public BiConsumer<ImmutableMultiset.Builder<T>, T> accumulator() {
return ((builder, t) -> builder.add(t));
}
@Override
public BinaryOperator<ImmutableMultiset.Builder<T>> combiner() {
return (a, b) -> {
a.addAll(b.build());
return a;
};
}
@Override
public Function<ImmutableMultiset.Builder<T>, Multiset<T>> finisher() {
return ImmutableMultiset.Builder::build;
}
@Override
public Set<Characteristics> characteristics() {
return EnumSet.of(Characteristics.UNORDERED);
}
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.execution.process;
import com.intellij.execution.TaskExecutor;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.util.ConcurrencyUtil;
import com.intellij.util.Consumer;
import com.intellij.util.io.BaseDataReader;
import com.intellij.util.io.BaseInputStreamReader;
import com.intellij.util.io.BaseOutputReader;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.*;
import java.nio.charset.Charset;
import java.util.concurrent.*;
import static com.intellij.util.io.BaseDataReader.AdaptiveSleepingPolicy;
public class BaseOSProcessHandler extends ProcessHandler implements TaskExecutor {
private static final Logger LOG = Logger.getInstance("#com.intellij.execution.process.OSProcessHandlerBase");
@NotNull protected final Process myProcess;
@Nullable protected final String myCommandLine;
protected final ProcessWaitFor myWaitFor;
@Nullable protected final Charset myCharset;
public BaseOSProcessHandler(@NotNull final Process process, @Nullable final String commandLine, @Nullable Charset charset) {
myProcess = process;
myCommandLine = commandLine;
myCharset = charset;
myWaitFor = new ProcessWaitFor(process, this);
}
/**
* Override this method in order to execute the task with a custom pool
*
* @param task a task to run
*/
protected Future<?> executeOnPooledThread(Runnable task) {
return ExecutorServiceHolder.ourThreadExecutorsService.submit(task);
}
@Override
public Future<?> executeTask(Runnable task) {
return executeOnPooledThread(task);
}
@NotNull
public Process getProcess() {
return myProcess;
}
protected boolean useAdaptiveSleepingPolicyWhenReadingOutput() {
return false;
}
/**
* Override this method to read process output and error streams in blocking mode
*
* @return true to read non-blocking but sleeping, false for blocking read
*/
protected boolean useNonBlockingRead() {
return !Registry.is("output.reader.blocking.mode", false);
}
protected boolean processHasSeparateErrorStream() {
return true;
}
@Override
public void startNotify() {
if (myCommandLine != null) {
notifyTextAvailable(myCommandLine + '\n', ProcessOutputTypes.SYSTEM);
}
addProcessListener(new ProcessAdapter() {
@Override
public void startNotified(final ProcessEvent event) {
try {
final BaseDataReader stdoutReader = createOutputDataReader(getPolicy());
final BaseDataReader stderrReader = processHasSeparateErrorStream() ? createErrorDataReader(getPolicy()) : null;
myWaitFor.setTerminationCallback(new Consumer<Integer>() {
@Override
public void consume(Integer exitCode) {
try {
// tell readers that no more attempts to read process' output should be made
if (stderrReader != null) stderrReader.stop();
stdoutReader.stop();
try {
if (stderrReader != null) stderrReader.waitFor();
stdoutReader.waitFor();
}
catch (InterruptedException ignore) {
}
}
finally {
onOSProcessTerminated(exitCode);
}
}
});
}
finally {
removeProcessListener(this);
}
}
});
super.startNotify();
}
private BaseDataReader.SleepingPolicy getPolicy() {
if (useNonBlockingRead()) {
return useAdaptiveSleepingPolicyWhenReadingOutput() ? new AdaptiveSleepingPolicy() : BaseDataReader.SleepingPolicy.SIMPLE;
}
else {
//use blocking read policy
return BaseDataReader.SleepingPolicy.BLOCKING;
}
}
@NotNull
protected BaseDataReader createErrorDataReader(BaseDataReader.SleepingPolicy sleepingPolicy) {
return new SimpleOutputReader(createProcessErrReader(), ProcessOutputTypes.STDERR, sleepingPolicy);
}
@NotNull
protected BaseDataReader createOutputDataReader(BaseDataReader.SleepingPolicy sleepingPolicy) {
return new SimpleOutputReader(createProcessOutReader(), ProcessOutputTypes.STDOUT, sleepingPolicy);
}
protected void onOSProcessTerminated(final int exitCode) {
notifyProcessTerminated(exitCode);
}
protected Reader createProcessOutReader() {
return createInputStreamReader(myProcess.getInputStream());
}
protected Reader createProcessErrReader() {
return createInputStreamReader(myProcess.getErrorStream());
}
private Reader createInputStreamReader(InputStream streamToRead) {
Charset charset = charsetNotNull();
return new BaseInputStreamReader(streamToRead, charset);
}
private Charset charsetNotNull() {
Charset charset = getCharset();
if (charset == null) {
// use default charset
charset = Charset.defaultCharset();
}
return charset;
}
@Override
protected void destroyProcessImpl() {
try {
closeStreams();
}
finally {
doDestroyProcess();
}
}
protected void doDestroyProcess() {
getProcess().destroy();
}
@Override
protected void detachProcessImpl() {
final Runnable runnable = new Runnable() {
@Override
public void run() {
closeStreams();
myWaitFor.detach();
notifyProcessDetached();
}
};
executeOnPooledThread(runnable);
}
protected void closeStreams() {
try {
myProcess.getOutputStream().close();
}
catch (IOException e) {
LOG.warn(e);
}
}
@Override
public boolean detachIsDefault() {
return false;
}
@Override
public OutputStream getProcessInput() {
return myProcess.getOutputStream();
}
@Nullable
public String getCommandLine() {
return myCommandLine;
}
@Nullable
public Charset getCharset() {
return myCharset;
}
public static class ExecutorServiceHolder {
private static final ExecutorService ourThreadExecutorsService = createServiceImpl();
private static ThreadPoolExecutor createServiceImpl() {
ThreadFactory factory = ConcurrencyUtil.newNamedThreadFactory("OSProcessHandler pooled thread");
return new ThreadPoolExecutor(10, Integer.MAX_VALUE, 60L, TimeUnit.SECONDS, new SynchronousQueue<Runnable>(), factory);
}
public static Future<?> submit(Runnable task) {
return ourThreadExecutorsService.submit(task);
}
}
private class SimpleOutputReader extends BaseOutputReader {
private final Key myProcessOutputType;
private SimpleOutputReader(@NotNull Reader reader, @NotNull Key processOutputType, SleepingPolicy sleepingPolicy) {
super(reader, sleepingPolicy);
myProcessOutputType = processOutputType;
start();
}
@Override
protected Future<?> executeOnPooledThread(Runnable runnable) {
return BaseOSProcessHandler.this.executeOnPooledThread(runnable);
}
@Override
protected void onTextAvailable(@NotNull String text) {
notifyTextAvailable(text, myProcessOutputType);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.core.session;
import org.apache.jackrabbit.api.security.authorization.PrivilegeManager;
import org.apache.jackrabbit.core.HierarchyManager;
import org.apache.jackrabbit.core.ItemManager;
import org.apache.jackrabbit.core.ItemValidator;
import org.apache.jackrabbit.core.RepositoryContext;
import org.apache.jackrabbit.core.RepositoryImpl;
import org.apache.jackrabbit.core.SessionImpl;
import org.apache.jackrabbit.core.WorkspaceImpl;
import org.apache.jackrabbit.core.config.WorkspaceConfig;
import org.apache.jackrabbit.core.data.DataStore;
import org.apache.jackrabbit.core.id.NodeId;
import org.apache.jackrabbit.core.id.NodeIdFactory;
import org.apache.jackrabbit.core.nodetype.NodeTypeManagerImpl;
import org.apache.jackrabbit.core.nodetype.NodeTypeRegistry;
import org.apache.jackrabbit.core.observation.ObservationManagerImpl;
import org.apache.jackrabbit.core.security.AccessManager;
import org.apache.jackrabbit.core.security.authorization.Permission;
import org.apache.jackrabbit.core.security.authorization.PrivilegeManagerImpl;
import org.apache.jackrabbit.core.state.SessionItemStateManager;
import org.apache.jackrabbit.core.value.ValueFactoryImpl;
import org.apache.jackrabbit.spi.Name;
import org.apache.jackrabbit.spi.Path;
import org.apache.jackrabbit.spi.commons.conversion.IllegalNameException;
import org.apache.jackrabbit.spi.commons.conversion.MalformedPathException;
import org.apache.jackrabbit.spi.commons.conversion.NamePathResolver;
import javax.jcr.AccessDeniedException;
import javax.jcr.NamespaceException;
import javax.jcr.NamespaceRegistry;
import javax.jcr.RepositoryException;
import javax.jcr.UnsupportedRepositoryOperationException;
import javax.jcr.ValueFactory;
/**
* Component context of a session. This class keeps track of the internal
* components associated with a session.
*/
public class SessionContext implements NamePathResolver {
/**
* The repository context of this session.
*/
private final RepositoryContext repositoryContext;
/**
* This session.
*/
private final SessionImpl session;
/**
* The state of this session.
*/
private final SessionState state;
/**
* The value factory of this session
*/
private final ValueFactory valueFactory;
/**
* The item validator of this session
*/
private final ItemValidator itemValidator;
/**
* Node type manager of this session
*/
private final NodeTypeManagerImpl nodeTypeManager;
/**
* Privilege manager of this session.
*/
private final PrivilegeManagerImpl privilegeManager;
/**
* The namespace registry exposed for this session context that includes
* permission checks.
*/
private final NamespaceRegistry nsRegistry;
/**
* The workspace of this session
*/
private final WorkspaceImpl workspace;
/**
* The item state manager of this session
*/
private volatile SessionItemStateManager itemStateManager;
/**
* The item manager of this session
*/
private volatile ItemManager itemManager;
/**
* The access manager of this session
*/
private volatile AccessManager accessManager;
/**
* The observation manager of this session.
*/
private volatile ObservationManagerImpl observationManager;
/**
* Creates a component context for the given session.
*
* @param repositoryContext repository context of the session
* @param session the session
* @param workspaceConfig workspace configuration
* @throws RepositoryException if the workspace can not be accessed
*/
public SessionContext(
RepositoryContext repositoryContext, SessionImpl session,
WorkspaceConfig workspaceConfig) throws RepositoryException {
assert repositoryContext != null;
assert session != null;
this.repositoryContext = repositoryContext;
this.session = session;
this.state = new SessionState(this);
this.valueFactory =
new ValueFactoryImpl(session, repositoryContext.getDataStore());
this.itemValidator = new ItemValidator(this);
this.nodeTypeManager = new NodeTypeManagerImpl(this);
this.privilegeManager = new PrivilegeManagerImpl(repositoryContext.getPrivilegeRegistry(), session);
this.nsRegistry = new PermissionAwareNamespaceRegistry();
this.workspace = new WorkspaceImpl(this, workspaceConfig);
}
//-------------------------------------------< per-repository components >
/**
* Returns the repository context of the session.
*
* @return repository context
*/
public RepositoryContext getRepositoryContext() {
return repositoryContext;
}
/**
* Returns this repository.
*
* @return repository
*/
public RepositoryImpl getRepository() {
return repositoryContext.getRepository();
}
/**
* Returns the root node identifier of the repository.
*
* @return root node identifier
*/
public NodeId getRootNodeId() {
return repositoryContext.getRootNodeId();
}
/**
* Returns the data store of this repository, or <code>null</code>
* if a data store is not configured.
*
* @return data store, or <code>null</code>
*/
public DataStore getDataStore() {
return repositoryContext.getDataStore();
}
/**
* Returns the node type registry of this repository.
*
* @return node type registry
*/
public NodeTypeRegistry getNodeTypeRegistry() {
return repositoryContext.getNodeTypeRegistry();
}
//----------------------------------------------< per-session components >
/**
* Returns this session.
*
* @return session
*/
public SessionImpl getSessionImpl() {
return session;
}
/**
* Returns the state of this session.
*
* @return session state
*/
public SessionState getSessionState() {
return state;
}
/**
* Returns the value factory of this session.
*
* @return value factory
*/
public ValueFactory getValueFactory() {
return valueFactory;
}
/**
* Returns the item validator of this session.
*
* @return item validator
*/
public ItemValidator getItemValidator() {
return itemValidator;
}
/**
* Returns the node type manager of this session.
*
* @return node type manager
*/
public NodeTypeManagerImpl getNodeTypeManager() {
return nodeTypeManager;
}
/**
* Returns the privilege manager of this session.
*
* @return the privilege manager.
*/
public PrivilegeManagerImpl getPrivilegeManager() {
return privilegeManager;
}
/**
* Returns a namespace registry instance which asserts that the editing
* session is allowed to modify the namespace registry.
*
* @return
*/
public NamespaceRegistry getNamespaceRegistry() {
return nsRegistry;
}
/**
* Returns the workspace of this session.
*
* @return workspace
*/
public WorkspaceImpl getWorkspace() {
return workspace;
}
public SessionItemStateManager getItemStateManager() {
assert itemStateManager != null;
return itemStateManager;
}
public void setItemStateManager(SessionItemStateManager itemStateManager) {
assert itemStateManager != null;
this.itemStateManager = itemStateManager;
}
public HierarchyManager getHierarchyManager() {
assert itemStateManager != null;
return itemStateManager.getHierarchyMgr();
}
public ItemManager getItemManager() {
assert itemManager != null;
return itemManager;
}
public void setItemManager(ItemManager itemManager) {
assert itemManager != null;
this.itemManager = itemManager;
}
public AccessManager getAccessManager() {
assert accessManager != null;
return accessManager;
}
public void setAccessManager(AccessManager accessManager) {
assert accessManager != null;
this.accessManager = accessManager;
}
public ObservationManagerImpl getObservationManager() {
assert observationManager != null;
return observationManager;
}
public void setObservationManager(
ObservationManagerImpl observationManager) {
assert observationManager != null;
this.observationManager = observationManager;
}
public NodeIdFactory getNodeIdFactory() {
return repositoryContext.getNodeIdFactory();
}
//--------------------------------------------------------< NameResolver >
public Name getQName(String name)
throws IllegalNameException, NamespaceException {
return session.getQName(name);
}
public String getJCRName(Name name) throws NamespaceException {
return session.getJCRName(name);
}
//--------------------------------------------------------< PathResolver >
public Path getQPath(String path)
throws MalformedPathException, IllegalNameException,
NamespaceException {
return session.getQPath(path);
}
public Path getQPath(String path, boolean normalizeIdentifier)
throws MalformedPathException, IllegalNameException,
NamespaceException {
return session.getQPath(path, normalizeIdentifier);
}
public String getJCRPath(Path path) throws NamespaceException {
return session.getJCRPath(path);
}
//--------------------------------------------------------------< Object >
/**
* Dumps the session internals to a string.
*
* @return string representation of session internals
*/
@Override
public String toString() {
return session + ":\n" + itemManager + "\n" + itemStateManager;
}
//--------------------------------------------------------------------------
/**
* Permission aware namespace registry implementation that makes sure that
* modifications of the namespace registry are only allowed if the editing
* session has the corresponding permissions.
*/
private class PermissionAwareNamespaceRegistry implements NamespaceRegistry {
private final NamespaceRegistry nsRegistry = repositoryContext.getNamespaceRegistry();
public void registerNamespace(String prefix, String uri) throws NamespaceException, UnsupportedRepositoryOperationException, AccessDeniedException, RepositoryException {
session.getAccessManager().checkRepositoryPermission(Permission.NAMESPACE_MNGMT);
nsRegistry.registerNamespace(prefix, uri);
}
public void unregisterNamespace(String prefix) throws NamespaceException, UnsupportedRepositoryOperationException, AccessDeniedException, RepositoryException {
session.getAccessManager().checkRepositoryPermission(Permission.NAMESPACE_MNGMT);
nsRegistry.unregisterNamespace(prefix);
}
public String[] getPrefixes() throws RepositoryException {
return nsRegistry.getPrefixes();
}
public String[] getURIs() throws RepositoryException {
return nsRegistry.getURIs();
}
public String getURI(String prefix) throws NamespaceException, RepositoryException {
return nsRegistry.getURI(prefix);
}
public String getPrefix(String uri) throws NamespaceException, RepositoryException {
return nsRegistry.getPrefix(uri);
}
}
}
| |
/**
* JBoss, Home of Professional Open Source.
* Copyright 2014-2022 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.pnc.model;
import org.hibernate.annotations.Cache;
import org.hibernate.annotations.CacheConcurrencyStrategy;
import org.hibernate.envers.Audited;
import org.hibernate.envers.RelationTargetAuditMode;
import org.jboss.pnc.common.util.StringUtils;
import org.jboss.pnc.enums.ArtifactQuality;
import org.jboss.pnc.enums.BuildCategory;
import javax.persistence.Cacheable;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.ForeignKey;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Index;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToMany;
import javax.persistence.ManyToOne;
import javax.persistence.PersistenceException;
import javax.persistence.PreRemove;
import javax.persistence.SequenceGenerator;
import javax.persistence.Table;
import javax.persistence.Transient;
import javax.persistence.UniqueConstraint;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import java.time.Instant;
import java.util.Date;
import java.util.HashSet;
import java.util.Set;
/**
* Created by <a href="mailto:matejonnet@gmail.com">Matej Lazar</a> on 2014-11-23.
*
* Class that maps the artifacts created and/or used by the builds of the projects. The "type" indicates the genesis of
* the artifact, whether it has been imported from external repositories, or built internally.
*
* The repoType indicated the type of repository which is used to distributed the artifact. The repoType repo indicates
* the format for the identifier field.
*
*/
@Cacheable
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE)
@Entity
@Table(
uniqueConstraints = @UniqueConstraint(
name = "uk_artifact_name",
columnNames = { "identifier", "sha256", "targetRepository_id" }),
indexes = { @Index(name = "idx_artifact_targetRepository", columnList = "targetRepository_id"),
@Index(name = "idx_artifact_identifier", columnList = "identifier"),
@Index(name = "idx_artifact_filename", columnList = "filename"),
@Index(name = "idx_artifact_md5", columnList = "md5"),
@Index(name = "idx_artifact_sha1", columnList = "sha1"),
@Index(name = "idx_artifact_sha256", columnList = "sha256"),
@Index(name = "idx_artifact_creation_user", columnList = "creationuser_id"),
@Index(name = "idx_artifact_modification_user", columnList = "modificationUser_id"),
@Index(name = "idx_artifact_buildrecord", columnList = "buildrecord_id") })
public class Artifact implements GenericEntity<Integer> {
private static final long serialVersionUID = 1L;
public static final String SEQUENCE_NAME = "artifact_id_seq";
@Id
@SequenceGenerator(name = SEQUENCE_NAME, sequenceName = SEQUENCE_NAME, initialValue = 100, allocationSize = 1)
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = SEQUENCE_NAME)
private Integer id;
/**
* Contains a string which uniquely identifies the artifact in a repository. For example, for a maven artifact this
* is the GATVC (groupId:artifactId:type:version[:qualifier] The format of the identifier string is determined by
* the repoType
*/
@NotNull
@Size(max = 1024)
private String identifier;
/**
* Package URL with format scheme:type/namespace/name@version?qualifiers#subpath. A purl is a URL string used to
* identify and locate a software package in a mostly universal and uniform way across programing languages, package
* managers, packaging conventions, tools, APIs and databases. Such a package URL is useful to reliably reference
* the same software package using a simple and expressive syntax and conventions based on familiar URLs. See
* https://github.com/package-url/purl-spec
*/
@Size(max = 1024)
private String purl;
@NotNull
@Size(max = 32)
private String md5;
@NotNull
@Size(max = 40)
private String sha1;
@NotNull
@Size(max = 64)
private String sha256;
private Long size;
@Audited
@NotNull
@Enumerated(EnumType.STRING)
private ArtifactQuality artifactQuality;
@Audited
@NotNull
@Enumerated(EnumType.STRING)
private BuildCategory buildCategory;
/**
* The type of repository which hosts this artifact (Maven, NPM, etc). This field determines the format of the
* identifier string.
*/
@JoinColumn(foreignKey = @ForeignKey(name = "fk_artifact_targetRepository"))
@NotNull
@ManyToOne(cascade = CascadeType.REFRESH)
private TargetRepository targetRepository;
@Size(max = 255)
private String filename;
/**
* Path to repository where the artifact file is available.
*/
@Size(max = 500)
@Column(length = 500)
private String deployPath;
/**
* The record of the build which produced this artifact.
*/
@ManyToOne
@JoinColumn(foreignKey = @ForeignKey(name = "fk_artifact_buildrecord"))
private BuildRecord buildRecord;
/**
* The list of builds which depend on this artifact. For example, if the build downloaded this artifact as a Maven
* dependency.
*/
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE)
@ManyToMany(mappedBy = "dependencies")
private Set<BuildRecord> dependantBuildRecords;
/**
* The location from which this artifact was originally downloaded for import
*/
@Size(max = 500)
@Column(unique = false, length = 500)
private String originUrl;
/**
* The date when this artifact was originally imported
*/
private Date importDate;
/**
* The product milestone releases which distribute this artifact
*/
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE)
@ManyToMany(mappedBy = "deliveredArtifacts")
private Set<ProductMilestone> deliveredInProductMilestones;
/**
* User who created the artifact (either triggering the build or e.g. creating via Deliverable Analyzer)
*/
@ManyToOne
@JoinColumn(foreignKey = @ForeignKey(name = "fk_artifact_creation_user"), updatable = false)
private User creationUser;
/**
* User who last changed any audited field related to the Quality labels
*/
@Audited(targetAuditMode = RelationTargetAuditMode.NOT_AUDITED)
@ManyToOne
@JoinColumn(foreignKey = @ForeignKey(name = "fk_artifact_modification_user"), updatable = true)
private User modificationUser;
@Column(columnDefinition = "timestamp with time zone", updatable = false)
private Date creationTime;
@Audited
@Column(columnDefinition = "timestamp with time zone")
private Date modificationTime;
/**
* Reason for the setting of the Quality level
*/
@Audited
@Size(max = 200)
@Column(length = 200)
private String qualityLevelReason;
@Transient
public IdentifierSha256 getIdentifierSha256() {
return new IdentifierSha256(identifier, sha256);
}
/**
* Try to use the {@link Artifact.Builder} instead.
*
* Basic no-arg constructor. Initializes the buildRecords and dependantBuildRecords to empty set.
*/
Artifact() {
dependantBuildRecords = new HashSet<>();
deliveredInProductMilestones = new HashSet<>();
creationTime = Date.from(Instant.now());
modificationTime = Date.from(Instant.now());
}
@PreRemove
public void preRemove() {
if (artifactQuality != ArtifactQuality.TEMPORARY && artifactQuality != ArtifactQuality.DELETED) {
throw new PersistenceException(
"The non-temporary artifacts cannot be deleted! Only deletion of temporary artifacts is supported ");
}
}
/**
* Gets the id.
*
* @return the id
*/
@Override
public Integer getId() {
return id;
}
/**
* Sets the id.
*
* @param id the new id
*/
@Override
public void setId(Integer id) {
this.id = id;
}
/**
* Gets the identifier.
*
* The identifier should contain different logic depending on the artifact type: i.e Maven should contain the GAV,
* NPM and CocoaPOD should be identified differently
*
* @return the identifier
*/
public String getIdentifier() {
return identifier;
}
/**
* Sets the identifier.
*
* @param identifier the new identifier
*/
public void setIdentifier(String identifier) {
this.identifier = identifier;
}
public String getPurl() {
return purl;
}
public void setPurl(String purl) {
this.purl = purl;
}
public String getMd5() {
return md5;
}
public void setMd5(String md5) {
this.md5 = md5;
}
public String getSha1() {
return sha1;
}
public void setSha1(String sha1) {
this.sha1 = sha1;
}
public String getSha256() {
return sha256;
}
public void setSha256(String sha256) {
this.sha256 = sha256;
}
public ArtifactQuality getArtifactQuality() {
return artifactQuality;
}
public void setArtifactQuality(ArtifactQuality artifactQuality) {
this.artifactQuality = artifactQuality;
}
public BuildCategory getBuildCategory() {
return buildCategory;
}
public void setBuildCategory(BuildCategory buildCategory) {
this.buildCategory = buildCategory;
}
/**
* Check if this artifact has an associated build record
*
* @return true if there is a build record for this artifact, false otherwise
*/
public boolean isBuilt() {
return buildRecord != null;
}
/**
* Check if this artifact was imported from a remote URL
*
* @return true if there is an originUrl
*/
public boolean isImported() {
return (originUrl != null && !originUrl.isEmpty());
}
public boolean isTrusted() {
return (isBuilt() || TargetRepository.isTrusted(originUrl, targetRepository));
}
/**
* Gets the filename.
*
* @return the filename
*/
public String getFilename() {
return filename;
}
/**
* Sets the filename.
*
* @param filename the new filename
*/
public void setFilename(String filename) {
this.filename = filename;
}
/**
* Gets the deploy url.
*
* @return the deploy url
*/
public String getDeployPath() {
return deployPath;
}
/**
* Sets the deploy path.
*
* @param deployPath the new deploy url
*/
public void setDeployPath(String deployPath) {
this.deployPath = deployPath;
}
/**
* Gets the build record which produced this artifact.
*
* @return the build record
*/
public BuildRecord getBuildRecord() {
return buildRecord;
}
/**
* Sets the build record which produced this artifact.
*
* @param buildRecord the build record
*/
public void setBuildRecord(BuildRecord buildRecord) {
if (this.buildRecord != null) {
this.buildRecord.getBuiltArtifacts().remove(this);
}
if (buildRecord != null) {
buildRecord.getBuiltArtifacts().add(this);
// The user who produced the BuildRecord is saved as the creationUser and modificationUser of the Artifact
this.creationUser = buildRecord.getUser();
this.modificationUser = buildRecord.getUser();
}
this.buildRecord = buildRecord;
}
public Set<BuildRecord> getDependantBuildRecords() {
return dependantBuildRecords;
}
public void setDependantBuildRecords(Set<BuildRecord> buildRecords) {
this.dependantBuildRecords = buildRecords;
}
public void addDependantBuildRecord(BuildRecord buildRecord) {
dependantBuildRecords.add(buildRecord);
buildRecord.getDependencies().add(this);
}
public void removeDependantBuildRecord(BuildRecord buildRecord) {
dependantBuildRecords.remove(buildRecord);
buildRecord.getDependencies().remove(this);
}
public String getOriginUrl() {
return originUrl;
}
public void setOriginUrl(String originUrl) {
this.originUrl = originUrl;
}
public Date getImportDate() {
return importDate;
}
public void setImportDate(Date importDate) {
this.importDate = importDate;
}
public Set<ProductMilestone> getDeliveredInProductMilestones() {
return deliveredInProductMilestones;
}
public void setDeliveredInProductMilestones(Set<ProductMilestone> deliveredInProductMilestones) {
this.deliveredInProductMilestones = deliveredInProductMilestones;
}
public boolean addDeliveredInProductMilestone(ProductMilestone productMilestone) {
productMilestone.getDeliveredArtifacts().add(this);
return deliveredInProductMilestones.add(productMilestone);
}
public boolean removeDeliveredInProductMilestone(ProductMilestone productMilestone) {
productMilestone.getDeliveredArtifacts().remove(this);
return deliveredInProductMilestones.remove(productMilestone);
}
public Long getSize() {
return size;
}
public void setSize(Long size) {
this.size = size;
}
public TargetRepository getTargetRepository() {
return targetRepository;
}
public void setTargetRepository(TargetRepository targetRepository) {
this.targetRepository = targetRepository;
}
/**
* @return the creationUser
*/
public User getCreationUser() {
return creationUser;
}
/**
* @param creationUser The user who created this artifact
*/
public void setCreationUser(User creationUser) {
this.creationUser = creationUser;
}
/**
* @return the modificationUser
*/
public User getModificationUser() {
return modificationUser;
}
/**
* @param modificationUser The user who last modified the Quality label of this artifact
*/
public void setModificationUser(User modificationUser) {
this.modificationUser = modificationUser;
}
/**
* @return the creationTime
*/
public Date getCreationTime() {
return creationTime;
}
/**
* @param creationTime The time at which this artifact was created
*/
public void setCreationTime(Date creationTime) {
this.creationTime = creationTime;
}
/**
* @return the modificationTime
*/
public Date getModificationTime() {
return modificationTime;
}
/**
* @param modificationTime The time at which the Quality label of this artifact was last modified
*/
public void setModificationTime(Date modificationTime) {
if (modificationTime != null) {
this.modificationTime = modificationTime;
}
}
/**
* @return the qualityLevelReason
*/
public String getQualityLevelReason() {
return qualityLevelReason;
}
/**
* @param qualityLevelReason The reason for the Quality level setting (change) of this artifact
*/
public void setQualityLevelReason(String qualityLevelReason) {
this.qualityLevelReason = StringUtils.nullIfBlank(qualityLevelReason);
}
@Override
public String toString() {
String tr = (targetRepository == null) ? "targetRepository=null" : targetRepository.toString();
return "Artifact [id: " + id + ", identifier=" + identifier + ", artifactQuality=" + artifactQuality
+ ", buildCategory=" + buildCategory + ", " + tr + "]";
}
public String getDescriptiveString() {
Integer trId = (targetRepository == null) ? null : targetRepository.getId();
return String.format(
"Identifier=%s, Sha256=%s, Target repository=%s, Deploy path=%s, Quality=%s",
identifier,
sha256,
trId,
deployPath,
artifactQuality);
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (!(o instanceof Artifact))
return false;
return id != null && id.equals(((Artifact) o).getId());
}
@Override
public int hashCode() {
// Because the id is generated when the entity is stored to DB, we need to have constant hash code to achieve
// equals+hashCode consistency across all JPA object states
return 31;
}
public static Builder builder() {
return Builder.newBuilder();
}
public static class Builder {
private Integer id;
private String identifier;
private String purl;
private String md5;
private String sha1;
private String sha256;
private Long size;
private ArtifactQuality artifactQuality;
private BuildCategory buildCategory;
private TargetRepository targetRepository;
private String filename;
private String deployPath;
private Set<BuildRecord> dependantBuildRecords;
private BuildRecord buildRecord;
private Set<ProductMilestone> deliveredInProductMilestones;
private String originUrl;
private Date importDate;
private User creationUser;
private User modificationUser;
private Date creationTime;
private Date modificationTime;
private String qualityLevelReason;
private Builder() {
dependantBuildRecords = new HashSet<>();
deliveredInProductMilestones = new HashSet<>();
}
public static Builder newBuilder() {
return new Builder();
}
public Artifact build() {
Artifact artifact = new Artifact();
artifact.setId(id);
artifact.setIdentifier(identifier);
artifact.setPurl(purl);
artifact.setMd5(md5);
artifact.setSha1(sha1);
artifact.setSha256(sha256);
artifact.setSize(size);
if (artifactQuality == null) {
artifactQuality = ArtifactQuality.NEW;
}
artifact.setArtifactQuality(artifactQuality);
if (buildCategory == null) {
buildCategory = BuildCategory.STANDARD;
}
artifact.setBuildCategory(buildCategory);
artifact.setTargetRepository(targetRepository);
artifact.setFilename(filename);
artifact.setDeployPath(deployPath);
if (dependantBuildRecords != null) {
artifact.setDependantBuildRecords(dependantBuildRecords);
}
artifact.setBuildRecord(buildRecord);
artifact.setDeliveredInProductMilestones(deliveredInProductMilestones);
artifact.setOriginUrl(originUrl);
artifact.setImportDate(importDate);
artifact.setCreationUser(creationUser);
artifact.setModificationUser(modificationUser);
artifact.setCreationTime(creationTime);
artifact.setModificationTime(modificationTime);
artifact.setQualityLevelReason(qualityLevelReason);
return artifact;
}
public Builder id(Integer id) {
this.id = id;
return this;
}
public Builder identifier(String identifier) {
this.identifier = identifier;
return this;
}
public Builder purl(String purl) {
this.purl = purl;
return this;
}
public Builder md5(String md5) {
this.md5 = md5;
return this;
}
public Builder sha1(String sha1) {
this.sha1 = sha1;
return this;
}
public Builder sha256(String sha256) {
this.sha256 = sha256;
return this;
}
public Builder size(Long size) {
this.size = size;
return this;
}
public Builder artifactQuality(ArtifactQuality artifactQuality) {
this.artifactQuality = artifactQuality;
return this;
}
public Builder buildCategory(BuildCategory buildCategory) {
this.buildCategory = buildCategory;
return this;
}
public Builder targetRepository(TargetRepository targetRepository) {
this.targetRepository = targetRepository;
return this;
}
public Builder filename(String filename) {
this.filename = filename;
return this;
}
public Builder deployPath(String deployPath) {
this.deployPath = deployPath;
return this;
}
public Builder buildRecord(BuildRecord buildRecord) {
this.buildRecord = buildRecord;
return this;
}
public Builder dependantBuildRecord(BuildRecord dependantBuildRecord) {
this.dependantBuildRecords.add(dependantBuildRecord);
return this;
}
public Builder dependantBuildRecords(Set<BuildRecord> dependantBuildRecords) {
this.dependantBuildRecords = dependantBuildRecords;
return this;
}
public Builder deliveredInProductMilestones(Set<ProductMilestone> deliveredInProductMilestones) {
this.deliveredInProductMilestones = deliveredInProductMilestones;
return this;
}
public Builder originUrl(String originUrl) {
this.originUrl = originUrl;
return this;
}
public Builder importDate(Date importDate) {
this.importDate = importDate;
return this;
}
public Builder creationUser(User creationUser) {
this.creationUser = creationUser;
return this;
}
public Builder modificationUser(User modificationUser) {
this.modificationUser = modificationUser;
return this;
}
public Builder creationTime(Date creationTime) {
this.creationTime = creationTime;
return this;
}
public Builder modificationTime(Date modificationTime) {
this.modificationTime = modificationTime;
return this;
}
public Builder qualityLevelReason(String qualityLevelReason) {
this.qualityLevelReason = qualityLevelReason;
return this;
}
}
public static class IdentifierSha256 {
private String identifier;
private String sha256;
public IdentifierSha256(String identifier, String sha256) {
this.identifier = identifier;
this.sha256 = sha256;
}
public String getSha256() {
return sha256;
}
public String getIdentifier() {
return identifier;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof IdentifierSha256)) {
return false;
}
IdentifierSha256 that = (IdentifierSha256) o;
if (!identifier.equals(that.identifier)) {
return false;
}
return sha256.equals(that.sha256);
}
@Override
public int hashCode() {
int result = identifier.hashCode();
result = 31 * result + sha256.hashCode();
return result;
}
}
}
| |
package cz.metacentrum.perun.core.api;
import cz.metacentrum.perun.core.impl.AuthzRoles;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.RETURNS_DEEP_STUBS;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.util.HashMap;
import java.util.List;
import cz.metacentrum.perun.core.api.exceptions.*;
import org.junit.Test;
import cz.metacentrum.perun.core.AbstractPerunIntegrationTest;
import cz.metacentrum.perun.core.blImpl.AuthzResolverBlImpl;
import cz.metacentrum.perun.core.impl.PerunSessionImpl;
import java.util.HashSet;
import java.util.Set;
/**
* Integration tests of AuthzResolver
*
* @author Jiri Harazim <harazim@mail.muni.cz>
*/
public class AuthzResolverIntegrationTest extends AbstractPerunIntegrationTest {
private static final String CLASS_NAME = "AuthzResolver.";
final ExtSource extSource = new ExtSource(0, "AuthzResolverExtSource", ExtSourcesManager.EXTSOURCE_LDAP);
@Test
public void isAuthorizedInvalidPrincipal() throws Exception {
System.out.println(CLASS_NAME + "isAuthorizedInvalidPrincipal");
assertTrue(!
AuthzResolver.isAuthorized(new PerunSessionImpl(
perun,
new PerunPrincipal("pepa", ExtSourcesManager.EXTSOURCE_NAME_INTERNAL, ExtSourcesManager.EXTSOURCE_INTERNAL),
new PerunClient()
), Role.PERUNADMIN));
}
@Test
public void setRoleVoAdmin() throws Exception {
System.out.println(CLASS_NAME + "setRole");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
final Member createdMember = createSomeMember(createdVo);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
AuthzResolver.setRole(sess, createdUser, createdVo, Role.VOADMIN);
PerunSession sess1 = getHisSession(createdMember);
AuthzResolver.refreshAuthz(sess1);
assertTrue(AuthzResolver.isAuthorized(sess1, Role.VOADMIN,createdVo));
}
@Test
public void setRoleVoObserver() throws Exception {
System.out.println(CLASS_NAME + "setRole");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
final Member createdMember = createSomeMember(createdVo);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
AuthzResolver.setRole(sess, createdUser, createdVo, Role.VOOBSERVER);
PerunSession sess1 = getHisSession(createdMember);
AuthzResolver.refreshAuthz(sess1);
assertTrue(AuthzResolver.isAuthorized(sess1, Role.VOOBSERVER,createdVo));
}
@Test
public void unsetRoleVoAdmin() throws Exception {
System.out.println(CLASS_NAME + "unsetRole");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
final Member createdMember = createSomeMember(createdVo);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
AuthzResolver.setRole(sess, createdUser, createdVo, Role.VOADMIN);
PerunSession sess1 = getHisSession(createdMember);
AuthzResolver.refreshAuthz(sess1);
assertTrue(AuthzResolver.isAuthorized(sess1, Role.VOADMIN,createdVo));
AuthzResolver.unsetRole(sess, createdUser, createdVo, Role.VOADMIN);
AuthzResolver.refreshAuthz(sess1);
assertTrue(!AuthzResolver.isAuthorized(sess1, Role.VOADMIN,createdVo));
}
@Test (expected = UserNotAdminException.class)
public void unsetRoleWhichNotExists() throws Exception {
System.out.println(CLASS_NAME + "unsetRole");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
final Member createdMember = createSomeMember(createdVo);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
AuthzResolver.unsetRole(sess, createdUser, createdVo, Role.VOADMIN);
}
@Test (expected = UserNotAdminException.class)
public void setUnsuportedRole() throws Exception {
System.out.println(CLASS_NAME + "setRole");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"test123test123","test123test123"));
final Member createdMember = createSomeMember(createdVo);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
AuthzResolver.unsetRole(sess, createdUser, createdVo, Role.VOADMIN);
}
@Test
public void isVoAdmin() throws Exception {
System.out.println(CLASS_NAME + "isVoAdmin");
assertTrue(! AuthzResolver.isVoAdmin(sess));
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"sdf","sdfh"));
final Member createdMember = createSomeMember(createdVo);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
PerunSession sess1 = getHisSession(createdMember);
assertTrue(! AuthzResolver.isVoAdmin(sess1));
perun.getVosManager().addAdmin(sess, createdVo, createdUser);
AuthzResolver.refreshAuthz(sess1);
assertTrue(AuthzResolver.isVoAdmin(sess1));
}
@Test
public void isGroupAdmin() throws Exception {
System.out.println(CLASS_NAME + "isGroupAdmin");
sess = mock(PerunSession.class, RETURNS_DEEP_STUBS);
when(sess.getPerunPrincipal().getRoles().hasRole(Role.GROUPADMIN)).thenReturn(true);
assertTrue(AuthzResolver.isGroupAdmin(sess));
}
@Test
public void isFacilityAdmin() {
System.out.println(CLASS_NAME + "isFacilityAdmin");
sess = mock(PerunSession.class, RETURNS_DEEP_STUBS);
when(sess.getPerunPrincipal().getRoles().hasRole(Role.FACILITYADMIN)).thenReturn(true);
assertTrue(AuthzResolver.isFacilityAdmin(sess));
}
@Test
public void isVoAdminUnit() {
System.out.println(CLASS_NAME + "isVoAdminUnit");
sess = mock(PerunSession.class, RETURNS_DEEP_STUBS);
when(sess.getPerunPrincipal().getRoles().hasRole(Role.VOADMIN)).thenReturn(true);
assertTrue(AuthzResolver.isVoAdmin(sess));
}
@Test
public void isPerunAdmin() {
System.out.println(CLASS_NAME + "isPerunAdmin");
sess = mock(PerunSession.class, RETURNS_DEEP_STUBS);
when(sess.getPerunPrincipal().getRoles().hasRole(Role.PERUNADMIN)).thenReturn(true);
assertTrue(AuthzResolver.isPerunAdmin(sess));
}
@Test
public void isAuthorized() throws Exception {
System.out.println(CLASS_NAME + "isAuthorized");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"sdf","sdfh"));
final Member createdMember = createSomeMember(createdVo);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
PerunSession sess1 = getHisSession(createdMember);
perun.getVosManager().addAdmin(sess, createdVo, createdUser);
AuthzResolver.refreshAuthz(sess1);
assertTrue(AuthzResolver.isAuthorized(sess1, Role.VOADMIN, createdVo));
}
@Test
public void addAllSubgroupsToAuthzRoles() throws Exception {
System.out.println(CLASS_NAME + "addAllSubgroupsToAuthzRoles");
Vo testVo = new Vo(1000, "AuthzResolver-testVo", "AuthzResolver-testVo");
testVo = perun.getVosManagerBl().createVo(sess, testVo);
Group testGroupA = new Group("AuthzResolver-testGroupA", "testGroupA");
Group testGroupB = new Group("AuthzResolver-testGroupB", "testGroupB");
Group testGroupC = new Group("AuthzResolver-testGroupC", "testGroupC");
testGroupA = perun.getGroupsManagerBl().createGroup(sess, testVo, testGroupA);
testGroupB = perun.getGroupsManagerBl().createGroup(sess, testGroupA, testGroupB);
testGroupC = perun.getGroupsManagerBl().createGroup(sess, testGroupB, testGroupC);
HashMap<String, Set<Integer>> mapWithRights = new HashMap<String, Set<Integer>>();
Set<Integer> listWithIds = new HashSet<Integer>();
listWithIds.add(testGroupA.getId());
mapWithRights.put("Vo", listWithIds);
mapWithRights.put("Group", listWithIds);
AuthzRoles authzRoles = new AuthzRoles(Role.GROUPADMIN, mapWithRights);
authzRoles = AuthzResolverBlImpl.addAllSubgroupsToAuthzRoles(sess, authzRoles);
assertTrue(authzRoles.hasRole(Role.GROUPADMIN));
assertTrue(!authzRoles.hasRole(Role.VOADMIN));
assertTrue(authzRoles.get(Role.GROUPADMIN).containsKey("Group"));
assertTrue(authzRoles.get(Role.GROUPADMIN).containsKey("Vo"));
assertTrue(authzRoles.get(Role.GROUPADMIN).get("Group").contains(testGroupA.getId()));
assertTrue(authzRoles.get(Role.GROUPADMIN).get("Group").contains(testGroupB.getId()));
assertTrue(authzRoles.get(Role.GROUPADMIN).get("Group").contains(testGroupC.getId()));
assertTrue(authzRoles.get(Role.GROUPADMIN).get("Group").size() == 3);
assertTrue(authzRoles.get(Role.GROUPADMIN).get("Vo").contains(testGroupA.getId()));
assertTrue(authzRoles.get(Role.GROUPADMIN).get("Vo").size() == 1);
}
@Test
public void isAuthorizedInOtherVo() throws Exception {
System.out.println(CLASS_NAME + "isAuthorizedInOtherVo");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"som3Vo","VoSom3Nam3"));
final Member createdMemberKouril = createSomeMember(createdVo);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMemberKouril);
PerunSession sessKouril = getHisSession(createdMemberKouril);
perun.getVosManager().addAdmin(sess, createdVo, createdUser);
AuthzResolver.refreshAuthz(sessKouril);
assertTrue("User is not authorized in own VO", AuthzResolver.isAuthorized(sessKouril, Role.VOADMIN, createdVo));
final Vo otherVo = perun.getVosManager().createVo(sess, new Vo(0,"otherVo","bliblaVo"));
assertTrue("User is authorized in foreign VO", !AuthzResolver.isAuthorized(sessKouril, Role.VOADMIN, otherVo));
}
@Test
public void isAuthorizedWrongRole() throws Exception {
System.out.println(CLASS_NAME + "isAuthorizedWrongRole");
final Vo createdVo = perun.getVosManager().createVo(sess, new Vo(0,"sdf","sdfh"));
final Member createdMember = createSomeMember(createdVo);
final User createdUser = perun.getUsersManagerBl().getUserByMember(sess, createdMember);
PerunSession sess1 = getHisSession(createdMember);
perun.getVosManager().addAdmin(sess, createdVo, createdUser);
AuthzResolver.refreshAuthz(sess1);
assertTrue( ! AuthzResolver.isAuthorized(sess1, Role.FACILITYADMIN, createdVo));
assertTrue( ! AuthzResolver.isAuthorized(sess1, Role.GROUPADMIN, createdVo));
assertTrue( ! AuthzResolver.isAuthorized(sess1, Role.SELF, createdVo));
assertTrue( ! AuthzResolver.isAuthorized(sess1, Role.PERUNADMIN, createdVo));
}
@Test
public void getPrincipalRoleNames() throws Exception {
System.out.println(CLASS_NAME + "getPrincipalRoleNames");
// Principal perunTests is PERUNADMIN
PerunPrincipal pp = new PerunPrincipal("perunTests", ExtSourcesManager.EXTSOURCE_NAME_INTERNAL, ExtSourcesManager.EXTSOURCE_INTERNAL);
PerunSession ps = new PerunSessionImpl(perun, pp, new PerunClient());
List<String> roleNames = cz.metacentrum.perun.core.api.AuthzResolver.getPrincipalRoleNames(ps);
assertTrue(roleNames.contains(Role.PERUNADMIN.getRoleName()));
}
// private methods ==============================================================
private Candidate setUpCandidate() {
String userFirstName = "FirstTest";
String userLastName = "LastTest";
String extLogin = "ExtLoginTest";
Candidate candidate = new Candidate(); //Mockito.mock(Candidate.class);
candidate.setFirstName(userFirstName);
candidate.setId(0);
candidate.setMiddleName("");
candidate.setLastName(userLastName);
candidate.setTitleBefore("");
candidate.setTitleAfter("");
final UserExtSource userExtSource = new UserExtSource(extSource, extLogin);
candidate.setUserExtSource(userExtSource);
candidate.setAttributes(new HashMap<String,String>());
return candidate;
}
private Member createSomeMember(final Vo createdVo) throws ExtendMembershipException, AlreadyMemberException, WrongAttributeValueException, WrongReferenceAttributeValueException, InternalErrorException, GroupOperationsException {
final Candidate candidate = setUpCandidate();
final Member createdMember = perun.getMembersManagerBl().createMemberSync(sess, createdVo, candidate);
return createdMember;
}
private PerunSession getHisSession(final Member createdMember) throws InternalErrorException {
List<UserExtSource> ues = perun.getUsersManagerBl().getUserExtSources(sess, perun.getUsersManagerBl().getUserByMember(sess, createdMember));
if (ues.size() == 0) {
throw new InternalErrorException("Empty userExtSource list");
}
UserExtSource ue = new UserExtSource();
for (UserExtSource u : ues) {
if (u.getExtSource().getType().equals(ExtSourcesManager.EXTSOURCE_LDAP)) {
ue = u;
break;
}
}
PerunPrincipal pp1 = new PerunPrincipal(ue.getLogin(), ue.getExtSource().getName(), ue.getExtSource().getType());
PerunSession sess1 = perun.getPerunSession(pp1, new PerunClient());
return sess1;
}
}
| |
/**********************************************************************************
*
* $Id$
*
***********************************************************************************
*
* Copyright (c) 2007, 2008, 2009 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.component.gradebook;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.hibernate.HibernateException;
import org.hibernate.Query;
import org.hibernate.Session;
import org.sakaiproject.service.gradebook.shared.GradebookExistsException;
import org.sakaiproject.service.gradebook.shared.GradebookFrameworkService;
import org.sakaiproject.service.gradebook.shared.GradebookNotFoundException;
import org.sakaiproject.service.gradebook.shared.GradebookService;
import org.sakaiproject.service.gradebook.shared.GradingScaleDefinition;
import org.sakaiproject.tool.gradebook.CourseGrade;
import org.sakaiproject.tool.gradebook.GradeMapping;
import org.sakaiproject.tool.gradebook.Gradebook;
import org.sakaiproject.tool.gradebook.GradingScale;
import org.sakaiproject.tool.gradebook.LetterGradeMapping;
import org.sakaiproject.tool.gradebook.LetterGradePercentMapping;
import org.sakaiproject.tool.gradebook.LetterGradePlusMinusMapping;
import org.sakaiproject.tool.gradebook.PassNotPassMapping;
import org.springframework.orm.hibernate3.HibernateCallback;
import org.springframework.orm.hibernate3.HibernateTemplate;
public class GradebookFrameworkServiceImpl extends BaseHibernateManager implements GradebookFrameworkService {
private static final Log log = LogFactory.getLog(GradebookFrameworkServiceImpl.class);
public static final String UID_OF_DEFAULT_GRADING_SCALE_PROPERTY = "uidOfDefaultGradingScale";
public static final String PROP_COURSE_POINTS_DISPLAYED = "gradebook.coursepoints.displayed";
public static final String PROP_COURSE_GRADE_DISPLAYED = "gradebook.coursegrade.displayed";
public static final String PROP_ASSIGNMENTS_DISPLAYED = "gradebook.assignments.displayed";
public void addGradebook(final String uid, final String name) {
if(isGradebookDefined(uid)) {
log.warn("You can not add a gradebook with uid=" + uid + ". That gradebook already exists.");
throw new GradebookExistsException("You can not add a gradebook with uid=" + uid + ". That gradebook already exists.");
}
if (log.isDebugEnabled()) log.debug("Adding gradebook uid=" + uid + " by userUid=" + getUserUid());
createDefaultLetterGradeMapping(getHardDefaultLetterMapping());
getHibernateTemplate().execute(new HibernateCallback() {
public Object doInHibernate(Session session) throws HibernateException {
// Get available grade mapping templates.
List gradingScales = session.createQuery("from GradingScale as gradingScale where gradingScale.unavailable=false").list();
// The application won't be able to run without grade mapping
// templates, so if for some reason none have been defined yet,
// do that now.
if (gradingScales.isEmpty()) {
if (log.isInfoEnabled()) log.info("No Grading Scale defined yet. This is probably because you have upgraded or you are working with a new database. Default grading scales will be created. Any customized system-wide grade mappings you may have defined in previous versions will have to be reconfigured.");
gradingScales = GradebookFrameworkServiceImpl.this.addDefaultGradingScales(session);
}
// Create and save the gradebook
Gradebook gradebook = new Gradebook(name);
gradebook.setUid(uid);
session.save(gradebook);
// Create the course grade for the gradebook
CourseGrade cg = new CourseGrade();
cg.setGradebook(gradebook);
session.save(cg);
// According to the specification, Display Assignment Grades is
// on by default, and Display course grade is off. But can be overridden via properties
Boolean propAssignmentsDisplayed = serverConfigurationService.getBoolean(PROP_ASSIGNMENTS_DISPLAYED,true);
gradebook.setAssignmentsDisplayed(propAssignmentsDisplayed);
Boolean propCourseGradeDisplayed = serverConfigurationService.getBoolean(PROP_COURSE_GRADE_DISPLAYED,false);
gradebook.setCourseGradeDisplayed(propCourseGradeDisplayed);
Boolean propCoursePointsDisplayed = serverConfigurationService.getBoolean(PROP_COURSE_POINTS_DISPLAYED,false);
//Feature is only in Sakai 11
//gradebook.setCoursePointsDisplayed(propCoursePointsDisplayed);
String defaultScaleUid = GradebookFrameworkServiceImpl.this.getPropertyValue(UID_OF_DEFAULT_GRADING_SCALE_PROPERTY);
// Add and save grade mappings based on the templates.
GradeMapping defaultGradeMapping = null;
Set gradeMappings = new HashSet();
for (Iterator iter = gradingScales.iterator(); iter.hasNext();) {
GradingScale gradingScale = (GradingScale)iter.next();
GradeMapping gradeMapping = new GradeMapping(gradingScale);
gradeMapping.setGradebook(gradebook);
session.save(gradeMapping);
gradeMappings.add(gradeMapping);
if (gradingScale.getUid().equals(defaultScaleUid)) {
defaultGradeMapping = gradeMapping;
}
}
// Check for null default.
if (defaultGradeMapping == null) {
defaultGradeMapping = (GradeMapping)gradeMappings.iterator().next();
if (log.isWarnEnabled()) log.warn("No default GradeMapping found for new Gradebook=" + gradebook.getUid() + "; will set default to " + defaultGradeMapping.getName());
}
gradebook.setSelectedGradeMapping(defaultGradeMapping);
// The Hibernate mapping as of Sakai 2.2 makes this next
// call meaningless when it comes to persisting changes at
// the end of the transaction. It is, however, needed for
// the mappings to be seen while the transaction remains
// uncommitted.
gradebook.setGradeMappings(gradeMappings);
gradebook.setGrade_type(GradebookService.GRADE_TYPE_POINTS);
gradebook.setCategory_type(GradebookService.CATEGORY_TYPE_NO_CATEGORY);
// Update the gradebook with the new selected grade mapping
session.update(gradebook);
return null;
}
});
}
private List addDefaultGradingScales(Session session) throws HibernateException {
List gradingScales = new ArrayList();
// Base the default set of templates on the old
// statically defined GradeMapping classes.
GradeMapping[] oldGradeMappings = {
new LetterGradeMapping(),
new LetterGradePlusMinusMapping(),
new PassNotPassMapping()
};
for (int i = 0; i < oldGradeMappings.length; i++) {
GradeMapping sampleMapping = oldGradeMappings[i];
sampleMapping.setDefaultValues();
GradingScale gradingScale = new GradingScale();
String uid = sampleMapping.getClass().getName();
uid = uid.substring(uid.lastIndexOf('.') + 1);
gradingScale.setUid(uid);
gradingScale.setUnavailable(false);
gradingScale.setName(sampleMapping.getName());
gradingScale.setGrades(new ArrayList(sampleMapping.getGrades()));
gradingScale.setDefaultBottomPercents(new HashMap(sampleMapping.getGradeMap()));
session.save(gradingScale);
if (log.isInfoEnabled()) log.info("Added Grade Mapping " + gradingScale.getUid());
gradingScales.add(gradingScale);
}
setDefaultGradingScale("LetterGradePlusMinusMapping");
session.flush();
return gradingScales;
}
public void setAvailableGradingScales(final Collection gradingScaleDefinitions) {
getHibernateTemplate().execute(new HibernateCallback() {
public Object doInHibernate(Session session) throws HibernateException {
mergeGradeMappings(gradingScaleDefinitions, session);
return null;
}
});
}
public void setDefaultGradingScale(String uid) {
setPropertyValue(UID_OF_DEFAULT_GRADING_SCALE_PROPERTY, uid);
}
private void copyDefinitionToScale(GradingScaleDefinition bean, GradingScale gradingScale) {
gradingScale.setUnavailable(false);
gradingScale.setName(bean.getName());
gradingScale.setGrades(bean.getGrades());
Map defaultBottomPercents = new HashMap();
Iterator gradesIter = bean.getGrades().iterator();
Iterator defaultBottomPercentsIter = bean.getDefaultBottomPercents().iterator();
while (gradesIter.hasNext() && defaultBottomPercentsIter.hasNext()) {
String grade = (String)gradesIter.next();
Double value = (Double)defaultBottomPercentsIter.next();
defaultBottomPercents.put(grade, value);
}
gradingScale.setDefaultBottomPercents(defaultBottomPercents);
}
private void mergeGradeMappings(Collection gradingScaleDefinitions, Session session) throws HibernateException {
Map newMappingDefinitionsMap = new HashMap();
HashSet uidsToSet = new HashSet();
for (Iterator iter = gradingScaleDefinitions.iterator(); iter.hasNext(); ) {
GradingScaleDefinition bean = (GradingScaleDefinition)iter.next();
newMappingDefinitionsMap.put(bean.getUid(), bean);
uidsToSet.add(bean.getUid());
}
// Until we move to Hibernate 3 syntax, we need to update one record at a time.
Query q;
List gmtList;
// Toggle any scales that are no longer specified.
q = session.createQuery("from GradingScale as gradingScale where gradingScale.uid not in (:uidList) and gradingScale.unavailable=false");
q.setParameterList("uidList", uidsToSet);
gmtList = q.list();
for (Iterator iter = gmtList.iterator(); iter.hasNext(); ) {
GradingScale gradingScale = (GradingScale)iter.next();
gradingScale.setUnavailable(true);
session.update(gradingScale);
if (log.isInfoEnabled()) log.info("Set Grading Scale " + gradingScale.getUid() + " unavailable");
}
// Modify any specified scales that already exist.
q = session.createQuery("from GradingScale as gradingScale where gradingScale.uid in (:uidList)");
q.setParameterList("uidList", uidsToSet);
gmtList = q.list();
for (Iterator iter = gmtList.iterator(); iter.hasNext(); ) {
GradingScale gradingScale = (GradingScale)iter.next();
copyDefinitionToScale((GradingScaleDefinition)newMappingDefinitionsMap.get(gradingScale.getUid()), gradingScale);
uidsToSet.remove(gradingScale.getUid());
session.update(gradingScale);
if (log.isInfoEnabled()) log.info("Updated Grading Scale " + gradingScale.getUid());
}
// Add any new scales.
for (Iterator iter = uidsToSet.iterator(); iter.hasNext(); ) {
String uid = (String)iter.next();
GradingScale gradingScale = new GradingScale();
gradingScale.setUid(uid);
GradingScaleDefinition bean = (GradingScaleDefinition)newMappingDefinitionsMap.get(uid);
copyDefinitionToScale(bean, gradingScale);
session.save(gradingScale);
if (log.isInfoEnabled()) log.info("Added Grading Scale " + gradingScale.getUid());
}
session.flush();
}
public void deleteGradebook(final String uid)
throws GradebookNotFoundException {
if (log.isDebugEnabled()) log.debug("Deleting gradebook uid=" + uid + " by userUid=" + getUserUid());
final Long gradebookId = getGradebook(uid).getId();
// Worse of both worlds code ahead. We've been quick-marched
// into Hibernate 3 sessions, but we're also having to use classic query
// parsing -- which keeps us from being able to use either Hibernate's new-style
// bulk delete queries or Hibernate's old-style session.delete method.
// Instead, we're stuck with going through the Spring template for each
// deletion one at a time.
HibernateTemplate hibTempl = getHibernateTemplate();
// int numberDeleted = hibTempl.bulkUpdate("delete GradingEvent as ge where ge.gradableObject.gradebook.id=?", gradebookId);
// log.warn("GradingEvent numberDeleted=" + numberDeleted);
List toBeDeleted;
int numberDeleted;
toBeDeleted = hibTempl.find("from GradingEvent as ge where ge.gradableObject.gradebook.id=?", gradebookId);
numberDeleted = toBeDeleted.size();
hibTempl.deleteAll(toBeDeleted);
if (log.isDebugEnabled()) log.debug("Deleted " + numberDeleted + " grading events");
toBeDeleted = hibTempl.find("from AbstractGradeRecord as gr where gr.gradableObject.gradebook.id=?", gradebookId);
numberDeleted = toBeDeleted.size();
hibTempl.deleteAll(toBeDeleted);
if (log.isDebugEnabled()) log.debug("Deleted " + numberDeleted + " grade records");
toBeDeleted = hibTempl.find("from GradableObject as go where go.gradebook.id=?", gradebookId);
numberDeleted = toBeDeleted.size();
hibTempl.deleteAll(toBeDeleted);
if (log.isDebugEnabled()) log.debug("Deleted " + numberDeleted + " gradable objects");
Gradebook gradebook = (Gradebook)hibTempl.load(Gradebook.class, gradebookId);
gradebook.setSelectedGradeMapping(null);
toBeDeleted = hibTempl.find("from GradeMapping as gm where gm.gradebook.id=?", gradebookId);
numberDeleted = toBeDeleted.size();
hibTempl.deleteAll(toBeDeleted);
if (log.isDebugEnabled()) log.debug("Deleted " + numberDeleted + " grade mappings");
hibTempl.delete(gradebook);
hibTempl.flush();
hibTempl.clear();
}
private void createDefaultLetterGradeMapping(final Map gradeMap)
{
if(getDefaultLetterGradePercentMapping() == null)
{
Set keySet = gradeMap.keySet();
if(keySet.size() != GradebookService.validLetterGrade.length) //we only consider letter grade with -/+ now.
throw new IllegalArgumentException("gradeMap doesn't have right size in BaseHibernateManager.createDefaultLetterGradePercentMapping");
if(validateLetterGradeMapping(gradeMap) == false)
throw new IllegalArgumentException("gradeMap contains invalid letter in BaseHibernateManager.createDefaultLetterGradePercentMapping");
HibernateCallback hc = new HibernateCallback() {
public Object doInHibernate(Session session) throws HibernateException {
LetterGradePercentMapping lgpm = new LetterGradePercentMapping();
session.save(lgpm);
Map saveMap = new HashMap();
for(Iterator iter = gradeMap.keySet().iterator(); iter.hasNext();)
{
String key = (String) iter.next();
saveMap.put(key, gradeMap.get(key));
}
if (lgpm != null)
{
lgpm.setGradeMap(saveMap);
lgpm.setMappingType(1);
session.update(lgpm);
}
return null;
}
};
getHibernateTemplate().execute(hc);
}
}
private Map getHardDefaultLetterMapping()
{
Map gradeMap = new HashMap();
gradeMap.put("A+", Double.valueOf(100));
gradeMap.put("A", Double.valueOf(95));
gradeMap.put("A-", Double.valueOf(90));
gradeMap.put("B+", Double.valueOf(87));
gradeMap.put("B", Double.valueOf(83));
gradeMap.put("B-", Double.valueOf(80));
gradeMap.put("C+", Double.valueOf(77));
gradeMap.put("C", Double.valueOf(73));
gradeMap.put("C-", Double.valueOf(70));
gradeMap.put("D+", Double.valueOf(67));
gradeMap.put("D", Double.valueOf(63));
gradeMap.put("D-", Double.valueOf(60));
gradeMap.put("F", Double.valueOf(0.0));
return gradeMap;
}
}
| |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2018 by Hitachi Vantara : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.databaselookup;
import java.util.Arrays;
import java.util.List;
import org.pentaho.di.core.CheckResult;
import org.pentaho.di.core.CheckResultInterface;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.util.Utils;
import org.pentaho.di.core.ProvidesModelerMeta;
import org.pentaho.di.core.database.Database;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleDatabaseException;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleStepException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.row.RowMeta;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.core.row.value.ValueMetaFactory;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.shared.SharedObjectInterface;
import org.pentaho.di.trans.DatabaseImpact;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStepMeta;
import org.pentaho.di.trans.step.StepDataInterface;
import org.pentaho.di.trans.step.StepInterface;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.step.StepMetaInterface;
import org.pentaho.metastore.api.IMetaStore;
import org.w3c.dom.Node;
public class DatabaseLookupMeta extends BaseStepMeta implements StepMetaInterface,
ProvidesModelerMeta {
private static Class<?> PKG = DatabaseLookupMeta.class; // for i18n purposes, needed by Translator2!!
public static final String[] conditionStrings = new String[] {
"=", "<>", "<", "<=", ">", ">=", "LIKE", "BETWEEN", "IS NULL", "IS NOT NULL", };
public static final int CONDITION_EQ = 0;
public static final int CONDITION_NE = 1;
public static final int CONDITION_LT = 2;
public static final int CONDITION_LE = 3;
public static final int CONDITION_GT = 4;
public static final int CONDITION_GE = 5;
public static final int CONDITION_LIKE = 6;
public static final int CONDITION_BETWEEN = 7;
public static final int CONDITION_IS_NULL = 8;
public static final int CONDITION_IS_NOT_NULL = 9;
/** what's the lookup schema name? */
private String schemaName;
/** what's the lookup table? */
private String tablename;
/** database connection */
private DatabaseMeta databaseMeta;
/** which field in input stream to compare with? */
private String[] streamKeyField1;
/** Extra field for between... */
private String[] streamKeyField2;
/** Comparator: =, <>, BETWEEN, ... */
private String[] keyCondition;
/** field in table */
private String[] tableKeyField;
/** return these field values after lookup */
private String[] returnValueField;
/** new name for value ... */
private String[] returnValueNewName;
/** default value in case not found... */
private String[] returnValueDefault;
/** type of default value */
private int[] returnValueDefaultType;
/** order by clause... */
private String orderByClause;
/** Cache values we look up --> faster */
private boolean cached;
/** Limit the cache size to this! */
private int cacheSize;
/** Flag to make it load all data into the cache at startup */
private boolean loadingAllDataInCache;
/** Have the lookup fail if multiple results were found, renders the orderByClause useless */
private boolean failingOnMultipleResults;
/** Have the lookup eat the incoming row when nothing gets found */
private boolean eatingRowOnLookupFailure;
public DatabaseLookupMeta() {
super(); // allocate BaseStepMeta
}
/**
* @return Returns the cached.
*/
public boolean isCached() {
return cached;
}
/**
* @param cached
* The cached to set.
*/
public void setCached( boolean cached ) {
this.cached = cached;
}
/**
* @return Returns the cacheSize.
*/
public int getCacheSize() {
return cacheSize;
}
/**
* @param cacheSize
* The cacheSize to set.
*/
public void setCacheSize( int cacheSize ) {
this.cacheSize = cacheSize;
}
/**
* @return Returns the database.
*/
@Override
public DatabaseMeta getDatabaseMeta() {
return databaseMeta;
}
@Override public String getTableName() {
return tablename;
}
/**
* @param database
* The database to set.
*/
public void setDatabaseMeta( DatabaseMeta database ) {
this.databaseMeta = database;
}
/**
* @return Returns the keyCondition.
*/
public String[] getKeyCondition() {
return keyCondition;
}
/**
* @param keyCondition
* The keyCondition to set.
*/
public void setKeyCondition( String[] keyCondition ) {
this.keyCondition = keyCondition;
}
/**
* @return Returns the orderByClause.
*/
public String getOrderByClause() {
return orderByClause;
}
/**
* @param orderByClause
* The orderByClause to set.
*/
public void setOrderByClause( String orderByClause ) {
this.orderByClause = orderByClause;
}
/**
* @return Returns the returnValueDefault.
*/
public String[] getReturnValueDefault() {
return returnValueDefault;
}
/**
* @param returnValueDefault
* The returnValueDefault to set.
*/
public void setReturnValueDefault( String[] returnValueDefault ) {
this.returnValueDefault = returnValueDefault;
}
/**
* @return Returns the returnValueDefaultType.
*/
public int[] getReturnValueDefaultType() {
return returnValueDefaultType;
}
/**
* @param returnValueDefaultType
* The returnValueDefaultType to set.
*/
public void setReturnValueDefaultType( int[] returnValueDefaultType ) {
this.returnValueDefaultType = returnValueDefaultType;
}
/**
* @return Returns the returnValueField.
*/
public String[] getReturnValueField() {
return returnValueField;
}
/**
* @param returnValueField
* The returnValueField to set.
*/
public void setReturnValueField( String[] returnValueField ) {
this.returnValueField = returnValueField;
}
/**
* @return Returns the returnValueNewName.
*/
public String[] getReturnValueNewName() {
return returnValueNewName;
}
/**
* @param returnValueNewName
* The returnValueNewName to set.
*/
public void setReturnValueNewName( String[] returnValueNewName ) {
this.returnValueNewName = returnValueNewName;
}
/**
* @return Returns the streamKeyField1.
*/
public String[] getStreamKeyField1() {
return streamKeyField1;
}
/**
* @param streamKeyField1
* The streamKeyField1 to set.
*/
public void setStreamKeyField1( String[] streamKeyField1 ) {
this.streamKeyField1 = streamKeyField1;
}
/**
* @return Returns the streamKeyField2.
*/
public String[] getStreamKeyField2() {
return streamKeyField2;
}
/**
* @param streamKeyField2
* The streamKeyField2 to set.
*/
public void setStreamKeyField2( String[] streamKeyField2 ) {
this.streamKeyField2 = streamKeyField2;
}
/**
* @return Returns the tableKeyField.
*/
public String[] getTableKeyField() {
return tableKeyField;
}
/**
* @param tableKeyField
* The tableKeyField to set.
*/
public void setTableKeyField( String[] tableKeyField ) {
this.tableKeyField = tableKeyField;
}
/**
* @return Returns the tablename.
*/
public String getTablename() {
return tablename;
}
/**
* @param tablename
* The tablename to set.
*/
public void setTablename( String tablename ) {
this.tablename = tablename;
}
/**
* @return Returns the failOnMultipleResults.
*/
public boolean isFailingOnMultipleResults() {
return failingOnMultipleResults;
}
/**
* @param failOnMultipleResults
* The failOnMultipleResults to set.
*/
public void setFailingOnMultipleResults( boolean failOnMultipleResults ) {
this.failingOnMultipleResults = failOnMultipleResults;
}
@Override
public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException {
streamKeyField1 = null;
returnValueField = null;
readData( stepnode, databases );
}
public void allocate( int nrkeys, int nrvalues ) {
streamKeyField1 = new String[nrkeys];
tableKeyField = new String[nrkeys];
keyCondition = new String[nrkeys];
streamKeyField2 = new String[nrkeys];
returnValueField = new String[nrvalues];
returnValueNewName = new String[nrvalues];
returnValueDefault = new String[nrvalues];
returnValueDefaultType = new int[nrvalues];
}
@Override
public Object clone() {
DatabaseLookupMeta retval = (DatabaseLookupMeta) super.clone();
int nrkeys = streamKeyField1.length;
int nrvalues = returnValueField.length;
retval.allocate( nrkeys, nrvalues );
System.arraycopy( streamKeyField1, 0, retval.streamKeyField1, 0, nrkeys );
System.arraycopy( tableKeyField, 0, retval.tableKeyField, 0, nrkeys );
System.arraycopy( keyCondition, 0, retval.keyCondition, 0, nrkeys );
System.arraycopy( streamKeyField2, 0, retval.streamKeyField2, 0, nrkeys );
System.arraycopy( returnValueField, 0, retval.returnValueField, 0, nrvalues );
System.arraycopy( returnValueNewName, 0, retval.returnValueNewName, 0, nrvalues );
System.arraycopy( returnValueDefault, 0, retval.returnValueDefault, 0, nrvalues );
System.arraycopy( returnValueDefaultType, 0, retval.returnValueDefaultType, 0, nrvalues );
return retval;
}
private void readData( Node stepnode, List<? extends SharedObjectInterface> databases ) throws KettleXMLException {
try {
String dtype;
String csize;
String con = XMLHandler.getTagValue( stepnode, "connection" );
databaseMeta = DatabaseMeta.findDatabase( databases, con );
cached = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "cache" ) );
loadingAllDataInCache = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "cache_load_all" ) );
csize = XMLHandler.getTagValue( stepnode, "cache_size" );
cacheSize = Const.toInt( csize, 0 );
schemaName = XMLHandler.getTagValue( stepnode, "lookup", "schema" );
tablename = XMLHandler.getTagValue( stepnode, "lookup", "table" );
Node lookup = XMLHandler.getSubNode( stepnode, "lookup" );
int nrkeys = XMLHandler.countNodes( lookup, "key" );
int nrvalues = XMLHandler.countNodes( lookup, "value" );
allocate( nrkeys, nrvalues );
for ( int i = 0; i < nrkeys; i++ ) {
Node knode = XMLHandler.getSubNodeByNr( lookup, "key", i );
streamKeyField1[i] = XMLHandler.getTagValue( knode, "name" );
tableKeyField[i] = XMLHandler.getTagValue( knode, "field" );
keyCondition[i] = XMLHandler.getTagValue( knode, "condition" );
if ( keyCondition[i] == null ) {
keyCondition[i] = "=";
}
streamKeyField2[i] = XMLHandler.getTagValue( knode, "name2" );
}
for ( int i = 0; i < nrvalues; i++ ) {
Node vnode = XMLHandler.getSubNodeByNr( lookup, "value", i );
returnValueField[i] = XMLHandler.getTagValue( vnode, "name" );
returnValueNewName[i] = XMLHandler.getTagValue( vnode, "rename" );
if ( returnValueNewName[i] == null ) {
returnValueNewName[i] = returnValueField[i]; // default: the same name!
}
returnValueDefault[i] = XMLHandler.getTagValue( vnode, "default" );
dtype = XMLHandler.getTagValue( vnode, "type" );
returnValueDefaultType[i] = ValueMetaFactory.getIdForValueMeta( dtype );
if ( returnValueDefaultType[i] < 0 ) {
// logError("unknown default value type: "+dtype+" for value "+value[i]+", default to type: String!");
returnValueDefaultType[i] = ValueMetaInterface.TYPE_STRING;
}
}
orderByClause = XMLHandler.getTagValue( lookup, "orderby" ); // Optional, can by null
failingOnMultipleResults = "Y".equalsIgnoreCase( XMLHandler.getTagValue( lookup, "fail_on_multiple" ) );
eatingRowOnLookupFailure = "Y".equalsIgnoreCase( XMLHandler.getTagValue( lookup, "eat_row_on_failure" ) );
} catch ( Exception e ) {
throw new KettleXMLException( BaseMessages.getString(
PKG, "DatabaseLookupMeta.ERROR0001.UnableToLoadStepFromXML" ), e );
}
}
@Override
public void setDefault() {
streamKeyField1 = null;
returnValueField = null;
databaseMeta = null;
cached = false;
cacheSize = 0;
schemaName = "";
tablename = BaseMessages.getString( PKG, "DatabaseLookupMeta.Default.TableName" );
int nrkeys = 0;
int nrvalues = 0;
allocate( nrkeys, nrvalues );
for ( int i = 0; i < nrkeys; i++ ) {
tableKeyField[i] = BaseMessages.getString( PKG, "DatabaseLookupMeta.Default.KeyFieldPrefix" );
keyCondition[i] = BaseMessages.getString( PKG, "DatabaseLookupMeta.Default.KeyCondition" );
streamKeyField1[i] = BaseMessages.getString( PKG, "DatabaseLookupMeta.Default.KeyStreamField1" );
streamKeyField2[i] = BaseMessages.getString( PKG, "DatabaseLookupMeta.Default.KeyStreamField2" );
}
for ( int i = 0; i < nrvalues; i++ ) {
returnValueField[i] = BaseMessages.getString( PKG, "DatabaseLookupMeta.Default.ReturnFieldPrefix" ) + i;
returnValueNewName[i] = BaseMessages.getString( PKG, "DatabaseLookupMeta.Default.ReturnNewNamePrefix" ) + i;
returnValueDefault[i] =
BaseMessages.getString( PKG, "DatabaseLookupMeta.Default.ReturnDefaultValuePrefix" ) + i;
returnValueDefaultType[i] = ValueMetaInterface.TYPE_STRING;
}
orderByClause = "";
failingOnMultipleResults = false;
eatingRowOnLookupFailure = false;
}
@Override
public void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException {
if ( Utils.isEmpty( info ) || info[0] == null ) { // null or length 0 : no info from database
for ( int i = 0; i < getReturnValueNewName().length; i++ ) {
try {
ValueMetaInterface v =
ValueMetaFactory.createValueMeta( getReturnValueNewName()[i], getReturnValueDefaultType()[i] );
v.setOrigin( name );
row.addValueMeta( v );
} catch ( Exception e ) {
throw new KettleStepException( e );
}
}
} else {
for ( int i = 0; i < returnValueNewName.length; i++ ) {
ValueMetaInterface v = info[0].searchValueMeta( returnValueField[i] );
if ( v != null ) {
ValueMetaInterface copy = v.clone(); // avoid renaming other value meta - PDI-9844
copy.setName( returnValueNewName[i] );
copy.setOrigin( name );
row.addValueMeta( copy );
}
}
}
}
@Override
public String getXML() {
StringBuilder retval = new StringBuilder( 500 );
retval
.append( " " ).append(
XMLHandler.addTagValue( "connection", databaseMeta == null ? "" : databaseMeta.getName() ) );
retval.append( " " ).append( XMLHandler.addTagValue( "cache", cached ) );
retval.append( " " ).append( XMLHandler.addTagValue( "cache_load_all", loadingAllDataInCache ) );
retval.append( " " ).append( XMLHandler.addTagValue( "cache_size", cacheSize ) );
retval.append( " <lookup>" ).append( Const.CR );
retval.append( " " ).append( XMLHandler.addTagValue( "schema", schemaName ) );
retval.append( " " ).append( XMLHandler.addTagValue( "table", tablename ) );
retval.append( " " ).append( XMLHandler.addTagValue( "orderby", orderByClause ) );
retval.append( " " ).append( XMLHandler.addTagValue( "fail_on_multiple", failingOnMultipleResults ) );
retval.append( " " ).append( XMLHandler.addTagValue( "eat_row_on_failure", eatingRowOnLookupFailure ) );
for ( int i = 0; i < streamKeyField1.length; i++ ) {
retval.append( " <key>" ).append( Const.CR );
retval.append( " " ).append( XMLHandler.addTagValue( "name", streamKeyField1[i] ) );
retval.append( " " ).append( XMLHandler.addTagValue( "field", tableKeyField[i] ) );
retval.append( " " ).append( XMLHandler.addTagValue( "condition", keyCondition[i] ) );
retval.append( " " ).append( XMLHandler.addTagValue( "name2", streamKeyField2[i] ) );
retval.append( " </key>" ).append( Const.CR );
}
for ( int i = 0; i < returnValueField.length; i++ ) {
retval.append( " <value>" ).append( Const.CR );
retval.append( " " ).append( XMLHandler.addTagValue( "name", returnValueField[i] ) );
retval.append( " " ).append( XMLHandler.addTagValue( "rename", returnValueNewName[i] ) );
retval.append( " " ).append( XMLHandler.addTagValue( "default", returnValueDefault[i] ) );
retval.append( " " ).append(
XMLHandler.addTagValue( "type", ValueMetaFactory.getValueMetaName( returnValueDefaultType[i] ) ) );
retval.append( " </value>" ).append( Const.CR );
}
retval.append( " </lookup>" ).append( Const.CR );
return retval.toString();
}
@Override
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException {
try {
databaseMeta = rep.loadDatabaseMetaFromStepAttribute( id_step, "id_connection", databases );
cached = rep.getStepAttributeBoolean( id_step, "cache" );
loadingAllDataInCache = rep.getStepAttributeBoolean( id_step, "cache_load_all" );
cacheSize = (int) rep.getStepAttributeInteger( id_step, "cache_size" );
schemaName = rep.getStepAttributeString( id_step, "lookup_schema" );
tablename = rep.getStepAttributeString( id_step, "lookup_table" );
orderByClause = rep.getStepAttributeString( id_step, "lookup_orderby" );
failingOnMultipleResults = rep.getStepAttributeBoolean( id_step, "fail_on_multiple" );
eatingRowOnLookupFailure = rep.getStepAttributeBoolean( id_step, "eat_row_on_failure" );
int nrkeys = rep.countNrStepAttributes( id_step, "lookup_key_field" );
int nrvalues = rep.countNrStepAttributes( id_step, "return_value_name" );
allocate( nrkeys, nrvalues );
for ( int i = 0; i < nrkeys; i++ ) {
streamKeyField1[i] = rep.getStepAttributeString( id_step, i, "lookup_key_name" );
tableKeyField[i] = rep.getStepAttributeString( id_step, i, "lookup_key_field" );
keyCondition[i] = rep.getStepAttributeString( id_step, i, "lookup_key_condition" );
streamKeyField2[i] = rep.getStepAttributeString( id_step, i, "lookup_key_name2" );
}
for ( int i = 0; i < nrvalues; i++ ) {
returnValueField[i] = rep.getStepAttributeString( id_step, i, "return_value_name" );
returnValueNewName[i] = rep.getStepAttributeString( id_step, i, "return_value_rename" );
returnValueDefault[i] = rep.getStepAttributeString( id_step, i, "return_value_default" );
returnValueDefaultType[i] =
ValueMetaFactory.getIdForValueMeta( rep.getStepAttributeString( id_step, i, "return_value_type" ) );
}
} catch ( Exception e ) {
throw new KettleException( BaseMessages.getString(
PKG, "DatabaseLookupMeta.ERROR0002.UnexpectedErrorReadingFromTheRepository" ), e );
}
}
@Override
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException {
try {
rep.saveDatabaseMetaStepAttribute( id_transformation, id_step, "id_connection", databaseMeta );
rep.saveStepAttribute( id_transformation, id_step, "cache", cached );
rep.saveStepAttribute( id_transformation, id_step, "cache_load_all", loadingAllDataInCache );
rep.saveStepAttribute( id_transformation, id_step, "cache_size", cacheSize );
rep.saveStepAttribute( id_transformation, id_step, "lookup_schema", schemaName );
rep.saveStepAttribute( id_transformation, id_step, "lookup_table", tablename );
rep.saveStepAttribute( id_transformation, id_step, "lookup_orderby", orderByClause );
rep.saveStepAttribute( id_transformation, id_step, "fail_on_multiple", failingOnMultipleResults );
rep.saveStepAttribute( id_transformation, id_step, "eat_row_on_failure", eatingRowOnLookupFailure );
for ( int i = 0; i < streamKeyField1.length; i++ ) {
rep.saveStepAttribute( id_transformation, id_step, i, "lookup_key_name", streamKeyField1[i] );
rep.saveStepAttribute( id_transformation, id_step, i, "lookup_key_field", tableKeyField[i] );
rep.saveStepAttribute( id_transformation, id_step, i, "lookup_key_condition", keyCondition[i] );
rep.saveStepAttribute( id_transformation, id_step, i, "lookup_key_name2", streamKeyField2[i] );
}
for ( int i = 0; i < returnValueField.length; i++ ) {
rep.saveStepAttribute( id_transformation, id_step, i, "return_value_name", returnValueField[i] );
rep.saveStepAttribute( id_transformation, id_step, i, "return_value_rename", returnValueNewName[i] );
rep.saveStepAttribute( id_transformation, id_step, i, "return_value_default", returnValueDefault[i] );
rep.saveStepAttribute( id_transformation, id_step, i, "return_value_type", ValueMetaFactory
.getValueMetaName( returnValueDefaultType[i] ) );
}
// Also, save the step-database relationship!
if ( databaseMeta != null ) {
rep.insertStepDatabase( id_transformation, id_step, databaseMeta.getObjectId() );
}
} catch ( Exception e ) {
throw new KettleException( BaseMessages.getString(
PKG, "DatabaseLookupMeta.ERROR0003.UnableToSaveStepToRepository" )
+ id_step, e );
}
}
@Override
public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ) {
CheckResult cr;
String error_message = "";
if ( databaseMeta != null ) {
Database db = new Database( loggingObject, databaseMeta );
db.shareVariablesWith( transMeta );
databases = new Database[] { db }; // Keep track of this one for cancelQuery
try {
db.connect();
if ( !Utils.isEmpty( tablename ) ) {
boolean first = true;
boolean error_found = false;
error_message = "";
RowMetaInterface r =
db.getTableFieldsMeta(
db.environmentSubstitute( schemaName ),
db.environmentSubstitute( tablename ) );
if ( r != null ) {
// Check the keys used to do the lookup...
for ( int i = 0; i < tableKeyField.length; i++ ) {
String lufield = tableKeyField[i];
ValueMetaInterface v = r.searchValueMeta( lufield );
if ( v == null ) {
if ( first ) {
first = false;
error_message +=
BaseMessages.getString( PKG, "DatabaseLookupMeta.Check.MissingCompareFieldsInLookupTable" )
+ Const.CR;
}
error_found = true;
error_message += "\t\t" + lufield + Const.CR;
}
}
if ( error_found ) {
cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta );
} else {
cr =
new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(
PKG, "DatabaseLookupMeta.Check.AllLookupFieldsFoundInTable" ), stepMeta );
}
remarks.add( cr );
// Also check the returned values!
for ( int i = 0; i < returnValueField.length; i++ ) {
String lufield = returnValueField[i];
ValueMetaInterface v = r.searchValueMeta( lufield );
if ( v == null ) {
if ( first ) {
first = false;
error_message +=
BaseMessages.getString( PKG, "DatabaseLookupMeta.Check.MissingReturnFieldsInLookupTable" )
+ Const.CR;
}
error_found = true;
error_message += "\t\t" + lufield + Const.CR;
}
}
if ( error_found ) {
cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta );
} else {
cr =
new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(
PKG, "DatabaseLookupMeta.Check.AllReturnFieldsFoundInTable" ), stepMeta );
}
remarks.add( cr );
} else {
error_message = BaseMessages.getString( PKG, "DatabaseLookupMeta.Check.CouldNotReadTableInfo" );
cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta );
remarks.add( cr );
}
}
// Look up fields in the input stream <prev>
if ( prev != null && prev.size() > 0 ) {
boolean first = true;
error_message = "";
boolean error_found = false;
for ( int i = 0; i < streamKeyField1.length; i++ ) {
ValueMetaInterface v = prev.searchValueMeta( streamKeyField1[i] );
if ( v == null ) {
if ( first ) {
first = false;
error_message +=
BaseMessages.getString( PKG, "DatabaseLookupMeta.Check.MissingFieldsNotFoundInInput" )
+ Const.CR;
}
error_found = true;
error_message += "\t\t" + streamKeyField1[i] + Const.CR;
}
}
if ( error_found ) {
cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta );
} else {
cr =
new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(
PKG, "DatabaseLookupMeta.Check.AllFieldsFoundInInput" ), stepMeta );
}
remarks.add( cr );
} else {
error_message =
BaseMessages.getString( PKG, "DatabaseLookupMeta.Check.CouldNotReadFromPreviousSteps" ) + Const.CR;
cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta );
remarks.add( cr );
}
} catch ( KettleDatabaseException dbe ) {
error_message =
BaseMessages.getString( PKG, "DatabaseLookupMeta.Check.DatabaseErrorWhileChecking" )
+ dbe.getMessage();
cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta );
remarks.add( cr );
} finally {
db.disconnect();
}
} else {
error_message = BaseMessages.getString( PKG, "DatabaseLookupMeta.Check.MissingConnectionError" );
cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta );
remarks.add( cr );
}
// See if we have input streams leading to this step!
if ( input.length > 0 ) {
cr =
new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(
PKG, "DatabaseLookupMeta.Check.StepIsReceivingInfoFromOtherSteps" ), stepMeta );
remarks.add( cr );
} else {
cr =
new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(
PKG, "DatabaseLookupMeta.Check.NoInputReceivedFromOtherSteps" ), stepMeta );
remarks.add( cr );
}
}
@Override
public RowMetaInterface getTableFields() {
RowMetaInterface fields = null;
if ( databaseMeta != null ) {
Database db = new Database( loggingObject, databaseMeta );
databases = new Database[] { db }; // Keep track of this one for cancelQuery
try {
db.connect();
fields =
db.getTableFieldsMeta(
databaseMeta.environmentSubstitute( schemaName ),
databaseMeta.environmentSubstitute( tablename ) );
} catch ( KettleDatabaseException dbe ) {
logError( BaseMessages.getString( PKG, "DatabaseLookupMeta.ERROR0004.ErrorGettingTableFields" )
+ dbe.getMessage() );
} finally {
db.disconnect();
}
}
return fields;
}
@Override
public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ) {
return new DatabaseLookup( stepMeta, stepDataInterface, cnr, transMeta, trans );
}
@Override
public StepDataInterface getStepData() {
return new DatabaseLookupData();
}
@Override
public void analyseImpact( List<DatabaseImpact> impact, TransMeta transMeta, StepMeta stepinfo,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, Repository repository,
IMetaStore metaStore ) {
// The keys are read-only...
for ( int i = 0; i < streamKeyField1.length; i++ ) {
ValueMetaInterface v = prev.searchValueMeta( streamKeyField1[i] );
DatabaseImpact ii =
new DatabaseImpact(
DatabaseImpact.TYPE_IMPACT_READ, transMeta.getName(), stepinfo.getName(), databaseMeta
.getDatabaseName(), tablename, tableKeyField[i], streamKeyField1[i], v != null
? v.getOrigin() : "?", "", BaseMessages.getString( PKG, "DatabaseLookupMeta.Impact.Key" ) );
impact.add( ii );
}
// The Return fields are read-only too...
for ( int i = 0; i < returnValueField.length; i++ ) {
DatabaseImpact ii =
new DatabaseImpact(
DatabaseImpact.TYPE_IMPACT_READ, transMeta.getName(), stepinfo.getName(),
databaseMeta.getDatabaseName(), tablename, returnValueField[i], "", "", "",
BaseMessages.getString( PKG, "DatabaseLookupMeta.Impact.ReturnValue" ) );
impact.add( ii );
}
}
@Override
public DatabaseMeta[] getUsedDatabaseConnections() {
if ( databaseMeta != null ) {
return new DatabaseMeta[] { databaseMeta };
} else {
return super.getUsedDatabaseConnections();
}
}
/**
* @return Returns the eatingRowOnLookupFailure.
*/
public boolean isEatingRowOnLookupFailure() {
return eatingRowOnLookupFailure;
}
/**
* @param eatingRowOnLookupFailure
* The eatingRowOnLookupFailure to set.
*/
public void setEatingRowOnLookupFailure( boolean eatingRowOnLookupFailure ) {
this.eatingRowOnLookupFailure = eatingRowOnLookupFailure;
}
/**
* @return the schemaName
*/
@Override
public String getSchemaName() {
return schemaName;
}
@Override public String getMissingDatabaseConnectionInformationMessage() {
return null;
}
/**
* @param schemaName
* the schemaName to set
*/
public void setSchemaName( String schemaName ) {
this.schemaName = schemaName;
}
@Override
public boolean supportsErrorHandling() {
return true;
}
/**
* @return the loadingAllDataInCache
*/
public boolean isLoadingAllDataInCache() {
return loadingAllDataInCache;
}
/**
* @param loadingAllDataInCache
* the loadingAllDataInCache to set
*/
public void setLoadingAllDataInCache( boolean loadingAllDataInCache ) {
this.loadingAllDataInCache = loadingAllDataInCache;
}
@Override public RowMeta getRowMeta( StepDataInterface stepData ) {
return (RowMeta) ( (DatabaseLookupData) stepData ).returnMeta;
}
@Override public List<String> getDatabaseFields() {
return Arrays.asList( returnValueField );
}
@Override public List<String> getStreamFields() {
return Arrays.asList( returnValueNewName );
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.transport;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionListenerResponseHandler;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.BoundTransportAddress;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.node.NodeClosedException;
import org.elasticsearch.node.ReportingService;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskManager;
import org.elasticsearch.threadpool.Scheduler;
import org.elasticsearch.threadpool.ThreadPool;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.net.UnknownHostException;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.function.Supplier;
public class TransportService extends AbstractLifecycleComponent implements ReportingService<TransportInfo>, TransportMessageListener,
TransportConnectionListener {
private static final Logger logger = LogManager.getLogger(TransportService.class);
public static final String DIRECT_RESPONSE_PROFILE = ".direct";
public static final String HANDSHAKE_ACTION_NAME = "internal:transport/handshake";
private final AtomicBoolean handleIncomingRequests = new AtomicBoolean();
private final DelegatingTransportMessageListener messageListener = new DelegatingTransportMessageListener();
protected final Transport transport;
protected final ConnectionManager connectionManager;
protected final ThreadPool threadPool;
protected final ClusterName clusterName;
protected final TaskManager taskManager;
private final TransportInterceptor.AsyncSender asyncSender;
private final Function<BoundTransportAddress, DiscoveryNode> localNodeFactory;
private final boolean remoteClusterClient;
private final Transport.ResponseHandlers responseHandlers;
private final TransportInterceptor interceptor;
// An LRU (don't really care about concurrency here) that holds the latest timed out requests so if they
// do show up, we can print more descriptive information about them
final Map<Long, TimeoutInfoHolder> timeoutInfoHandlers =
Collections.synchronizedMap(new LinkedHashMap<Long, TimeoutInfoHolder>(100, .75F, true) {
@Override
protected boolean removeEldestEntry(Map.Entry eldest) {
return size() > 100;
}
});
public static final TransportInterceptor NOOP_TRANSPORT_INTERCEPTOR = new TransportInterceptor() {};
// tracer log
private final Logger tracerLog;
volatile String[] tracerLogInclude;
volatile String[] tracerLogExclude;
private final RemoteClusterService remoteClusterService;
/** if set will call requests sent to this id to shortcut and executed locally */
volatile DiscoveryNode localNode = null;
private final Transport.Connection localNodeConnection = new Transport.Connection() {
@Override
public DiscoveryNode getNode() {
return localNode;
}
@Override
public void sendRequest(long requestId, String action, TransportRequest request, TransportRequestOptions options)
throws TransportException {
sendLocalRequest(requestId, action, request, options);
}
@Override
public void addCloseListener(ActionListener<Void> listener) {
}
@Override
public boolean isClosed() {
return false;
}
@Override
public void close() {
}
};
/**
* Build the service.
*
* @param clusterSettings if non null, the {@linkplain TransportService} will register with the {@link ClusterSettings} for settings
* updates for {@link TransportSettings#TRACE_LOG_EXCLUDE_SETTING} and {@link TransportSettings#TRACE_LOG_INCLUDE_SETTING}.
*/
public TransportService(Settings settings, Transport transport, ThreadPool threadPool, TransportInterceptor transportInterceptor,
Function<BoundTransportAddress, DiscoveryNode> localNodeFactory, @Nullable ClusterSettings clusterSettings,
Set<String> taskHeaders) {
this(settings, transport, threadPool, transportInterceptor, localNodeFactory, clusterSettings, taskHeaders,
new ClusterConnectionManager(settings, transport));
}
public TransportService(Settings settings, Transport transport, ThreadPool threadPool, TransportInterceptor transportInterceptor,
Function<BoundTransportAddress, DiscoveryNode> localNodeFactory, @Nullable ClusterSettings clusterSettings,
Set<String> taskHeaders, ConnectionManager connectionManager) {
this.transport = transport;
transport.setSlowLogThreshold(TransportSettings.SLOW_OPERATION_THRESHOLD_SETTING.get(settings));
this.threadPool = threadPool;
this.localNodeFactory = localNodeFactory;
this.connectionManager = connectionManager;
this.clusterName = ClusterName.CLUSTER_NAME_SETTING.get(settings);
setTracerLogInclude(TransportSettings.TRACE_LOG_INCLUDE_SETTING.get(settings));
setTracerLogExclude(TransportSettings.TRACE_LOG_EXCLUDE_SETTING.get(settings));
tracerLog = Loggers.getLogger(logger, ".tracer");
taskManager = createTaskManager(settings, threadPool, taskHeaders);
this.interceptor = transportInterceptor;
this.asyncSender = interceptor.interceptSender(this::sendRequestInternal);
this.remoteClusterClient = DiscoveryNode.isRemoteClusterClient(settings);
remoteClusterService = new RemoteClusterService(settings, this);
responseHandlers = transport.getResponseHandlers();
if (clusterSettings != null) {
clusterSettings.addSettingsUpdateConsumer(TransportSettings.TRACE_LOG_INCLUDE_SETTING, this::setTracerLogInclude);
clusterSettings.addSettingsUpdateConsumer(TransportSettings.TRACE_LOG_EXCLUDE_SETTING, this::setTracerLogExclude);
if (remoteClusterClient) {
remoteClusterService.listenForUpdates(clusterSettings);
}
clusterSettings.addSettingsUpdateConsumer(TransportSettings.SLOW_OPERATION_THRESHOLD_SETTING, transport::setSlowLogThreshold);
}
registerRequestHandler(
HANDSHAKE_ACTION_NAME,
ThreadPool.Names.SAME,
false, false,
HandshakeRequest::new,
(request, channel, task) -> channel.sendResponse(
new HandshakeResponse(localNode, clusterName, localNode.getVersion())));
}
public RemoteClusterService getRemoteClusterService() {
return remoteClusterService;
}
public DiscoveryNode getLocalNode() {
return localNode;
}
public TaskManager getTaskManager() {
return taskManager;
}
protected TaskManager createTaskManager(Settings settings, ThreadPool threadPool, Set<String> taskHeaders) {
return new TaskManager(settings, threadPool, taskHeaders);
}
/**
* The executor service for this transport service.
*
* @return the executor service
*/
private ExecutorService getExecutorService() {
return threadPool.generic();
}
void setTracerLogInclude(List<String> tracerLogInclude) {
this.tracerLogInclude = tracerLogInclude.toArray(Strings.EMPTY_ARRAY);
}
void setTracerLogExclude(List<String> tracerLogExclude) {
this.tracerLogExclude = tracerLogExclude.toArray(Strings.EMPTY_ARRAY);
}
@Override
protected void doStart() {
transport.setMessageListener(this);
connectionManager.addListener(this);
transport.start();
if (transport.boundAddress() != null && logger.isInfoEnabled()) {
logger.info("{}", transport.boundAddress());
for (Map.Entry<String, BoundTransportAddress> entry : transport.profileBoundAddresses().entrySet()) {
logger.info("profile [{}]: {}", entry.getKey(), entry.getValue());
}
}
localNode = localNodeFactory.apply(transport.boundAddress());
if (remoteClusterClient) {
// here we start to connect to the remote clusters
remoteClusterService.initializeRemoteClusters();
}
}
@Override
protected void doStop() {
try {
IOUtils.close(connectionManager, remoteClusterService, transport::stop);
} catch (IOException e) {
throw new UncheckedIOException(e);
} finally {
// in case the transport is not connected to our local node (thus cleaned on node disconnect)
// make sure to clean any leftover on going handles
for (final Transport.ResponseContext holderToNotify : responseHandlers.prune(h -> true)) {
// callback that an exception happened, but on a different thread since we don't
// want handlers to worry about stack overflows
getExecutorService().execute(new AbstractRunnable() {
@Override
public void onRejection(Exception e) {
// if we get rejected during node shutdown we don't wanna bubble it up
logger.debug(
() -> new ParameterizedMessage(
"failed to notify response handler on rejection, action: {}",
holderToNotify.action()),
e);
}
@Override
public void onFailure(Exception e) {
logger.warn(
() -> new ParameterizedMessage(
"failed to notify response handler on exception, action: {}",
holderToNotify.action()),
e);
}
@Override
public void doRun() {
TransportException ex = new SendRequestTransportException(holderToNotify.connection().getNode(),
holderToNotify.action(), new NodeClosedException(localNode));
holderToNotify.handler().handleException(ex);
}
});
}
}
}
@Override
protected void doClose() throws IOException {
transport.close();
}
/**
* start accepting incoming requests.
* when the transport layer starts up it will block any incoming requests until
* this method is called
*/
public final void acceptIncomingRequests() {
handleIncomingRequests.set(true);
}
@Override
public TransportInfo info() {
BoundTransportAddress boundTransportAddress = boundAddress();
if (boundTransportAddress == null) {
return null;
}
return new TransportInfo(boundTransportAddress, transport.profileBoundAddresses());
}
public TransportStats stats() {
return transport.getStats();
}
public boolean isTransportSecure() {
return transport.isSecure();
}
public BoundTransportAddress boundAddress() {
return transport.boundAddress();
}
public List<String> getDefaultSeedAddresses() {
return transport.getDefaultSeedAddresses();
}
/**
* Returns <code>true</code> iff the given node is already connected.
*/
public boolean nodeConnected(DiscoveryNode node) {
return isLocalNode(node) || connectionManager.nodeConnected(node);
}
/**
* Connect to the specified node with the given connection profile.
* The ActionListener will be called on the calling thread or the generic thread pool.
*
* @param node the node to connect to
* @param listener the action listener to notify
*/
public void connectToNode(DiscoveryNode node, ActionListener<Void> listener) throws ConnectTransportException {
connectToNode(node, null, listener);
}
/**
* Connect to the specified node with the given connection profile.
* The ActionListener will be called on the calling thread or the generic thread pool.
*
* @param node the node to connect to
* @param connectionProfile the connection profile to use when connecting to this node
* @param listener the action listener to notify
*/
public void connectToNode(final DiscoveryNode node, ConnectionProfile connectionProfile, ActionListener<Void> listener) {
if (isLocalNode(node)) {
listener.onResponse(null);
return;
}
connectionManager.connectToNode(node, connectionProfile, connectionValidator(node), listener);
}
public ConnectionManager.ConnectionValidator connectionValidator(DiscoveryNode node) {
return (newConnection, actualProfile, listener) -> {
// We don't validate cluster names to allow for CCS connections.
handshake(newConnection, actualProfile.getHandshakeTimeout().millis(), cn -> true, ActionListener.map(listener, resp -> {
final DiscoveryNode remote = resp.discoveryNode;
if (node.equals(remote) == false) {
throw new ConnectTransportException(node, "handshake failed. unexpected remote node " + remote);
}
return null;
}));
};
}
/**
* Establishes a new connection to the given node. The connection is NOT maintained by this service, it's the callers
* responsibility to close the connection once it goes out of scope.
* The ActionListener will be called on the calling thread or the generic thread pool.
* @param node the node to connect to
* @param connectionProfile the connection profile to use
* @param listener the action listener to notify
*/
public void openConnection(final DiscoveryNode node, ConnectionProfile connectionProfile,
ActionListener<Transport.Connection> listener) {
if (isLocalNode(node)) {
listener.onResponse(localNodeConnection);
} else {
connectionManager.openConnection(node, connectionProfile, listener);
}
}
/**
* Executes a high-level handshake using the given connection
* and returns the discovery node of the node the connection
* was established with. The handshake will fail if the cluster
* name on the target node mismatches the local cluster name.
* The ActionListener will be called on the calling thread or the generic thread pool.
*
* @param connection the connection to a specific node
* @param handshakeTimeout handshake timeout
* @param listener action listener to notify
* @throws ConnectTransportException if the connection failed
* @throws IllegalStateException if the handshake failed
*/
public void handshake(
final Transport.Connection connection,
final long handshakeTimeout,
final ActionListener<DiscoveryNode> listener) {
handshake(connection, handshakeTimeout, clusterName.getEqualityPredicate(),
ActionListener.map(listener, HandshakeResponse::getDiscoveryNode));
}
/**
* Executes a high-level handshake using the given connection
* and returns the discovery node of the node the connection
* was established with. The handshake will fail if the cluster
* name on the target node doesn't match the local cluster name.
* The ActionListener will be called on the calling thread or the generic thread pool.
*
* @param connection the connection to a specific node
* @param handshakeTimeout handshake timeout
* @param clusterNamePredicate cluster name validation predicate
* @param listener action listener to notify
* @throws IllegalStateException if the handshake failed
*/
public void handshake(
final Transport.Connection connection,
final long handshakeTimeout, Predicate<ClusterName> clusterNamePredicate,
final ActionListener<HandshakeResponse> listener) {
final DiscoveryNode node = connection.getNode();
sendRequest(connection, HANDSHAKE_ACTION_NAME, HandshakeRequest.INSTANCE,
TransportRequestOptions.builder().withTimeout(handshakeTimeout).build(),
new ActionListenerResponseHandler<>(
new ActionListener<>() {
@Override
public void onResponse(HandshakeResponse response) {
if (clusterNamePredicate.test(response.clusterName) == false) {
listener.onFailure(new IllegalStateException("handshake with [" + node + "] failed: remote cluster name ["
+ response.clusterName.value() + "] does not match " + clusterNamePredicate));
} else if (response.version.isCompatible(localNode.getVersion()) == false) {
listener.onFailure(new IllegalStateException("handshake with [" + node + "] failed: remote node version ["
+ response.version + "] is incompatible with local node version [" + localNode.getVersion() + "]"));
} else {
listener.onResponse(response);
}
}
@Override
public void onFailure(Exception e) {
listener.onFailure(e);
}
}
, HandshakeResponse::new, ThreadPool.Names.GENERIC
));
}
public ConnectionManager getConnectionManager() {
return connectionManager;
}
static class HandshakeRequest extends TransportRequest {
public static final HandshakeRequest INSTANCE = new HandshakeRequest();
HandshakeRequest(StreamInput in) throws IOException {
super(in);
}
private HandshakeRequest() {
}
}
public static class HandshakeResponse extends TransportResponse {
private final DiscoveryNode discoveryNode;
private final ClusterName clusterName;
private final Version version;
public HandshakeResponse(DiscoveryNode discoveryNode, ClusterName clusterName, Version version) {
this.discoveryNode = discoveryNode;
this.version = version;
this.clusterName = clusterName;
}
public HandshakeResponse(StreamInput in) throws IOException {
super(in);
discoveryNode = in.readOptionalWriteable(DiscoveryNode::new);
clusterName = new ClusterName(in);
version = Version.readVersion(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeOptionalWriteable(discoveryNode);
clusterName.writeTo(out);
Version.writeVersion(version, out);
}
public DiscoveryNode getDiscoveryNode() {
return discoveryNode;
}
public ClusterName getClusterName() {
return clusterName;
}
}
public void disconnectFromNode(DiscoveryNode node) {
if (isLocalNode(node)) {
return;
}
connectionManager.disconnectFromNode(node);
}
public void addMessageListener(TransportMessageListener listener) {
messageListener.listeners.add(listener);
}
public boolean removeMessageListener(TransportMessageListener listener) {
return messageListener.listeners.remove(listener);
}
public void addConnectionListener(TransportConnectionListener listener) {
connectionManager.addListener(listener);
}
public void removeConnectionListener(TransportConnectionListener listener) {
connectionManager.removeListener(listener);
}
public <T extends TransportResponse> void sendRequest(final DiscoveryNode node, final String action,
final TransportRequest request,
final TransportResponseHandler<T> handler) {
final Transport.Connection connection;
try {
connection = getConnection(node);
} catch (final NodeNotConnectedException ex) {
// the caller might not handle this so we invoke the handler
handler.handleException(ex);
return;
}
sendRequest(connection, action, request, TransportRequestOptions.EMPTY, handler);
}
public final <T extends TransportResponse> void sendRequest(final DiscoveryNode node, final String action,
final TransportRequest request,
final TransportRequestOptions options,
TransportResponseHandler<T> handler) {
final Transport.Connection connection;
try {
connection = getConnection(node);
} catch (final NodeNotConnectedException ex) {
// the caller might not handle this so we invoke the handler
handler.handleException(ex);
return;
}
sendRequest(connection, action, request, options, handler);
}
/**
* Sends a request on the specified connection. If there is a failure sending the request, the specified handler is invoked.
*
* @param connection the connection to send the request on
* @param action the name of the action
* @param request the request
* @param options the options for this request
* @param handler the response handler
* @param <T> the type of the transport response
*/
public final <T extends TransportResponse> void sendRequest(final Transport.Connection connection, final String action,
final TransportRequest request,
final TransportRequestOptions options,
final TransportResponseHandler<T> handler) {
try {
final TransportResponseHandler<T> delegate;
if (request.getParentTask().isSet()) {
// TODO: capture the connection instead so that we can cancel child tasks on the remote connections.
final Releasable unregisterChildNode = taskManager.registerChildNode(request.getParentTask().getId(), connection.getNode());
delegate = new TransportResponseHandler<>() {
@Override
public void handleResponse(T response) {
unregisterChildNode.close();
handler.handleResponse(response);
}
@Override
public void handleException(TransportException exp) {
unregisterChildNode.close();
handler.handleException(exp);
}
@Override
public String executor() {
return handler.executor();
}
@Override
public T read(StreamInput in) throws IOException {
return handler.read(in);
}
@Override
public String toString() {
return getClass().getName() + "/[" + action + "]:" + handler.toString();
}
};
} else {
delegate = handler;
}
asyncSender.sendRequest(connection, action, request, options, delegate);
} catch (final Exception ex) {
// the caller might not handle this so we invoke the handler
final TransportException te;
if (ex instanceof TransportException) {
te = (TransportException) ex;
} else {
te = new TransportException("failure to send", ex);
}
handler.handleException(te);
}
}
/**
* Returns either a real transport connection or a local node connection if we are using the local node optimization.
* @throws NodeNotConnectedException if the given node is not connected
*/
public Transport.Connection getConnection(DiscoveryNode node) {
if (isLocalNode(node)) {
return localNodeConnection;
} else {
return connectionManager.getConnection(node);
}
}
public final <T extends TransportResponse> void sendChildRequest(final DiscoveryNode node, final String action,
final TransportRequest request, final Task parentTask,
final TransportRequestOptions options,
final TransportResponseHandler<T> handler) {
final Transport.Connection connection;
try {
connection = getConnection(node);
} catch (final NodeNotConnectedException ex) {
// the caller might not handle this so we invoke the handler
handler.handleException(ex);
return;
}
sendChildRequest(connection, action, request, parentTask, options, handler);
}
public <T extends TransportResponse> void sendChildRequest(final Transport.Connection connection, final String action,
final TransportRequest request, final Task parentTask,
final TransportResponseHandler<T> handler) {
sendChildRequest(connection, action, request, parentTask, TransportRequestOptions.EMPTY, handler);
}
public <T extends TransportResponse> void sendChildRequest(final Transport.Connection connection, final String action,
final TransportRequest request, final Task parentTask,
final TransportRequestOptions options,
final TransportResponseHandler<T> handler) {
request.setParentTask(localNode.getId(), parentTask.getId());
sendRequest(connection, action, request, options, handler);
}
private <T extends TransportResponse> void sendRequestInternal(final Transport.Connection connection, final String action,
final TransportRequest request,
final TransportRequestOptions options,
TransportResponseHandler<T> handler) {
if (connection == null) {
throw new IllegalStateException("can't send request to a null connection");
}
DiscoveryNode node = connection.getNode();
Supplier<ThreadContext.StoredContext> storedContextSupplier = threadPool.getThreadContext().newRestorableContext(true);
ContextRestoreResponseHandler<T> responseHandler = new ContextRestoreResponseHandler<>(storedContextSupplier, handler);
// TODO we can probably fold this entire request ID dance into connection.sendRequest but it will be a bigger refactoring
final long requestId = responseHandlers.add(new Transport.ResponseContext<>(responseHandler, connection, action));
final TimeoutHandler timeoutHandler;
if (options.timeout() != null) {
timeoutHandler = new TimeoutHandler(requestId, connection.getNode(), action);
responseHandler.setTimeoutHandler(timeoutHandler);
} else {
timeoutHandler = null;
}
try {
if (lifecycle.stoppedOrClosed()) {
/*
* If we are not started the exception handling will remove the request holder again and calls the handler to notify the
* caller. It will only notify if toStop hasn't done the work yet.
*/
throw new NodeClosedException(localNode);
}
if (timeoutHandler != null) {
assert options.timeout() != null;
timeoutHandler.scheduleTimeout(options.timeout());
}
connection.sendRequest(requestId, action, request, options); // local node optimization happens upstream
} catch (final Exception e) {
// usually happen either because we failed to connect to the node
// or because we failed serializing the message
final Transport.ResponseContext<? extends TransportResponse> contextToNotify = responseHandlers.remove(requestId);
// If holderToNotify == null then handler has already been taken care of.
if (contextToNotify != null) {
if (timeoutHandler != null) {
timeoutHandler.cancel();
}
// callback that an exception happened, but on a different thread since we don't
// want handlers to worry about stack overflows. In the special case of running into a closing node we run on the current
// thread on a best effort basis though.
final SendRequestTransportException sendRequestException = new SendRequestTransportException(node, action, e);
final String executor = lifecycle.stoppedOrClosed() ? ThreadPool.Names.SAME : ThreadPool.Names.GENERIC;
threadPool.executor(executor).execute(new AbstractRunnable() {
@Override
public void onRejection(Exception e) {
// if we get rejected during node shutdown we don't wanna bubble it up
logger.debug(
() -> new ParameterizedMessage(
"failed to notify response handler on rejection, action: {}",
contextToNotify.action()),
e);
}
@Override
public void onFailure(Exception e) {
logger.warn(
() -> new ParameterizedMessage(
"failed to notify response handler on exception, action: {}",
contextToNotify.action()),
e);
}
@Override
protected void doRun() throws Exception {
contextToNotify.handler().handleException(sendRequestException);
}
});
} else {
logger.debug("Exception while sending request, handler likely already notified due to timeout", e);
}
}
}
private void sendLocalRequest(long requestId, final String action, final TransportRequest request, TransportRequestOptions options) {
final DirectResponseChannel channel = new DirectResponseChannel(localNode, action, requestId, this, threadPool);
try {
onRequestSent(localNode, requestId, action, request, options);
onRequestReceived(requestId, action);
final RequestHandlerRegistry reg = getRequestHandler(action);
if (reg == null) {
throw new ActionNotFoundTransportException("Action [" + action + "] not found");
}
final String executor = reg.getExecutor();
if (ThreadPool.Names.SAME.equals(executor)) {
//noinspection unchecked
reg.processMessageReceived(request, channel);
} else {
threadPool.executor(executor).execute(new AbstractRunnable() {
@Override
protected void doRun() throws Exception {
//noinspection unchecked
reg.processMessageReceived(request, channel);
}
@Override
public boolean isForceExecution() {
return reg.isForceExecution();
}
@Override
public void onFailure(Exception e) {
try {
channel.sendResponse(e);
} catch (Exception inner) {
inner.addSuppressed(e);
logger.warn(() -> new ParameterizedMessage(
"failed to notify channel of error message for action [{}]", action), inner);
}
}
@Override
public String toString() {
return "processing of [" + requestId + "][" + action + "]: " + request;
}
});
}
} catch (Exception e) {
try {
channel.sendResponse(e);
} catch (Exception inner) {
inner.addSuppressed(e);
logger.warn(
() -> new ParameterizedMessage(
"failed to notify channel of error message for action [{}]", action), inner);
}
}
}
private boolean shouldTraceAction(String action) {
return shouldTraceAction(action, tracerLogInclude, tracerLogExclude);
}
public static boolean shouldTraceAction(String action, String[] include, String[] exclude) {
if (include.length > 0) {
if (Regex.simpleMatch(include, action) == false) {
return false;
}
}
if (exclude.length > 0) {
return !Regex.simpleMatch(exclude, action);
}
return true;
}
public TransportAddress[] addressesFromString(String address) throws UnknownHostException {
return transport.addressesFromString(address);
}
/**
* A set of all valid action prefixes.
*/
public static final Set<String> VALID_ACTION_PREFIXES = Set.of(
"indices:admin",
"indices:monitor",
"indices:data/write",
"indices:data/read",
"indices:internal",
"cluster:admin",
"cluster:monitor",
"cluster:internal",
"internal:");
private void validateActionName(String actionName) {
// TODO we should makes this a hard validation and throw an exception but we need a good way to add backwards layer
// for it. Maybe start with a deprecation layer
if (isValidActionName(actionName) == false) {
logger.warn("invalid action name [" + actionName + "] must start with one of: " +
TransportService.VALID_ACTION_PREFIXES );
}
}
/**
* Returns <code>true</code> iff the action name starts with a valid prefix.
*
* @see #VALID_ACTION_PREFIXES
*/
public static boolean isValidActionName(String actionName) {
for (String prefix : VALID_ACTION_PREFIXES) {
if (actionName.startsWith(prefix)) {
return true;
}
}
return false;
}
/**
* Registers a new request handler
*
* @param action The action the request handler is associated with
* @param requestReader a callable to be used construct new instances for streaming
* @param executor The executor the request handling will be executed on
* @param handler The handler itself that implements the request handling
*/
public <Request extends TransportRequest> void registerRequestHandler(String action, String executor,
Writeable.Reader<Request> requestReader,
TransportRequestHandler<Request> handler) {
validateActionName(action);
handler = interceptor.interceptHandler(action, executor, false, handler);
RequestHandlerRegistry<Request> reg = new RequestHandlerRegistry<>(
action, requestReader, taskManager, handler, executor, false, true);
transport.registerRequestHandler(reg);
}
/**
* Registers a new request handler
*
* @param action The action the request handler is associated with
* @param requestReader The request class that will be used to construct new instances for streaming
* @param executor The executor the request handling will be executed on
* @param forceExecution Force execution on the executor queue and never reject it
* @param canTripCircuitBreaker Check the request size and raise an exception in case the limit is breached.
* @param handler The handler itself that implements the request handling
*/
public <Request extends TransportRequest> void registerRequestHandler(String action,
String executor, boolean forceExecution,
boolean canTripCircuitBreaker,
Writeable.Reader<Request> requestReader,
TransportRequestHandler<Request> handler) {
validateActionName(action);
handler = interceptor.interceptHandler(action, executor, forceExecution, handler);
RequestHandlerRegistry<Request> reg = new RequestHandlerRegistry<>(
action, requestReader, taskManager, handler, executor, forceExecution, canTripCircuitBreaker);
transport.registerRequestHandler(reg);
}
/**
* called by the {@link Transport} implementation when an incoming request arrives but before
* any parsing of it has happened (with the exception of the requestId and action)
*/
@Override
public void onRequestReceived(long requestId, String action) {
if (handleIncomingRequests.get() == false) {
throw new IllegalStateException("transport not ready yet to handle incoming requests");
}
if (tracerLog.isTraceEnabled() && shouldTraceAction(action)) {
tracerLog.trace("[{}][{}] received request", requestId, action);
}
messageListener.onRequestReceived(requestId, action);
}
/** called by the {@link Transport} implementation once a request has been sent */
@Override
public void onRequestSent(DiscoveryNode node, long requestId, String action, TransportRequest request,
TransportRequestOptions options) {
if (tracerLog.isTraceEnabled() && shouldTraceAction(action)) {
tracerLog.trace("[{}][{}] sent to [{}] (timeout: [{}])", requestId, action, node, options.timeout());
}
messageListener.onRequestSent(node, requestId, action, request, options);
}
@Override
public void onResponseReceived(long requestId, Transport.ResponseContext holder) {
if (holder == null) {
checkForTimeout(requestId);
} else if (tracerLog.isTraceEnabled() && shouldTraceAction(holder.action())) {
tracerLog.trace("[{}][{}] received response from [{}]", requestId, holder.action(), holder.connection().getNode());
}
messageListener.onResponseReceived(requestId, holder);
}
/** called by the {@link Transport} implementation once a response was sent to calling node */
@Override
public void onResponseSent(long requestId, String action, TransportResponse response) {
if (tracerLog.isTraceEnabled() && shouldTraceAction(action)) {
tracerLog.trace("[{}][{}] sent response", requestId, action);
}
messageListener.onResponseSent(requestId, action, response);
}
/** called by the {@link Transport} implementation after an exception was sent as a response to an incoming request */
@Override
public void onResponseSent(long requestId, String action, Exception e) {
if (tracerLog.isTraceEnabled() && shouldTraceAction(action)) {
tracerLog.trace(() -> new ParameterizedMessage("[{}][{}] sent error response", requestId, action), e);
}
messageListener.onResponseSent(requestId, action, e);
}
public RequestHandlerRegistry<? extends TransportRequest> getRequestHandler(String action) {
return transport.getRequestHandlers().getHandler(action);
}
private void checkForTimeout(long requestId) {
// lets see if its in the timeout holder, but sync on mutex to make sure any ongoing timeout handling has finished
final DiscoveryNode sourceNode;
final String action;
assert responseHandlers.contains(requestId) == false;
TimeoutInfoHolder timeoutInfoHolder = timeoutInfoHandlers.remove(requestId);
if (timeoutInfoHolder != null) {
long time = threadPool.relativeTimeInMillis();
logger.warn("Received response for a request that has timed out, sent [{}ms] ago, timed out [{}ms] ago, " +
"action [{}], node [{}], id [{}]", time - timeoutInfoHolder.sentTime(), time - timeoutInfoHolder.timeoutTime(),
timeoutInfoHolder.action(), timeoutInfoHolder.node(), requestId);
action = timeoutInfoHolder.action();
sourceNode = timeoutInfoHolder.node();
} else {
logger.warn("Transport response handler not found of id [{}]", requestId);
action = null;
sourceNode = null;
}
// call tracer out of lock
if (tracerLog.isTraceEnabled() == false) {
return;
}
if (action == null) {
assert sourceNode == null;
tracerLog.trace("[{}] received response but can't resolve it to a request", requestId);
} else if (shouldTraceAction(action)) {
tracerLog.trace("[{}][{}] received response from [{}]", requestId, action, sourceNode);
}
}
@Override
public void onConnectionClosed(Transport.Connection connection) {
try {
List<Transport.ResponseContext<? extends TransportResponse>> pruned =
responseHandlers.prune(h -> h.connection().getCacheKey().equals(connection.getCacheKey()));
// callback that an exception happened, but on a different thread since we don't
// want handlers to worry about stack overflows
getExecutorService().execute(new Runnable() {
@Override
public void run() {
for (Transport.ResponseContext holderToNotify : pruned) {
holderToNotify.handler().handleException(
new NodeDisconnectedException(connection.getNode(), holderToNotify.action()));
}
}
@Override
public String toString() {
return "onConnectionClosed(" + connection.getNode() + ")";
}
});
} catch (EsRejectedExecutionException ex) {
logger.debug("Rejected execution on onConnectionClosed", ex);
}
}
final class TimeoutHandler implements Runnable {
private final long requestId;
private final long sentTime = threadPool.relativeTimeInMillis();
private final String action;
private final DiscoveryNode node;
volatile Scheduler.Cancellable cancellable;
TimeoutHandler(long requestId, DiscoveryNode node, String action) {
this.requestId = requestId;
this.node = node;
this.action = action;
}
@Override
public void run() {
if (responseHandlers.contains(requestId)) {
long timeoutTime = threadPool.relativeTimeInMillis();
timeoutInfoHandlers.put(requestId, new TimeoutInfoHolder(node, action, sentTime, timeoutTime));
// now that we have the information visible via timeoutInfoHandlers, we try to remove the request id
final Transport.ResponseContext<? extends TransportResponse> holder = responseHandlers.remove(requestId);
if (holder != null) {
assert holder.action().equals(action);
assert holder.connection().getNode().equals(node);
holder.handler().handleException(
new ReceiveTimeoutTransportException(holder.connection().getNode(), holder.action(),
"request_id [" + requestId + "] timed out after [" + (timeoutTime - sentTime) + "ms]"));
} else {
// response was processed, remove timeout info.
timeoutInfoHandlers.remove(requestId);
}
}
}
/**
* cancels timeout handling. this is a best effort only to avoid running it. remove the requestId from {@link #responseHandlers}
* to make sure this doesn't run.
*/
public void cancel() {
assert responseHandlers.contains(requestId) == false :
"cancel must be called after the requestId [" + requestId + "] has been removed from clientHandlers";
if (cancellable != null) {
cancellable.cancel();
}
}
@Override
public String toString() {
return "timeout handler for [" + requestId + "][" + action + "]";
}
private void scheduleTimeout(TimeValue timeout) {
this.cancellable = threadPool.schedule(this, timeout, ThreadPool.Names.GENERIC);
}
}
static class TimeoutInfoHolder {
private final DiscoveryNode node;
private final String action;
private final long sentTime;
private final long timeoutTime;
TimeoutInfoHolder(DiscoveryNode node, String action, long sentTime, long timeoutTime) {
this.node = node;
this.action = action;
this.sentTime = sentTime;
this.timeoutTime = timeoutTime;
}
public DiscoveryNode node() {
return node;
}
public String action() {
return action;
}
public long sentTime() {
return sentTime;
}
public long timeoutTime() {
return timeoutTime;
}
}
/**
* This handler wrapper ensures that the response thread executes with the correct thread context. Before any of the handle methods
* are invoked we restore the context.
*/
public static final class ContextRestoreResponseHandler<T extends TransportResponse> implements TransportResponseHandler<T> {
private final TransportResponseHandler<T> delegate;
private final Supplier<ThreadContext.StoredContext> contextSupplier;
private volatile TimeoutHandler handler;
public ContextRestoreResponseHandler(Supplier<ThreadContext.StoredContext> contextSupplier, TransportResponseHandler<T> delegate) {
this.delegate = delegate;
this.contextSupplier = contextSupplier;
}
@Override
public T read(StreamInput in) throws IOException {
return delegate.read(in);
}
@Override
public void handleResponse(T response) {
if(handler != null) {
handler.cancel();
}
try (ThreadContext.StoredContext ignore = contextSupplier.get()) {
delegate.handleResponse(response);
}
}
@Override
public void handleException(TransportException exp) {
if(handler != null) {
handler.cancel();
}
try (ThreadContext.StoredContext ignore = contextSupplier.get()) {
delegate.handleException(exp);
}
}
@Override
public String executor() {
return delegate.executor();
}
@Override
public String toString() {
return getClass().getName() + "/" + delegate.toString();
}
void setTimeoutHandler(TimeoutHandler handler) {
this.handler = handler;
}
}
static class DirectResponseChannel implements TransportChannel {
final DiscoveryNode localNode;
private final String action;
private final long requestId;
final TransportService service;
final ThreadPool threadPool;
DirectResponseChannel(DiscoveryNode localNode, String action, long requestId, TransportService service, ThreadPool threadPool) {
this.localNode = localNode;
this.action = action;
this.requestId = requestId;
this.service = service;
this.threadPool = threadPool;
}
@Override
public String getProfileName() {
return DIRECT_RESPONSE_PROFILE;
}
@Override
public void sendResponse(TransportResponse response) throws IOException {
service.onResponseSent(requestId, action, response);
final TransportResponseHandler handler = service.responseHandlers.onResponseReceived(requestId, service);
// ignore if its null, the service logs it
if (handler != null) {
final String executor = handler.executor();
if (ThreadPool.Names.SAME.equals(executor)) {
processResponse(handler, response);
} else {
threadPool.executor(executor).execute(new Runnable() {
@Override
public void run() {
processResponse(handler, response);
}
@Override
public String toString() {
return "delivery of response to [" + requestId + "][" + action + "]: " + response;
}
});
}
}
}
@SuppressWarnings("unchecked")
protected void processResponse(TransportResponseHandler handler, TransportResponse response) {
try {
handler.handleResponse(response);
} catch (Exception e) {
processException(handler, wrapInRemote(new ResponseHandlerFailureTransportException(e)));
}
}
@Override
public void sendResponse(Exception exception) throws IOException {
service.onResponseSent(requestId, action, exception);
final TransportResponseHandler handler = service.responseHandlers.onResponseReceived(requestId, service);
// ignore if its null, the service logs it
if (handler != null) {
final RemoteTransportException rtx = wrapInRemote(exception);
final String executor = handler.executor();
if (ThreadPool.Names.SAME.equals(executor)) {
processException(handler, rtx);
} else {
threadPool.executor(handler.executor()).execute(new Runnable() {
@Override
public void run() {
processException(handler, rtx);
}
@Override
public String toString() {
return "delivery of failure response to [" + requestId + "][" + action + "]: " + exception;
}
});
}
}
}
protected RemoteTransportException wrapInRemote(Exception e) {
if (e instanceof RemoteTransportException) {
return (RemoteTransportException) e;
}
return new RemoteTransportException(localNode.getName(), localNode.getAddress(), action, e);
}
protected void processException(final TransportResponseHandler handler, final RemoteTransportException rtx) {
try {
handler.handleException(rtx);
} catch (Exception e) {
logger.error(
() -> new ParameterizedMessage(
"failed to handle exception for action [{}], handler [{}]", action, handler), e);
}
}
@Override
public String getChannelType() {
return "direct";
}
@Override
public Version getVersion() {
return localNode.getVersion();
}
}
/**
* Returns the internal thread pool
*/
public ThreadPool getThreadPool() {
return threadPool;
}
private boolean isLocalNode(DiscoveryNode discoveryNode) {
return Objects.requireNonNull(discoveryNode, "discovery node must not be null").equals(localNode);
}
private static final class DelegatingTransportMessageListener implements TransportMessageListener {
private final List<TransportMessageListener> listeners = new CopyOnWriteArrayList<>();
@Override
public void onRequestReceived(long requestId, String action) {
for (TransportMessageListener listener : listeners) {
listener.onRequestReceived(requestId, action);
}
}
@Override
public void onResponseSent(long requestId, String action, TransportResponse response) {
for (TransportMessageListener listener : listeners) {
listener.onResponseSent(requestId, action, response);
}
}
@Override
public void onResponseSent(long requestId, String action, Exception error) {
for (TransportMessageListener listener : listeners) {
listener.onResponseSent(requestId, action, error);
}
}
@Override
public void onRequestSent(DiscoveryNode node, long requestId, String action, TransportRequest request,
TransportRequestOptions finalOptions) {
for (TransportMessageListener listener : listeners) {
listener.onRequestSent(node, requestId, action, request, finalOptions);
}
}
@Override
public void onResponseReceived(long requestId, Transport.ResponseContext holder) {
for (TransportMessageListener listener : listeners) {
listener.onResponseReceived(requestId, holder);
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.dispatcher.runner;
import org.apache.flink.core.testutils.OneShotLatch;
import org.apache.flink.runtime.clusterframework.ApplicationStatus;
import org.apache.flink.runtime.concurrent.FutureUtils;
import org.apache.flink.runtime.dispatcher.DispatcherId;
import org.apache.flink.runtime.leaderelection.TestingLeaderElectionService;
import org.apache.flink.runtime.util.LeaderConnectionInfo;
import org.apache.flink.runtime.util.TestingFatalErrorHandler;
import org.apache.flink.runtime.webmonitor.TestingDispatcherGateway;
import org.apache.flink.util.TestLogger;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.fail;
/** Tests for the {@link DefaultDispatcherRunner}. */
public class DefaultDispatcherRunnerTest extends TestLogger {
private TestingLeaderElectionService testingLeaderElectionService;
private TestingFatalErrorHandler testingFatalErrorHandler;
private TestingDispatcherLeaderProcessFactory testingDispatcherLeaderProcessFactory;
@Before
public void setup() {
testingLeaderElectionService = new TestingLeaderElectionService();
testingFatalErrorHandler = new TestingFatalErrorHandler();
testingDispatcherLeaderProcessFactory =
TestingDispatcherLeaderProcessFactory.defaultValue();
}
@After
public void teardown() throws Exception {
if (testingLeaderElectionService != null) {
testingLeaderElectionService.stop();
testingLeaderElectionService = null;
}
if (testingFatalErrorHandler != null) {
testingFatalErrorHandler.rethrowError();
testingFatalErrorHandler = null;
}
}
@Test
public void closeAsync_doesNotCompleteUncompletedShutDownFuture() throws Exception {
final DispatcherRunner dispatcherRunner = createDispatcherRunner();
final CompletableFuture<Void> terminationFuture = dispatcherRunner.closeAsync();
terminationFuture.get();
final CompletableFuture<ApplicationStatus> shutDownFuture =
dispatcherRunner.getShutDownFuture();
assertThat(shutDownFuture.isDone(), is(false));
}
@Test
public void getShutDownFuture_whileRunning_forwardsDispatcherLeaderProcessShutDownRequest()
throws Exception {
final UUID leaderSessionId = UUID.randomUUID();
final CompletableFuture<ApplicationStatus> shutDownFuture = new CompletableFuture<>();
final TestingDispatcherLeaderProcess testingDispatcherLeaderProcess =
TestingDispatcherLeaderProcess.newBuilder(leaderSessionId)
.setShutDownFuture(shutDownFuture)
.build();
testingDispatcherLeaderProcessFactory =
TestingDispatcherLeaderProcessFactory.from(testingDispatcherLeaderProcess);
try (final DispatcherRunner dispatcherRunner = createDispatcherRunner()) {
testingLeaderElectionService.isLeader(leaderSessionId);
final CompletableFuture<ApplicationStatus> dispatcherShutDownFuture =
dispatcherRunner.getShutDownFuture();
assertFalse(dispatcherShutDownFuture.isDone());
final ApplicationStatus finalApplicationStatus = ApplicationStatus.UNKNOWN;
shutDownFuture.complete(finalApplicationStatus);
assertThat(dispatcherShutDownFuture.get(), is(finalApplicationStatus));
}
}
@Test
public void getShutDownFuture_afterClose_ignoresDispatcherLeaderProcessShutDownRequest()
throws Exception {
final UUID leaderSessionId = UUID.randomUUID();
final CompletableFuture<ApplicationStatus> shutDownFuture = new CompletableFuture<>();
final TestingDispatcherLeaderProcess testingDispatcherLeaderProcess =
TestingDispatcherLeaderProcess.newBuilder(leaderSessionId)
.setShutDownFuture(shutDownFuture)
.build();
testingDispatcherLeaderProcessFactory =
TestingDispatcherLeaderProcessFactory.from(testingDispatcherLeaderProcess);
try (final DispatcherRunner dispatcherRunner = createDispatcherRunner()) {
testingLeaderElectionService.isLeader(leaderSessionId);
final CompletableFuture<ApplicationStatus> dispatcherShutDownFuture =
dispatcherRunner.getShutDownFuture();
assertFalse(dispatcherShutDownFuture.isDone());
dispatcherRunner.closeAsync();
final ApplicationStatus finalApplicationStatus = ApplicationStatus.UNKNOWN;
shutDownFuture.complete(finalApplicationStatus);
try {
dispatcherShutDownFuture.get(10L, TimeUnit.MILLISECONDS);
fail(
"The dispatcher runner should no longer react to the dispatcher leader process's shut down request if it has been terminated.");
} catch (TimeoutException expected) {
}
}
}
@Test
public void getShutDownFuture_newLeader_ignoresOldDispatcherLeaderProcessShutDownRequest()
throws Exception {
final UUID firstLeaderSessionId = UUID.randomUUID();
final UUID secondLeaderSessionId = UUID.randomUUID();
final CompletableFuture<ApplicationStatus> shutDownFuture = new CompletableFuture<>();
final TestingDispatcherLeaderProcess firstTestingDispatcherLeaderProcess =
TestingDispatcherLeaderProcess.newBuilder(firstLeaderSessionId)
.setShutDownFuture(shutDownFuture)
.build();
final TestingDispatcherLeaderProcess secondTestingDispatcherLeaderProcess =
TestingDispatcherLeaderProcess.newBuilder(secondLeaderSessionId).build();
testingDispatcherLeaderProcessFactory =
TestingDispatcherLeaderProcessFactory.from(
firstTestingDispatcherLeaderProcess, secondTestingDispatcherLeaderProcess);
try (final DispatcherRunner dispatcherRunner = createDispatcherRunner()) {
testingLeaderElectionService.isLeader(firstLeaderSessionId);
final CompletableFuture<ApplicationStatus> dispatcherShutDownFuture =
dispatcherRunner.getShutDownFuture();
assertFalse(dispatcherShutDownFuture.isDone());
testingLeaderElectionService.isLeader(secondLeaderSessionId);
final ApplicationStatus finalApplicationStatus = ApplicationStatus.UNKNOWN;
shutDownFuture.complete(finalApplicationStatus);
assertFalse(dispatcherShutDownFuture.isDone());
}
}
@Test
public void revokeLeadership_withExistingLeader_stopsLeaderProcess() throws Exception {
final UUID leaderSessionId = UUID.randomUUID();
final OneShotLatch startLatch = new OneShotLatch();
final OneShotLatch stopLatch = new OneShotLatch();
testingDispatcherLeaderProcessFactory =
TestingDispatcherLeaderProcessFactory.from(
TestingDispatcherLeaderProcess.newBuilder(leaderSessionId)
.setStartConsumer(ignored -> startLatch.trigger())
.setCloseAsyncSupplier(
() -> {
stopLatch.trigger();
return FutureUtils.completedVoidFuture();
})
.build());
try (final DispatcherRunner dispatcherRunner = createDispatcherRunner()) {
testingLeaderElectionService.isLeader(leaderSessionId);
// wait until the leader process has been started
startLatch.await();
testingLeaderElectionService.notLeader();
// verify that the leader gets stopped
stopLatch.await();
}
}
@Test
public void grantLeadership_withExistingLeader_waitsForTerminationOfFirstLeader()
throws Exception {
final UUID firstLeaderSessionId = UUID.randomUUID();
final UUID secondLeaderSessionId = UUID.randomUUID();
final StartStopDispatcherLeaderProcess firstTestingDispatcherLeaderProcess =
StartStopDispatcherLeaderProcess.create(firstLeaderSessionId);
final StartStopDispatcherLeaderProcess secondTestingDispatcherLeaderProcess =
StartStopDispatcherLeaderProcess.create(secondLeaderSessionId);
testingDispatcherLeaderProcessFactory =
TestingDispatcherLeaderProcessFactory.from(
firstTestingDispatcherLeaderProcess.asTestingDispatcherLeaderProcess(),
secondTestingDispatcherLeaderProcess.asTestingDispatcherLeaderProcess());
try (final DispatcherRunner dispatcherRunner = createDispatcherRunner()) {
testingLeaderElectionService.isLeader(firstLeaderSessionId);
assertThat(firstTestingDispatcherLeaderProcess.isStarted(), is(true));
testingLeaderElectionService.isLeader(secondLeaderSessionId);
assertThat(secondTestingDispatcherLeaderProcess.isStarted(), is(false));
firstTestingDispatcherLeaderProcess.terminateProcess();
assertThat(secondTestingDispatcherLeaderProcess.isStarted(), is(true));
secondTestingDispatcherLeaderProcess
.terminateProcess(); // make the dispatcherRunner terminate
}
}
@Test
public void grantLeadership_validLeader_confirmsLeaderSession() throws Exception {
final UUID leaderSessionId = UUID.randomUUID();
try (final DispatcherRunner dispatcherRunner = createDispatcherRunner()) {
testingLeaderElectionService.isLeader(leaderSessionId);
final CompletableFuture<LeaderConnectionInfo> confirmationFuture =
testingLeaderElectionService.getConfirmationFuture();
final LeaderConnectionInfo leaderConnectionInfo = confirmationFuture.get();
assertThat(leaderConnectionInfo.getLeaderSessionId(), is(leaderSessionId));
}
}
@Test
public void grantLeadership_oldLeader_doesNotConfirmLeaderSession() throws Exception {
final UUID leaderSessionId = UUID.randomUUID();
final CompletableFuture<String> contenderConfirmationFuture = new CompletableFuture<>();
final TestingDispatcherLeaderProcess testingDispatcherLeaderProcess =
TestingDispatcherLeaderProcess.newBuilder(leaderSessionId)
.setConfirmLeaderSessionFuture(contenderConfirmationFuture)
.build();
testingDispatcherLeaderProcessFactory =
TestingDispatcherLeaderProcessFactory.from(testingDispatcherLeaderProcess);
try (final DispatcherRunner dispatcherRunner = createDispatcherRunner()) {
testingLeaderElectionService.isLeader(leaderSessionId);
testingLeaderElectionService.notLeader();
// complete the confirmation future after losing the leadership
contenderConfirmationFuture.complete("leader address");
final CompletableFuture<LeaderConnectionInfo> leaderElectionConfirmationFuture =
testingLeaderElectionService.getConfirmationFuture();
try {
leaderElectionConfirmationFuture.get(5L, TimeUnit.MILLISECONDS);
fail("No valid leader should exist.");
} catch (TimeoutException expected) {
}
}
}
@Test
public void
grantLeadership_multipleLeaderChanges_lastDispatcherLeaderProcessWaitsForOthersToTerminateBeforeItStarts()
throws Exception {
final UUID firstLeaderSession = UUID.randomUUID();
final UUID secondLeaderSession = UUID.randomUUID();
final UUID thirdLeaderSession = UUID.randomUUID();
final CompletableFuture<Void> firstDispatcherLeaderProcessTerminationFuture =
new CompletableFuture<>();
final TestingDispatcherLeaderProcess firstDispatcherLeaderProcess =
TestingDispatcherLeaderProcess.newBuilder(firstLeaderSession)
.setCloseAsyncSupplier(() -> firstDispatcherLeaderProcessTerminationFuture)
.build();
final CompletableFuture<Void> secondDispatcherLeaderProcessTerminationFuture =
new CompletableFuture<>();
final TestingDispatcherLeaderProcess secondDispatcherLeaderProcess =
TestingDispatcherLeaderProcess.newBuilder(secondLeaderSession)
.setCloseAsyncSupplier(() -> secondDispatcherLeaderProcessTerminationFuture)
.build();
final CompletableFuture<Void> thirdDispatcherLeaderProcessHasBeenStartedFuture =
new CompletableFuture<>();
final TestingDispatcherLeaderProcess thirdDispatcherLeaderProcess =
TestingDispatcherLeaderProcess.newBuilder(thirdLeaderSession)
.setStartConsumer(
thirdDispatcherLeaderProcessHasBeenStartedFuture::complete)
.build();
testingDispatcherLeaderProcessFactory =
TestingDispatcherLeaderProcessFactory.from(
firstDispatcherLeaderProcess,
secondDispatcherLeaderProcess,
thirdDispatcherLeaderProcess);
final DispatcherRunner dispatcherRunner = createDispatcherRunner();
try {
testingLeaderElectionService.isLeader(firstLeaderSession);
testingLeaderElectionService.isLeader(secondLeaderSession);
testingLeaderElectionService.isLeader(thirdLeaderSession);
firstDispatcherLeaderProcessTerminationFuture.complete(null);
assertThat(thirdDispatcherLeaderProcessHasBeenStartedFuture.isDone(), is(false));
secondDispatcherLeaderProcessTerminationFuture.complete(null);
assertThat(thirdDispatcherLeaderProcessHasBeenStartedFuture.isDone(), is(true));
} finally {
firstDispatcherLeaderProcessTerminationFuture.complete(null);
secondDispatcherLeaderProcessTerminationFuture.complete(null);
dispatcherRunner.close();
}
}
private static final class StartStopDispatcherLeaderProcess {
private final TestingDispatcherLeaderProcess testingDispatcherLeaderProcess;
private final CompletableFuture<Void> startFuture;
private final CompletableFuture<Void> terminationFuture;
private StartStopDispatcherLeaderProcess(
TestingDispatcherLeaderProcess testingDispatcherLeaderProcess,
CompletableFuture<Void> startFuture,
CompletableFuture<Void> terminationFuture) {
this.testingDispatcherLeaderProcess = testingDispatcherLeaderProcess;
this.startFuture = startFuture;
this.terminationFuture = terminationFuture;
}
private TestingDispatcherLeaderProcess asTestingDispatcherLeaderProcess() {
return testingDispatcherLeaderProcess;
}
private boolean isStarted() {
return startFuture.isDone();
}
private void terminateProcess() {
terminationFuture.complete(null);
}
private static StartStopDispatcherLeaderProcess create(UUID leaderSessionId) {
final CompletableFuture<Void> processStartFuture = new CompletableFuture<>();
final CompletableFuture<Void> processTerminationFuture = new CompletableFuture<>();
final TestingDispatcherLeaderProcess dispatcherLeaderProcess =
TestingDispatcherLeaderProcess.newBuilder(leaderSessionId)
.setStartConsumer(processStartFuture::complete)
.setCloseAsyncSupplier(() -> processTerminationFuture)
.build();
return new StartStopDispatcherLeaderProcess(
dispatcherLeaderProcess, processStartFuture, processTerminationFuture);
}
}
private TestingDispatcherGateway createDispatcherGateway(UUID leaderSessionId) {
return new TestingDispatcherGateway.Builder()
.setFencingToken(DispatcherId.fromUuid(leaderSessionId))
.build();
}
private DispatcherRunner createDispatcherRunner() throws Exception {
return DefaultDispatcherRunner.create(
testingLeaderElectionService,
testingFatalErrorHandler,
testingDispatcherLeaderProcessFactory);
}
}
| |
/*
* Copyright (c) 2016 by Rafael Angel Aznar Aparici (rafaaznar at gmail dot com)
*
* sisane-server: Helps you to develop easily AJAX web applications
* by copying and modifying this Java Server.
*
* Sources at https://github.com/rafaelaznar/sisane-server
*
* sisane-server is distributed under the MIT License (MIT)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package net.daw.data.implementation;
import net.daw.data.publicinterface.DataInterface;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import net.daw.helper.statics.Log4j;
public class MysqlData implements DataInterface {
Connection connection = null;
public MysqlData(Connection pooledConnection) {
connection = pooledConnection;
}
@Override
public int executeUpdateSQL(String strSQL) throws Exception {
ResultSet oResultSet = null;
java.sql.PreparedStatement oPreparedStatement = null;
int iResult = 0;
try {
oPreparedStatement = connection.prepareStatement(strSQL, Statement.RETURN_GENERATED_KEYS);
iResult = oPreparedStatement.executeUpdate();
if (iResult == -1) {
Log4j.errorLog(this.getClass().getName() + ":" + "executeUpdateSQL error");
throw new SQLException();
}
} catch (SQLException ex) {
Log4j.errorLog(this.getClass().getName() + ":" + (ex.getStackTrace()[0]).getMethodName(), ex);
throw new Exception();
} finally {
if (oResultSet != null) {
oResultSet.close();
}
if (oPreparedStatement != null) {
oPreparedStatement.close();
}
}
return iResult;
}
@Override
public int executeInsertSQL(String strSQL) throws Exception {
ResultSet oResultSet = null;
java.sql.PreparedStatement oPreparedStatement = null;
int id = 0;
try {
oPreparedStatement = connection.prepareStatement(strSQL, Statement.RETURN_GENERATED_KEYS);
int returnLastInsertId = oPreparedStatement.executeUpdate();
if (returnLastInsertId != -1) {
oResultSet = oPreparedStatement.getGeneratedKeys();
oResultSet.next();
id = oResultSet.getInt(1);
} else {
Log4j.errorLog(this.getClass().getName() + ":" + "executeInsertSQL error");
throw new SQLException();
}
} catch (SQLException ex) {
Log4j.errorLog(this.getClass().getName() + ":" + (ex.getStackTrace()[0]).getMethodName(), ex);
throw new Exception();
} finally {
if (oResultSet != null) {
oResultSet.close();
}
if (oPreparedStatement != null) {
oPreparedStatement.close();
}
}
return id;
}
@Override
public int removeOne(int intId, String strTabla) throws Exception {
PreparedStatement oPreparedStatement = null;
int intResult = 0;
try {
String strSQL = "DELETE FROM " + strTabla + " WHERE id = ?";
oPreparedStatement = (PreparedStatement) connection.prepareStatement(strSQL);
oPreparedStatement.setInt(1, intId);
intResult = oPreparedStatement.executeUpdate();
} catch (SQLException ex) {
Log4j.errorLog(this.getClass().getName() + ":" + (ex.getStackTrace()[0]).getMethodName(), ex);
throw new Exception();
} finally {
if (oPreparedStatement != null) {
oPreparedStatement.close();
}
}
return intResult;
}
@Override
public String getId(String strTabla, String strCampo, String strValor) throws Exception {
String strResult = null;
Statement oStatement = null;
ResultSet oResultSet = null;
try {
oStatement = (Statement) connection.createStatement();
String strSQL = "SELECT id FROM " + strTabla + " WHERE " + strCampo + "='" + strValor + "'";
oResultSet = oStatement.executeQuery(strSQL);
if (oResultSet.next()) {
strResult = oResultSet.getString("id");
} else {
Log4j.errorLog(this.getClass().getName() + ":" + "getId error");
throw new SQLException();
}
} catch (SQLException ex) {
Log4j.errorLog(this.getClass().getName() + ":" + (ex.getStackTrace()[0]).getMethodName(), ex);
throw new Exception();
} finally {
if (oResultSet != null) {
oResultSet.close();
}
if (oStatement != null) {
oStatement.close();
}
}
return strResult;
}
@Override
public String getOne(String strSqlSelectDataOrigin, String strCampo, int id) throws Exception {
String strResult = null;
PreparedStatement oPreparedStatement = null;
ResultSet oResultSet = null;
String strSQL = "";
try {
strSQL = strSqlSelectDataOrigin.substring(0, strSqlSelectDataOrigin.indexOf("1=1") + 3) + " AND id=" + id + " " + strSqlSelectDataOrigin.substring(strSqlSelectDataOrigin.indexOf("1=1") + 3, strSqlSelectDataOrigin.length());
oPreparedStatement = connection.prepareStatement(strSQL);
oResultSet = oPreparedStatement.executeQuery();
if (oResultSet.next()) {
strResult = oResultSet.getString(strCampo);
} else {
Log4j.errorLog(this.getClass().getName() + ":" + "getOne error");
throw new SQLException();
}
} catch (Exception ex) {
Log4j.errorLog(this.getClass().getName() + ":" + (ex.getStackTrace()[0]).getMethodName(), ex);
throw new Exception();
} finally {
if (oResultSet != null) {
oResultSet.close();
}
oPreparedStatement.close();
}
return strResult;
}
@Override
public Long getCount(String strSqlSelectDataOrigin) throws Exception {
Long longResult = 0L;
Statement oStatement = null;
ResultSet oResultSet = null;
try {
oStatement = (Statement) connection.createStatement();
String strNewSqlDataSource = "SELECT COUNT(*) " + strSqlSelectDataOrigin.substring(strSqlSelectDataOrigin.toLowerCase().indexOf("from"), strSqlSelectDataOrigin.length());
oStatement = (Statement) connection.createStatement();
oResultSet = oStatement.executeQuery(strNewSqlDataSource);
while (oResultSet.next()) {
longResult = oResultSet.getLong("COUNT(*)");
}
} catch (SQLException ex) {
Log4j.errorLog(this.getClass().getName() + ":" + (ex.getStackTrace()[0]).getMethodName(), ex);
throw new Exception();
} finally {
if (oResultSet != null) {
oResultSet.close();
}
if (oStatement != null) {
oStatement.close();
}
}
return longResult;
}
@Override
public ResultSet getAllSQL(String strSqlSelectDataOrigin) throws Exception {
Statement oStatement = null;
ResultSet oResultSet = null;
try {
oStatement = (Statement) connection.createStatement();
oResultSet = oStatement.executeQuery(strSqlSelectDataOrigin);
} catch (Exception ex) {
Log4j.errorLog(this.getClass().getName() + ":" + (ex.getStackTrace()[0]).getMethodName(), ex);
throw new Exception();
} finally {
}
return oResultSet;
}
public int truncateTable(String strTabla) throws Exception {
PreparedStatement oPreparedStatement = null;
int intResult = 0;
try {
Statement s = connection.createStatement();;
s.addBatch("SET FOREIGN_KEY_CHECKS = 0");
s.addBatch("TRUNCATE TABLE " + strTabla);
s.addBatch("SET FOREIGN_KEY_CHECKS = 1");
s.executeBatch();
} catch (SQLException ex) {
Log4j.errorLog(this.getClass().getName() + ":" + (ex.getStackTrace()[0]).getMethodName(), ex);
throw new Exception();
} finally {
if (oPreparedStatement != null) {
oPreparedStatement.close();
}
}
return intResult;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.jmeter.timers;
import java.io.Serializable;
import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.apache.jmeter.testbeans.TestBean;
import org.apache.jmeter.testelement.AbstractTestElement;
import org.apache.jmeter.testelement.TestStateListener;
import org.apache.jmeter.testelement.ThreadListener;
import org.apache.jmeter.threads.JMeterContextService;
import org.slf4j.LoggerFactory;
import org.slf4j.Logger;
/**
* The purpose of the SyncTimer is to block threads until X number of threads
* have been blocked, and then they are all released at once. A SyncTimer can
* thus create large instant loads at various points of the test plan.
*
*/
public class SyncTimer extends AbstractTestElement implements Timer, Serializable, TestBean, TestStateListener, ThreadListener {
private static final Logger log = LoggerFactory.getLogger(SyncTimer.class);
private static final String SYNC_TIMER_GROUP_SIZE = "SyncTimer.groupSize";
private static final String SYNC_TIMER_TIME_OUT_MS = "SyncTimer.timeoutInMs";
private static final long serialVersionUID = 2;
private transient BarrierWrapper barrier;
/**
* Wrapper to {@link CyclicBarrier} to allow lazy init of CyclicBarrier when SyncTimer is configured with 0
*/
private static class BarrierWrapper implements Cloneable {
private CyclicBarrier barrier;
/**
*
*/
public BarrierWrapper() {
this.barrier = null;
}
/**
* @param parties Number of parties
*/
public BarrierWrapper(int parties) {
this.barrier = new CyclicBarrier(parties);
}
/**
* Synchronized is required to ensure CyclicBarrier is initialized only once per Thread Group
* @param parties Number of parties
*/
public synchronized void setup(int parties) {
if(this.barrier== null) {
this.barrier = new CyclicBarrier(parties);
}
}
/**
* Wait until all threads called await on this timer
*
* @return The arrival index of the current thread
* @throws InterruptedException
* when interrupted while waiting, or the interrupted status
* is set on entering this method
* @throws BrokenBarrierException
* if the barrier is reset while waiting or broken on
* entering or while waiting
* @see java.util.concurrent.CyclicBarrier#await()
*/
public int await() throws InterruptedException, BrokenBarrierException{
return barrier.await();
}
/**
* Wait until all threads called await on this timer
*
* @param timeout
* The timeout in <code>timeUnit</code> units
* @param timeUnit
* The time unit for the <code>timeout</code>
* @return The arrival index of the current thread
* @throws InterruptedException
* when interrupted while waiting, or the interrupted status
* is set on entering this method
* @throws BrokenBarrierException
* if the barrier is reset while waiting or broken on
* entering or while waiting
* @throws TimeoutException
* if the specified time elapses
* @see java.util.concurrent.CyclicBarrier#await()
*/
public int await(long timeout, TimeUnit timeUnit) throws InterruptedException, BrokenBarrierException, TimeoutException {
return barrier.await(timeout, timeUnit);
}
/**
* @see java.util.concurrent.CyclicBarrier#reset()
*/
public void reset() {
barrier.reset();
}
/**
* @see java.lang.Object#clone()
*/
@Override
protected Object clone() {
BarrierWrapper barrierWrapper= null;
try {
barrierWrapper = (BarrierWrapper) super.clone();
barrierWrapper.barrier = this.barrier;
} catch (CloneNotSupportedException e) {
//Cannot happen
}
return barrierWrapper;
}
}
// Ensure transient object is created by the server
private Object readResolve(){
createBarrier();
return this;
}
/**
* @return Returns the numThreads.
*/
public int getGroupSize() {
return this.getPropertyAsInt(SYNC_TIMER_GROUP_SIZE);
}
/**
* @param numThreads
* The numThreads to set.
*/
public void setGroupSize(int numThreads) {
this.setProperty(SYNC_TIMER_GROUP_SIZE, numThreads);
}
/**
* {@inheritDoc}
*/
@Override
public long delay() {
if(getGroupSize()>=0) {
int arrival = 0;
long timeoutInMs = this.getTimeoutInMs();
try {
if(timeoutInMs==0) {
arrival = this.barrier.await();
} else if(timeoutInMs > 0){
arrival = this.barrier.await(timeoutInMs, TimeUnit.MILLISECONDS);
} else {
throw new IllegalArgumentException("Negative value for timeout:"+timeoutInMs+" in Synchronizing Timer "+getName());
}
} catch (InterruptedException | BrokenBarrierException e) {
return 0;
} catch (TimeoutException e) {
log.warn("SyncTimer "+ getName() + " timeouted waiting for users after:"+getTimeoutInMs()+"ms");
return 0;
} finally {
if(arrival == 0) {
barrier.reset();
}
}
}
return 0;
}
/**
* We have to control the cloning process because we need some cross-thread
* communication if our synctimers are to be able to determine when to block
* and when to release.
*/
@Override
public Object clone() {
SyncTimer newTimer = (SyncTimer) super.clone();
newTimer.barrier = barrier;
return newTimer;
}
/**
* {@inheritDoc}
*/
@Override
public void testEnded() {
this.testEnded(null);
}
/**
* Reset timerCounter
*/
@Override
public void testEnded(String host) {
createBarrier();
}
/**
* {@inheritDoc}
*/
@Override
public void testStarted() {
testStarted(null);
}
/**
* Reset timerCounter
*/
@Override
public void testStarted(String host) {
createBarrier();
}
/**
*
*/
private void createBarrier() {
if(getGroupSize() == 0) {
// Lazy init
this.barrier = new BarrierWrapper();
} else {
this.barrier = new BarrierWrapper(getGroupSize());
}
}
@Override
public void threadStarted() {
if(getGroupSize() == 0) {
int numThreadsInGroup = JMeterContextService.getContext().getThreadGroup().getNumThreads();
// Unique Barrier creation ensured by synchronized setup
this.barrier.setup(numThreadsInGroup);
}
}
@Override
public void threadFinished() {
// NOOP
}
/**
* @return the timeoutInMs
*/
public long getTimeoutInMs() {
return this.getPropertyAsLong(SYNC_TIMER_TIME_OUT_MS);
}
/**
* @param timeoutInMs the timeoutInMs to set
*/
public void setTimeoutInMs(long timeoutInMs) {
this.setProperty(SYNC_TIMER_TIME_OUT_MS, timeoutInMs);
}
}
| |
/**
*
* Copyright 2003-2007 Jive Software.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.smackx.disco.packet;
import org.jivesoftware.smack.packet.IQ;
import org.jivesoftware.smack.util.StringUtils;
import org.jivesoftware.smack.util.TypedCloneable;
import org.jivesoftware.smack.util.XmlStringBuilder;
import org.jxmpp.util.XmppStringUtils;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
/**
* A DiscoverInfo IQ packet, which is used by XMPP clients to request and receive information
* to/from other XMPP entities.<p>
*
* The received information may contain one or more identities of the requested XMPP entity, and
* a list of supported features by the requested XMPP entity.
*
* @author Gaston Dombiak
*/
public class DiscoverInfo extends IQ implements TypedCloneable<DiscoverInfo> {
public static final String ELEMENT = QUERY_ELEMENT;
public static final String NAMESPACE = "http://jabber.org/protocol/disco#info";
private final List<Feature> features = new LinkedList<Feature>();
private final Set<Feature> featuresSet = new HashSet<Feature>();
private final List<Identity> identities = new LinkedList<Identity>();
private final Set<String> identitiesSet = new HashSet<String>();
private String node;
private boolean containsDuplicateFeatures;
public DiscoverInfo() {
super(ELEMENT, NAMESPACE);
}
/**
* Copy constructor.
*
* @param d
*/
public DiscoverInfo(DiscoverInfo d) {
super(d);
// Set node
setNode(d.getNode());
// Copy features
for (Feature f : d.features) {
addFeature(f.clone());
}
// Copy identities
for (Identity i : d.identities) {
addIdentity(i.clone());
}
}
/**
* Adds a new feature to the discovered information.
*
* @param feature the discovered feature
* @return true if the feature did not already exist.
*/
public boolean addFeature(String feature) {
return addFeature(new Feature(feature));
}
/**
* Adds a collection of features to the packet. Does noting if featuresToAdd is null.
*
* @param featuresToAdd
*/
public void addFeatures(Collection<String> featuresToAdd) {
if (featuresToAdd == null) return;
for (String feature : featuresToAdd) {
addFeature(feature);
}
}
public boolean addFeature(Feature feature) {
features.add(feature);
boolean featureIsNew = featuresSet.add(feature);
if (!featureIsNew) {
containsDuplicateFeatures = true;
}
return featureIsNew;
}
/**
* Returns the discovered features of an XMPP entity.
*
* @return an unmodifiable list of the discovered features of an XMPP entity
*/
public List<Feature> getFeatures() {
return Collections.unmodifiableList(features);
}
/**
* Adds a new identity of the requested entity to the discovered information.
*
* @param identity the discovered entity's identity
*/
public void addIdentity(Identity identity) {
identities.add(identity);
identitiesSet.add(identity.getKey());
}
/**
* Adds identities to the DiscoverInfo stanza.
*
* @param identitiesToAdd
*/
public void addIdentities(Collection<Identity> identitiesToAdd) {
if (identitiesToAdd == null) return;
for (Identity identity : identitiesToAdd) {
addIdentity(identity);
}
}
/**
* Returns the discovered identities of an XMPP entity.
*
* @return an unmodifiable list of the discovered identities
*/
public List<Identity> getIdentities() {
return Collections.unmodifiableList(identities);
}
/**
* Returns true if this DiscoverInfo contains at least one Identity of the given category and type.
*
* @param category the category to look for.
* @param type the type to look for.
* @return true if this DiscoverInfo contains a Identity of the given category and type.
*/
public boolean hasIdentity(String category, String type) {
String key = XmppStringUtils.generateKey(category, type);
return identitiesSet.contains(key);
}
/**
* Returns all Identities of the given category and type of this DiscoverInfo.
*
* @param category category the category to look for.
* @param type type the type to look for.
* @return a list of Identites with the given category and type.
*/
public List<Identity> getIdentities(String category, String type) {
List<Identity> res = new ArrayList<Identity>(identities.size());
for (Identity identity : identities) {
if (identity.getCategory().equals(category) && identity.getType().equals(type)) {
res.add(identity);
}
}
return res;
}
/**
* Returns the node attribute that supplements the 'jid' attribute. A node is merely
* something that is associated with a JID and for which the JID can provide information.<p>
*
* Node attributes SHOULD be used only when trying to provide or query information which
* is not directly addressable.
*
* @return the node attribute that supplements the 'jid' attribute
*/
public String getNode() {
return node;
}
/**
* Sets the node attribute that supplements the 'jid' attribute. A node is merely
* something that is associated with a JID and for which the JID can provide information.<p>
*
* Node attributes SHOULD be used only when trying to provide or query information which
* is not directly addressable.
*
* @param node the node attribute that supplements the 'jid' attribute
*/
public void setNode(String node) {
this.node = node;
}
/**
* Returns true if the specified feature is part of the discovered information.
*
* @param feature the feature to check
* @return true if the requestes feature has been discovered
*/
public boolean containsFeature(String feature) {
return features.contains(new Feature(feature));
}
@Override
protected IQChildElementXmlStringBuilder getIQChildElementBuilder(IQChildElementXmlStringBuilder xml) {
xml.optAttribute("node", getNode());
xml.rightAngleBracket();
for (Identity identity : identities) {
xml.append(identity.toXML());
}
for (Feature feature : features) {
xml.append(feature.toXML());
}
return xml;
}
/**
* Test if a DiscoverInfo response contains duplicate identities.
*
* @return true if duplicate identities where found, otherwise false
*/
public boolean containsDuplicateIdentities() {
List<Identity> checkedIdentities = new LinkedList<Identity>();
for (Identity i : identities) {
for (Identity i2 : checkedIdentities) {
if (i.equals(i2))
return true;
}
checkedIdentities.add(i);
}
return false;
}
/**
* Test if a DiscoverInfo response contains duplicate features.
*
* @return true if duplicate identities where found, otherwise false
*/
public boolean containsDuplicateFeatures() {
return containsDuplicateFeatures;
}
@Override
public DiscoverInfo clone() {
return new DiscoverInfo(this);
}
/**
* Represents the identity of a given XMPP entity. An entity may have many identities but all
* the identities SHOULD have the same name.<p>
*
* Refer to <a href="http://www.jabber.org/registrar/disco-categories.html">Jabber::Registrar</a>
* in order to get the official registry of values for the <i>category</i> and <i>type</i>
* attributes.
*
*/
public static class Identity implements Comparable<Identity>, TypedCloneable<Identity> {
private final String category;
private final String type;
private final String key;
private final String name;
private final String lang; // 'xml:lang;
public Identity(Identity identity) {
this.category = identity.category;
this.type = identity.type;
this.key = identity.type;
this.name = identity.name;
this.lang = identity.lang;
}
/**
* Creates a new identity for an XMPP entity.
*
* @param category the entity's category (required as per XEP-30).
* @param type the entity's type (required as per XEP-30).
*/
public Identity(String category, String type) {
this(category, type, null, null);
}
/**
* Creates a new identity for an XMPP entity.
* 'category' and 'type' are required by
* <a href="http://xmpp.org/extensions/xep-0030.html#schemas">XEP-30 XML Schemas</a>
*
* @param category the entity's category (required as per XEP-30).
* @param name the entity's name.
* @param type the entity's type (required as per XEP-30).
*/
public Identity(String category, String name, String type) {
this(category, type, name, null);
}
/**
* Creates a new identity for an XMPP entity.
* 'category' and 'type' are required by
* <a href="http://xmpp.org/extensions/xep-0030.html#schemas">XEP-30 XML Schemas</a>
*
* @param category the entity's category (required as per XEP-30).
* @param type the entity's type (required as per XEP-30).
* @param name the entity's name.
* @param lang the entity's lang.
*/
public Identity(String category, String type, String name, String lang) {
this.category = StringUtils.requireNotNullOrEmpty(category, "category cannot be null");
this.type = StringUtils.requireNotNullOrEmpty(type, "type cannot be null");
this.key = XmppStringUtils.generateKey(category, type);
this.name = name;
this.lang = lang;
}
/**
* Returns the entity's category. To get the official registry of values for the
* 'category' attribute refer to <a href="http://www.jabber.org/registrar/disco-categories.html">Jabber::Registrar</a>
*
* @return the entity's category.
*/
public String getCategory() {
return category;
}
/**
* Returns the identity's name.
*
* @return the identity's name.
*/
public String getName() {
return name;
}
/**
* Returns the entity's type. To get the official registry of values for the
* 'type' attribute refer to <a href="http://www.jabber.org/registrar/disco-categories.html">Jabber::Registrar</a>
*
* @return the entity's type.
*/
public String getType() {
return type;
}
/**
* Returns the identities natural language if one is set.
*
* @return the value of xml:lang of this Identity
*/
public String getLanguage() {
return lang;
}
private String getKey() {
return key;
}
/**
* Returns true if this identity is of the given category and type.
*
* @param category the category.
* @param type the type.
* @return true if this identity is of the given category and type.
*/
public boolean isOfCategoryAndType(String category, String type) {
return this.category.equals(category) && this.type.equals(type);
}
public XmlStringBuilder toXML() {
XmlStringBuilder xml = new XmlStringBuilder();
xml.halfOpenElement("identity");
xml.xmllangAttribute(lang);
xml.attribute("category", category);
xml.optAttribute("name", name);
xml.optAttribute("type", type);
xml.closeEmptyElement();
return xml;
}
/**
* Check equality for Identity for category, type, lang and name
* in that order as defined by
* <a href="http://xmpp.org/extensions/xep-0115.html#ver-proc">XEP-0015 5.4 Processing Method (Step 3.3)</a>.
*
*/
public boolean equals(Object obj) {
if (obj == null)
return false;
if (obj == this)
return true;
if (obj.getClass() != getClass())
return false;
DiscoverInfo.Identity other = (DiscoverInfo.Identity) obj;
if (!this.key.equals(other.key))
return false;
String otherLang = other.lang == null ? "" : other.lang;
String thisLang = lang == null ? "" : lang;
if (!otherLang.equals(thisLang))
return false;
String otherName = other.name == null ? "" : other.name;
String thisName = name == null ? "" : other.name;
if (!thisName.equals(otherName))
return false;
return true;
}
@Override
public int hashCode() {
int result = 1;
result = 37 * result + key.hashCode();
result = 37 * result + (lang == null ? 0 : lang.hashCode());
result = 37 * result + (name == null ? 0 : name.hashCode());
return result;
}
/**
* Compares this identity with another one. The comparison order is: Category, Type, Lang.
* If all three are identical the other Identity is considered equal. Name is not used for
* comparison, as defined by XEP-0115
*
* @param other
* @return a negative integer, zero, or a positive integer as this object is less than,
* equal to, or greater than the specified object.
*/
public int compareTo(DiscoverInfo.Identity other) {
String otherLang = other.lang == null ? "" : other.lang;
String thisLang = lang == null ? "" : lang;
// This can be removed once the deprecated constructor is removed.
String otherType = other.type == null ? "" : other.type;
String thisType = type == null ? "" : type;
if (category.equals(other.category)) {
if (thisType.equals(otherType)) {
if (thisLang.equals(otherLang)) {
// Don't compare on name, XEP-30 says that name SHOULD
// be equals for all identities of an entity
return 0;
} else {
return thisLang.compareTo(otherLang);
}
} else {
return thisType.compareTo(otherType);
}
} else {
return category.compareTo(other.category);
}
}
@Override
public Identity clone() {
return new Identity(this);
}
}
/**
* Represents the features offered by the item. This information helps requestors determine
* what actions are possible with regard to this item (registration, search, join, etc.)
* as well as specific feature types of interest, if any (e.g., for the purpose of feature
* negotiation).
*/
public static class Feature implements TypedCloneable<Feature> {
private final String variable;
public Feature(Feature feature) {
this.variable = feature.variable;
}
/**
* Creates a new feature offered by an XMPP entity or item.
*
* @param variable the feature's variable.
*/
public Feature(String variable) {
this.variable = StringUtils.requireNotNullOrEmpty(variable, "variable cannot be null");
}
/**
* Returns the feature's variable.
*
* @return the feature's variable.
*/
public String getVar() {
return variable;
}
public XmlStringBuilder toXML() {
XmlStringBuilder xml = new XmlStringBuilder();
xml.halfOpenElement("feature");
xml.attribute("var", variable);
xml.closeEmptyElement();
return xml;
}
public boolean equals(Object obj) {
if (obj == null)
return false;
if (obj == this)
return true;
if (obj.getClass() != getClass())
return false;
DiscoverInfo.Feature other = (DiscoverInfo.Feature) obj;
return variable.equals(other.variable);
}
@Override
public int hashCode() {
return variable.hashCode();
}
@Override
public Feature clone() {
return new Feature(this);
}
}
}
| |
/**
*
*/
package org.gprom.jdbc.jna;
import java.util.Arrays;
import java.util.List;
import com.sun.jna.Callback;
import com.sun.jna.Library;
import com.sun.jna.Native;
import com.sun.jna.NativeLong;
import com.sun.jna.Pointer;
import com.sun.jna.StringArray;
import com.sun.jna.Structure;
import com.sun.jna.Union;
import com.sun.jna.ptr.IntByReference;
import com.sun.jna.ptr.PointerByReference;
/**
* @author lord_pretzel
*
*/
public interface GProM_JNA extends Library {
// instance of the library
GProM_JNA INSTANCE = (GProM_JNA) Native.loadLibrary("gprom",
GProM_JNA.class);
// library methods
// initialization
public void gprom_init();
public void gprom_readOptions(int argc, String[] args);
public void gprom_readOptionAndInit(int argc, String[] args);
public void gprom_configFromOptions();
public void gprom_reconfPlugins();
public void gprom_shutdown();
// rewrite methods
public Pointer gprom_rewriteQuery (String query);
// configuration interface
public String gprom_getStringOption (String name);
public int gprom_getIntOption (String name);
public boolean gprom_getBoolOption (String name);
public double gprom_getFloatOption (String name);
public void gprom_setStringOption (String name, String value);
public void gprom_setIntOption(String name, int value);
public void gprom_setBoolOption(String name, boolean value);
public void gprom_setFloatOption(String name, double value);
public String gprom_getOptionType(String key);
public boolean gprom_optionExists(String key);
// logging callback interface
interface GProMLoggerCallbackFunction extends Callback {
void invoke(String message, String fileInfo, int line, int logLevel);
}
// exception callback interface
interface GProMExceptionCallbackFunction extends Callback {
int invoke(String message, String file, int line, int severity);
}
public void gprom_registerExceptionCallbackFunction (GProMExceptionCallbackFunction callback);
public void gprom_registerLoggerCallbackFunction (GProMLoggerCallbackFunction callback);
public void gprom_setMaxLogLevel (int maxLevel);
// metadata lookup callback interface defines a methods and a plugin struct
// public class GProMMetadataLookupPlugin extends Structure {
//
// /// C type : boolean isInitialized
// public GProMMetadataLookupPlugin.isInitialized_callback isInitialized;
// /// C type : initMetadataLookupPlugin_callback
// public GProMMetadataLookupPlugin.initMetadataLookupPlugin_callback initMetadataLookupPlugin;
// /// C type : databaseConnectionOpen_callback
// public GProMMetadataLookupPlugin.databaseConnectionOpen_callback databaseConnectionOpen;
// /// C type : databaseConnectionClose_callback
// public GProMMetadataLookupPlugin.databaseConnectionClose_callback databaseConnectionClose;
// /// C type : shutdownMetadataLookupPlugin_callback
// public GProMMetadataLookupPlugin.shutdownMetadataLookupPlugin_callback shutdownMetadataLookupPlugin;
// /// C type: boolean (*catalogTableExists) (char * tableName);
// public GProMMetadataLookupPlugin.catalogTableExists_callback catalogTableExists;
// /// C type: boolean (*catalogViewExists) (char * viewName);
// public GProMMetadataLookupPlugin.catalogViewExists_callback catalogViewExists;
// /// C type : getAttributes_callback
// public GProMMetadataLookupPlugin.getAttributes_callback getAttributes;
// /// C type : getAttributeNames_callback
// public GProMMetadataLookupPlugin.getAttributeNames_callback getAttributeNames;
// /// C type : getAttributeDefaultVal_callback
// public GProMMetadataLookupPlugin.getAttributeDefaultVal_callback getAttributeDefaultVal;
// /// C type : boolean (*isAgg) (char *functionName)
// public GProMMetadataLookupPlugin.isAgg_callback isAgg;
// /// C types : boolean (*isWindowFunction) (char *functionName)
// public GProMMetadataLookupPlugin.isWindowFunction_callback isWindowFunction;
// /// C type : getFuncReturnType_callback
// public GProMMetadataLookupPlugin.getFuncReturnType_callback getFuncReturnType;
// /// C type : getOpReturnType_callback
// public GProMMetadataLookupPlugin.getOpReturnType_callback getOpReturnType;
// /// C type : getTableDefinition_callback
// public GProMMetadataLookupPlugin.getTableDefinition_callback getTableDefinition;
// /// C type : getViewDefinition_callback
// public GProMMetadataLookupPlugin.getViewDefinition_callback getViewDefinition;
// /// C type: char ** (*getKeyInformation) (char *tableName)
// public GProMMetadataLookupPlugin.getKeyInformation_callback getKeyInformation;
//
//
// public interface isInitialized_callback {
// boolean apply();
// }
// public interface initMetadataLookupPlugin_callback extends Callback {
// int apply();
// };
// public interface databaseConnectionOpen_callback extends Callback {
// int apply();
// };
// public interface databaseConnectionClose_callback extends Callback {
// int apply();
// };
// public interface shutdownMetadataLookupPlugin_callback extends Callback {
// int apply();
// };
// public interface catalogViewExists_callback extends Callback {
// int apply(String viewName);
// }
// public interface catalogTableExists_callback extends Callback {
// int apply(String tableName);
// }
// public interface getAttributes_callback extends Callback {
// void apply(String tableName,
// PointerByReference attrs, // output parameter char ***
// PointerByReference dataTypes, // output parameter char ***
// IntByReference numArgs); // output parameter int *
// };
// public interface getAttributeNames_callback extends Callback {
// void apply(String tableName,
// PointerByReference attrs, // output parameter char ***
// IntByReference numArgs // output parameter int *
// );
// };
// public interface getAttributeDefaultVal_callback extends Callback {
// String apply(String schema, String tableName, String attrName);
// };
// public interface isWindowFunction_callback {
// int apply(String functionName);
// }
// public interface isAgg_callback {
// int apply(String functionName);
// }
// public interface getFuncReturnType_callback extends Callback {
// String apply(String fName, StringArray args, int numArgs);
// };
// public interface getOpReturnType_callback extends Callback {
// String apply(String oName, StringArray args, int numArgs);
// };
// public interface getTableDefinition_callback extends Callback {
// String apply(String tableName);
// };
// public interface getViewDefinition_callback extends Callback {
// String apply(String viewName);
// };
// public interface getKeyInformation_callback extends Callback {
// PointerByReference apply(String tableName);
// };
//
// public GProMMetadataLookupPlugin() {
// super();
// initFieldOrder();
// }
// protected void initFieldOrder() {
// setFieldOrder(new String[]{"isInitialized", "initMetadataLookupPlugin", "databaseConnectionOpen", "databaseConnectionClose", "shutdownMetadataLookupPlugin", "getAttributes", "getAttributeNames", "getAttributeDefaultVal", "getFuncReturnType", "getOpReturnType", "getTableDefinition", "getViewDefinition"});
// }
// protected ByReference newByReference() { return new ByReference(); }
// protected ByValue newByValue() { return new ByValue(); }
// protected GProMMetadataLookupPlugin newInstance() { return new GProMMetadataLookupPlugin(); }
//// public static GProMMetadataLookupPlugin[] newArray(int arrayLength) {
//// return Structure.newArray(GProMMetadataLookupPlugin.class, arrayLength);
//// }
// public static class ByReference extends GProMMetadataLookupPlugin implements Structure.ByReference {
//
// };
// public static class ByValue extends GProMMetadataLookupPlugin implements Structure.ByValue {
//
// }
// /* (non-Javadoc)
// * @see com.sun.jna.Structure#getFieldOrder()
// */
// @Override
// protected List getFieldOrder() {
// // TODO Auto-generated method stub
// return null;
// };
// }
void gprom_registerMetadataLookupPlugin(GProMMetadataLookupPlugin plugin);
public static interface GProMNodeTag {
public static final int GProM_T_Invalid = 0;
public static final int GProM_T_Node = 1;
/* collection types */
public static final int GProM_T_List = 2;
public static final int GProM_T_IntList = 3;
public static final int GProM_T_Set = 4;
public static final int GProM_T_HashMap = 5;
public static final int GProM_T_Vector = 6;
/* options */
public static final int GProM_T_KeyValue = 7;
/* expression nodes */
public static final int GProM_T_Constant = 8;
public static final int GProM_T_AttributeReference = 9;
public static final int GProM_T_SQLParameter = 10;
public static final int GProM_T_FunctionCall = 11;
public static final int GProM_T_Operator = 12;
public static final int GProM_T_CaseExpr = 13;
public static final int GProM_T_CaseWhen = 14;
public static final int GProM_T_IsNullExpr = 15;
public static final int GProM_T_WindowBound = 16;
public static final int GProM_T_WindowFrame = 17;
public static final int GProM_T_WindowDef = 18;
public static final int GProM_T_WindowFunction = 19;
public static final int GProM_T_RowNumExpr = 20;
public static final int GProM_T_OrderExpr = 21;
public static final int GProM_T_CastExpr = 22;
/* query block model nodes */
public static final int GProM_T_SetQuery = 23;
public static final int GProM_T_ProvenanceStmt = 24;
public static final int GProM_T_ProvenanceTransactionInfo = 25;
public static final int GProM_T_QueryBlock = 26;
public static final int GProM_T_SelectItem = 27;
public static final int GProM_T_FromItem = 28;
public static final int GProM_T_FromProvInfo = 29;
public static final int GProM_T_FromTableRef = 30;
public static final int GProM_T_FromSubquery = 31;
public static final int GProM_T_FromJoinExpr = 32;
public static final int GProM_T_DistinctClause = 33;
public static final int GProM_T_NestedSubquery = 34;
public static final int GProM_T_Insert = 35;
public static final int GProM_T_Delete = 36;
public static final int GProM_T_Update = 37;
public static final int GProM_T_TransactionStmt = 38;
public static final int GProM_T_WithStmt = 39;
public static final int GProM_T_DDLStatement = 40;
public static final int GProM_T_UtilityStatement = 41;
/* query operator model nodes */
public static final int GProM_T_Schema = 42;
public static final int GProM_T_AttributeDef = 43;
public static final int GProM_T_QueryOperator = 44;
public static final int GProM_T_SelectionOperator = 45;
public static final int GProM_T_ProjectionOperator = 46;
public static final int GProM_T_JoinOperator = 47;
public static final int GProM_T_AggregationOperator = 48;
public static final int GProM_T_ProvenanceComputation = 49;
public static final int GProM_T_TableAccessOperator = 50;
public static final int GProM_T_SetOperator = 51;
public static final int GProM_T_DuplicateRemoval = 52;
public static final int GProM_T_ConstRelOperator = 53;
public static final int GProM_T_NestingOperator = 54;
public static final int GProM_T_WindowOperator = 55;
public static final int GProM_T_OrderOperator = 56;
/* datalog model nodes */
public static final int GProM_T_DLNode = 57;
public static final int GProM_T_DLAtom = 58;
public static final int GProM_T_DLVar = 59;
public static final int GProM_T_DLRule = 60;
public static final int GProM_T_DLProgram = 61;
public static final int GProM_T_DLComparison = 62;
public static final int GProM_T_DLDomain = 63;
/* Json Table Node */
public static final int GProM_T_FromJsonTable = 64;
public static final int GProM_T_JsonTableOperator = 65;
public static final int GProM_T_JsonColInfoItem = 66;
public static final int GProM_T_JsonPath = 67;
/* relation */
public static final int GProM_T_Relation = 68;
/* rpq */
public static final int GProM_T_Regex = 69;
public static final int GProM_T_RPQQuery = 70;
/* ddl */
public static final int GProM_T_CreateTable = 71;
public static final int GProM_T_AlterTable = 72;
}
public static interface GProMDataType {
public static final int GProM_DT_INT = 0;
public static final int GProM_DT_LONG = 1;
public static final int GProM_DT_STRING = 2;
public static final int GProM_DT_FLOAT = 3;
public static final int GProM_DT_BOOL = 4;
public static final int GProM_DT_VARCHAR2 = 5;
}
public static interface GProMJoinType {
public static final int GProM_JOIN_INNER = 0;
public static final int GProM_JOIN_CROSS = 1;
public static final int GProM_JOIN_LEFT_OUTER = 2;
public static final int GProM_JOIN_RIGHT_OUTER = 3;
public static final int GProM_JOIN_FULL_OUTER = 4;
}
public static interface GProMSetOpType{
public static final int GProM_SETOP_UNION = 0;
public static final int GProM_SETOP_INTERSECTION = 1;
public static final int GProM_SETOP_DIFFERENCE = 2;
}
public static interface GProMProvenanceType {
public static final int GProM_PROV_PI_CS = 0;
public static final int GProM_PROV_TRANSFORMATION = 1;
public static final int GProM_PROV_NONE = 2;/* for reenactment of bag semantics only */
}
/* what type of database operation(s) a provenance computation is for */
public static interface GProMProvenanceInputType {
public static final int GProM_PROV_INPUT_QUERY = 0;
public static final int GProM_PROV_INPUT_UPDATE = 1;
public static final int GProM_PROV_INPUT_UPDATE_SEQUENCE = 2;
public static final int GProM_PROV_INPUT_REENACT = 3;
public static final int GProM_PROV_INPUT_REENACT_WITH_TIMES = 4;
public static final int GProM_PROV_INPUT_TRANSACTION = 5;
public static final int GProM_PROV_INPUT_TIME_INTERVAL = 6;
}
public static interface GProMIsolationLevel {
public static final int GProM_ISOLATION_SERIALIZABLE = 0;
public static final int GProM_ISOLATION_READ_COMMITTED = 1;
public static final int GProM_ISOLATION_READ_ONLY = 2;
}
public static interface GProMNestingExprType {
public static final int GProM_NESTQ_EXISTS = 0;
public static final int GProM_NESTQ_ANY = 1;
public static final int GProM_NESTQ_ALL = 2;
public static final int GProM_NESTQ_UNIQUE = 3;
public static final int GProM_NESTQ_SCALAR = 4;
}
public static interface GProMWindowBoundType {
public static final int GProM_WINBOUND_UNBOUND_PREC = 0;
public static final int GProM_WINBOUND_CURRENT_ROW = 1;
public static final int GProM_WINBOUND_EXPR_PREC = 2;
public static final int GProM_WINBOUND_EXPR_FOLLOW = 3;
}
public static interface GProMWinFrameType {
public static final int GProM_WINFRAME_ROWS = 0;
public static final int GProM_WINFRAME_RANGE = 1;
}
public static interface GProMSortOrder {
public static final int GProM_SORT_ASC = 0;
public static final int GProM_SORT_DESC = 1;
}
public static interface GProMSortNullOrder {
public static final int GProM_SORT_NULLS_FIRST = 0;
public static final int GProM_SORT_NULLS_LAST = 1;
}
public static final int GProM_INVALID_PARAM = (int)-1;
public static final int GProM_INVALID_ATTR = (int)-1;
public static final int GProM_INVALID_FROM_ITEM = (int)-1;
public Pointer gprom_rewriteQueryToOperatorModel (String query);
public Pointer gprom_provRewriteOperator(Pointer nodeFromMimir);
}
| |
package com.bitdubai.fermat_cry_plugin.layer.crypto_network.bitcoin.developer.bitdubai.version_1.structure;
import com.bitdubai.fermat_api.DealsWithPluginIdentity;
import com.bitdubai.fermat_api.FermatException;
import com.bitdubai.fermat_api.layer.all_definition.enums.Plugins;
import com.bitdubai.fermat_api.layer.dmp_world.Agent;
import com.bitdubai.fermat_api.layer.dmp_world.wallet.exceptions.CantStartAgentException;
import com.bitdubai.fermat_api.layer.osa_android.file_system.DealsWithPluginFileSystem;
import com.bitdubai.fermat_api.layer.osa_android.file_system.PluginFileSystem;
import com.bitdubai.fermat_api.layer.osa_android.logger_system.DealsWithLogger;
import com.bitdubai.fermat_api.layer.osa_android.logger_system.LogManager;
import com.bitdubai.fermat_pip_api.layer.pip_platform_service.error_manager.DealsWithErrors;
import com.bitdubai.fermat_pip_api.layer.pip_platform_service.error_manager.ErrorManager;
import com.bitdubai.fermat_pip_api.layer.pip_platform_service.error_manager.UnexpectedPlatformExceptionSeverity;
import com.bitdubai.fermat_pip_api.layer.pip_platform_service.error_manager.UnexpectedPluginExceptionSeverity;
import com.bitdubai.fermat_cry_api.layer.crypto_network.bitcoin.BitcoinManager;
import com.bitdubai.fermat_cry_api.layer.crypto_network.bitcoin.exceptions.CantConnectToBitcoinNetwork;
import com.bitdubai.fermat_cry_plugin.layer.crypto_network.bitcoin.developer.bitdubai.version_1.BitcoinCryptoNetworkPluginRoot;
import com.bitdubai.fermat_cry_plugin.layer.crypto_network.bitcoin.developer.bitdubai.version_1.exceptions.CantCreateBlockStoreFileException;
import org.bitcoinj.core.NetworkParameters;
import org.bitcoinj.core.PeerAddress;
import org.bitcoinj.core.PeerGroup;
import org.bitcoinj.core.Wallet;
import org.bitcoinj.net.discovery.DnsDiscovery;
import org.bitcoinj.params.RegTestParams;
import java.net.InetSocketAddress;
import java.util.UUID;
/**
* Created by rodrigo on 08/06/15.
*/
public class BitcoinCryptoNetworkMonitoringAgent implements Agent, BitcoinManager, DealsWithErrors, DealsWithLogger, DealsWithPluginFileSystem, DealsWithPluginIdentity {
/**
* BitcoinCryptoNetworkMonitoringAgent member variables
*/
private BitcoinEventListeners myListeners;
private NetworkParameters networkParameters;
private StoredBlockChain storedBlockChain;
private PeerGroup peers;
private Wallet wallet;
private String userPublicKey;
/**
* Agent interface member variables
*/
private Thread agentThread;
private MonitorAgent monitorAgent;
/**
* DealaWithError interface member variables
*/
private ErrorManager errorManager;
/**
* DealsWithLogger interface member variable
*/
private LogManager logManager;
/**
* DealsWithPluginFileSystem interface member variable
*/
private PluginFileSystem pluginFileSystem;
/**
* DealsWithPluginIdentify interface member variable
*/
private UUID pluginId;
/**
* constructor
* @param wallet the BitcoinJ wallet that will be used to store the transactions and specify which
* addresses to monitore
* @param userPublicKey the user ID that we are calling the connection for.
*/
public BitcoinCryptoNetworkMonitoringAgent(Wallet wallet, String userPublicKey){
this.wallet = wallet;
this.userPublicKey = userPublicKey;
this.networkParameters = BitcoinNetworkConfiguration.getNetworkConfiguration();
peers = null;
}
/**
* DealsWithErrors interface impplementation
*
* @param errorManager
*/
@Override
public void setErrorManager(ErrorManager errorManager) {
this.errorManager = errorManager;
}
/**
* DealsWithPlugInFileSystem interface implementation
*
* @param pluginFileSystem
*/
@Override
public void setPluginFileSystem(PluginFileSystem pluginFileSystem) {
this.pluginFileSystem = pluginFileSystem;
}
/**
* DealsWithPluginIdentity interface implementation
*
* @param pluginId
*/
@Override
public void setPluginId(UUID pluginId) {
this.pluginId = pluginId;
}
/**
* DealsWithLogger interface implementation
*
* @param logManager
*/
@Override
public void setLogManager(LogManager logManager) {
this.logManager = logManager;
}
/**
* Agent interface implementation
*
* @throws CantStartAgentException
*/
@Override
public void start() throws CantStartAgentException {
/**
* I start the thread that will launch the class that connects to bitcoin.
*/
monitorAgent = new MonitorAgent();
this.agentThread = new Thread(monitorAgent);
this.agentThread.start();
}
/**
* Agent interface implementation
*/
@Override
public void stop() {
/**
* will stop the bitcoin monitoring agent.
*/
if (peers.isRunning())
peers.stop();
}
/**
* return the amount of connected peers. Warning, may change as soon as this is executed.
*
* @return
*/
public int getConnectedPeers(){
if (peers != null && peers.isRunning())
return peers.numConnectedPeers();
else
return 0;
}
/**
* Used by the Vault when we want to send bitcoins.
*
* @return
*/
public PeerGroup getPeers() {
return peers;
}
/**
* return true if the service is running. It doest't mean we are connected. We might be without
* internet access but the service still running.
*
* @return
*/
public boolean isRunning() {
if (peers == null)
return false;
else
return peers.isRunning();
}
public void configureBlockChain() throws CantCreateBlockStoreFileException {
/**
* I prepare the block chain object
*/
try {
storedBlockChain = new StoredBlockChain(wallet, userPublicKey);
storedBlockChain.setPluginId(pluginId);
storedBlockChain.setPluginFileSystem(pluginFileSystem);
storedBlockChain.setErrorManager(errorManager);
storedBlockChain.createBlockChain();
} catch (Exception exception) {
errorManager.reportUnexpectedPluginException(Plugins.BITDUBAI_BITCOIN_CRYPTO_NETWORK, UnexpectedPluginExceptionSeverity.DISABLES_THIS_PLUGIN, FermatException.wrapException(exception));
throw new CantCreateBlockStoreFileException(CantCreateBlockStoreFileException.DEFAULT_MESSAGE, FermatException.wrapException(exception), null, null);
}
}
public void configurePeers() {
/**
* I define the peers information that I will be connecting to.
*/
try {
storedBlockChain.getBlockChain().addWallet(wallet);
peers = new PeerGroup(this.networkParameters, storedBlockChain.getBlockChain());
peers.addWallet(wallet);
peers.setUserAgent(BitcoinManager.FERMAT_AGENT_NAME, BitcoinManager.FERMAT_AGENT_VERSION);
peers.setUseLocalhostPeerWhenPossible(true);
/**
* If we are using RegTest network, we will connect to local server
*/
if (networkParameters == RegTestParams.get()) {
InetSocketAddress inetSocketAddress = new InetSocketAddress(REGTEST_SERVER_ADDRESS, REGTEST_SERVER_PORT);
PeerAddress peerAddress = new PeerAddress(inetSocketAddress);
peers.addAddress(peerAddress);
logManager.log(BitcoinCryptoNetworkPluginRoot.getLogLevelByClass(this.getClass().getName()), "CryptoNetwork information: Using RegTest. Connecting to " + inetSocketAddress.toString(), null, null);
} else
/**
* If it is not RegTest, then I will get the Peers by DNSDiscovery
*/ {
logManager.log(BitcoinCryptoNetworkPluginRoot.getLogLevelByClass(this.getClass().getName()), "CryptoNetwork information: Using " + networkParameters.toString() + " network.", null, null);
peers.addPeerDiscovery(new DnsDiscovery(this.networkParameters));
}
myListeners = new BitcoinEventListeners();
myListeners.setLogManager(this.logManager);
peers.addEventListener(myListeners);
} catch (Exception exception) {
errorManager.reportUnexpectedPluginException(Plugins.BITDUBAI_BITCOIN_CRYPTO_NETWORK, UnexpectedPluginExceptionSeverity.DISABLES_THIS_PLUGIN, exception);
}
}
/**
* private class executed by the start of the Agent.
*/
private class MonitorAgent implements Runnable {
@Override
public void run() {
try {
doTheMainTask();
} catch (CantConnectToBitcoinNetwork cantConnectToBitcoinNetwork) {
errorManager.reportUnexpectedPluginException(Plugins.BITDUBAI_BITCOIN_CRYPTO_NETWORK, UnexpectedPluginExceptionSeverity.DISABLES_SOME_FUNCTIONALITY_WITHIN_THIS_PLUGIN, cantConnectToBitcoinNetwork);
}
}
/**
* triggers the connection to peers, the download (or update) of the block chain
* and the listening to incoming transactions.
*/
private void doTheMainTask() throws CantConnectToBitcoinNetwork {
try {
peers.start();
peers.downloadBlockChain();
} catch (Exception exception) {
throw new CantConnectToBitcoinNetwork("Couldn't connect to Bitcoin Network.", exception, "", "Error executing Agent.");
}
}
}
}
| |
package com.ianhanniballake.localstorage;
import android.annotation.TargetApi;
import android.content.res.AssetFileDescriptor;
import android.database.Cursor;
import android.database.MatrixCursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Point;
import android.os.Build;
import android.os.CancellationSignal;
import android.os.Environment;
import android.os.ParcelFileDescriptor;
import android.provider.DocumentsContract.Document;
import android.provider.DocumentsContract.Root;
import android.provider.DocumentsProvider;
import android.util.Log;
import android.webkit.MimeTypeMap;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import com.whatdoyouwanttodo.R;
@TargetApi(Build.VERSION_CODES.KITKAT)
public class LocalStorageProvider extends DocumentsProvider {
public static final String AUTHORITY = "com.ianhanniballake.localstorage.documents";
/**
* Default root projection: everything but Root.COLUMN_MIME_TYPES
*/
private final static String[] DEFAULT_ROOT_PROJECTION = new String[] {
Root.COLUMN_ROOT_ID,
Root.COLUMN_FLAGS, Root.COLUMN_TITLE, Root.COLUMN_DOCUMENT_ID, Root.COLUMN_ICON,
Root.COLUMN_AVAILABLE_BYTES
};
/**
* Default document projection: everything but Document.COLUMN_ICON and
* Document.COLUMN_SUMMARY
*/
private final static String[] DEFAULT_DOCUMENT_PROJECTION = new String[] {
Document.COLUMN_DOCUMENT_ID,
Document.COLUMN_DISPLAY_NAME, Document.COLUMN_FLAGS, Document.COLUMN_MIME_TYPE,
Document.COLUMN_SIZE,
Document.COLUMN_LAST_MODIFIED
};
@Override
public Cursor queryRoots(final String[] projection) throws FileNotFoundException {
// Create a cursor with either the requested fields, or the default
// projection if "projection" is null.
final MatrixCursor result = new MatrixCursor(projection != null ? projection
: DEFAULT_ROOT_PROJECTION);
// Add Home directory
File homeDir = Environment.getExternalStorageDirectory();
final MatrixCursor.RowBuilder row = result.newRow();
// These columns are required
row.add(Root.COLUMN_ROOT_ID, homeDir.getAbsolutePath());
row.add(Root.COLUMN_DOCUMENT_ID, homeDir.getAbsolutePath());
row.add(Root.COLUMN_TITLE, getContext().getString(R.string.internal_storage));
row.add(Root.COLUMN_FLAGS, Root.FLAG_LOCAL_ONLY | Root.FLAG_SUPPORTS_CREATE);
row.add(Root.COLUMN_ICON, R.drawable.ic_provider);
// These columns are optional
row.add(Root.COLUMN_AVAILABLE_BYTES, homeDir.getFreeSpace());
// Root.COLUMN_MIME_TYPE is another optional column and useful if you
// have multiple roots with different
// types of mime types (roots that don't match the requested mime type
// are automatically hidden)
return result;
}
@Override
public String createDocument(final String parentDocumentId, final String mimeType,
final String displayName) throws FileNotFoundException {
File newFile = new File(parentDocumentId, displayName);
try {
newFile.createNewFile();
return newFile.getAbsolutePath();
} catch (IOException e) {
Log.e(LocalStorageProvider.class.getSimpleName(), "Error creating new file " + newFile);
}
return null;
}
@Override
public AssetFileDescriptor openDocumentThumbnail(final String documentId, final Point sizeHint,
final CancellationSignal signal) throws FileNotFoundException {
// Assume documentId points to an image file. Build a thumbnail no
// larger than twice the sizeHint
BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
BitmapFactory.decodeFile(documentId, options);
final int targetHeight = 2 * sizeHint.y;
final int targetWidth = 2 * sizeHint.x;
final int height = options.outHeight;
final int width = options.outWidth;
options.inSampleSize = 1;
if (height > targetHeight || width > targetWidth) {
final int halfHeight = height / 2;
final int halfWidth = width / 2;
// Calculate the largest inSampleSize value that is a power of 2 and
// keeps both
// height and width larger than the requested height and width.
while ((halfHeight / options.inSampleSize) > targetHeight
|| (halfWidth / options.inSampleSize) > targetWidth) {
options.inSampleSize *= 2;
}
}
options.inJustDecodeBounds = false;
Bitmap bitmap = BitmapFactory.decodeFile(documentId, options);
// Write out the thumbnail to a temporary file
File tempFile = null;
FileOutputStream out = null;
try {
tempFile = File.createTempFile("thumbnail", null, getContext().getCacheDir());
out = new FileOutputStream(tempFile);
bitmap.compress(Bitmap.CompressFormat.PNG, 90, out);
} catch (IOException e) {
Log.e(LocalStorageProvider.class.getSimpleName(), "Error writing thumbnail", e);
return null;
} finally {
if (out != null)
try {
out.close();
} catch (IOException e) {
Log.e(LocalStorageProvider.class.getSimpleName(), "Error closing thumbnail", e);
}
}
// It appears the Storage Framework UI caches these results quite
// aggressively so there is little reason to
// write your own caching layer beyond what you need to return a single
// AssetFileDescriptor
return new AssetFileDescriptor(ParcelFileDescriptor.open(tempFile,
ParcelFileDescriptor.MODE_READ_ONLY), 0,
AssetFileDescriptor.UNKNOWN_LENGTH);
}
@Override
public Cursor queryChildDocuments(final String parentDocumentId, final String[] projection,
final String sortOrder) throws FileNotFoundException {
// Create a cursor with either the requested fields, or the default
// projection if "projection" is null.
final MatrixCursor result = new MatrixCursor(projection != null ? projection
: DEFAULT_DOCUMENT_PROJECTION);
final File parent = new File(parentDocumentId);
for (File file : parent.listFiles()) {
// Don't show hidden files/folders
if (!file.getName().startsWith(".")) {
// Adds the file's display name, MIME type, size, and so on.
includeFile(result, file);
}
}
return result;
}
@Override
public Cursor queryDocument(final String documentId, final String[] projection)
throws FileNotFoundException {
// Create a cursor with either the requested fields, or the default
// projection if "projection" is null.
final MatrixCursor result = new MatrixCursor(projection != null ? projection
: DEFAULT_DOCUMENT_PROJECTION);
includeFile(result, new File(documentId));
return result;
}
private void includeFile(final MatrixCursor result, final File file)
throws FileNotFoundException {
final MatrixCursor.RowBuilder row = result.newRow();
// These columns are required
row.add(Document.COLUMN_DOCUMENT_ID, file.getAbsolutePath());
row.add(Document.COLUMN_DISPLAY_NAME, file.getName());
String mimeType = getDocumentType(file.getAbsolutePath());
row.add(Document.COLUMN_MIME_TYPE, mimeType);
int flags = file.canWrite() ? Document.FLAG_SUPPORTS_DELETE | Document.FLAG_SUPPORTS_WRITE
: 0;
// We only show thumbnails for image files - expect a call to
// openDocumentThumbnail for each file that has
// this flag set
if (mimeType.startsWith("image/"))
flags |= Document.FLAG_SUPPORTS_THUMBNAIL;
row.add(Document.COLUMN_FLAGS, flags);
// COLUMN_SIZE is required, but can be null
row.add(Document.COLUMN_SIZE, file.length());
// These columns are optional
row.add(Document.COLUMN_LAST_MODIFIED, file.lastModified());
// Document.COLUMN_ICON can be a resource id identifying a custom icon.
// The system provides default icons
// based on mime type
// Document.COLUMN_SUMMARY is optional additional information about the
// file
}
@Override
public String getDocumentType(final String documentId) throws FileNotFoundException {
File file = new File(documentId);
if (file.isDirectory())
return Document.MIME_TYPE_DIR;
// From FileProvider.getType(Uri)
final int lastDot = file.getName().lastIndexOf('.');
if (lastDot >= 0) {
final String extension = file.getName().substring(lastDot + 1);
final String mime = MimeTypeMap.getSingleton().getMimeTypeFromExtension(extension);
if (mime != null) {
return mime;
}
}
return "application/octet-stream";
}
@Override
public void deleteDocument(final String documentId) throws FileNotFoundException {
new File(documentId).delete();
}
@Override
public ParcelFileDescriptor openDocument(final String documentId, final String mode,
final CancellationSignal signal) throws FileNotFoundException {
File file = new File(documentId);
final boolean isWrite = (mode.indexOf('w') != -1);
if (isWrite) {
return ParcelFileDescriptor.open(file, ParcelFileDescriptor.MODE_READ_WRITE);
} else {
return ParcelFileDescriptor.open(file, ParcelFileDescriptor.MODE_READ_ONLY);
}
}
@Override
public boolean onCreate() {
return true;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.