gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
// Copyright 2015 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.cmdline;
import com.google.common.base.Preconditions;
import com.google.common.base.Throwables;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.ComparisonChain;
import com.google.devtools.build.lib.util.StringCanonicalizer;
import com.google.devtools.build.lib.util.StringUtilities;
import com.google.devtools.build.lib.vfs.Canonicalizer;
import com.google.devtools.build.lib.vfs.PathFragment;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.ObjectStreamException;
import java.io.Serializable;
import java.util.Objects;
import java.util.concurrent.ExecutionException;
import javax.annotation.concurrent.Immutable;
/**
* Uniquely identifies a package, given a repository name and a package's path fragment.
*
* <p>The repository the build is happening in is the <i>default workspace</i>, and is identified
* by the workspace name "". Other repositories can be named in the WORKSPACE file. These
* workspaces are prefixed by {@literal @}.</p>
*/
@Immutable
public final class PackageIdentifier implements Comparable<PackageIdentifier>, Serializable {
/**
* A human-readable name for the repository.
*/
public static final class RepositoryName {
private static final LoadingCache<String, RepositoryName> repositoryNameCache =
CacheBuilder.newBuilder()
.weakValues()
.build(
new CacheLoader<String, RepositoryName> () {
@Override
public RepositoryName load(String name) throws TargetParsingException {
String errorMessage = validate(name);
if (errorMessage != null) {
errorMessage = "invalid repository name '"
+ StringUtilities.sanitizeControlChars(name) + "': " + errorMessage;
throw new TargetParsingException(errorMessage);
}
return new RepositoryName(StringCanonicalizer.intern(name));
}
});
/**
* Makes sure that name is a valid repository name and creates a new RepositoryName using it.
* @throws TargetParsingException if the name is invalid.
*/
public static RepositoryName create(String name) throws TargetParsingException {
try {
return repositoryNameCache.get(name);
} catch (ExecutionException e) {
Throwables.propagateIfInstanceOf(e.getCause(), TargetParsingException.class);
throw new IllegalStateException("Failed to create RepositoryName from " + name, e);
}
}
private final String name;
private RepositoryName(String name) {
this.name = name;
}
/**
* Performs validity checking. Returns null on success, an error message otherwise.
*/
private static String validate(String name) {
if (name.isEmpty()) {
return null;
}
if (!name.startsWith("@")) {
return "workspace name must start with '@'";
}
// "@" isn't a valid workspace name.
if (name.length() == 1) {
return "empty workspace name";
}
// Check for any character outside of [/0-9A-Za-z_.-]. Try to evaluate the
// conditional quickly (by looking in decreasing order of character class
// likelihood).
if (name.startsWith("@/") || name.endsWith("/")) {
return "workspace names cannot start nor end with '/'";
} else if (name.contains("//")) {
return "workspace names cannot contain multiple '/'s in a row";
}
for (int i = name.length() - 1; i >= 1; --i) {
char c = name.charAt(i);
if ((c < 'a' || c > 'z') && c != '_' && c != '-' && c != '/' && c != '.'
&& (c < '0' || c > '9') && (c < 'A' || c > 'Z')) {
return "workspace names may contain only A-Z, a-z, 0-9, '-', '_', '.', and '/'";
}
}
return null;
}
/**
* Returns the repository name without the leading "{@literal @}". For the default repository,
* returns "".
*/
public String strippedName() {
if (name.isEmpty()) {
return name;
}
return name.substring(1);
}
/**
* Returns if this is the default repository, that is, {@link #name} is "".
*/
public boolean isDefault() {
return name.isEmpty();
}
/**
* Returns the repository name, with leading "{@literal @}" (or "" for the default repository).
*/
// TODO(bazel-team): Use this over toString()- easier to track its usage.
public String getName() {
return name;
}
/**
* Returns the repository name, with leading "{@literal @}" (or "" for the default repository).
*/
@Override
public String toString() {
return name;
}
@Override
public boolean equals(Object object) {
if (this == object) {
return true;
}
if (!(object instanceof RepositoryName)) {
return false;
}
return name.equals(((RepositoryName) object).name);
}
@Override
public int hashCode() {
return name.hashCode();
}
}
public static final String DEFAULT_REPOSITORY = "";
public static final RepositoryName DEFAULT_REPOSITORY_NAME;
static {
try {
DEFAULT_REPOSITORY_NAME = RepositoryName.create(DEFAULT_REPOSITORY);
} catch (TargetParsingException e) {
throw new IllegalStateException(e);
}
}
/**
* Helper for serializing PackageIdentifiers.
*
* <p>PackageIdentifier's field should be final, but then it couldn't be deserialized. This
* allows the fields to be deserialized and copied into a new PackageIdentifier.</p>
*/
private static final class SerializationProxy implements Serializable {
PackageIdentifier packageId;
public SerializationProxy(PackageIdentifier packageId) {
this.packageId = packageId;
}
private void writeObject(ObjectOutputStream out) throws IOException {
out.writeObject(packageId.repository.toString());
out.writeObject(packageId.pkgName);
}
private void readObject(ObjectInputStream in)
throws IOException, ClassNotFoundException {
try {
packageId = new PackageIdentifier((String) in.readObject(), (PathFragment) in.readObject());
} catch (TargetParsingException e) {
throw new IOException("Error serializing package identifier: " + e.getMessage());
}
}
@SuppressWarnings("unused")
private void readObjectNoData() throws ObjectStreamException {
}
private Object readResolve() {
return packageId;
}
}
// Temporary factory for identifiers without explicit repositories.
// TODO(bazel-team): remove all usages of this.
public static PackageIdentifier createInDefaultRepo(String name) {
return createInDefaultRepo(new PathFragment(name));
}
public static PackageIdentifier createInDefaultRepo(PathFragment name) {
try {
return new PackageIdentifier(DEFAULT_REPOSITORY, name);
} catch (TargetParsingException e) {
throw new IllegalArgumentException("could not create package identifier for " + name
+ ": " + e.getMessage());
}
}
/**
* The identifier for this repository. This is either "" or prefixed with an "@",
* e.g., "@myrepo".
*/
private final RepositoryName repository;
/** The name of the package. Canonical (i.e. x.equals(y) <=> x==y). */
private final PathFragment pkgName;
public PackageIdentifier(String repository, PathFragment pkgName) throws TargetParsingException {
this(RepositoryName.create(repository), pkgName);
}
public PackageIdentifier(RepositoryName repository, PathFragment pkgName) {
Preconditions.checkNotNull(repository);
Preconditions.checkNotNull(pkgName);
this.repository = repository;
this.pkgName = Canonicalizer.fragments().intern(pkgName.normalize());
}
public static PackageIdentifier parse(String input) throws TargetParsingException {
String repo;
String packageName;
int packageStartPos = input.indexOf("//");
if (packageStartPos > 0) {
repo = input.substring(0, packageStartPos);
packageName = input.substring(packageStartPos + 2);
} else if (packageStartPos == 0) {
repo = PackageIdentifier.DEFAULT_REPOSITORY;
packageName = input.substring(2);
} else {
repo = PackageIdentifier.DEFAULT_REPOSITORY;
packageName = input;
}
String error = RepositoryName.validate(repo);
if (error != null) {
throw new TargetParsingException(error);
}
error = LabelValidator.validatePackageName(packageName);
if (error != null) {
throw new TargetParsingException(error);
}
return new PackageIdentifier(repo, new PathFragment(packageName));
}
private Object writeReplace() {
return new SerializationProxy(this);
}
private void readObject(ObjectInputStream in) throws IOException {
throw new IOException("Serialization is allowed only by proxy");
}
@SuppressWarnings("unused")
private void readObjectNoData() throws ObjectStreamException {
}
public RepositoryName getRepository() {
return repository;
}
public PathFragment getPackageFragment() {
return pkgName;
}
/**
* Returns a relative path that should be unique across all remote and packages, based on the
* repository and package names.
*/
public PathFragment getPathFragment() {
return repository.isDefault() ? pkgName
: new PathFragment("external").getRelative(repository.strippedName())
.getRelative(pkgName);
}
/**
* Returns the name of this package.
*
* <p>There are certain places that expect the path fragment as the package name ('foo/bar') as a
* package identifier. This isn't specific enough for packages in other repositories, so their
* stringified version is '@baz//foo/bar'.</p>
*/
@Override
public String toString() {
return (repository.isDefault() ? "" : repository + "//") + pkgName;
}
@Override
public boolean equals(Object object) {
if (this == object) {
return true;
}
if (!(object instanceof PackageIdentifier)) {
return false;
}
PackageIdentifier that = (PackageIdentifier) object;
return pkgName.equals(that.pkgName) && repository.equals(that.repository);
}
@Override
public int hashCode() {
return Objects.hash(repository, pkgName);
}
@Override
public int compareTo(PackageIdentifier that) {
return ComparisonChain.start()
.compare(repository.toString(), that.repository.toString())
.compare(pkgName, that.pkgName)
.result();
}
}
| |
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.extractor.ts;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.extractor.ExtractorOutput;
import com.google.android.exoplayer2.extractor.MpegAudioHeader;
import com.google.android.exoplayer2.extractor.TrackOutput;
import com.google.android.exoplayer2.extractor.ts.TsPayloadReader.TrackIdGenerator;
import com.google.android.exoplayer2.util.ParsableByteArray;
/**
* Parses a continuous MPEG Audio byte stream and extracts individual frames.
*/
/* package */ final class MpegAudioReader implements ElementaryStreamReader {
private static final int STATE_FINDING_HEADER = 0;
private static final int STATE_READING_HEADER = 1;
private static final int STATE_READING_FRAME = 2;
private static final int HEADER_SIZE = 4;
private final ParsableByteArray headerScratch;
private final MpegAudioHeader header;
private final String language;
private String formatId;
private TrackOutput output;
private int state;
private int frameBytesRead;
private boolean hasOutputFormat;
// Used when finding the frame header.
private boolean lastByteWasFF;
// Parsed from the frame header.
private long frameDurationUs;
private int frameSize;
// The timestamp to attach to the next sample in the current packet.
private long timeUs;
public MpegAudioReader() {
this(null);
}
public MpegAudioReader(String language) {
state = STATE_FINDING_HEADER;
// The first byte of an MPEG Audio frame header is always 0xFF.
headerScratch = new ParsableByteArray(4);
headerScratch.data[0] = (byte) 0xFF;
header = new MpegAudioHeader();
this.language = language;
}
@Override
public void seek() {
state = STATE_FINDING_HEADER;
frameBytesRead = 0;
lastByteWasFF = false;
}
@Override
public void createTracks(ExtractorOutput extractorOutput, TrackIdGenerator idGenerator) {
idGenerator.generateNewId();
formatId = idGenerator.getFormatId();
output = extractorOutput.track(idGenerator.getTrackId(), C.TRACK_TYPE_AUDIO);
}
@Override
public void packetStarted(long pesTimeUs, boolean dataAlignmentIndicator) {
timeUs = pesTimeUs;
}
@Override
public void consume(ParsableByteArray data) {
while (data.bytesLeft() > 0) {
switch (state) {
case STATE_FINDING_HEADER:
findHeader(data);
break;
case STATE_READING_HEADER:
readHeaderRemainder(data);
break;
case STATE_READING_FRAME:
readFrameRemainder(data);
break;
}
}
}
@Override
public void packetFinished() {
// Do nothing.
}
/**
* Attempts to locate the start of the next frame header.
* <p>
* If a frame header is located then the state is changed to {@link #STATE_READING_HEADER}, the
* first two bytes of the header are written into {@link #headerScratch}, and the position of the
* source is advanced to the byte that immediately follows these two bytes.
* <p>
* If a frame header is not located then the position of the source is advanced to the limit, and
* the method should be called again with the next source to continue the search.
*
* @param source The source from which to read.
*/
private void findHeader(ParsableByteArray source) {
byte[] data = source.data;
int startOffset = source.getPosition();
int endOffset = source.limit();
for (int i = startOffset; i < endOffset; i++) {
boolean byteIsFF = (data[i] & 0xFF) == 0xFF;
boolean found = lastByteWasFF && (data[i] & 0xE0) == 0xE0;
lastByteWasFF = byteIsFF;
if (found) {
source.setPosition(i + 1);
// Reset lastByteWasFF for next time.
lastByteWasFF = false;
headerScratch.data[1] = data[i];
frameBytesRead = 2;
state = STATE_READING_HEADER;
return;
}
}
source.setPosition(endOffset);
}
/**
* Attempts to read the remaining two bytes of the frame header.
* <p>
* If a frame header is read in full then the state is changed to {@link #STATE_READING_FRAME},
* the media format is output if this has not previously occurred, the four header bytes are
* output as sample data, and the position of the source is advanced to the byte that immediately
* follows the header.
* <p>
* If a frame header is read in full but cannot be parsed then the state is changed to
* {@link #STATE_READING_HEADER}.
* <p>
* If a frame header is not read in full then the position of the source is advanced to the limit,
* and the method should be called again with the next source to continue the read.
*
* @param source The source from which to read.
*/
private void readHeaderRemainder(ParsableByteArray source) {
int bytesToRead = Math.min(source.bytesLeft(), HEADER_SIZE - frameBytesRead);
source.readBytes(headerScratch.data, frameBytesRead, bytesToRead);
frameBytesRead += bytesToRead;
if (frameBytesRead < HEADER_SIZE) {
// We haven't read the whole header yet.
return;
}
headerScratch.setPosition(0);
boolean parsedHeader = MpegAudioHeader.populateHeader(headerScratch.readInt(), header);
if (!parsedHeader) {
// We thought we'd located a frame header, but we hadn't.
frameBytesRead = 0;
state = STATE_READING_HEADER;
return;
}
frameSize = header.frameSize;
if (!hasOutputFormat) {
frameDurationUs = (C.MICROS_PER_SECOND * header.samplesPerFrame) / header.sampleRate;
Format format = Format.createAudioSampleFormat(formatId, header.mimeType, null,
Format.NO_VALUE, MpegAudioHeader.MAX_FRAME_SIZE_BYTES, header.channels, header.sampleRate,
null, null, 0, language);
output.format(format);
hasOutputFormat = true;
}
headerScratch.setPosition(0);
output.sampleData(headerScratch, HEADER_SIZE);
state = STATE_READING_FRAME;
}
/**
* Attempts to read the remainder of the frame.
* <p>
* If a frame is read in full then true is returned. The frame will have been output, and the
* position of the source will have been advanced to the byte that immediately follows the end of
* the frame.
* <p>
* If a frame is not read in full then the position of the source will have been advanced to the
* limit, and the method should be called again with the next source to continue the read.
*
* @param source The source from which to read.
*/
private void readFrameRemainder(ParsableByteArray source) {
int bytesToRead = Math.min(source.bytesLeft(), frameSize - frameBytesRead);
output.sampleData(source, bytesToRead);
frameBytesRead += bytesToRead;
if (frameBytesRead < frameSize) {
// We haven't read the whole of the frame yet.
return;
}
output.sampleMetadata(timeUs, C.BUFFER_FLAG_KEY_FRAME, frameSize, 0, null);
timeUs += frameDurationUs;
frameBytesRead = 0;
state = STATE_FINDING_HEADER;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.mina;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.nio.charset.Charset;
import java.util.List;
import java.util.concurrent.ExecutorService;
import org.apache.camel.CamelException;
import org.apache.camel.Exchange;
import org.apache.camel.ExchangePattern;
import org.apache.camel.Processor;
import org.apache.camel.support.DefaultConsumer;
import org.apache.camel.support.ExchangeHelper;
import org.apache.camel.util.IOHelper;
import org.apache.mina.core.filterchain.DefaultIoFilterChainBuilder;
import org.apache.mina.core.filterchain.IoFilter;
import org.apache.mina.core.future.CloseFuture;
import org.apache.mina.core.future.ConnectFuture;
import org.apache.mina.core.service.IoAcceptor;
import org.apache.mina.core.service.IoConnector;
import org.apache.mina.core.service.IoHandlerAdapter;
import org.apache.mina.core.service.IoService;
import org.apache.mina.core.session.IoSession;
import org.apache.mina.filter.codec.ProtocolCodecFactory;
import org.apache.mina.filter.codec.ProtocolCodecFilter;
import org.apache.mina.filter.codec.serialization.ObjectSerializationCodecFactory;
import org.apache.mina.filter.codec.textline.LineDelimiter;
import org.apache.mina.filter.executor.ExecutorFilter;
import org.apache.mina.filter.executor.OrderedThreadPoolExecutor;
import org.apache.mina.filter.executor.UnorderedThreadPoolExecutor;
import org.apache.mina.filter.logging.LoggingFilter;
import org.apache.mina.filter.ssl.SslFilter;
import org.apache.mina.transport.socket.nio.NioDatagramAcceptor;
import org.apache.mina.transport.socket.nio.NioSocketAcceptor;
import org.apache.mina.transport.socket.nio.NioSocketConnector;
import org.apache.mina.transport.vmpipe.VmPipeAcceptor;
import org.apache.mina.transport.vmpipe.VmPipeAddress;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A {@link org.apache.camel.Consumer Consumer} implementation for Apache MINA.
*/
public class MinaConsumer extends DefaultConsumer {
private static final Logger LOG = LoggerFactory.getLogger(MinaConsumer.class);
private IoSession session;
private IoConnector connector;
private SocketAddress address;
private IoAcceptor acceptor;
private final MinaConfiguration configuration;
private ExecutorService workerPool;
public MinaConsumer(final MinaEndpoint endpoint, Processor processor) throws Exception {
super(endpoint, processor);
this.configuration = endpoint.getConfiguration();
//
// All mina endpoints are InOut. The endpoints are asynchronous.
// Endpoints can send "n" messages and receive "m" messages.
//
this.getEndpoint().setExchangePattern(ExchangePattern.InOut);
String protocol = configuration.getProtocol();
if (protocol.equals("tcp")) {
if (configuration.isClientMode()) {
setupClientSocketProtocol(protocol, configuration);
} else {
setupSocketProtocol(protocol, configuration);
}
} else if (configuration.isDatagramProtocol()) {
setupDatagramProtocol(protocol, configuration);
} else if (protocol.equals("vm")) {
setupVmProtocol(protocol, configuration);
}
}
@Override
protected void doStart() throws Exception {
super.doStart();
if (configuration.isClientMode() && configuration.getProtocol().equals("tcp")) {
connector.setHandler(new ReceiveHandler());
ConnectFuture future = connector.connect(address);
future.awaitUninterruptibly();
session = future.getSession();
LOG.info("Connected to server address: {} using connector: {} timeout: {} millis.", address, connector, configuration.getTimeout());
} else {
acceptor.setHandler(new ReceiveHandler());
acceptor.bind(address);
LOG.info("Bound to server address: {} using acceptor: {}", address, acceptor);
}
}
@Override
protected void doStop() throws Exception {
if (configuration.isClientMode() && configuration.getProtocol().equals("tcp")) {
LOG.info("Disconnect from server address: {} using connector: {}", address, connector);
if (session != null) {
CloseFuture closeFuture = session.closeNow();
closeFuture.awaitUninterruptibly();
}
connector.dispose(true);
} else {
LOG.info("Unbinding from server address: {} using acceptor: {}", address, acceptor);
if (address instanceof InetSocketAddress) {
// need to check if the address is IPV4 all network address
if ("0.0.0.0".equals(((InetSocketAddress)address).getAddress().getHostAddress())) {
LOG.info("Unbind the server address {}", acceptor.getLocalAddresses());
acceptor.unbind(acceptor.getLocalAddresses());
} else {
acceptor.unbind(address);
}
} else {
acceptor.unbind(address);
}
}
super.doStop();
}
@Override
protected void doShutdown() throws Exception {
if (workerPool != null) {
workerPool.shutdown();
}
if (acceptor != null) {
acceptor.dispose(true);
}
super.doShutdown();
}
// Implementation methods
//-------------------------------------------------------------------------
protected void setupVmProtocol(String uri, MinaConfiguration configuration) {
boolean minaLogger = configuration.isMinaLogger();
List<IoFilter> filters = configuration.getFilters();
address = new VmPipeAddress(configuration.getPort());
acceptor = new VmPipeAcceptor();
// acceptor connectorConfig
configureCodecFactory("MinaConsumer", acceptor, configuration);
if (minaLogger) {
acceptor.getFilterChain().addLast("logger", new LoggingFilter());
}
appendIoFiltersToChain(filters, acceptor.getFilterChain());
if (configuration.getSslContextParameters() != null) {
LOG.warn("Using vm protocol"
+ ", but an SSLContextParameters instance was provided. SSLContextParameters is only supported on the TCP protocol.");
}
}
protected void setupSocketProtocol(String uri, MinaConfiguration configuration) throws Exception {
LOG.debug("createSocketEndpoint");
boolean minaLogger = configuration.isMinaLogger();
List<IoFilter> filters = configuration.getFilters();
address = new InetSocketAddress(configuration.getHost(), configuration.getPort());
final int processorCount = Runtime.getRuntime().availableProcessors() + 1;
acceptor = new NioSocketAcceptor(processorCount);
// acceptor connectorConfig
configureCodecFactory("MinaConsumer", acceptor, configuration);
((NioSocketAcceptor) acceptor).setReuseAddress(true);
acceptor.setCloseOnDeactivation(true);
if (configuration.isOrderedThreadPoolExecutor()) {
workerPool = new OrderedThreadPoolExecutor(configuration.getMaximumPoolSize());
} else {
workerPool = new UnorderedThreadPoolExecutor(configuration.getMaximumPoolSize());
}
acceptor.getFilterChain().addLast("threadPool", new ExecutorFilter(workerPool));
if (minaLogger) {
acceptor.getFilterChain().addLast("logger", new LoggingFilter());
}
appendIoFiltersToChain(filters, acceptor.getFilterChain());
if (configuration.getSslContextParameters() != null) {
SslFilter filter = new SslFilter(configuration.getSslContextParameters().createSSLContext(getEndpoint().getCamelContext()), configuration.isAutoStartTls());
filter.setUseClientMode(false);
acceptor.getFilterChain().addFirst("sslFilter", filter);
}
}
protected void setupClientSocketProtocol(String uri, MinaConfiguration configuration) throws Exception {
boolean minaLogger = configuration.isMinaLogger();
long timeout = configuration.getTimeout();
List<IoFilter> filters = configuration.getFilters();
address = new InetSocketAddress(configuration.getHost(), configuration.getPort());
final int processorCount = Runtime.getRuntime().availableProcessors() + 1;
connector = new NioSocketConnector(processorCount);
if (configuration.isOrderedThreadPoolExecutor()) {
workerPool = new OrderedThreadPoolExecutor(configuration.getMaximumPoolSize());
} else {
workerPool = new UnorderedThreadPoolExecutor(configuration.getMaximumPoolSize());
}
connector.getFilterChain().addLast("threadPool", new ExecutorFilter(workerPool));
if (minaLogger) {
connector.getFilterChain().addLast("logger", new LoggingFilter());
}
appendIoFiltersToChain(filters, connector.getFilterChain());
if (configuration.getSslContextParameters() != null) {
SslFilter filter = new SslFilter(configuration.getSslContextParameters().createSSLContext(getEndpoint().getCamelContext()), configuration.isAutoStartTls());
filter.setUseClientMode(true);
connector.getFilterChain().addFirst("sslFilter", filter);
}
configureCodecFactory("MinaConsumer", connector, configuration);
connector.setConnectTimeoutMillis(timeout);
}
protected void configureCodecFactory(String type, IoService service, MinaConfiguration configuration) {
if (configuration.getCodec() != null) {
addCodecFactory(service, configuration.getCodec());
} else if (configuration.isAllowDefaultCodec()) {
configureDefaultCodecFactory(type, service, configuration);
}
}
protected void configureDefaultCodecFactory(String type, IoService service, MinaConfiguration configuration) {
if (configuration.isTextline()) {
Charset charset = getEncodingParameter(type, configuration);
LineDelimiter delimiter = getLineDelimiterParameter(configuration.getTextlineDelimiter());
MinaTextLineCodecFactory codecFactory = new MinaTextLineCodecFactory(charset, delimiter);
if (configuration.getEncoderMaxLineLength() > 0) {
codecFactory.setEncoderMaxLineLength(configuration.getEncoderMaxLineLength());
}
if (configuration.getDecoderMaxLineLength() > 0) {
codecFactory.setDecoderMaxLineLength(configuration.getDecoderMaxLineLength());
}
addCodecFactory(service, codecFactory);
if (LOG.isDebugEnabled()) {
LOG.debug("{}: Using TextLineCodecFactory: {} using encoding: {} line delimiter: {}({})",
new Object[]{type, codecFactory, charset, configuration.getTextlineDelimiter(), delimiter});
LOG.debug("Encoder maximum line length: {}. Decoder maximum line length: {}",
codecFactory.getEncoderMaxLineLength(), codecFactory.getDecoderMaxLineLength());
}
} else {
ObjectSerializationCodecFactory codecFactory = new ObjectSerializationCodecFactory();
addCodecFactory(service, codecFactory);
LOG.debug("{}: Using ObjectSerializationCodecFactory: {}", type, codecFactory);
}
}
protected void setupDatagramProtocol(String uri, MinaConfiguration configuration) {
boolean minaLogger = configuration.isMinaLogger();
List<IoFilter> filters = configuration.getFilters();
address = new InetSocketAddress(configuration.getHost(), configuration.getPort());
acceptor = new NioDatagramAcceptor();
// acceptor connectorConfig
configureDataGramCodecFactory("MinaConsumer", acceptor, configuration);
acceptor.setCloseOnDeactivation(true);
// reuse address is default true for datagram
if (configuration.isOrderedThreadPoolExecutor()) {
workerPool = new OrderedThreadPoolExecutor(configuration.getMaximumPoolSize());
} else {
workerPool = new UnorderedThreadPoolExecutor(configuration.getMaximumPoolSize());
}
acceptor.getFilterChain().addLast("threadPool", new ExecutorFilter(workerPool));
if (minaLogger) {
acceptor.getFilterChain().addLast("logger", new LoggingFilter());
}
appendIoFiltersToChain(filters, acceptor.getFilterChain());
if (configuration.getSslContextParameters() != null) {
LOG.warn("Using datagram protocol, " + configuration.getProtocol()
+ ", but an SSLContextParameters instance was provided. SSLContextParameters is only supported on the TCP protocol.");
}
}
/**
* For datagrams the entire message is available as a single IoBuffer so lets just pass those around by default
* and try converting whatever they payload is into IoBuffer unless some custom converter is specified
*/
protected void configureDataGramCodecFactory(final String type, final IoService service, final MinaConfiguration configuration) {
ProtocolCodecFactory codecFactory = configuration.getCodec();
if (codecFactory == null) {
codecFactory = new MinaUdpProtocolCodecFactory(this.getEndpoint().getCamelContext());
if (LOG.isDebugEnabled()) {
LOG.debug("{}: Using CodecFactory: {}", type, codecFactory);
}
}
addCodecFactory(service, codecFactory);
}
private void addCodecFactory(IoService service, ProtocolCodecFactory codecFactory) {
service.getFilterChain().addLast("codec", new ProtocolCodecFilter(codecFactory));
}
private static LineDelimiter getLineDelimiterParameter(MinaTextLineDelimiter delimiter) {
if (delimiter == null) {
return LineDelimiter.DEFAULT;
}
switch (delimiter) {
case DEFAULT:
return LineDelimiter.DEFAULT;
case AUTO:
return LineDelimiter.AUTO;
case UNIX:
return LineDelimiter.UNIX;
case WINDOWS:
return LineDelimiter.WINDOWS;
case MAC:
return LineDelimiter.MAC;
default:
throw new IllegalArgumentException("Unknown textline delimiter: " + delimiter);
}
}
private Charset getEncodingParameter(String type, MinaConfiguration configuration) {
String encoding = configuration.getEncoding();
if (encoding == null) {
encoding = Charset.defaultCharset().name();
// set in on configuration so its updated
configuration.setEncoding(encoding);
LOG.debug("{}: No encoding parameter using default charset: {}", type, encoding);
}
if (!Charset.isSupported(encoding)) {
throw new IllegalArgumentException("The encoding: " + encoding + " is not supported");
}
return Charset.forName(encoding);
}
private void appendIoFiltersToChain(List<IoFilter> filters, DefaultIoFilterChainBuilder filterChain) {
if (filters != null && filters.size() > 0) {
for (IoFilter ioFilter : filters) {
filterChain.addLast(ioFilter.getClass().getCanonicalName(), ioFilter);
}
}
}
@Override
public MinaEndpoint getEndpoint() {
return (MinaEndpoint) super.getEndpoint();
}
public IoAcceptor getAcceptor() {
return acceptor;
}
public void setAcceptor(IoAcceptor acceptor) {
this.acceptor = acceptor;
}
/**
* Handles consuming messages and replying if the exchange is out capable.
*/
private final class ReceiveHandler extends IoHandlerAdapter {
@Override
public void exceptionCaught(IoSession session, Throwable cause) throws Exception {
if (cause instanceof IOException) {
LOG.debug("IOExceptions are automatically handled by MINA");
return;
}
// close invalid session
if (session != null) {
LOG.warn("Closing session as an exception was thrown from MINA");
session.closeNow();
}
// must wrap and rethrow since cause can be of Throwable and we must only throw Exception
throw new CamelException(cause);
}
@Override
public void messageReceived(IoSession session, Object object) throws Exception {
// log what we received
if (LOG.isDebugEnabled()) {
Object in = object;
if (in instanceof byte[]) {
// byte arrays is not readable so convert to string
in = getEndpoint().getCamelContext().getTypeConverter().convertTo(String.class, in);
}
LOG.debug("Received body: {}", in);
}
Exchange exchange = getEndpoint().createExchange(session, object);
//Set the exchange charset property for converting
if (getEndpoint().getConfiguration().getCharsetName() != null) {
exchange.setProperty(Exchange.CHARSET_NAME, IOHelper.normalizeCharset(getEndpoint().getConfiguration().getCharsetName()));
}
try {
getProcessor().process(exchange);
} catch (Throwable e) {
getExceptionHandler().handleException(e);
}
//
// If there's a response to send, send it.
//
boolean disconnect = getEndpoint().getConfiguration().isDisconnect();
Object response = null;
if (exchange.hasOut()) {
response = MinaPayloadHelper.getOut(getEndpoint(), exchange);
} else {
response = MinaPayloadHelper.getIn(getEndpoint(), exchange);
}
boolean failed = exchange.isFailed();
if (failed && !getEndpoint().getConfiguration().isTransferExchange()) {
if (exchange.getException() != null) {
response = exchange.getException();
} else {
// failed and no exception, must be a fault
response = exchange.getOut().getBody();
}
}
if (response != null) {
LOG.debug("Writing body: {}", response);
MinaHelper.writeBody(session, response, exchange, configuration.getWriteTimeout());
} else {
LOG.debug("Writing no response");
disconnect = Boolean.TRUE;
}
// should session be closed after complete?
Boolean close;
if (ExchangeHelper.isOutCapable(exchange)) {
close = exchange.getOut().getHeader(MinaConstants.MINA_CLOSE_SESSION_WHEN_COMPLETE, Boolean.class);
} else {
close = exchange.getIn().getHeader(MinaConstants.MINA_CLOSE_SESSION_WHEN_COMPLETE, Boolean.class);
}
// should we disconnect, the header can override the configuration
if (close != null) {
disconnect = close;
}
if (disconnect) {
LOG.debug("Closing session when complete at address: {}", address);
session.closeNow();
}
}
}
}
| |
/*
* Copyright (c) 2010-2014 Evolveum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.evolveum.midpoint.testing.sanity;
import com.evolveum.midpoint.xml.ns._public.common.api_types_3.ObjectDeltaOperationListType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.CredentialsType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectDeltaOperationType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.PasswordType;
import com.evolveum.prism.xml.ns._public.types_3.ChangeTypeType;
import com.evolveum.prism.xml.ns._public.types_3.ItemPathType;
import com.evolveum.prism.xml.ns._public.types_3.ObjectDeltaType;
import com.evolveum.prism.xml.ns._public.types_3.PolyStringType;
import com.evolveum.prism.xml.ns._public.types_3.ProtectedStringType;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.Validate;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBElement;
import javax.xml.bind.JAXBException;
import javax.xml.namespace.QName;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import java.io.IOException;
/**
* @author Radovan Semancik
*
*/
public class ModelClientUtil {
// XML constants
public static final String NS_COMMON = "http://midpoint.evolveum.com/xml/ns/public/common/common-3";
public static final QName COMMON_PATH = new QName(NS_COMMON, "path");
public static final QName COMMON_VALUE = new QName(NS_COMMON, "value");
public static final QName COMMON_GIVEN_NAME = new QName(NS_COMMON, "givenName");
public static final QName COMMON_ASSIGNMENT = new QName(NS_COMMON, "assignment");
public static final String NS_TYPES = "http://prism.evolveum.com/xml/ns/public/types-3";
private static final QName TYPES_POLYSTRING_ORIG = new QName(NS_TYPES, "orig");
public static final QName TYPES_CLEAR_VALUE = new QName(NS_TYPES, "clearValue");
private static final DocumentBuilder domDocumentBuilder;
public static JAXBContext instantiateJaxbContext() throws JAXBException {
return JAXBContext.newInstance("com.evolveum.midpoint.xml.ns._public.common.api_types_3:" +
"com.evolveum.midpoint.xml.ns._public.common.common_3:" +
"com.evolveum.midpoint.xml.ns._public.common.fault_3:" +
"com.evolveum.midpoint.xml.ns._public.connector.icf_1.connector_schema_3:" +
"com.evolveum.midpoint.xml.ns._public.connector.icf_1.resource_schema_3:" +
"com.evolveum.midpoint.xml.ns._public.resource.capabilities_3:" +
"com.evolveum.prism.xml.ns._public.annotation_3:" +
"com.evolveum.prism.xml.ns._public.query_3:" +
"com.evolveum.prism.xml.ns._public.types_3:" +
"org.w3._2000._09.xmldsig:" +
"org.w3._2001._04.xmlenc");
}
public static Element createPathElement(String stringPath, Document doc) {
String pathDeclaration = "declare default namespace '" + NS_COMMON + "'; " + stringPath;
return createTextElement(COMMON_PATH, pathDeclaration, doc);
}
public static ItemPathType createItemPathType(String stringPath) {
String pathDeclaration = "declare default namespace '" + NS_COMMON + "'; " + stringPath;
ItemPathType itemPathType = new ItemPathType(pathDeclaration);
return itemPathType;
}
// public static SearchFilterType parseSearchFilterType(String filterClauseAsXml) throws IOException, SAXException {
// Element filterClauseAsElement = parseElement(filterClauseAsXml);
// SearchFilterType searchFilterType = new SearchFilterType();
// searchFilterType.setFilterClause(filterClauseAsElement);
// return searchFilterType;
// }
public static PolyStringType createPolyStringType(String string, Document doc) {
PolyStringType polyStringType = new PolyStringType();
polyStringType.setOrig(string);
return polyStringType;
}
public static Element createTextElement(QName qname, String value, Document doc) {
Element element = doc.createElementNS(qname.getNamespaceURI(), qname.getLocalPart());
element.setTextContent(value);
return element;
}
public static CredentialsType createPasswordCredentials(String password) {
CredentialsType credentialsType = new CredentialsType();
credentialsType.setPassword(createPasswordType(password));
return credentialsType;
}
public static PasswordType createPasswordType(String password) {
PasswordType passwordType = new PasswordType();
passwordType.setValue(createProtectedString(password));
return passwordType;
}
public static ProtectedStringType createProtectedString(String clearValue) {
ProtectedStringType protectedString = new ProtectedStringType();
protectedString.setClearValue(clearValue);
return protectedString; }
public static <T> JAXBElement<T> toJaxbElement(QName name, T value) {
return new JAXBElement<>(name, (Class<T>) value.getClass(), value);
}
public static Document getDocumnent() {
return domDocumentBuilder.newDocument();
}
public static String getTypeUri(Class<? extends ObjectType> type) {
// QName typeQName = JAXBUtil.getTypeQName(type);
// String typeUri = QNameUtil.qNameToUri(typeQName);
String typeUri = NS_COMMON + "#" + type.getSimpleName();
return typeUri;
}
public static QName getTypeQName(Class<? extends ObjectType> type) {
// QName typeQName = JAXBUtil.getTypeQName(type);
QName typeQName = new QName(NS_COMMON, type.getSimpleName());
return typeQName;
}
public static Element parseElement(String stringXml) throws SAXException, IOException {
Document document = domDocumentBuilder.parse(IOUtils.toInputStream(stringXml, "utf-8"));
return getFirstChildElement(document);
}
public static Element getFirstChildElement(Node parent) {
if (parent == null || parent.getChildNodes() == null) {
return null;
}
NodeList nodes = parent.getChildNodes();
for (int i = 0; i < nodes.getLength(); i++) {
Node child = nodes.item(i);
if (child.getNodeType() == Node.ELEMENT_NODE) {
return (Element) child;
}
}
return null;
}
/**
* Retrieves OID created by model Web Service from the returned list of ObjectDeltaOperations.
*
* @param operationListType result of the model web service executeChanges call
* @param originalDelta original request used to find corresponding ObjectDeltaOperationType instance. Must be of ADD type.
* @return OID if found
*
* PRELIMINARY IMPLEMENTATION. Currently the first returned ADD delta with the same object type as original delta is returned.
*/
public static String getOidFromDeltaOperationList(ObjectDeltaOperationListType operationListType, ObjectDeltaType originalDelta) {
Validate.notNull(operationListType);
Validate.notNull(originalDelta);
if (originalDelta.getChangeType() != ChangeTypeType.ADD) {
throw new IllegalArgumentException("Original delta is not of ADD type");
}
if (originalDelta.getObjectToAdd() == null) {
throw new IllegalArgumentException("Original delta contains no object-to-be-added");
}
for (ObjectDeltaOperationType operationType : operationListType.getDeltaOperation()) {
ObjectDeltaType objectDeltaType = operationType.getObjectDelta();
if (objectDeltaType.getChangeType() == ChangeTypeType.ADD &&
objectDeltaType.getObjectToAdd() != null) {
ObjectType objectAdded = (ObjectType) objectDeltaType.getObjectToAdd();
if (objectAdded.getClass().equals(originalDelta.getObjectToAdd().getClass())) {
return objectAdded.getOid();
}
}
}
return null;
}
static {
try {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setNamespaceAware(true);
domDocumentBuilder = factory.newDocumentBuilder();
} catch (ParserConfigurationException ex) {
throw new IllegalStateException("Error creating XML document " + ex.getMessage());
}
}
}
| |
package com.bitdubai.fermat_wpd_plugin.layer.sub_app_module.wallet_store.developer.bitdubai.version_1.structure;
import com.bitdubai.fermat_api.FermatException;
import com.bitdubai.fermat_api.layer.all_definition.enums.Languages;
import com.bitdubai.fermat_api.layer.all_definition.enums.WalletCategory;
import com.bitdubai.fermat_api.layer.all_definition.enums.WalletType;
import com.bitdubai.fermat_api.layer.all_definition.resources_structure.enums.ScreenSize;
import com.bitdubai.fermat_api.layer.all_definition.util.Version;
import com.bitdubai.fermat_api.layer.dmp_identity.translator.interfaces.TranslatorIdentity;
import com.bitdubai.fermat_wpd_api.layer.wpd_middleware.wallet_manager.exceptions.CantFindProcessException;
import com.bitdubai.fermat_wpd_api.layer.wpd_middleware.wallet_manager.exceptions.CantInstallWalletException;
import com.bitdubai.fermat_wpd_api.layer.wpd_middleware.wallet_manager.interfaces.DealsWithWalletManager;
import com.bitdubai.fermat_wpd_api.layer.wpd_middleware.wallet_manager.interfaces.WalletInstallationProcess;
import com.bitdubai.fermat_wpd_api.layer.wpd_middleware.wallet_manager.interfaces.WalletManagerManager;
import com.bitdubai.fermat_wpd_api.layer.wpd_middleware.wallet_store.enums.CatalogItems;
import com.bitdubai.fermat_wpd_api.layer.wpd_middleware.wallet_store.enums.InstallationStatus;
import com.bitdubai.fermat_wpd_api.layer.wpd_middleware.wallet_store.exceptions.CantGetItemInformationException;
import com.bitdubai.fermat_wpd_api.layer.wpd_middleware.wallet_store.exceptions.CantSetInstallationStatusException;
import com.bitdubai.fermat_wpd_api.layer.wpd_middleware.wallet_store.interfaces.DealsWithWalletStoreMiddleware;
import com.bitdubai.fermat_wpd_api.layer.wpd_middleware.wallet_store.interfaces.WalletStoreManager;
import com.bitdubai.fermat_wpd_api.layer.wpd_sub_app_module.wallet_store.exceptions.CantGetRefinedCatalogException;
import com.bitdubai.fermat_wpd_api.layer.wpd_sub_app_module.wallet_store.exceptions.CantGetSkinVideoPreviewException;
import com.bitdubai.fermat_wpd_api.layer.wpd_sub_app_module.wallet_store.exceptions.CantGetWalletsFromCatalogueException;
import com.bitdubai.fermat_wpd_api.layer.wpd_sub_app_module.wallet_store.exceptions.CantStartInstallationException;
import com.bitdubai.fermat_wpd_api.layer.wpd_sub_app_module.wallet_store.exceptions.CantStartLanguageInstallationException;
import com.bitdubai.fermat_wpd_api.layer.wpd_sub_app_module.wallet_store.exceptions.CantStartSkinInstallationException;
import com.bitdubai.fermat_wpd_api.layer.wpd_sub_app_module.wallet_store.exceptions.CantStartUninstallLanguageException;
import com.bitdubai.fermat_wpd_api.layer.wpd_sub_app_module.wallet_store.exceptions.CantStartUninstallSkinException;
import com.bitdubai.fermat_wpd_api.layer.wpd_sub_app_module.wallet_store.exceptions.CantStartUninstallWalletException;
import com.bitdubai.fermat_wpd_api.layer.wpd_sub_app_module.wallet_store.exceptions.DatailedInformationNotFoundException;
import com.bitdubai.fermat_wpd_api.layer.wpd_sub_app_module.wallet_store.interfaces.WalletCatalogueFilter;
import com.bitdubai.fermat_wpd_api.layer.wpd_sub_app_module.wallet_store.interfaces.WalletStoreCatalogue;
import com.bitdubai.fermat_wpd_api.layer.wpd_sub_app_module.wallet_store.interfaces.WalletStoreCatalogueItem;
import com.bitdubai.fermat_wpd_api.layer.wpd_sub_app_module.wallet_store.interfaces.WalletStoreDetailedCatalogItem;
import com.bitdubai.fermat_wpd_api.layer.wpd_sub_app_module.wallet_store.interfaces.WalletStoreLanguage;
import com.bitdubai.fermat_wpd_api.layer.wpd_sub_app_module.wallet_store.interfaces.WalletStoreSkin;
import com.bitdubai.fermat_wpd_api.layer.wpd_network_service.wallet_store.exceptions.CantGetCatalogItemException;
import com.bitdubai.fermat_wpd_api.layer.wpd_network_service.wallet_store.exceptions.CantGetDesignerException;
import com.bitdubai.fermat_wpd_api.layer.wpd_network_service.wallet_store.exceptions.CantGetDeveloperException;
import com.bitdubai.fermat_wpd_api.layer.wpd_network_service.wallet_store.exceptions.CantGetLanguageException;
import com.bitdubai.fermat_wpd_api.layer.wpd_network_service.wallet_store.exceptions.CantGetLanguagesException;
import com.bitdubai.fermat_wpd_api.layer.wpd_network_service.wallet_store.exceptions.CantGetSkinException;
import com.bitdubai.fermat_wpd_api.layer.wpd_network_service.wallet_store.exceptions.CantGetSkinsException;
import com.bitdubai.fermat_wpd_api.layer.wpd_network_service.wallet_store.exceptions.CantGetWalletDetailsException;
import com.bitdubai.fermat_wpd_api.layer.wpd_network_service.wallet_store.exceptions.CantGetWalletIconException;
import com.bitdubai.fermat_wpd_api.layer.wpd_network_service.wallet_store.exceptions.CantGetWalletsCatalogException;
import com.bitdubai.fermat_wpd_api.layer.wpd_network_service.wallet_store.interfaces.CatalogItem;
import com.bitdubai.fermat_wpd_api.layer.wpd_network_service.wallet_store.interfaces.DealsWithWalletStoreNetworkService;
import com.bitdubai.fermat_wpd_api.layer.wpd_network_service.wallet_store.interfaces.DetailedCatalogItem;
import com.bitdubai.fermat_wpd_api.layer.wpd_network_service.wallet_store.interfaces.Language;
import com.bitdubai.fermat_wpd_api.layer.wpd_network_service.wallet_store.interfaces.Skin;
import com.bitdubai.fermat_wpd_api.layer.wpd_network_service.wallet_store.interfaces.WalletCatalog;
import com.bitdubai.fermat_api.layer.osa_android.logger_system.DealsWithLogger;
import com.bitdubai.fermat_api.layer.osa_android.logger_system.LogManager;
import com.bitdubai.fermat_api.layer.pip_Identity.developer.interfaces.DeveloperIdentity;
import com.bitdubai.fermat_pip_api.layer.pip_platform_service.error_manager.DealsWithErrors;
import com.bitdubai.fermat_pip_api.layer.pip_platform_service.error_manager.ErrorManager;
import com.bitdubai.fermat_pip_api.layer.pip_user.device_user.interfaces.DealsWithDeviceUser;
import com.bitdubai.fermat_pip_api.layer.pip_user.device_user.interfaces.DeviceUserManager;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import java.util.logging.Logger;
/**
* Created by rodrigo on 7/29/15.
*/
public class WalletStoreModuleManager implements DealsWithErrors, DealsWithDeviceUser, DealsWithLogger, DealsWithWalletManager, DealsWithWalletStoreMiddleware, DealsWithWalletStoreNetworkService {
/**
* DealsWithErrors interface member variables
*/
ErrorManager errorManager;
/**
* DealsWithDeviceUser interface variables and implementation
*/
DeviceUserManager deviceUserManager;
@Override
public void setDeviceUserManager(DeviceUserManager deviceUserManager) {
this.deviceUserManager = deviceUserManager;
//Logger LOG = Logger.getGlobal();
//LOG.info("MAP_SETDUM:"+this.deviceUserManager);
}
/**
* DealsWithjLogger interface member variable
*/
LogManager logManager;
/**
* DealsWithWalletStoreMiddleware interface member variable
*/
WalletStoreManager walletStoreManagerMiddleware;
/**
* DealsWithWalletManager interface variable and implementation
*/
WalletManagerManager walletManagerManager;
@Override
public void setWalletManagerManager(WalletManagerManager walletManagerManager) {
this.walletManagerManager = walletManagerManager;
Logger LOG = Logger.getGlobal();
LOG.info("MAP_SETWMM:"+this.walletManagerManager);
}
/**
* DealsWithWalletStoreNetworkService interface member variable
*/
com.bitdubai.fermat_wpd_api.layer.wpd_network_service.wallet_store.interfaces.WalletStoreManager walletStoreManagerNetworkService;
/**
* Constructor
*
* @param errorManager
* @param logManager
* @param walletStoreManagerMiddleware
* @param walletStoreManagerNetworkService
*/
public WalletStoreModuleManager(ErrorManager errorManager, LogManager logManager, WalletStoreManager walletStoreManagerMiddleware, com.bitdubai.fermat_wpd_api.layer.wpd_network_service.wallet_store.interfaces.WalletStoreManager walletStoreManagerNetworkService) {
this.errorManager = errorManager;
this.logManager = logManager;
this.walletStoreManagerMiddleware = walletStoreManagerMiddleware;
this.walletStoreManagerNetworkService = walletStoreManagerNetworkService;
}
/**
* DealsWithErrors interface implementation
*/
@Override
public void setErrorManager(ErrorManager errorManager) {
this.errorManager = errorManager;
}
/**
* DealsWithLogManager interface implementation
*/
@Override
public void setLogManager(LogManager logManager) {
this.logManager = logManager;
}
/**
* DealsWithWalletStoreMiddleware interface implementation
*/
@Override
public void setWalletStoreManager(WalletStoreManager walletStoreManager) {
this.walletStoreManagerMiddleware = walletStoreManager;
}
/**
* DealsWithWalletStoreNetworkService interface implementation
*/
@Override
public void setWalletStoreManager(com.bitdubai.fermat_wpd_api.layer.wpd_network_service.wallet_store.interfaces.WalletStoreManager walletStoreManager) {
this.walletStoreManagerNetworkService = walletStoreManager;
}
private InstallationStatus getWalletInstallationStatus(CatalogItem catalogItem) throws CantGetItemInformationException {
return walletStoreManagerMiddleware.getInstallationStatus(CatalogItems.WALLET, catalogItem.getId());
}
private InstallationStatus getSkinInstallationStatus(UUID skinId) throws CantGetItemInformationException {
return walletStoreManagerMiddleware.getInstallationStatus(CatalogItems.SKIN, skinId);
}
private InstallationStatus getLanguageInstallationStatus(UUID languageId) throws CantGetItemInformationException {
return walletStoreManagerMiddleware.getInstallationStatus(CatalogItems.LANGUAGE, languageId);
}
private WalletStoreCatalogueItem getWalletCatalogueItem(final CatalogItem catalogItem, final InstallationStatus installationStatus) {
WalletStoreCatalogueItem walletStoreCatalogueItem = new WalletStoreCatalogueItem() {
@Override
public InstallationStatus getInstallationStatus() {
return installationStatus;
}
@Override
public WalletStoreDetailedCatalogItem getWalletDetailedCatalogItem() throws DatailedInformationNotFoundException {
//TODO METODO CON RETURN NULL - OJO: solo INFORMATIVO de ayuda VISUAL para DEBUG - Eliminar si molesta
return null;
}
@Override
public UUID getId() {
return catalogItem.getId();
}
@Override
public String getName() {
return catalogItem.getName();
}
@Override
public WalletCategory getCategory() {
return catalogItem.getCategory();
}
@Override
public String getDescription() {
return catalogItem.getDescription();
}
@Override
public int getDefaultSizeInBytes() {
return catalogItem.getDefaultSizeInBytes();
}
@Override
public byte[] getIcon() throws CantGetWalletIconException {
return catalogItem.getIcon();
}
@Override
public URL getpublisherWebsiteUrl() {
return catalogItem.getpublisherWebsiteUrl();
}
@Override
public DetailedCatalogItem getDetailedCatalogItemImpl() throws CantGetWalletDetailsException {
return catalogItem.getDetailedCatalogItemImpl();
}
};
return walletStoreCatalogueItem;
}
private com.bitdubai.fermat_api.layer.dmp_identity.designer.interfaces.DesignerIdentity getDesigner(UUID designerId) throws CantGetDesignerException {
return walletStoreManagerNetworkService.getDesigner(designerId);
}
private WalletStoreSkin getWalletStoreSkin(final Skin skin, final InstallationStatus installationStatus) {
WalletStoreSkin walletStoreSkin = new WalletStoreSkin() {
@Override
public InstallationStatus getInstallationStatus() {
return installationStatus;
}
@Override
public UUID getSkinId() {
return skin.getSkinId();
}
@Override
public String getSkinName() {
return skin.getSkinName();
}
@Override
public UUID getWalletId() {
return skin.getWalletId();
}
@Override
public Version getVersion() {
return skin.getVersion();
}
@Override
public Version getInitialWalletVersion() {
return skin.getInitialWalletVersion();
}
@Override
public Version getFinalWalletVersion() {
return skin.getFinalWalletVersion();
}
@Override
public byte[] getPresentationImage() throws CantGetWalletIconException {
return skin.getPresentationImage();
}
@Override
public List<byte[]> getPreviewImageList() throws CantGetWalletIconException {
return skin.getPreviewImageList();
}
@Override
public boolean hasVideoPreview() {
return skin.hasVideoPreview();
}
@Override
public List<URL> getVideoPreviews() throws CantGetSkinVideoPreviewException {
return skin.getVideoPreviews();
}
@Override
public long getSkinSizeInBytes() {
return skin.getSkinSizeInBytes();
}
@Override
public com.bitdubai.fermat_api.layer.dmp_identity.designer.interfaces.DesignerIdentity getDesigner() {
return skin.getDesigner();
}
@Override
public boolean isDefault() {
return skin.isDefault();
}
@Override
public ScreenSize getScreenSize() {
return skin.getScreenSize();
}
};
return walletStoreSkin;
}
private WalletStoreLanguage getWalletStoreLanguage(final Language language, final InstallationStatus installationStatus) {
WalletStoreLanguage walletStoreLanguage = new WalletStoreLanguage() {
@Override
public InstallationStatus getInstallationStatus() {
return installationStatus;
}
@Override
public UUID getLanguageId() {
return language.getLanguageId();
}
@Override
public UUID getWalletId() {
return language.getWalletId();
}
@Override
public Languages getLanguageName() {
return language.getLanguageName();
}
@Override
public String getLanguageLabel() {
return language.getLanguageLabel();
}
@Override
public int getLanguagePackageSizeInBytes() {
return language.getLanguagePackageSizeInBytes();
}
@Override
public Version getVersion() {
return language.getVersion();
}
@Override
public Version getInitialWalletVersion() {
return language.getInitialWalletVersion();
}
@Override
public Version getFinalWalletVersion() {
return language.getFinalWalletVersion();
}
@Override
public TranslatorIdentity getTranslator() {
return language.getTranslator();
}
@Override
public boolean isDefault() {
return language.isDefault();
}
};
return walletStoreLanguage;
}
private WalletStoreDetailedCatalogItem getWalletStoreDetailedCatalogItem(final DetailedCatalogItem detailedCatalogItem) throws CantGetDeveloperException {
WalletStoreDetailedCatalogItem walletStoreDetailedCatalogItem = new WalletStoreDetailedCatalogItem() {
@Override
public Language getDefaultLanguage() throws CantGetLanguageException {
return detailedCatalogItem.getDefaultLanguage();
}
@Override
public List<Language> getLanguages() throws CantGetLanguagesException {
return detailedCatalogItem.getLanguages();
}
@Override
public Skin getDefaultSkin() throws CantGetSkinException {
return detailedCatalogItem.getDefaultSkin();
}
@Override
public List<Skin> getSkins() throws CantGetSkinsException {
return detailedCatalogItem.getSkins();
}
@Override
public Version getVersion() {
return detailedCatalogItem.getVersion();
}
@Override
public Version getPlatformInitialVersion() {
return detailedCatalogItem.getPlatformInitialVersion();
}
@Override
public Version getPlatformFinalVersion() {
return detailedCatalogItem.getPlatformFinalVersion();
}
@Override
public DeveloperIdentity getDeveloper() {
return detailedCatalogItem.getDeveloper();
}
@Override
public com.bitdubai.fermat_api.layer.dmp_identity.designer.interfaces.DesignerIdentity getDesigner() {
return detailedCatalogItem.getDesigner();
}
};
return walletStoreDetailedCatalogItem;
}
private DeveloperIdentity getDeveloper(UUID developerId) throws CantGetDeveloperException {
return walletStoreManagerNetworkService.getDeveloper(developerId);
}
/**
* Puts to installing status the specified language and its wallet.
*
* @param walletCatalogueId
* @param languageId
* @throws CantStartLanguageInstallationException
*/
public void installLanguage(UUID walletCatalogueId, UUID languageId) throws CantStartLanguageInstallationException {
try {
walletStoreManagerMiddleware.setInstallationStatus(CatalogItems.LANGUAGE, languageId, InstallationStatus.INSTALLING);
walletStoreManagerMiddleware.setInstallationStatus(CatalogItems.WALLET, walletCatalogueId, InstallationStatus.INSTALLING);
Language installingLanguage = getWalletLanguageFromWalletCatalogueId(walletCatalogueId);
walletManagerManager.installLanguage(walletCatalogueId, languageId, installingLanguage.getLanguageName(), installingLanguage.getLanguageLabel(), installingLanguage.getVersion());
walletStoreManagerMiddleware.setInstallationStatus(CatalogItems.LANGUAGE, languageId, InstallationStatus.INSTALLED);
walletStoreManagerMiddleware.setInstallationStatus(CatalogItems.WALLET, walletCatalogueId, InstallationStatus.INSTALLED);
} catch (Exception exception) {
try {
walletStoreManagerMiddleware.setInstallationStatus(CatalogItems.LANGUAGE, languageId, InstallationStatus.NOT_INSTALLED);
walletStoreManagerMiddleware.setInstallationStatus(CatalogItems.WALLET, walletCatalogueId, InstallationStatus.NOT_INSTALLED);
} catch (CantSetInstallationStatusException e) {
throw new CantStartLanguageInstallationException(CantStartLanguageInstallationException.DEFAULT_MESSAGE, e, null, null);
}
throw new CantStartLanguageInstallationException(CantStartLanguageInstallationException.DEFAULT_MESSAGE, exception, null, null);
}
}
/**
* Puts to installing status the specified skin and its wallet.
*
* @param walletCatalogueId
* @param skinId
* @throws CantStartSkinInstallationException
*/
public void installSkin(UUID walletCatalogueId, UUID skinId) throws CantStartSkinInstallationException {
try {
walletStoreManagerMiddleware.setInstallationStatus(CatalogItems.SKIN, skinId, InstallationStatus.INSTALLING);
walletStoreManagerMiddleware.setInstallationStatus(CatalogItems.WALLET, walletCatalogueId, InstallationStatus.INSTALLING);
Skin installingSkin = getWalletSkinFromWalletCatalogueId(walletCatalogueId);
//We send null preview for now.
walletManagerManager.installSkin(walletCatalogueId, skinId, installingSkin.getSkinName(), null, installingSkin.getVersion());
walletStoreManagerMiddleware.setInstallationStatus(CatalogItems.SKIN, skinId, InstallationStatus.INSTALLED);
walletStoreManagerMiddleware.setInstallationStatus(CatalogItems.WALLET, walletCatalogueId, InstallationStatus.INSTALLED);
} catch (Exception exception) {
try {
walletStoreManagerMiddleware.setInstallationStatus(CatalogItems.SKIN, skinId, InstallationStatus.NOT_INSTALLED);
walletStoreManagerMiddleware.setInstallationStatus(CatalogItems.WALLET, walletCatalogueId, InstallationStatus.NOT_INSTALLED);
} catch (CantSetInstallationStatusException e) {
throw new CantStartSkinInstallationException(CantStartSkinInstallationException.DEFAULT_MESSAGE, e, null, null);
}
throw new CantStartSkinInstallationException(CantStartSkinInstallationException.DEFAULT_MESSAGE, exception, null, null);
}
}
private Language getWalletLanguageFromWalletCatalogueId(UUID walletCatalogueId) throws CantGetLanguageException {
try {
DetailedCatalogItem detailedCatalogItem = walletStoreManagerNetworkService.getDetailedCatalogItem(walletCatalogueId);
Logger LOG = Logger.getGlobal();
LOG.info("MAP_detailedCatalogItem:" + detailedCatalogItem);
return detailedCatalogItem.getDefaultLanguage();
} catch (CantGetCatalogItemException exception) {
throw new CantGetLanguageException(CantGetCatalogItemException.DEFAULT_MESSAGE, exception, "Cannot get the wallet language", "Please, check the cause");
}
}
private Skin getWalletSkinFromWalletCatalogueId(UUID walletCatalogueId) throws CantGetSkinException {
DetailedCatalogItem detailedCatalogItem = null;
try {
detailedCatalogItem = walletStoreManagerNetworkService.getDetailedCatalogItem(walletCatalogueId);
return detailedCatalogItem.getDefaultSkin();
} catch (CantGetCatalogItemException exception) {
throw new CantGetSkinException(CantGetCatalogItemException.DEFAULT_MESSAGE, exception, "Cannot get the wallet Skin", "Please, check the cause");
}
}
private String checkDeveloperAlias(DetailedCatalogItem detailedCatalogItem){
String developerAlias="DefaultDeveloperAlias";
if(detailedCatalogItem.getDeveloper()!=null){
developerAlias=detailedCatalogItem.getDeveloper().getAlias();
}
return developerAlias;
}
private Languages checkLanguages(Languages languages){
Languages checkingLanguages=languages;
if(languages==null){
checkingLanguages=Languages.AMERICAN_ENGLISH;
}
return checkingLanguages;
}
/**
* start the installation of the passed wallet.
*
* @param walletCategory
* @param skinId
* @param languageId
* @param walletCatalogueId
* @param version
* @throws CantStartInstallationException
*/
public void installWallet(WalletCategory walletCategory, UUID skinId, UUID languageId, UUID walletCatalogueId, Version version) throws CantStartInstallationException {
try {
Logger LOG = Logger.getGlobal();
LOG.info("MAP_CATALOGUE:"+walletCatalogueId);
LOG.info("MAP_WMNS:"+walletStoreManagerNetworkService);
walletStoreManagerMiddleware.setInstallationStatus(CatalogItems.WALLET, walletCatalogueId, InstallationStatus.INSTALLING);
CatalogItem catalogItem = walletStoreManagerNetworkService.getCatalogItem(walletCatalogueId);
DetailedCatalogItem detailedCatalogItem = walletStoreManagerNetworkService.getDetailedCatalogItem(walletCatalogueId);
LOG.info("MAP_WMM:"+walletManagerManager);
WalletInstallationProcess walletInstallationProcess = walletManagerManager.installWallet(walletCategory, walletCatalogueId.toString());
LOG.info("MAP_DUM:"+deviceUserManager);
//DeviceUser deviceUser = deviceUserManager.getLoggedInDeviceUser();
Skin skin = getWalletSkinFromWalletCatalogueId(walletCatalogueId);
Language language = getWalletLanguageFromWalletCatalogueId(walletCatalogueId);
//TODO: when we fix the publisher, delete this, please.
Languages languageName=checkLanguages(language.getLanguageName());
String developerAlias=checkDeveloperAlias(detailedCatalogItem);
/*
For now, we'll pass null to the walletPrivateKey, walletIconName, skinPreview method arguments
TODO: Get the real values for this null objects.
*/
LOG.info("MAP_STORE_MODULE:"+walletInstallationProcess);
LOG.info("MAP_NAME:"+catalogItem.getName());
LOG.info("MAP_ID:"+catalogItem.getId());
LOG.info("MAP_VERSION:"+detailedCatalogItem.getVersion());
LOG.info("MAP_SCREENS:"+skin.getScreenSize().getCode());
LOG.info("MAP_SKIN_VERSION:"+skin.getVersion());
LOG.info("MAP_SKIN_NAME:"+skin.getSkinName());
LOG.info("MAP_LANGUAGE_VERSION:"+language.getVersion());
LOG.info("MAP_LANGUAGE_NAME:"+language.getLanguageName());
LOG.info("MAP_FIX_LANGUAGE_NAME:"+languageName);
LOG.info("MAP_LANGUAGE_LABEL:"+language.getLanguageLabel());
LOG.info("MAP_DEVELOPER_ALIAS:"+developerAlias);
LOG.info("MAP_VERSION:"+version);
walletInstallationProcess.startInstallation(WalletType.NICHE, catalogItem.getName(),
catalogItem.getId().toString(), null, /*deviceUser.getPublicKey()*/"testPublicKey", null,
walletCatalogueId, detailedCatalogItem.getVersion(), skin.getScreenSize().getCode(),
skinId, skin.getVersion(), skin.getSkinName(), null, languageId,
language.getVersion(), /*language.getLanguageName()*/languageName, language.getLanguageLabel(),
/*detailedCatalogItem.getDeveloper().getAlias()*/developerAlias, version.toString()/*"1.0.0"*/);
} catch (CantSetInstallationStatusException exception) {
throw new CantStartInstallationException(CantSetInstallationStatusException.DEFAULT_MESSAGE, exception, "Cannot set the instalation status", "Please, check the cause");
} catch (CantGetCatalogItemException exception) {
throw new CantStartInstallationException(CantGetCatalogItemException.DEFAULT_MESSAGE, exception, "Cannot get the catalog items", "Please, check the cause");
} catch (CantFindProcessException exception) {
throw new CantStartInstallationException(CantFindProcessException.DEFAULT_MESSAGE, exception, "Cannot get the WalletInstallationProcess", "Please, check the cause");
} /*catch (CantGetLoggedInDeviceUserException exception) {
throw new CantStartInstallationException(CantGetLoggedInDeviceUserException.DEFAULT_MESSAGE, exception, "Cannot get the Device user", "Please, check the cause");
}*/ catch (CantGetSkinException exception) {
throw new CantStartInstallationException(CantGetSkinException.DEFAULT_MESSAGE, exception, "Cannot get the wallet Skin", "Please, check the cause");
} catch (CantGetLanguageException exception) {
throw new CantStartInstallationException(CantGetLanguageException.DEFAULT_MESSAGE, exception, "Cannot get the wallet language", "Please, check the cause");
} catch (CantInstallWalletException exception) {
throw new CantStartInstallationException(CantStartInstallationException.DEFAULT_MESSAGE, exception, "Trying to install a new wallet", "Please, check the cause");
} catch (Exception exception) {
throw new CantStartInstallationException(CantStartInstallationException.DEFAULT_MESSAGE, FermatException.wrapException(exception), "Trying to install a new wallet", "Please, check the cause");
}
}
/**
* unisntall the specified Language
*
* @param walletCatalogueId
* @param languageId
* @throws CantStartUninstallLanguageException
*/
public void uninstallLanguage(UUID walletCatalogueId, UUID languageId) throws CantStartUninstallLanguageException {
try {
walletStoreManagerMiddleware.setInstallationStatus(CatalogItems.LANGUAGE, languageId, InstallationStatus.UNINSTALLING);
walletStoreManagerMiddleware.setInstallationStatus(CatalogItems.WALLET, walletCatalogueId, InstallationStatus.UNINSTALLING);
walletManagerManager.uninstallLanguage(walletCatalogueId, languageId);
walletStoreManagerMiddleware.setInstallationStatus(CatalogItems.LANGUAGE, languageId, InstallationStatus.UNINSTALLED);
walletStoreManagerMiddleware.setInstallationStatus(CatalogItems.WALLET, walletCatalogueId, InstallationStatus.UNINSTALLED);
} catch (Exception exception) {
try {
walletStoreManagerMiddleware.setInstallationStatus(CatalogItems.LANGUAGE, languageId, InstallationStatus.NOT_UNINSTALLED);
walletStoreManagerMiddleware.setInstallationStatus(CatalogItems.WALLET, walletCatalogueId, InstallationStatus.NOT_UNINSTALLED);
} catch (CantSetInstallationStatusException e) {
throw new CantStartUninstallLanguageException(CantStartUninstallLanguageException.DEFAULT_MESSAGE, e, null, null);
}
throw new CantStartUninstallLanguageException(CantStartUninstallLanguageException.DEFAULT_MESSAGE, exception, null, null);
}
}
/**
* uninstall the specified skin
*
* @param walletCatalogueId
* @param skinId
* @throws CantStartUninstallSkinException
*/
public void uninstallSkin(UUID walletCatalogueId, UUID skinId) throws CantStartUninstallSkinException {
try {
walletStoreManagerMiddleware.setInstallationStatus(CatalogItems.SKIN, skinId, InstallationStatus.UNINSTALLING);
walletStoreManagerMiddleware.setInstallationStatus(CatalogItems.WALLET, walletCatalogueId, InstallationStatus.UNINSTALLING);
walletManagerManager.uninstallSkin(walletCatalogueId, skinId);
walletStoreManagerMiddleware.setInstallationStatus(CatalogItems.SKIN, skinId, InstallationStatus.UNINSTALLED);
walletStoreManagerMiddleware.setInstallationStatus(CatalogItems.WALLET, walletCatalogueId, InstallationStatus.UNINSTALLED);
} catch (Exception exception) {
try {
walletStoreManagerMiddleware.setInstallationStatus(CatalogItems.SKIN, skinId, InstallationStatus.NOT_UNINSTALLED);
walletStoreManagerMiddleware.setInstallationStatus(CatalogItems.WALLET, walletCatalogueId, InstallationStatus.NOT_UNINSTALLED);
} catch (CantSetInstallationStatusException e) {
throw new CantStartUninstallSkinException(CantStartUninstallSkinException.DEFAULT_MESSAGE, e, null, null);
}
throw new CantStartUninstallSkinException(CantStartUninstallSkinException.DEFAULT_MESSAGE, exception, null, null);
}
}
/**
* unisntall the specified wallet
*
* @param walletCatalogueId
* @throws CantStartUninstallWalletException
*/
public void uninstallWallet(UUID walletCatalogueId) throws CantStartUninstallWalletException {
try {
walletStoreManagerMiddleware.setInstallationStatus(CatalogItems.WALLET, walletCatalogueId, InstallationStatus.UNINSTALLING);
} catch (Exception exception) {
throw new CantStartUninstallWalletException(CantStartUninstallWalletException.DEFAULT_MESSAGE, exception, null, null);
}
}
/**
* returns the WalletStore Catalag
*
* @return
* @throws CantGetRefinedCatalogException
*/
public WalletStoreCatalogue getCatalogue() throws CantGetRefinedCatalogException {
try {
final List<WalletStoreCatalogueItem> walletStoreCatalogueItemList = new ArrayList<WalletStoreCatalogueItem>();
WalletCatalog walletCatalog = walletStoreManagerNetworkService.getWalletCatalogue();
for (CatalogItem catalogItem : walletCatalog.getWalletCatalog(0, 0)) {
InstallationStatus installationStatus;
try {
installationStatus = getWalletInstallationStatus(catalogItem);
} catch (Exception e) {
installationStatus = InstallationStatus.NOT_INSTALLED;
}
WalletStoreCatalogueItem walletStoreCatalogueItem = getWalletCatalogueItem(catalogItem, installationStatus);
walletStoreCatalogueItemList.add(walletStoreCatalogueItem);
}
WalletStoreCatalogue walletStoreCatalogue = new WalletStoreCatalogue() {
@Override
public List<WalletStoreCatalogueItem> getWalletCatalogue(int offset, int top) throws CantGetWalletsFromCatalogueException {
return walletStoreCatalogueItemList;
}
@Override
public void addFilter(WalletCatalogueFilter walletFilter) {
}
@Override
public void clearFilters() {
}
};
System.out.println("walletStoreCatalogue: " + walletStoreCatalogue);
return walletStoreCatalogue;
} catch (Exception exception) {
throw new CantGetRefinedCatalogException(CantGetRefinedCatalogException.DEFAULT_MESSAGE, exception, null, null);
}
}
/**
* Gets the WalletStoreDetailed CAtalogItem object for the passes wallet
*
* @param walletCatalogId
* @return
* @throws CantGetWalletsCatalogException
*/
public WalletStoreDetailedCatalogItem getCatalogItemDetails(UUID walletCatalogId) throws CantGetWalletsCatalogException {
try {
return getWalletStoreDetailedCatalogItem(walletStoreManagerNetworkService.getDetailedCatalogItem(walletCatalogId));
} catch (Exception exception) {
throw new CantGetWalletsCatalogException(CantGetWalletsCatalogException.DEFAULT_MESSAGE, exception, null, null);
}
}
}
| |
package com.apexmob.skink.listeners;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertSame;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import com.apexmob.skink.MockNodeListener;
import com.apexmob.skink.NodeListener;
import com.apexmob.skink.ParsingTest;
import com.apexmob.skink.StartElement;
import com.apexmob.skink.Text;
public class FilteringNodeListenerTest extends ParsingTest {
MockNodeListener mock1 = null;
MockNodeListener mock2 = null;
MockFilteringNodeListener listener = null;
MockFilteringNodeListener listener2 = null;
@Rule
public ExpectedException thrown= ExpectedException.none();
@Before
public void setUp() {
mock1 = new MockNodeListener();
mock2 = new MockNodeListener();
listener = new MockFilteringNodeListener(mock1);
List<NodeListener> mocks = new ArrayList<NodeListener>();
mocks.add(mock1);
mocks.add(mock2);
listener2 = new MockFilteringNodeListener(mocks);
}
@After
public void tearDown() {
mock1 = null;
mock2 = null;
listener = null;
listener2 = null;
}
@Test
public void testSingleElementIncludeWithOneListener_PolicyDefault() {
listener.includeElement = true;
listener.onStartElement(buildStartElement("<div>"));
listener.onText(buildText("test"));
listener.onEndElement(buildEndElement("</div>"));
assertEquals(1, mock1.getStartElementCount());
assertEquals(1, mock1.getEndElementCount());
assertEquals(1, mock1.getTextCount());
}
@Test
public void testSingleElementIncludeWithOneListener_PolicyAll() {
listener.setPropagationPolicy(PropagationPolicy.All);
listener.includeElement = true;
listener.onStartElement(buildStartElement("<div>"));
listener.onText(buildText("test"));
listener.onEndElement(buildEndElement("</div>"));
assertEquals(1, mock1.getStartElementCount());
assertEquals(1, mock1.getEndElementCount());
assertEquals(1, mock1.getTextCount());
}
@Test
public void testSingleElementIncludeWithOneListener_PolicyTagsOnly() {
listener.setPropagationPolicy(PropagationPolicy.ElementsOnly);
listener.includeElement = true;
listener.onStartElement(buildStartElement("<div>"));
listener.onText(buildText("test"));
listener.onEndElement(buildEndElement("</div>"));
assertEquals(1, mock1.getStartElementCount());
assertEquals(1, mock1.getEndElementCount());
assertEquals(0, mock1.getTextCount());
}
@Test
public void testSingleElementIncludeWithOneListener_PolicyParentTagOnly() {
listener.setPropagationPolicy(PropagationPolicy.ParentElementOnly);
listener.includeElement = true;
listener.onStartElement(buildStartElement("<div>"));
listener.onText(buildText("test"));
listener.onEndElement(buildEndElement("</div>"));
assertEquals(1, mock1.getStartElementCount());
assertEquals(1, mock1.getEndElementCount());
assertEquals(0, mock1.getTextCount());
}
@Test
public void testSingleElementExcludeWithOneListenert() {
listener.includeElement = false;
listener.onStartElement(buildStartElement("<div>"));
listener.onText(buildText("test"));
listener.onEndElement(buildEndElement("</div>"));
assertEquals(0, mock1.getStartElementCount());
assertEquals(0, mock1.getEndElementCount());
assertEquals(0, mock1.getTextCount());
}
@Test
public void testMultipleElementsIncludeParentWithOneListener_PolicyDefault() {
listener.includeElement = true;
listener.onStartElement(buildStartElement("<div>"));
listener.onText(buildText("one"));
listener.onStartElement(buildStartElement("<span>"));
listener.onText(buildText("two"));
listener.onEndElement(buildEndElement("</span>"));
listener.onText(buildText("three"));
listener.onEndElement(buildEndElement("</div>"));
assertEquals(2, mock1.getStartElementCount());
assertEquals(2, mock1.getEndElementCount());
assertEquals(3, mock1.getTextCount());
}
@Test
public void testMultipleElementsIncludeParentWithOneListener_PolicyAll() {
listener.setPropagationPolicy(PropagationPolicy.All);
listener.includeElement = true;
listener.onStartElement(buildStartElement("<div>"));
listener.onText(buildText("one"));
listener.onStartElement(buildStartElement("<span>"));
listener.onText(buildText("two"));
listener.onEndElement(buildEndElement("</span>"));
listener.onText(buildText("three"));
listener.onEndElement(buildEndElement("</div>"));
assertEquals(2, mock1.getStartElementCount());
assertEquals(2, mock1.getEndElementCount());
assertEquals(3, mock1.getTextCount());
}
@Test
public void testMultipleElementsIncludeParentWithOneListener_PolicyTagsOnly() {
listener.setPropagationPolicy(PropagationPolicy.ElementsOnly);
listener.includeElement = true;
listener.onStartElement(buildStartElement("<div>"));
listener.onText(buildText("one"));
listener.onStartElement(buildStartElement("<span>"));
listener.onText(buildText("two"));
listener.onEndElement(buildEndElement("</span>"));
listener.onText(buildText("three"));
listener.onEndElement(buildEndElement("</div>"));
assertEquals(2, mock1.getStartElementCount());
assertEquals(2, mock1.getEndElementCount());
assertEquals(0, mock1.getTextCount());
}
@Test
public void testMultipleElementsIncludeParentWithOneListener_PolicyParentTagOnly() {
listener.setPropagationPolicy(PropagationPolicy.ParentElementOnly);
listener.includeElement = true;
listener.onStartElement(buildStartElement("<div>"));
listener.onText(buildText("one"));
listener.onStartElement(buildStartElement("<span>"));
listener.onText(buildText("two"));
listener.onEndElement(buildEndElement("</span>"));
listener.onText(buildText("three"));
listener.onEndElement(buildEndElement("</div>"));
assertEquals(1, mock1.getStartElementCount());
assertEquals(1, mock1.getEndElementCount());
assertEquals(0, mock1.getTextCount());
assertStartElement(mock1, 0, "div");
assertEndElement(mock1, 1, "div");
}
@Test
public void testMultipleElementsIncludeChildWithOneListener() {
listener.includeElement = false;
listener.onStartElement(buildStartElement("<div>"));
listener.includeElement = true;
listener.onStartElement(buildStartElement("<span>"));
listener.onEndElement(buildEndElement("</span>"));
listener.onEndElement(buildEndElement("</div>"));
assertEquals(1, mock1.getStartElementCount());
assertEquals(1, mock1.getEndElementCount());
assertEquals(0, mock1.getTextCount());
assertEquals("span", mock1.getFirstStartElementName());
assertEquals("span", mock1.getFirstEndElementName());
}
@Test
public void testSingleElementIncludeWithTwoListeners() {
listener2.includeElement = true;
listener2.onStartElement(buildStartElement("<div>"));
listener2.onText(buildText("test"));
listener2.onEndElement(buildEndElement("</div>"));
assertEquals(1, mock1.getStartElementCount());
assertEquals(1, mock1.getEndElementCount());
assertEquals(1, mock1.getTextCount());
assertEquals(1, mock2.getStartElementCount());
assertEquals(1, mock2.getEndElementCount());
assertEquals(1, mock2.getTextCount());
}
@Test
public void testSingleElementExcludeWithTwoListeners() {
listener2.includeElement = false;
listener2.onStartElement(buildStartElement("<div>"));
listener2.onText(buildText("test"));
listener2.onEndElement(buildEndElement("</div>"));
assertEquals(0, mock1.getStartElementCount());
assertEquals(0, mock1.getEndElementCount());
assertEquals(0, mock1.getTextCount());
assertEquals(0, mock2.getStartElementCount());
assertEquals(0, mock2.getEndElementCount());
assertEquals(0, mock2.getTextCount());
}
@Test
public void testMultipleElementsIncludeParentWithTwoListeners() {
listener2.includeElement = true;
listener2.onStartElement(buildStartElement("<div>"));
listener2.onStartElement(buildStartElement("<span>"));
listener2.onEndElement(buildEndElement("</span>"));
listener2.onEndElement(buildEndElement("</div>"));
assertEquals(2, mock1.getStartElementCount());
assertEquals(2, mock1.getEndElementCount());
assertEquals(0, mock1.getTextCount());
assertEquals(2, mock2.getStartElementCount());
assertEquals(2, mock2.getEndElementCount());
assertEquals(0, mock2.getTextCount());
}
@Test
public void testMultipleElementsIncludeChildWithTwoListeners() {
listener2.includeElement = false;
listener2.onStartElement(buildStartElement("<div>"));
listener2.includeElement = true;
listener2.onStartElement(buildStartElement("<span>"));
listener2.onEndElement(buildEndElement("</span>"));
listener2.onEndElement(buildEndElement("</div>"));
assertEquals(1, mock1.getStartElementCount());
assertEquals(1, mock1.getEndElementCount());
assertEquals(0, mock1.getTextCount());
assertEquals("span", mock1.getFirstStartElementName());
assertEquals("span", mock1.getFirstEndElementName());
assertEquals(1, mock2.getStartElementCount());
assertEquals(1, mock2.getEndElementCount());
assertEquals(0, mock2.getTextCount());
assertEquals("span", mock2.getFirstStartElementName());
assertEquals("span", mock2.getFirstEndElementName());
}
@Test
public void includeTextListensForText() {
listener.includeText = true;
listener.onStartElement(buildStartElement("<div>"));
listener.onText(buildText("test"));
listener.onStartElement(buildStartElement("</div>"));
assertEquals(1, mock1.getTextCount());
assertEquals("test", mock1.getFirstText());
}
@Test
public void excludeTextDoesNotListenForText() {
listener.includeText = false;
listener.onStartElement(buildStartElement("<div>"));
listener.onText(buildText("test"));
listener.onStartElement(buildStartElement("</div>"));
assertEquals(0, mock1.getTextCount());
}
@Test
public void throwsIllegalArgumentExceptionWhenPropagationPolicyIsNull() {
thrown.expect(IllegalArgumentException.class);
listener.setPropagationPolicy(null);
}
@Test
public void defaultPropagationIsAll() {
assertSame(PropagationPolicy.All, listener.getPropagationPolicy());
}
@Test
public void canGetPropagationPolicy() {
listener.setPropagationPolicy(PropagationPolicy.ElementsOnly);
assertSame(PropagationPolicy.ElementsOnly, listener.getPropagationPolicy());
}
@Test
public void throwsIllegalStateExceptionWhenStateIsInconsistentFromPolicyChanges() {
thrown.expect(IllegalStateException.class);
listener.onStartElement(buildStartElement("<div>"));
listener.setPropagationPolicy(PropagationPolicy.ParentElementOnly);
listener.onEndElement(buildEndElement("</div>"));
}
@Test
public void throwsIllegalStateExceptionWhenEndElementNameDoesNotMatchStartElementName() {
thrown.expect(IllegalStateException.class);
listener.setPropagationPolicy(PropagationPolicy.ParentElementOnly);
listener.onStartElement(buildStartElement("<div>"));
listener.onEndElement(buildEndElement("</span>"));
}
public static class MockFilteringNodeListener extends FilteringNodeListener {
public boolean includeElement = true;
public boolean includeText = true;
public MockFilteringNodeListener(NodeListener listener) {
super(listener);
}
@Override
protected boolean include(Text text) {
return includeText;
}
public MockFilteringNodeListener(Collection<NodeListener> listeners) {
super(listeners);
}
@Override
protected boolean include(StartElement start) {
return includeElement;
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.action;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionListenerResponseHandler;
import org.elasticsearch.action.IndicesRequest;
import org.elasticsearch.action.OriginalIndices;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.search.SearchService;
import org.elasticsearch.search.dfs.DfsSearchResult;
import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.QueryFetchSearchResult;
import org.elasticsearch.search.fetch.ScrollQueryFetchSearchResult;
import org.elasticsearch.search.fetch.ShardFetchRequest;
import org.elasticsearch.search.fetch.ShardFetchSearchRequest;
import org.elasticsearch.search.internal.InternalScrollSearchRequest;
import org.elasticsearch.search.internal.ShardSearchTransportRequest;
import org.elasticsearch.search.query.QuerySearchRequest;
import org.elasticsearch.search.query.QuerySearchResult;
import org.elasticsearch.search.query.QuerySearchResultProvider;
import org.elasticsearch.search.query.ScrollQuerySearchResult;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportChannel;
import org.elasticsearch.transport.TransportRequest;
import org.elasticsearch.transport.TransportRequestHandler;
import org.elasticsearch.transport.TransportResponse;
import org.elasticsearch.transport.TransportService;
import java.io.IOException;
/**
* An encapsulation of {@link org.elasticsearch.search.SearchService} operations exposed through
* transport.
*/
public class SearchTransportService extends AbstractComponent {
public static final String FREE_CONTEXT_SCROLL_ACTION_NAME = "indices:data/read/search[free_context/scroll]";
public static final String FREE_CONTEXT_ACTION_NAME = "indices:data/read/search[free_context]";
public static final String CLEAR_SCROLL_CONTEXTS_ACTION_NAME = "indices:data/read/search[clear_scroll_contexts]";
public static final String DFS_ACTION_NAME = "indices:data/read/search[phase/dfs]";
public static final String QUERY_ACTION_NAME = "indices:data/read/search[phase/query]";
public static final String QUERY_ID_ACTION_NAME = "indices:data/read/search[phase/query/id]";
public static final String QUERY_SCROLL_ACTION_NAME = "indices:data/read/search[phase/query/scroll]";
public static final String QUERY_FETCH_ACTION_NAME = "indices:data/read/search[phase/query+fetch]";
public static final String QUERY_QUERY_FETCH_ACTION_NAME = "indices:data/read/search[phase/query/query+fetch]";
public static final String QUERY_FETCH_SCROLL_ACTION_NAME = "indices:data/read/search[phase/query+fetch/scroll]";
public static final String FETCH_ID_SCROLL_ACTION_NAME = "indices:data/read/search[phase/fetch/id/scroll]";
public static final String FETCH_ID_ACTION_NAME = "indices:data/read/search[phase/fetch/id]";
private final TransportService transportService;
private final SearchService searchService;
@Inject
public SearchTransportService(Settings settings, TransportService transportService, SearchService searchService) {
super(settings);
this.transportService = transportService;
this.searchService = searchService;
transportService.registerRequestHandler(FREE_CONTEXT_SCROLL_ACTION_NAME, ScrollFreeContextRequest::new, ThreadPool.Names.SAME,
new FreeContextTransportHandler<>());
transportService.registerRequestHandler(FREE_CONTEXT_ACTION_NAME, SearchFreeContextRequest::new, ThreadPool.Names.SAME,
new FreeContextTransportHandler<>());
transportService.registerRequestHandler(CLEAR_SCROLL_CONTEXTS_ACTION_NAME, ClearScrollContextsRequest::new, ThreadPool.Names.SAME,
new ClearScrollContextsTransportHandler());
transportService.registerRequestHandler(DFS_ACTION_NAME, ShardSearchTransportRequest::new, ThreadPool.Names.SEARCH,
new SearchDfsTransportHandler());
transportService.registerRequestHandler(QUERY_ACTION_NAME, ShardSearchTransportRequest::new, ThreadPool.Names.SEARCH,
new SearchQueryTransportHandler());
transportService.registerRequestHandler(QUERY_ID_ACTION_NAME, QuerySearchRequest::new, ThreadPool.Names.SEARCH,
new SearchQueryByIdTransportHandler());
transportService.registerRequestHandler(QUERY_SCROLL_ACTION_NAME, InternalScrollSearchRequest::new, ThreadPool.Names.SEARCH,
new SearchQueryScrollTransportHandler());
transportService.registerRequestHandler(QUERY_FETCH_ACTION_NAME, ShardSearchTransportRequest::new, ThreadPool.Names.SEARCH,
new SearchQueryFetchTransportHandler());
transportService.registerRequestHandler(QUERY_QUERY_FETCH_ACTION_NAME, QuerySearchRequest::new, ThreadPool.Names.SEARCH,
new SearchQueryQueryFetchTransportHandler());
transportService.registerRequestHandler(QUERY_FETCH_SCROLL_ACTION_NAME, InternalScrollSearchRequest::new, ThreadPool.Names.SEARCH,
new SearchQueryFetchScrollTransportHandler());
transportService.registerRequestHandler(FETCH_ID_SCROLL_ACTION_NAME, ShardFetchRequest::new, ThreadPool.Names.SEARCH,
new FetchByIdTransportHandler<>());
transportService.registerRequestHandler(FETCH_ID_ACTION_NAME, ShardFetchSearchRequest::new, ThreadPool.Names.SEARCH,
new FetchByIdTransportHandler<>());
}
public void sendFreeContext(DiscoveryNode node, final long contextId, SearchRequest request) {
transportService.sendRequest(node, FREE_CONTEXT_ACTION_NAME, new SearchFreeContextRequest(request, contextId),
new ActionListenerResponseHandler<SearchFreeContextResponse>(new ActionListener<SearchFreeContextResponse>() {
@Override
public void onResponse(SearchFreeContextResponse response) {
// no need to respond if it was freed or not
}
@Override
public void onFailure(Throwable e) {
}
}) {
@Override
public SearchFreeContextResponse newInstance() {
return new SearchFreeContextResponse();
}
});
}
public void sendFreeContext(DiscoveryNode node, long contextId, final ActionListener<SearchFreeContextResponse> listener) {
transportService.sendRequest(node, FREE_CONTEXT_SCROLL_ACTION_NAME, new ScrollFreeContextRequest(contextId),
new ActionListenerResponseHandler<SearchFreeContextResponse>(listener) {
@Override
public SearchFreeContextResponse newInstance() {
return new SearchFreeContextResponse();
}
});
}
public void sendClearAllScrollContexts(DiscoveryNode node, final ActionListener<TransportResponse> listener) {
transportService.sendRequest(node, CLEAR_SCROLL_CONTEXTS_ACTION_NAME, new ClearScrollContextsRequest(),
new ActionListenerResponseHandler<TransportResponse>(listener) {
@Override
public TransportResponse newInstance() {
return TransportResponse.Empty.INSTANCE;
}
});
}
public void sendExecuteDfs(DiscoveryNode node, final ShardSearchTransportRequest request,
final ActionListener<DfsSearchResult> listener) {
transportService.sendRequest(node, DFS_ACTION_NAME, request, new ActionListenerResponseHandler<DfsSearchResult>(listener) {
@Override
public DfsSearchResult newInstance() {
return new DfsSearchResult();
}
});
}
public void sendExecuteQuery(DiscoveryNode node, final ShardSearchTransportRequest request,
final ActionListener<QuerySearchResultProvider> listener) {
transportService.sendRequest(node, QUERY_ACTION_NAME, request,
new ActionListenerResponseHandler<QuerySearchResultProvider>(listener) {
@Override
public QuerySearchResult newInstance() {
return new QuerySearchResult();
}
});
}
public void sendExecuteQuery(DiscoveryNode node, final QuerySearchRequest request, final ActionListener<QuerySearchResult> listener) {
transportService.sendRequest(node, QUERY_ID_ACTION_NAME, request, new ActionListenerResponseHandler<QuerySearchResult>(listener) {
@Override
public QuerySearchResult newInstance() {
return new QuerySearchResult();
}
});
}
public void sendExecuteQuery(DiscoveryNode node, final InternalScrollSearchRequest request,
final ActionListener<ScrollQuerySearchResult> listener) {
transportService.sendRequest(node, QUERY_SCROLL_ACTION_NAME, request,
new ActionListenerResponseHandler<ScrollQuerySearchResult>(listener) {
@Override
public ScrollQuerySearchResult newInstance() {
return new ScrollQuerySearchResult();
}
});
}
public void sendExecuteFetch(DiscoveryNode node, final ShardSearchTransportRequest request,
final ActionListener<QueryFetchSearchResult> listener) {
transportService.sendRequest(node, QUERY_FETCH_ACTION_NAME, request,
new ActionListenerResponseHandler<QueryFetchSearchResult>(listener) {
@Override
public QueryFetchSearchResult newInstance() {
return new QueryFetchSearchResult();
}
});
}
public void sendExecuteFetch(DiscoveryNode node, final QuerySearchRequest request,
final ActionListener<QueryFetchSearchResult> listener) {
transportService.sendRequest(node, QUERY_QUERY_FETCH_ACTION_NAME, request,
new ActionListenerResponseHandler<QueryFetchSearchResult>(listener) {
@Override
public QueryFetchSearchResult newInstance() {
return new QueryFetchSearchResult();
}
});
}
public void sendExecuteFetch(DiscoveryNode node, final InternalScrollSearchRequest request,
final ActionListener<ScrollQueryFetchSearchResult> listener) {
transportService.sendRequest(node, QUERY_FETCH_SCROLL_ACTION_NAME, request,
new ActionListenerResponseHandler<ScrollQueryFetchSearchResult>(listener) {
@Override
public ScrollQueryFetchSearchResult newInstance() {
return new ScrollQueryFetchSearchResult();
}
});
}
public void sendExecuteFetch(DiscoveryNode node, final ShardFetchSearchRequest request,
final ActionListener<FetchSearchResult> listener) {
sendExecuteFetch(node, FETCH_ID_ACTION_NAME, request, listener);
}
public void sendExecuteFetchScroll(DiscoveryNode node, final ShardFetchRequest request,
final ActionListener<FetchSearchResult> listener) {
sendExecuteFetch(node, FETCH_ID_SCROLL_ACTION_NAME, request, listener);
}
private void sendExecuteFetch(DiscoveryNode node, String action, final ShardFetchRequest request,
final ActionListener<FetchSearchResult> listener) {
transportService.sendRequest(node, action, request, new ActionListenerResponseHandler<FetchSearchResult>(listener) {
@Override
public FetchSearchResult newInstance() {
return new FetchSearchResult();
}
});
}
static class ScrollFreeContextRequest extends TransportRequest {
private long id;
ScrollFreeContextRequest() {
}
ScrollFreeContextRequest(long id) {
this.id = id;
}
public long id() {
return this.id;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
id = in.readLong();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeLong(id);
}
}
static class SearchFreeContextRequest extends ScrollFreeContextRequest implements IndicesRequest {
private OriginalIndices originalIndices;
public SearchFreeContextRequest() {
}
SearchFreeContextRequest(SearchRequest request, long id) {
super(id);
this.originalIndices = new OriginalIndices(request);
}
@Override
public String[] indices() {
if (originalIndices == null) {
return null;
}
return originalIndices.indices();
}
@Override
public IndicesOptions indicesOptions() {
if (originalIndices == null) {
return null;
}
return originalIndices.indicesOptions();
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
originalIndices = OriginalIndices.readOriginalIndices(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
OriginalIndices.writeOriginalIndices(originalIndices, out);
}
}
public static class SearchFreeContextResponse extends TransportResponse {
private boolean freed;
SearchFreeContextResponse() {
}
SearchFreeContextResponse(boolean freed) {
this.freed = freed;
}
public boolean isFreed() {
return freed;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
freed = in.readBoolean();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeBoolean(freed);
}
}
class FreeContextTransportHandler<FreeContextRequest extends ScrollFreeContextRequest>
implements TransportRequestHandler<FreeContextRequest> {
@Override
public void messageReceived(FreeContextRequest request, TransportChannel channel) throws Exception {
boolean freed = searchService.freeContext(request.id());
channel.sendResponse(new SearchFreeContextResponse(freed));
}
}
static class ClearScrollContextsRequest extends TransportRequest {
}
class ClearScrollContextsTransportHandler implements TransportRequestHandler<ClearScrollContextsRequest> {
@Override
public void messageReceived(ClearScrollContextsRequest request, TransportChannel channel) throws Exception {
searchService.freeAllScrollContexts();
channel.sendResponse(TransportResponse.Empty.INSTANCE);
}
}
class SearchDfsTransportHandler implements TransportRequestHandler<ShardSearchTransportRequest> {
@Override
public void messageReceived(ShardSearchTransportRequest request, TransportChannel channel) throws Exception {
DfsSearchResult result = searchService.executeDfsPhase(request);
channel.sendResponse(result);
}
}
class SearchQueryTransportHandler implements TransportRequestHandler<ShardSearchTransportRequest> {
@Override
public void messageReceived(ShardSearchTransportRequest request, TransportChannel channel) throws Exception {
QuerySearchResultProvider result = searchService.executeQueryPhase(request);
channel.sendResponse(result);
}
}
class SearchQueryByIdTransportHandler implements TransportRequestHandler<QuerySearchRequest> {
@Override
public void messageReceived(QuerySearchRequest request, TransportChannel channel) throws Exception {
QuerySearchResult result = searchService.executeQueryPhase(request);
channel.sendResponse(result);
}
}
class SearchQueryScrollTransportHandler implements TransportRequestHandler<InternalScrollSearchRequest> {
@Override
public void messageReceived(InternalScrollSearchRequest request, TransportChannel channel) throws Exception {
ScrollQuerySearchResult result = searchService.executeQueryPhase(request);
channel.sendResponse(result);
}
}
class SearchQueryFetchTransportHandler implements TransportRequestHandler<ShardSearchTransportRequest> {
@Override
public void messageReceived(ShardSearchTransportRequest request, TransportChannel channel) throws Exception {
QueryFetchSearchResult result = searchService.executeFetchPhase(request);
channel.sendResponse(result);
}
}
class SearchQueryQueryFetchTransportHandler implements TransportRequestHandler<QuerySearchRequest> {
@Override
public void messageReceived(QuerySearchRequest request, TransportChannel channel) throws Exception {
QueryFetchSearchResult result = searchService.executeFetchPhase(request);
channel.sendResponse(result);
}
}
class FetchByIdTransportHandler<Request extends ShardFetchRequest> implements TransportRequestHandler<Request> {
@Override
public void messageReceived(Request request, TransportChannel channel) throws Exception {
FetchSearchResult result = searchService.executeFetchPhase(request);
channel.sendResponse(result);
}
}
class SearchQueryFetchScrollTransportHandler implements TransportRequestHandler<InternalScrollSearchRequest> {
@Override
public void messageReceived(InternalScrollSearchRequest request, TransportChannel channel) throws Exception {
ScrollQueryFetchSearchResult result = searchService.executeFetchPhase(request);
channel.sendResponse(result);
}
}
}
| |
/*
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.flex.forks.batik.util;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Iterator;
/**
* Protocol Handler for the 'data' protocol.
* RFC: 2397
* http://www.ietf.org/rfc/rfc2397.txt
*
* @author <a href="mailto:deweese@apache.org">Thomas DeWeese</a>
* @version $Id: ParsedURLDataProtocolHandler.java 578680 2007-09-24 07:20:03Z cam $
*/
public class ParsedURLDataProtocolHandler
extends AbstractParsedURLProtocolHandler {
static final String DATA_PROTOCOL = "data";
static final String BASE64 = "base64";
static final String CHARSET = "charset";
public ParsedURLDataProtocolHandler() {
super(DATA_PROTOCOL);
}
public ParsedURLData parseURL(ParsedURL baseURL, String urlStr) {
// No relative form...
return parseURL(urlStr);
}
public ParsedURLData parseURL(String urlStr) {
DataParsedURLData ret = new DataParsedURLData();
int pidx=0, idx;
int len = urlStr.length();
// Pull fragment id off first...
idx = urlStr.indexOf('#');
ret.ref = null;
if (idx != -1) {
if (idx + 1 < len) {
ret.ref = urlStr.substring(idx + 1);
}
urlStr = urlStr.substring(0, idx);
len = urlStr.length();
}
idx = urlStr.indexOf(':');
if (idx != -1) {
// May have a protocol spec...
ret.protocol = urlStr.substring(pidx, idx);
if (ret.protocol.indexOf('/') == -1)
pidx = idx+1;
else {
// Got a slash in protocol probably means
// no protocol given, (host and port?)
ret.protocol = null;
pidx = 0;
}
}
idx = urlStr.indexOf(',',pidx);
if ((idx != -1) && (idx != pidx)) {
ret.host = urlStr.substring(pidx, idx);
pidx = idx+1;
int aidx = ret.host.lastIndexOf(';');
if ((aidx == -1) || (aidx==ret.host.length())) {
ret.contentType = ret.host;
} else {
String enc = ret.host.substring(aidx+1);
idx = enc.indexOf('=');
if (idx == -1) {
// It is an encoding.
ret.contentEncoding = enc;
ret.contentType = ret.host.substring(0, aidx);
} else {
ret.contentType = ret.host;
}
// if theres a charset pull it out.
aidx = 0;
idx = ret.contentType.indexOf(';', aidx);
if (idx != -1) {
aidx = idx+1;
while (aidx < ret.contentType.length()) {
idx = ret.contentType.indexOf(';', aidx);
if (idx == -1) idx = ret.contentType.length();
String param = ret.contentType.substring(aidx, idx);
int eqIdx = param.indexOf('=');
if ((eqIdx != -1) &&
(CHARSET.equals(param.substring(0,eqIdx))))
ret.charset = param.substring(eqIdx+1);
aidx = idx+1;
}
}
}
}
if (pidx < urlStr.length()) {
ret.path = urlStr.substring(pidx);
}
return ret;
}
/**
* Overrides some of the methods to support data protocol weirdness
*/
static class DataParsedURLData extends ParsedURLData {
String charset;
public boolean complete() {
return path != null;
}
public String getPortStr() {
String portStr = "data:";
if (host != null) {
portStr += host;
}
portStr += ",";
return portStr;
}
public String toString() {
String ret = getPortStr();
if (path != null) {
ret += path;
}
if (ref != null) {
ret += '#' + ref;
}
return ret;
}
/**
* Returns the content type if available. This is only available
* for some protocols.
*/
public String getContentType(String userAgent) {
return contentType;
}
/**
* Returns the content encoding if available. This is only available
* for some protocols.
*/
public String getContentEncoding(String userAgent) {
return contentEncoding;
}
protected InputStream openStreamInternal
(String userAgent, Iterator mimeTypes, Iterator encodingTypes)
throws IOException {
stream = decode(path);
if (BASE64.equals(contentEncoding)) {
stream = new Base64DecodeStream(stream);
}
return stream;
}
public static InputStream decode(String s) {
int len = s.length();
byte [] data = new byte[len];
int j=0;
for(int i=0; i<len; i++) {
char c = s.charAt(i);
switch (c) {
default : data[j++]= (byte)c; break;
case '%': {
if (i+2 < len) {
i += 2;
byte b;
char c1 = s.charAt(i-1);
if (c1 >= '0' && c1 <= '9') b=(byte)(c1-'0');
else if (c1 >= 'a' && c1 <= 'z') b=(byte)(c1-'a'+10);
else if (c1 >= 'A' && c1 <= 'Z') b=(byte)(c1-'A'+10);
else break;
b*=16;
char c2 = s.charAt(i);
if (c2 >= '0' && c2 <= '9') b+=(byte)(c2-'0');
else if (c2 >= 'a' && c2 <= 'z') b+=(byte)(c2-'a'+10);
else if (c2 >= 'A' && c2 <= 'Z') b+=(byte)(c2-'A'+10);
else break;
data[j++] = b;
}
}
break;
}
}
return new ByteArrayInputStream(data, 0, j);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.core;
import static org.apache.beam.sdk.testing.PCollectionViewTesting.materializeValuesFor;
import static org.hamcrest.Matchers.contains;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import java.util.List;
import org.apache.beam.sdk.Pipeline;
import org.apache.beam.sdk.transforms.Create;
import org.apache.beam.sdk.transforms.View;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.transforms.windowing.FixedWindows;
import org.apache.beam.sdk.transforms.windowing.IntervalWindow;
import org.apache.beam.sdk.transforms.windowing.PaneInfo;
import org.apache.beam.sdk.transforms.windowing.Window;
import org.apache.beam.sdk.util.WindowedValue;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.sdk.values.PCollectionView;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
import org.joda.time.Duration;
import org.joda.time.Instant;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Unit tests for {@link SideInputHandler}. */
@RunWith(JUnit4.class)
public class SideInputHandlerTest {
private static final long WINDOW_MSECS_1 = 100;
private static final long WINDOW_MSECS_2 = 500;
private PCollectionView<Iterable<String>> view1;
private PCollectionView<Iterable<String>> view2;
@Before
public void setUp() {
PCollection<String> pc = Pipeline.create().apply(Create.of("1"));
view1 =
pc.apply(Window.into(FixedWindows.of(new Duration(WINDOW_MSECS_1))))
.apply(View.asIterable());
view2 =
pc.apply(Window.into(FixedWindows.of(new Duration(WINDOW_MSECS_2))))
.apply(View.asIterable());
}
@Test
public void testIsEmpty() {
SideInputHandler sideInputHandler =
new SideInputHandler(ImmutableList.of(view1), InMemoryStateInternals.<Void>forKey(null));
assertFalse(sideInputHandler.isEmpty());
// create an empty handler
SideInputHandler emptySideInputHandler =
new SideInputHandler(ImmutableList.of(), InMemoryStateInternals.<Void>forKey(null));
assertTrue(emptySideInputHandler.isEmpty());
}
@Test
public void testContains() {
SideInputHandler sideInputHandler =
new SideInputHandler(ImmutableList.of(view1), InMemoryStateInternals.<Void>forKey(null));
assertTrue(sideInputHandler.contains(view1));
assertFalse(sideInputHandler.contains(view2));
}
@Test
public void testIsReady() {
SideInputHandler sideInputHandler =
new SideInputHandler(
ImmutableList.of(view1, view2), InMemoryStateInternals.<Void>forKey(null));
IntervalWindow firstWindow = new IntervalWindow(new Instant(0), new Instant(WINDOW_MSECS_1));
IntervalWindow secondWindow = new IntervalWindow(new Instant(0), new Instant(WINDOW_MSECS_2));
// side input should not yet be ready
assertFalse(sideInputHandler.isReady(view1, firstWindow));
// add a value for view1
sideInputHandler.addSideInputValue(
view1,
valuesInWindow(
materializeValuesFor(View.asIterable(), "Hello"), new Instant(0), firstWindow));
// now side input should be ready
assertTrue(sideInputHandler.isReady(view1, firstWindow));
// second window input should still not be ready
assertFalse(sideInputHandler.isReady(view1, secondWindow));
}
@Test
public void testNewInputReplacesPreviousInput() {
// new input should completely replace old input
// the creation of the Iterable that has the side input
// contents happens upstream. this is also where
// accumulation/discarding is decided.
SideInputHandler sideInputHandler =
new SideInputHandler(ImmutableList.of(view1), InMemoryStateInternals.<Void>forKey(null));
IntervalWindow window = new IntervalWindow(new Instant(0), new Instant(WINDOW_MSECS_1));
// add a first value for view1
sideInputHandler.addSideInputValue(
view1,
valuesInWindow(materializeValuesFor(View.asIterable(), "Hello"), new Instant(0), window));
assertThat(sideInputHandler.get(view1, window), contains("Hello"));
// subsequent values should replace existing values
sideInputHandler.addSideInputValue(
view1,
valuesInWindow(
materializeValuesFor(View.asIterable(), "Ciao", "Buongiorno"), new Instant(0), window));
assertThat(sideInputHandler.get(view1, window), contains("Ciao", "Buongiorno"));
}
@Test
public void testMultipleWindows() {
SideInputHandler sideInputHandler =
new SideInputHandler(ImmutableList.of(view1), InMemoryStateInternals.<Void>forKey(null));
// two windows that we'll later use for adding elements/retrieving side input
IntervalWindow firstWindow = new IntervalWindow(new Instant(0), new Instant(WINDOW_MSECS_1));
IntervalWindow secondWindow =
new IntervalWindow(new Instant(1000), new Instant(1000 + WINDOW_MSECS_2));
// add a first value for view1 in the first window
sideInputHandler.addSideInputValue(
view1,
valuesInWindow(
materializeValuesFor(View.asIterable(), "Hello"), new Instant(0), firstWindow));
assertThat(sideInputHandler.get(view1, firstWindow), contains("Hello"));
// add something for second window of view1
sideInputHandler.addSideInputValue(
view1,
valuesInWindow(
materializeValuesFor(View.asIterable(), "Arrivederci"), new Instant(0), secondWindow));
assertThat(sideInputHandler.get(view1, secondWindow), contains("Arrivederci"));
// contents for first window should be unaffected
assertThat(sideInputHandler.get(view1, firstWindow), contains("Hello"));
}
@Test
public void testMultipleSideInputs() {
SideInputHandler sideInputHandler =
new SideInputHandler(
ImmutableList.of(view1, view2), InMemoryStateInternals.<Void>forKey(null));
// two windows that we'll later use for adding elements/retrieving side input
IntervalWindow firstWindow = new IntervalWindow(new Instant(0), new Instant(WINDOW_MSECS_1));
// add value for view1 in the first window
sideInputHandler.addSideInputValue(
view1,
valuesInWindow(
materializeValuesFor(View.asIterable(), "Hello"), new Instant(0), firstWindow));
assertThat(sideInputHandler.get(view1, firstWindow), contains("Hello"));
// view2 should not have any data
assertFalse(sideInputHandler.isReady(view2, firstWindow));
// also add some data for view2
sideInputHandler.addSideInputValue(
view2,
valuesInWindow(
materializeValuesFor(View.asIterable(), "Salut"), new Instant(0), firstWindow));
assertTrue(sideInputHandler.isReady(view2, firstWindow));
assertThat(sideInputHandler.get(view2, firstWindow), contains("Salut"));
// view1 should not be affected by that
assertThat(sideInputHandler.get(view1, firstWindow), contains("Hello"));
}
@SuppressWarnings({"unchecked", "rawtypes"})
private WindowedValue<Iterable<?>> valuesInWindow(
List<Object> values, Instant timestamp, BoundedWindow window) {
return (WindowedValue) WindowedValue.of(values, timestamp, window, PaneInfo.NO_FIRING);
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.fielddata.ordinals;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FilteredTermsEnum;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.util.*;
import org.apache.lucene.util.packed.GrowableWriter;
import org.apache.lucene.util.packed.PackedInts;
import org.apache.lucene.util.packed.PagedGrowableWriter;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.fielddata.BytesValues;
import java.io.Closeable;
import java.io.IOException;
import java.util.Arrays;
import java.util.Comparator;
/**
* Simple class to build document ID <-> ordinal mapping. Note: Ordinals are
* <tt>1</tt> based monotonically increasing positive integers. <tt>0</tt>
* donates the missing value in this context.
*/
public final class OrdinalsBuilder implements Closeable {
/**
* Default acceptable overhead ratio. {@link OrdinalsBuilder} memory usage is mostly transient so it is likely a better trade-off to
* trade memory for speed in order to resize less often.
*/
public static final float DEFAULT_ACCEPTABLE_OVERHEAD_RATIO = PackedInts.FAST;
/**
* The following structure is used to store ordinals. The idea is to store ords on levels of increasing sizes. Level 0 stores
* 1 value and 1 pointer to level 1. Level 1 stores 2 values and 1 pointer to level 2, ..., Level n stores 2**n values and
* 1 pointer to level n+1. If at some point an ordinal or a pointer has 0 as a value, this means that there are no remaining
* values. On the first level, ordinals.get(docId) is the first ordinal for docId or 0 if the document has no ordinals. On
* subsequent levels, the first 2^level slots are reserved and all have 0 as a value.
* <pre>
* Example for an index of 3 docs (O=ordinal, P = pointer)
* Level 0:
* ordinals [1] [4] [2]
* nextLevelSlices 2 0 1
* Level 1:
* ordinals [0 0] [2 0] [3 4]
* nextLevelSlices 0 0 1
* Level 2:
* ordinals [0 0 0 0] [5 0 0 0]
* nextLevelSlices 0 0
* </pre>
* On level 0, all documents have an ordinal: 0 has 1, 1 has 4 and 2 has 2 as a first ordinal, this means that we need to read
* nextLevelEntries to get the index of their ordinals on the next level. The entry for document 1 is 0, meaning that we have
* already read all its ordinals. On the contrary 0 and 2 have more ordinals which are stored at indices 2 and 1. Let's continue
* with document 2: it has 2 more ordinals on level 1: 3 and 4 and its next level index is 1 meaning that there are remaining
* ordinals on the next level. On level 2 at index 1, we can read [5 0 0 0] meaning that 5 is an ordinal as well, but the
* fact that it is followed by zeros means that there are no more ordinals. In the end, document 2 has 2, 3, 4 and 5 as ordinals.
* <p/>
* In addition to these structures, there is another array which stores the current position (level + slice + offset in the slice)
* in order to be able to append data in constant time.
*/
private static class OrdinalsStore {
private static final int PAGE_SIZE = 1 << 12;
/**
* Number of slots at <code>level</code>
*/
private static int numSlots(int level) {
return 1 << level;
}
private static int slotsMask(int level) {
return numSlots(level) - 1;
}
/**
* Encode the position for the given level and offset. The idea is to encode the level using unary coding in the lower bits and
* then the offset in the higher bits.
*/
private static long position(int level, long offset) {
assert level >= 1;
return (1 << (level - 1)) | (offset << level);
}
/**
* Decode the level from an encoded position.
*/
private static int level(long position) {
return 1 + Long.numberOfTrailingZeros(position);
}
/**
* Decode the offset from the position.
*/
private static long offset(long position, int level) {
return position >>> level;
}
/**
* Get the ID of the slice given an offset.
*/
private static long sliceID(int level, long offset) {
return offset >>> level;
}
/**
* Compute the first offset of the given slice.
*/
private static long startOffset(int level, long slice) {
return slice << level;
}
/**
* Compute the number of ordinals stored for a value given its current position.
*/
private static int numOrdinals(int level, long offset) {
return (1 << level) + (int) (offset & slotsMask(level));
}
// Current position
private PagedGrowableWriter positions;
// First level (0) of ordinals and pointers to the next level
private final GrowableWriter firstOrdinals;
private PagedGrowableWriter firstNextLevelSlices;
// Ordinals and pointers for other levels +1
private final PagedGrowableWriter[] ordinals;
private final PagedGrowableWriter[] nextLevelSlices;
private final int[] sizes;
private final int startBitsPerValue;
private final float acceptableOverheadRatio;
OrdinalsStore(int maxDoc, int startBitsPerValue, float acceptableOverheadRatio) {
this.startBitsPerValue = startBitsPerValue;
this.acceptableOverheadRatio = acceptableOverheadRatio;
positions = new PagedGrowableWriter(maxDoc, PAGE_SIZE, startBitsPerValue, acceptableOverheadRatio);
firstOrdinals = new GrowableWriter(startBitsPerValue, maxDoc, acceptableOverheadRatio);
// over allocate in order to never worry about the array sizes, 24 entries would allow to store several millions of ordinals per doc...
ordinals = new PagedGrowableWriter[24];
nextLevelSlices = new PagedGrowableWriter[24];
sizes = new int[24];
Arrays.fill(sizes, 1); // reserve the 1st slice on every level
}
/**
* Allocate a new slice and return its ID.
*/
private long newSlice(int level) {
final long newSlice = sizes[level]++;
// Lazily allocate ordinals
if (ordinals[level] == null) {
ordinals[level] = new PagedGrowableWriter(8L * numSlots(level), PAGE_SIZE, startBitsPerValue, acceptableOverheadRatio);
} else {
ordinals[level] = ordinals[level].grow(sizes[level] * numSlots(level));
if (nextLevelSlices[level] != null) {
nextLevelSlices[level] = nextLevelSlices[level].grow(sizes[level]);
}
}
return newSlice;
}
public int addOrdinal(int docID, long ordinal) {
final long position = positions.get(docID);
if (position == 0L) { // on the first level
// 0 or 1 ordinal
if (firstOrdinals.get(docID) == 0L) {
firstOrdinals.set(docID, ordinal + 1);
return 1;
} else {
final long newSlice = newSlice(1);
if (firstNextLevelSlices == null) {
firstNextLevelSlices = new PagedGrowableWriter(firstOrdinals.size(), PAGE_SIZE, 3, acceptableOverheadRatio);
}
firstNextLevelSlices.set(docID, newSlice);
final long offset = startOffset(1, newSlice);
ordinals[1].set(offset, ordinal + 1);
positions.set(docID, position(1, offset)); // current position is on the 1st level and not allocated yet
return 2;
}
} else {
int level = level(position);
long offset = offset(position, level);
assert offset != 0L;
if (((offset + 1) & slotsMask(level)) == 0L) {
// reached the end of the slice, allocate a new one on the next level
final long newSlice = newSlice(level + 1);
if (nextLevelSlices[level] == null) {
nextLevelSlices[level] = new PagedGrowableWriter(sizes[level], PAGE_SIZE, 1, acceptableOverheadRatio);
}
nextLevelSlices[level].set(sliceID(level, offset), newSlice);
++level;
offset = startOffset(level, newSlice);
assert (offset & slotsMask(level)) == 0L;
} else {
// just go to the next slot
++offset;
}
ordinals[level].set(offset, ordinal + 1);
final long newPosition = position(level, offset);
positions.set(docID, newPosition);
return numOrdinals(level, offset);
}
}
public void appendOrdinals(int docID, LongsRef ords) {
// First level
final long firstOrd = firstOrdinals.get(docID);
if (firstOrd == 0L) {
return;
}
ords.longs = ArrayUtil.grow(ords.longs, ords.offset + ords.length + 1);
ords.longs[ords.offset + ords.length++] = firstOrd - 1;
if (firstNextLevelSlices == null) {
return;
}
long sliceID = firstNextLevelSlices.get(docID);
if (sliceID == 0L) {
return;
}
// Other levels
for (int level = 1; ; ++level) {
final int numSlots = numSlots(level);
ords.longs = ArrayUtil.grow(ords.longs, ords.offset + ords.length + numSlots);
final long offset = startOffset(level, sliceID);
for (int j = 0; j < numSlots; ++j) {
final long ord = ordinals[level].get(offset + j);
if (ord == 0L) {
return;
}
ords.longs[ords.offset + ords.length++] = ord - 1;
}
if (nextLevelSlices[level] == null) {
return;
}
sliceID = nextLevelSlices[level].get(sliceID);
if (sliceID == 0L) {
return;
}
}
}
}
private final int maxDoc;
private long currentOrd = BytesValues.WithOrdinals.MIN_ORDINAL - 1;
private int numDocsWithValue = 0;
private int numMultiValuedDocs = 0;
private int totalNumOrds = 0;
private OrdinalsStore ordinals;
private final LongsRef spare;
public OrdinalsBuilder(long numTerms, int maxDoc, float acceptableOverheadRatio) throws IOException {
this.maxDoc = maxDoc;
int startBitsPerValue = 8;
if (numTerms >= 0) {
startBitsPerValue = PackedInts.bitsRequired(numTerms);
}
ordinals = new OrdinalsStore(maxDoc, startBitsPerValue, acceptableOverheadRatio);
spare = new LongsRef();
}
public OrdinalsBuilder(int maxDoc, float acceptableOverheadRatio) throws IOException {
this(-1, maxDoc, acceptableOverheadRatio);
}
public OrdinalsBuilder(int maxDoc) throws IOException {
this(maxDoc, DEFAULT_ACCEPTABLE_OVERHEAD_RATIO);
}
/**
* Returns a shared {@link LongsRef} instance for the given doc ID holding all ordinals associated with it.
*/
public LongsRef docOrds(int docID) {
spare.offset = spare.length = 0;
ordinals.appendOrdinals(docID, spare);
return spare;
}
/**
* Return a {@link PackedInts.Reader} instance mapping every doc ID to its first ordinal + 1 if it exists and 0 otherwise.
*/
public PackedInts.Reader getFirstOrdinals() {
return ordinals.firstOrdinals;
}
/**
* Advances the {@link OrdinalsBuilder} to the next ordinal and
* return the current ordinal.
*/
public long nextOrdinal() {
return ++currentOrd;
}
/**
* Retruns the current ordinal or <tt>0</tt> if this build has not been advanced via
* {@link #nextOrdinal()}.
*/
public long currentOrdinal() {
return currentOrd;
}
/**
* Associates the given document id with the current ordinal.
*/
public OrdinalsBuilder addDoc(int doc) {
totalNumOrds++;
final int numValues = ordinals.addOrdinal(doc, currentOrd);
if (numValues == 1) {
++numDocsWithValue;
} else if (numValues == 2) {
++numMultiValuedDocs;
}
return this;
}
/**
* Returns <code>true</code> iff this builder contains a document ID that is associated with more than one ordinal. Otherwise <code>false</code>;
*/
public boolean isMultiValued() {
return numMultiValuedDocs > 0;
}
/**
* Returns the number distinct of document IDs with one or more values.
*/
public int getNumDocsWithValue() {
return numDocsWithValue;
}
/**
* Returns the number distinct of document IDs associated with exactly one value.
*/
public int getNumSingleValuedDocs() {
return numDocsWithValue - numMultiValuedDocs;
}
/**
* Returns the number distinct of document IDs associated with two or more values.
*/
public int getNumMultiValuesDocs() {
return numMultiValuedDocs;
}
/**
* Returns the number of document ID to ordinal pairs in this builder.
*/
public int getTotalNumOrds() {
return totalNumOrds;
}
/**
* Returns the number of distinct ordinals in this builder.
*/
public long getMaxOrd() {
return currentOrd + 1;
}
/**
* Builds a {@link FixedBitSet} where each documents bit is that that has one or more ordinals associated with it.
* if every document has an ordinal associated with it this method returns <code>null</code>
*/
public FixedBitSet buildDocsWithValuesSet() {
if (numDocsWithValue == maxDoc) {
return null;
}
final FixedBitSet bitSet = new FixedBitSet(maxDoc);
for (int docID = 0; docID < maxDoc; ++docID) {
if (ordinals.firstOrdinals.get(docID) != 0) {
bitSet.set(docID);
}
}
return bitSet;
}
/**
* Builds an {@link Ordinals} instance from the builders current state.
*/
public Ordinals build(Settings settings) {
final float acceptableOverheadRatio = settings.getAsFloat("acceptable_overhead_ratio", PackedInts.FASTEST);
if (numMultiValuedDocs > 0 || MultiOrdinals.significantlySmallerThanSinglePackedOrdinals(maxDoc, numDocsWithValue, getMaxOrd(), acceptableOverheadRatio)) {
// MultiOrdinals can be smaller than SinglePackedOrdinals for sparse fields
return new MultiOrdinals(this, acceptableOverheadRatio);
} else {
return new SinglePackedOrdinals(this, acceptableOverheadRatio);
}
}
/**
* Returns the maximum document ID this builder can associate with an ordinal
*/
public int maxDoc() {
return maxDoc;
}
/**
* A {@link TermsEnum} that iterates only full precision prefix coded 64 bit values.
*
* @see #buildFromTerms(TermsEnum, Bits)
*/
public static TermsEnum wrapNumeric64Bit(TermsEnum termsEnum) {
return new FilteredTermsEnum(termsEnum, false) {
@Override
protected AcceptStatus accept(BytesRef term) throws IOException {
// we stop accepting terms once we moved across the prefix codec terms - redundant values!
return NumericUtils.getPrefixCodedLongShift(term) == 0 ? AcceptStatus.YES : AcceptStatus.END;
}
};
}
/**
* A {@link TermsEnum} that iterates only full precision prefix coded 32 bit values.
*
* @see #buildFromTerms(TermsEnum, Bits)
*/
public static TermsEnum wrapNumeric32Bit(TermsEnum termsEnum) {
return new FilteredTermsEnum(termsEnum, false) {
@Override
protected AcceptStatus accept(BytesRef term) throws IOException {
// we stop accepting terms once we moved across the prefix codec terms - redundant values!
return NumericUtils.getPrefixCodedIntShift(term) == 0 ? AcceptStatus.YES : AcceptStatus.END;
}
};
}
/**
* This method iterates all terms in the given {@link TermsEnum} and
* associates each terms ordinal with the terms documents. The caller must
* exhaust the returned {@link BytesRefIterator} which returns all values
* where the first returned value is associted with the ordinal <tt>1</tt>
* etc.
* <p>
* If the {@link TermsEnum} contains prefix coded numerical values the terms
* enum should be wrapped with either {@link #wrapNumeric32Bit(TermsEnum)}
* or {@link #wrapNumeric64Bit(TermsEnum)} depending on its precision. If
* the {@link TermsEnum} is not wrapped the returned
* {@link BytesRefIterator} will contain partial precision terms rather than
* only full-precision terms.
* </p>
*/
public BytesRefIterator buildFromTerms(final TermsEnum termsEnum) throws IOException {
return new BytesRefIterator() {
private DocsEnum docsEnum = null;
@Override
public BytesRef next() throws IOException {
BytesRef ref;
if ((ref = termsEnum.next()) != null) {
docsEnum = termsEnum.docs(null, docsEnum, DocsEnum.FLAG_NONE);
nextOrdinal();
int docId;
while ((docId = docsEnum.nextDoc()) != DocsEnum.NO_MORE_DOCS) {
addDoc(docId);
}
}
return ref;
}
@Override
public Comparator<BytesRef> getComparator() {
return termsEnum.getComparator();
}
};
}
/**
* Closes this builder and release all resources.
*/
@Override
public void close() throws IOException {
ordinals = null;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapreduce.v2.app;
import static org.junit.Assert.fail;
import java.security.PrivilegedExceptionAction;
import java.util.Iterator;
import java.util.List;
import org.junit.Assert;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.JobACL;
import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.v2.api.MRClientProtocol;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.FailTaskAttemptRequest;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetCountersRequest;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDiagnosticsRequest;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportRequest;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptCompletionEventsRequest;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptReportRequest;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportRequest;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportsRequest;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillJobRequest;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillTaskAttemptRequest;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillTaskRequest;
import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
import org.apache.hadoop.mapreduce.v2.api.records.JobState;
import org.apache.hadoop.mapreduce.v2.api.records.Phase;
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport;
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.mapreduce.v2.app.client.ClientService;
import org.apache.hadoop.mapreduce.v2.app.client.MRClientService;
import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptDiagnosticsUpdateEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.TaskAttemptStatus;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.ipc.YarnRPC;
import org.junit.Test;
public class TestMRClientService {
private static RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
@Test
public void test() throws Exception {
MRAppWithClientService app = new MRAppWithClientService(1, 0, false);
Configuration conf = new Configuration();
Job job = app.submit(conf);
app.waitForState(job, JobState.RUNNING);
Assert.assertEquals("Num tasks not correct", 1, job.getTasks().size());
Iterator<Task> it = job.getTasks().values().iterator();
Task task = it.next();
app.waitForState(task, TaskState.RUNNING);
TaskAttempt attempt = task.getAttempts().values().iterator().next();
app.waitForState(attempt, TaskAttemptState.RUNNING);
// send the diagnostic
String diagnostic1 = "Diagnostic1";
String diagnostic2 = "Diagnostic2";
app.getContext().getEventHandler().handle(
new TaskAttemptDiagnosticsUpdateEvent(attempt.getID(), diagnostic1));
// send the status update
TaskAttemptStatus taskAttemptStatus = new TaskAttemptStatus();
taskAttemptStatus.id = attempt.getID();
taskAttemptStatus.progress = 0.5f;
taskAttemptStatus.stateString = "RUNNING";
taskAttemptStatus.taskState = TaskAttemptState.RUNNING;
taskAttemptStatus.phase = Phase.MAP;
// send the status update
app.getContext().getEventHandler().handle(
new TaskAttemptStatusUpdateEvent(attempt.getID(), taskAttemptStatus));
//verify that all object are fully populated by invoking RPCs.
YarnRPC rpc = YarnRPC.create(conf);
MRClientProtocol proxy =
(MRClientProtocol) rpc.getProxy(MRClientProtocol.class,
app.clientService.getBindAddress(), conf);
GetCountersRequest gcRequest =
recordFactory.newRecordInstance(GetCountersRequest.class);
gcRequest.setJobId(job.getID());
Assert.assertNotNull("Counters is null",
proxy.getCounters(gcRequest).getCounters());
GetJobReportRequest gjrRequest =
recordFactory.newRecordInstance(GetJobReportRequest.class);
gjrRequest.setJobId(job.getID());
JobReport jr = proxy.getJobReport(gjrRequest).getJobReport();
verifyJobReport(jr);
GetTaskAttemptCompletionEventsRequest gtaceRequest =
recordFactory.newRecordInstance(GetTaskAttemptCompletionEventsRequest.class);
gtaceRequest.setJobId(job.getID());
gtaceRequest.setFromEventId(0);
gtaceRequest.setMaxEvents(10);
Assert.assertNotNull("TaskCompletionEvents is null",
proxy.getTaskAttemptCompletionEvents(gtaceRequest).getCompletionEventList());
GetDiagnosticsRequest gdRequest =
recordFactory.newRecordInstance(GetDiagnosticsRequest.class);
gdRequest.setTaskAttemptId(attempt.getID());
Assert.assertNotNull("Diagnostics is null",
proxy.getDiagnostics(gdRequest).getDiagnosticsList());
GetTaskAttemptReportRequest gtarRequest =
recordFactory.newRecordInstance(GetTaskAttemptReportRequest.class);
gtarRequest.setTaskAttemptId(attempt.getID());
TaskAttemptReport tar =
proxy.getTaskAttemptReport(gtarRequest).getTaskAttemptReport();
verifyTaskAttemptReport(tar);
GetTaskReportRequest gtrRequest =
recordFactory.newRecordInstance(GetTaskReportRequest.class);
gtrRequest.setTaskId(task.getID());
Assert.assertNotNull("TaskReport is null",
proxy.getTaskReport(gtrRequest).getTaskReport());
GetTaskReportsRequest gtreportsRequest =
recordFactory.newRecordInstance(GetTaskReportsRequest.class);
gtreportsRequest.setJobId(job.getID());
gtreportsRequest.setTaskType(TaskType.MAP);
Assert.assertNotNull("TaskReports for map is null",
proxy.getTaskReports(gtreportsRequest).getTaskReportList());
gtreportsRequest =
recordFactory.newRecordInstance(GetTaskReportsRequest.class);
gtreportsRequest.setJobId(job.getID());
gtreportsRequest.setTaskType(TaskType.REDUCE);
Assert.assertNotNull("TaskReports for reduce is null",
proxy.getTaskReports(gtreportsRequest).getTaskReportList());
List<String> diag = proxy.getDiagnostics(gdRequest).getDiagnosticsList();
Assert.assertEquals("Num diagnostics not correct", 1 , diag.size());
Assert.assertEquals("Diag 1 not correct",
diagnostic1, diag.get(0).toString());
TaskReport taskReport = proxy.getTaskReport(gtrRequest).getTaskReport();
Assert.assertEquals("Num diagnostics not correct", 1,
taskReport.getDiagnosticsCount());
//send the done signal to the task
app.getContext().getEventHandler().handle(
new TaskAttemptEvent(
task.getAttempts().values().iterator().next().getID(),
TaskAttemptEventType.TA_DONE));
app.waitForState(job, JobState.SUCCEEDED);
}
@Test
public void testViewAclOnlyCannotModify() throws Exception {
final MRAppWithClientService app = new MRAppWithClientService(1, 0, false);
final Configuration conf = new Configuration();
conf.setBoolean(MRConfig.MR_ACLS_ENABLED, true);
conf.set(MRJobConfig.JOB_ACL_VIEW_JOB, "viewonlyuser");
Job job = app.submit(conf);
app.waitForState(job, JobState.RUNNING);
Assert.assertEquals("Num tasks not correct", 1, job.getTasks().size());
Iterator<Task> it = job.getTasks().values().iterator();
Task task = it.next();
app.waitForState(task, TaskState.RUNNING);
TaskAttempt attempt = task.getAttempts().values().iterator().next();
app.waitForState(attempt, TaskAttemptState.RUNNING);
UserGroupInformation viewOnlyUser =
UserGroupInformation.createUserForTesting(
"viewonlyuser", new String[] {});
Assert.assertTrue("viewonlyuser cannot view job",
job.checkAccess(viewOnlyUser, JobACL.VIEW_JOB));
Assert.assertFalse("viewonlyuser can modify job",
job.checkAccess(viewOnlyUser, JobACL.MODIFY_JOB));
MRClientProtocol client = viewOnlyUser.doAs(
new PrivilegedExceptionAction<MRClientProtocol>() {
@Override
public MRClientProtocol run() throws Exception {
YarnRPC rpc = YarnRPC.create(conf);
return (MRClientProtocol) rpc.getProxy(MRClientProtocol.class,
app.clientService.getBindAddress(), conf);
}
});
KillJobRequest killJobRequest = recordFactory.newRecordInstance(
KillJobRequest.class);
killJobRequest.setJobId(app.getJobId());
try {
client.killJob(killJobRequest);
fail("viewonlyuser killed job");
} catch (AccessControlException e) {
// pass
}
KillTaskRequest killTaskRequest = recordFactory.newRecordInstance(
KillTaskRequest.class);
killTaskRequest.setTaskId(task.getID());
try {
client.killTask(killTaskRequest);
fail("viewonlyuser killed task");
} catch (AccessControlException e) {
// pass
}
KillTaskAttemptRequest killTaskAttemptRequest =
recordFactory.newRecordInstance(KillTaskAttemptRequest.class);
killTaskAttemptRequest.setTaskAttemptId(attempt.getID());
try {
client.killTaskAttempt(killTaskAttemptRequest);
fail("viewonlyuser killed task attempt");
} catch (AccessControlException e) {
// pass
}
FailTaskAttemptRequest failTaskAttemptRequest =
recordFactory.newRecordInstance(FailTaskAttemptRequest.class);
failTaskAttemptRequest.setTaskAttemptId(attempt.getID());
try {
client.failTaskAttempt(failTaskAttemptRequest);
fail("viewonlyuser killed task attempt");
} catch (AccessControlException e) {
// pass
}
}
private void verifyJobReport(JobReport jr) {
Assert.assertNotNull("JobReport is null", jr);
List<AMInfo> amInfos = jr.getAMInfos();
Assert.assertEquals(1, amInfos.size());
Assert.assertEquals(JobState.RUNNING, jr.getJobState());
AMInfo amInfo = amInfos.get(0);
Assert.assertEquals(MRApp.NM_HOST, amInfo.getNodeManagerHost());
Assert.assertEquals(MRApp.NM_PORT, amInfo.getNodeManagerPort());
Assert.assertEquals(MRApp.NM_HTTP_PORT, amInfo.getNodeManagerHttpPort());
Assert.assertEquals(1, amInfo.getAppAttemptId().getAttemptId());
Assert.assertEquals(1, amInfo.getContainerId().getApplicationAttemptId()
.getAttemptId());
Assert.assertTrue(amInfo.getStartTime() > 0);
Assert.assertEquals(false, jr.isUber());
}
private void verifyTaskAttemptReport(TaskAttemptReport tar) {
Assert.assertEquals(TaskAttemptState.RUNNING, tar.getTaskAttemptState());
Assert.assertNotNull("TaskAttemptReport is null", tar);
Assert.assertEquals(MRApp.NM_HOST, tar.getNodeManagerHost());
Assert.assertEquals(MRApp.NM_PORT, tar.getNodeManagerPort());
Assert.assertEquals(MRApp.NM_HTTP_PORT, tar.getNodeManagerHttpPort());
Assert.assertEquals(1, tar.getContainerId().getApplicationAttemptId()
.getAttemptId());
}
class MRAppWithClientService extends MRApp {
MRClientService clientService = null;
MRAppWithClientService(int maps, int reduces, boolean autoComplete) {
super(maps, reduces, autoComplete, "MRAppWithClientService", true);
}
@Override
protected ClientService createClientService(AppContext context) {
clientService = new MRClientService(context);
return clientService;
}
}
public static void main(String[] args) throws Exception {
TestMRClientService t = new TestMRClientService();
t.test();
}
}
| |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.cloudstack.api.command.user.firewall;
import java.util.List;
import org.apache.log4j.Logger;
import org.apache.cloudstack.acl.RoleType;
import org.apache.cloudstack.acl.SecurityChecker.AccessType;
import org.apache.cloudstack.api.ACL;
import org.apache.cloudstack.api.APICommand;
import org.apache.cloudstack.api.ApiCommandJobType;
import org.apache.cloudstack.api.ApiConstants;
import org.apache.cloudstack.api.ApiErrorCode;
import org.apache.cloudstack.api.BaseAsyncCmd;
import org.apache.cloudstack.api.BaseAsyncCreateCmd;
import org.apache.cloudstack.api.Parameter;
import org.apache.cloudstack.api.ServerApiException;
import org.apache.cloudstack.api.response.FirewallRuleResponse;
import org.apache.cloudstack.api.response.IPAddressResponse;
import org.apache.cloudstack.api.response.NetworkResponse;
import org.apache.cloudstack.api.response.UserVmResponse;
import org.apache.cloudstack.context.CallContext;
import com.cloud.event.EventTypes;
import com.cloud.exception.InvalidParameterValueException;
import com.cloud.exception.NetworkRuleConflictException;
import com.cloud.exception.ResourceUnavailableException;
import com.cloud.network.IpAddress;
import com.cloud.network.rules.FirewallRule;
import com.cloud.network.rules.PortForwardingRule;
import com.cloud.user.Account;
import com.cloud.utils.net.Ip;
import com.cloud.utils.net.NetUtils;
import com.cloud.vm.VirtualMachine;
@APICommand(name = "createPortForwardingRule", description = "Creates a port forwarding rule", responseObject = FirewallRuleResponse.class, entityType = {FirewallRule.class,
VirtualMachine.class, IpAddress.class},
requestHasSensitiveInfo = false, responseHasSensitiveInfo = false)
public class CreatePortForwardingRuleCmd extends BaseAsyncCreateCmd implements PortForwardingRule {
public static final Logger s_logger = Logger.getLogger(CreatePortForwardingRuleCmd.class.getName());
private static final String s_name = "createportforwardingruleresponse";
// ///////////////////////////////////////////////////
// ////////////// API parameters /////////////////////
// ///////////////////////////////////////////////////
@ACL(accessType = AccessType.OperateEntry)
@Parameter(name = ApiConstants.IP_ADDRESS_ID,
type = CommandType.UUID,
entityType = IPAddressResponse.class,
required = true,
description = "the IP address id of the port forwarding rule")
private Long ipAddressId;
@Parameter(name = ApiConstants.PRIVATE_START_PORT,
type = CommandType.INTEGER,
required = true,
description = "the starting port of port forwarding rule's private port range")
private Integer privateStartPort;
@Parameter(name = ApiConstants.PROTOCOL,
type = CommandType.STRING,
required = true,
description = "the protocol for the port forwarding rule. Valid values are TCP or UDP.")
private String protocol;
@Parameter(name = ApiConstants.PRIVATE_END_PORT,
type = CommandType.INTEGER,
required = false,
description = "the ending port of port forwarding rule's private port range")
private Integer privateEndPort;
@Parameter(name = ApiConstants.PUBLIC_START_PORT,
type = CommandType.INTEGER,
required = true,
description = "the starting port of port forwarding rule's public port range")
private Integer publicStartPort;
@Parameter(name = ApiConstants.PUBLIC_END_PORT,
type = CommandType.INTEGER,
required = false,
description = "the ending port of port forwarding rule's private port range")
private Integer publicEndPort;
@ACL(accessType = AccessType.OperateEntry)
@Parameter(name = ApiConstants.VIRTUAL_MACHINE_ID,
type = CommandType.UUID,
entityType = UserVmResponse.class,
required = true,
description = "the ID of the virtual machine for the port forwarding rule")
private Long virtualMachineId;
@Parameter(name = ApiConstants.CIDR_LIST, type = CommandType.LIST, collectionType = CommandType.STRING, description = "the cidr list to forward traffic from")
private List<String> cidrlist;
@Parameter(name = ApiConstants.OPEN_FIREWALL, type = CommandType.BOOLEAN, description = "if true, firewall rule for source/end public port is automatically created; "
+ "if false - firewall rule has to be created explicitly. If not specified 1) defaulted to false when PF"
+ " rule is being created for VPC guest network 2) in all other cases defaulted to true")
private Boolean openFirewall;
@Parameter(name = ApiConstants.NETWORK_ID,
type = CommandType.UUID,
entityType = NetworkResponse.class,
description = "the network of the virtual machine the port forwarding rule will be created for. "
+ "Required when public IP address is not associated with any guest network yet (VPC case).")
private Long networkId;
@Parameter(name = ApiConstants.VM_GUEST_IP,
type = CommandType.STRING,
required = false,
description = "VM guest nic secondary IP address for the port forwarding rule")
private String vmSecondaryIp;
@Parameter(name = ApiConstants.FOR_DISPLAY, type = CommandType.BOOLEAN, description = "an optional field, whether to the display the rule to the end user or not", since = "4.4", authorized = {RoleType.Admin})
private Boolean display;
// ///////////////////////////////////////////////////
// ///////////////// Accessors ///////////////////////
// ///////////////////////////////////////////////////
public Long getIpAddressId() {
return ipAddressId;
}
public Ip getVmSecondaryIp() {
if (vmSecondaryIp == null) {
return null;
}
return new Ip(vmSecondaryIp);
}
@Override
public String getProtocol() {
return protocol.trim();
}
@Override
public long getVirtualMachineId() {
return virtualMachineId;
}
@Override
public List<String> getSourceCidrList() {
if (cidrlist != null) {
throw new InvalidParameterValueException("Parameter cidrList is deprecated; if you need to open firewall "
+ "rule for the specific cidr, please refer to createFirewallRule command");
}
return null;
}
public Boolean getOpenFirewall() {
boolean isVpc = getVpcId() == null ? false : true;
if (openFirewall != null) {
if (isVpc && openFirewall) {
throw new InvalidParameterValueException("Can't have openFirewall=true when IP address belongs to VPC");
}
return openFirewall;
} else {
if (isVpc) {
return false;
}
return true;
}
}
private Long getVpcId() {
if (ipAddressId != null) {
IpAddress ipAddr = _networkService.getIp(ipAddressId);
if (ipAddr == null || !ipAddr.readyToUse()) {
throw new InvalidParameterValueException("Unable to create PF rule, invalid IP address id " + ipAddressId);
} else {
return ipAddr.getVpcId();
}
}
return null;
}
// ///////////////////////////////////////////////////
// ///////////// API Implementation///////////////////
// ///////////////////////////////////////////////////
@Override
public String getCommandName() {
return s_name;
}
@Override
public void execute() throws ResourceUnavailableException {
CallContext callerContext = CallContext.current();
boolean success = true;
PortForwardingRule rule = null;
try {
CallContext.current().setEventDetails("Rule Id: " + getEntityId());
if (getOpenFirewall()) {
success = success && _firewallService.applyIngressFirewallRules(ipAddressId, callerContext.getCallingAccount());
}
success = success && _rulesService.applyPortForwardingRules(ipAddressId, callerContext.getCallingAccount());
// State is different after the rule is applied, so get new object here
rule = _entityMgr.findById(PortForwardingRule.class, getEntityId());
FirewallRuleResponse fwResponse = new FirewallRuleResponse();
if (rule != null) {
fwResponse = _responseGenerator.createPortForwardingRuleResponse(rule);
setResponseObject(fwResponse);
}
fwResponse.setResponseName(getCommandName());
} finally {
if (!success || rule == null) {
if (getOpenFirewall()) {
_firewallService.revokeRelatedFirewallRule(getEntityId(), true);
}
try {
_rulesService.revokePortForwardingRule(getEntityId(), true);
} catch (Exception ex) {
//Ignore e.g. failed to apply rules to device error
}
throw new ServerApiException(ApiErrorCode.INTERNAL_ERROR, "Failed to apply port forwarding rule");
}
}
}
@Override
public long getId() {
throw new UnsupportedOperationException("database id can only provided by VO objects");
}
@Override
public String getXid() {
// FIXME: We should allow for end user to specify Xid.
return null;
}
@Override
public String getUuid() {
// TODO Auto-generated method stub
return null;
}
@Override
public Long getSourceIpAddressId() {
return ipAddressId;
}
@Override
public Integer getSourcePortStart() {
return publicStartPort.intValue();
}
@Override
public Integer getSourcePortEnd() {
return (publicEndPort == null) ? publicStartPort.intValue() : publicEndPort.intValue();
}
@Override
public Purpose getPurpose() {
return Purpose.PortForwarding;
}
@Override
public State getState() {
throw new UnsupportedOperationException("Should never call me to find the state");
}
@Override
public long getNetworkId() {
IpAddress ip = _entityMgr.findById(IpAddress.class, getIpAddressId());
Long ntwkId = null;
if (ip.getAssociatedWithNetworkId() != null) {
ntwkId = ip.getAssociatedWithNetworkId();
} else {
ntwkId = networkId;
}
if (ntwkId == null) {
throw new InvalidParameterValueException("Unable to create port forwarding rule for the ipAddress id=" + ipAddressId +
" as ip is not associated with any network and no networkId is passed in");
}
return ntwkId;
}
@Override
public long getEntityOwnerId() {
Account account = CallContext.current().getCallingAccount();
if (account != null) {
return account.getId();
}
return Account.ACCOUNT_ID_SYSTEM; // no account info given, parent this command to SYSTEM so ERROR events are
// tracked
}
@Override
public long getDomainId() {
IpAddress ip = _networkService.getIp(ipAddressId);
return ip.getDomainId();
}
@Override
public Ip getDestinationIpAddress() {
return null;
}
@Override
public void setDestinationIpAddress(Ip destinationIpAddress) {
return;
}
@Override
public int getDestinationPortStart() {
return privateStartPort.intValue();
}
@Override
public int getDestinationPortEnd() {
return (privateEndPort == null) ? privateStartPort.intValue() : privateEndPort.intValue();
}
@Override
public void create() {
// cidr list parameter is deprecated
if (cidrlist != null) {
throw new InvalidParameterValueException(
"Parameter cidrList is deprecated; if you need to open firewall rule for the specific cidr, please refer to createFirewallRule command");
}
Ip privateIp = getVmSecondaryIp();
if (privateIp != null) {
if (!NetUtils.isValidIp4(privateIp.toString())) {
throw new InvalidParameterValueException("Invalid vm ip address");
}
}
try {
PortForwardingRule result = _rulesService.createPortForwardingRule(this, virtualMachineId, privateIp, getOpenFirewall(), isDisplay());
setEntityId(result.getId());
setEntityUuid(result.getUuid());
} catch (NetworkRuleConflictException ex) {
s_logger.trace("Network Rule Conflict: ", ex);
throw new ServerApiException(ApiErrorCode.NETWORK_RULE_CONFLICT_ERROR, ex.getMessage(), ex);
}
}
@Override
public String getEventType() {
return EventTypes.EVENT_NET_RULE_ADD;
}
@Override
public String getEventDescription() {
IpAddress ip = _networkService.getIp(ipAddressId);
return ("Applying port forwarding rule for Ip: " + ip.getAddress() + " with virtual machine:" + virtualMachineId);
}
@Override
public long getAccountId() {
IpAddress ip = _networkService.getIp(ipAddressId);
return ip.getAccountId();
}
@Override
public String getSyncObjType() {
return BaseAsyncCmd.networkSyncObject;
}
@Override
public Long getSyncObjId() {
return getIp().getAssociatedWithNetworkId();
}
private IpAddress getIp() {
IpAddress ip = _networkService.getIp(ipAddressId);
if (ip == null) {
throw new InvalidParameterValueException("Unable to find ip address by id " + ipAddressId);
}
return ip;
}
@Override
public Integer getIcmpCode() {
return null;
}
@Override
public Integer getIcmpType() {
return null;
}
@Override
public Long getRelated() {
return null;
}
@Override
public FirewallRuleType getType() {
return FirewallRuleType.User;
}
@Override
public ApiCommandJobType getInstanceType() {
return ApiCommandJobType.FirewallRule;
}
@Override
public TrafficType getTrafficType() {
return null;
}
@Override
public List<String> getDestinationCidrList(){
return null;
}
@Override
public boolean isDisplay() {
if (display != null) {
return display;
} else {
return true;
}
}
@Override
public Class<?> getEntityType() {
return FirewallRule.class;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.storm.daemon.supervisor;
import static org.apache.storm.utils.Utils.OR;
import com.google.common.base.Joiner;
import com.google.common.collect.Lists;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.NavigableMap;
import org.apache.commons.lang.StringUtils;
import org.apache.storm.Config;
import org.apache.storm.DaemonConfig;
import org.apache.storm.container.ResourceIsolationInterface;
import org.apache.storm.generated.LocalAssignment;
import org.apache.storm.generated.ProfileAction;
import org.apache.storm.generated.ProfileRequest;
import org.apache.storm.generated.StormTopology;
import org.apache.storm.generated.WorkerResources;
import org.apache.storm.utils.ConfigUtils;
import org.apache.storm.utils.LocalState;
import org.apache.storm.utils.ObjectReader;
import org.apache.storm.utils.ServerConfigUtils;
import org.apache.storm.utils.ServerUtils;
import org.apache.storm.utils.SimpleVersion;
import org.apache.storm.utils.Time;
import org.apache.storm.utils.Utils;
import org.apache.storm.utils.VersionInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A container that runs processes on the local box.
*/
public class BasicContainer extends Container {
private static final Logger LOG = LoggerFactory.getLogger(BasicContainer.class);
private static final FilenameFilter jarFilter = (dir, name) -> name.endsWith(".jar");
private static final Joiner CPJ =
Joiner.on(File.pathSeparator).skipNulls();
protected final LocalState _localState;
protected final String _profileCmd;
protected final String _stormHome = System.getProperty("storm.home");
protected volatile boolean _exitedEarly = false;
protected volatile long memoryLimitMB;
protected volatile long memoryLimitExceededStart;
protected final double hardMemoryLimitMultiplier;
protected final long hardMemoryLimitOver;
protected final long lowMemoryThresholdMB;
protected final long mediumMemoryThresholdMb;
protected final long mediumMemoryGracePeriodMs;
private class ProcessExitCallback implements ExitCodeCallback {
private final String _logPrefix;
public ProcessExitCallback(String logPrefix) {
_logPrefix = logPrefix;
}
@Override
public void call(int exitCode) {
LOG.info("{} exited with code: {}", _logPrefix, exitCode);
_exitedEarly = true;
}
}
/**
* Create a new BasicContainer
* @param type the type of container being made.
* @param conf the supervisor config
* @param supervisorId the ID of the supervisor this is a part of.
* @param port the port the container is on. Should be <= 0 if only a partial recovery
* @param assignment the assignment for this container. Should be null if only a partial recovery.
* @param resourceIsolationManager used to isolate resources for a container can be null if no isolation is used.
* @param localState the local state of the supervisor. May be null if partial recovery
* @param workerId the id of the worker to use. Must not be null if doing a partial recovery.
*/
public BasicContainer(ContainerType type, Map<String, Object> conf, String supervisorId, int port,
LocalAssignment assignment, ResourceIsolationInterface resourceIsolationManager,
LocalState localState, String workerId) throws IOException {
this(type, conf, supervisorId, port, assignment, resourceIsolationManager, localState, workerId, null, null, null);
}
/**
* Create a new BasicContainer
* @param type the type of container being made.
* @param conf the supervisor config
* @param supervisorId the ID of the supervisor this is a part of.
* @param port the port the container is on. Should be <= 0 if only a partial recovery
* @param assignment the assignment for this container. Should be null if only a partial recovery.
* @param resourceIsolationManager used to isolate resources for a container can be null if no isolation is used.
* @param localState the local state of the supervisor. May be null if partial recovery
* @param workerId the id of the worker to use. Must not be null if doing a partial recovery.
* @param ops file system operations (mostly for testing) if null a new one is made
* @param topoConf the config of the topology (mostly for testing) if null
* and not a partial recovery the real conf is read.
* @param profileCmd the command to use when profiling (used for testing)
* @throws IOException on any error
* @throws ContainerRecoveryException if the Container could not be recovered.
*/
BasicContainer(ContainerType type, Map<String, Object> conf, String supervisorId, int port,
LocalAssignment assignment, ResourceIsolationInterface resourceIsolationManager,
LocalState localState, String workerId, Map<String, Object> topoConf,
AdvancedFSOps ops, String profileCmd) throws IOException {
super(type, conf, supervisorId, port, assignment, resourceIsolationManager, workerId, topoConf, ops);
assert(localState != null);
_localState = localState;
if (type.isRecovery() && !type.isOnlyKillable()) {
synchronized (localState) {
String wid = null;
Map<String, Integer> workerToPort = localState.getApprovedWorkers();
for (Map.Entry<String, Integer> entry : workerToPort.entrySet()) {
if (port == entry.getValue().intValue()) {
wid = entry.getKey();
}
}
if (wid == null) {
throw new ContainerRecoveryException("Could not find worker id for " + port + " " + assignment);
}
LOG.info("Recovered Worker {}", wid);
_workerId = wid;
}
} else if (_workerId == null){
createNewWorkerId();
}
if (profileCmd == null) {
profileCmd = _stormHome + File.separator + "bin" + File.separator
+ conf.get(DaemonConfig.WORKER_PROFILER_COMMAND);
}
_profileCmd = profileCmd;
hardMemoryLimitMultiplier =
ObjectReader.getDouble(conf.get(DaemonConfig.STORM_SUPERVISOR_HARD_MEMORY_LIMIT_MULTIPLIER), 2.0);
hardMemoryLimitOver =
ObjectReader.getInt(conf.get(DaemonConfig.STORM_SUPERVISOR_HARD_LIMIT_MEMORY_OVERAGE_MB), 0);
lowMemoryThresholdMB = ObjectReader.getInt(conf.get(DaemonConfig.STORM_SUPERVISOR_LOW_MEMORY_THRESHOLD_MB), 1024);
mediumMemoryThresholdMb =
ObjectReader.getInt(conf.get(DaemonConfig.STORM_SUPERVISOR_MEDIUM_MEMORY_THRESHOLD_MB), 1536);
mediumMemoryGracePeriodMs =
ObjectReader.getInt(conf.get(DaemonConfig.STORM_SUPERVISOR_MEDIUM_MEMORY_GRACE_PERIOD_MS), 20_000);
if (assignment != null) {
WorkerResources resources = assignment.get_resources();
memoryLimitMB = calculateMemoryLimit(resources, getMemOnHeap(resources));
}
}
/**
* Create a new worker ID for this process and store in in this object and
* in the local state. Never call this if a worker is currently up and running.
* We will lose track of the process.
*/
protected void createNewWorkerId() {
_type.assertFull();
assert(_workerId == null);
synchronized (_localState) {
_workerId = Utils.uuid();
Map<String, Integer> workerToPort = _localState.getApprovedWorkers();
if (workerToPort == null) {
workerToPort = new HashMap<>(1);
}
removeWorkersOn(workerToPort, _port);
workerToPort.put(_workerId, _port);
_localState.setApprovedWorkers(workerToPort);
LOG.info("Created Worker ID {}", _workerId);
}
}
private static void removeWorkersOn(Map<String, Integer> workerToPort, int _port) {
for (Iterator<Entry<String, Integer>> i = workerToPort.entrySet().iterator(); i.hasNext();) {
Entry<String, Integer> found = i.next();
if (_port == found.getValue().intValue()) {
LOG.warn("Deleting worker {} from state", found.getKey());
i.remove();
}
}
}
@Override
public void cleanUpForRestart() throws IOException {
String origWorkerId = _workerId;
super.cleanUpForRestart();
synchronized (_localState) {
Map<String, Integer> workersToPort = _localState.getApprovedWorkers();
workersToPort.remove(origWorkerId);
removeWorkersOn(workersToPort, _port);
_localState.setApprovedWorkers(workersToPort);
LOG.info("Removed Worker ID {}", origWorkerId);
}
}
@Override
public void relaunch() throws IOException {
_type.assertFull();
//We are launching it now...
_type = ContainerType.LAUNCH;
createNewWorkerId();
setup();
launch();
}
@Override
public boolean didMainProcessExit() {
return _exitedEarly;
}
/**
* Run the given command for profiling
*
* @param command
* the command to run
* @param env
* the environment to run the command
* @param logPrefix
* the prefix to include in the logs
* @param targetDir
* the working directory to run the command in
* @return true if it ran successfully, else false
* @throws IOException
* on any error
* @throws InterruptedException
* if interrupted wile waiting for the process to exit.
*/
protected boolean runProfilingCommand(List<String> command, Map<String, String> env, String logPrefix,
File targetDir) throws IOException, InterruptedException {
_type.assertFull();
Process p = ClientSupervisorUtils.launchProcess(command, env, logPrefix, null, targetDir);
int ret = p.waitFor();
return ret == 0;
}
@Override
public boolean runProfiling(ProfileRequest request, boolean stop) throws IOException, InterruptedException {
_type.assertFull();
String targetDir = ConfigUtils.workerArtifactsRoot(_conf, _topologyId, _port);
@SuppressWarnings("unchecked")
Map<String, String> env = (Map<String, String>) _topoConf.get(Config.TOPOLOGY_ENVIRONMENT);
if (env == null) {
env = new HashMap<>();
}
String str = ConfigUtils.workerArtifactsPidPath(_conf, _topologyId, _port);
String workerPid = _ops.slurpString(new File(str)).trim();
ProfileAction profileAction = request.get_action();
String logPrefix = "ProfilerAction process " + _topologyId + ":" + _port + " PROFILER_ACTION: " + profileAction
+ " ";
List<String> command = mkProfileCommand(profileAction, stop, workerPid, targetDir);
File targetFile = new File(targetDir);
if (command.size() > 0) {
return runProfilingCommand(command, env, logPrefix, targetFile);
}
LOG.warn("PROFILING REQUEST NOT SUPPORTED {} IGNORED...", request);
return true;
}
/**
* Get the command to run when doing profiling
* @param action the profiling action to perform
* @param stop if this is meant to stop the profiling or start it
* @param workerPid the PID of the process to profile
* @param targetDir the current working directory of the worker process
* @return the command to run for profiling.
*/
private List<String> mkProfileCommand(ProfileAction action, boolean stop, String workerPid, String targetDir) {
switch(action) {
case JMAP_DUMP:
return jmapDumpCmd(workerPid, targetDir);
case JSTACK_DUMP:
return jstackDumpCmd(workerPid, targetDir);
case JPROFILE_DUMP:
return jprofileDump(workerPid, targetDir);
case JVM_RESTART:
return jprofileJvmRestart(workerPid);
case JPROFILE_STOP:
if (stop) {
return jprofileStop(workerPid, targetDir);
}
return jprofileStart(workerPid);
default:
return Lists.newArrayList();
}
}
private List<String> jmapDumpCmd(String pid, String targetDir) {
return Lists.newArrayList(_profileCmd, pid, "jmap", targetDir);
}
private List<String> jstackDumpCmd(String pid, String targetDir) {
return Lists.newArrayList(_profileCmd, pid, "jstack", targetDir);
}
private List<String> jprofileStart(String pid) {
return Lists.newArrayList(_profileCmd, pid, "start");
}
private List<String> jprofileStop(String pid, String targetDir) {
return Lists.newArrayList(_profileCmd, pid, "stop", targetDir);
}
private List<String> jprofileDump(String pid, String targetDir) {
return Lists.newArrayList(_profileCmd, pid, "dump", targetDir);
}
private List<String> jprofileJvmRestart(String pid) {
return Lists.newArrayList(_profileCmd, pid, "kill");
}
/**
* Compute the java.library.path that should be used for the worker.
* This helps it to load JNI libraries that are packaged in the uber jar.
* @param stormRoot the root directory of the worker process
* @param conf the config for the supervisor.
* @return the java.library.path/LD_LIBRARY_PATH to use so native libraries load correctly.
*/
protected String javaLibraryPath(String stormRoot, Map<String, Object> conf) {
String resourceRoot = stormRoot + File.separator + ServerConfigUtils.RESOURCES_SUBDIR;
String os = System.getProperty("os.name").replaceAll("\\s+", "_");
String arch = System.getProperty("os.arch");
String archResourceRoot = resourceRoot + File.separator + os + "-" + arch;
String ret = CPJ.join(archResourceRoot, resourceRoot,
conf.get(DaemonConfig.JAVA_LIBRARY_PATH));
return ret;
}
/**
* Returns a path with a wildcard as the final element, so that the JVM will expand
* that to all JARs in the directory.
* @param dir the directory to which a wildcard will be appended
* @return the path with wildcard ("*") suffix
*/
protected String getWildcardDir(File dir) {
return dir.toString() + File.separator + "*";
}
protected List<String> frameworkClasspath(SimpleVersion topoVersion) {
File stormWorkerLibDir = new File(_stormHome, "lib-worker");
String topoConfDir =
System.getenv("STORM_CONF_DIR") != null ?
System.getenv("STORM_CONF_DIR") :
new File(_stormHome, "conf").getAbsolutePath();
File stormExtlibDir = new File(_stormHome, "extlib");
String extcp = System.getenv("STORM_EXT_CLASSPATH");
List<String> pathElements = new LinkedList<>();
pathElements.add(getWildcardDir(stormWorkerLibDir));
pathElements.add(getWildcardDir(stormExtlibDir));
pathElements.add(extcp);
pathElements.add(topoConfDir);
NavigableMap<SimpleVersion, List<String>> classpaths = Utils.getConfiguredClasspathVersions(_conf, pathElements);
return Utils.getCompatibleVersion(classpaths, topoVersion, "classpath", pathElements);
}
protected String getWorkerMain(SimpleVersion topoVersion) {
String defaultWorkerGuess = "org.apache.storm.daemon.worker.Worker";
if (topoVersion.getMajor() == 0) {
//Prior to the org.apache change
defaultWorkerGuess = "backtype.storm.daemon.worker";
} else if (topoVersion.getMajor() == 1) {
//Have not moved to a java worker yet
defaultWorkerGuess = "org.apache.storm.daemon.worker";
}
NavigableMap<SimpleVersion,String> mains = Utils.getConfiguredWorkerMainVersions(_conf);
return Utils.getCompatibleVersion(mains, topoVersion, "worker main class", defaultWorkerGuess);
}
protected String getWorkerLogWriter(SimpleVersion topoVersion) {
String defaultGuess = "org.apache.storm.LogWriter";
if (topoVersion.getMajor() == 0) {
//Prior to the org.apache change
defaultGuess = "backtype.storm.LogWriter";
}
NavigableMap<SimpleVersion,String> mains = Utils.getConfiguredWorkerLogWriterVersions(_conf);
return Utils.getCompatibleVersion(mains, topoVersion, "worker log writer class", defaultGuess);
}
@SuppressWarnings("unchecked")
private List<String> asStringList(Object o) {
if (o instanceof String) {
return Arrays.asList((String)o);
} else if (o instanceof List) {
return (List<String>)o;
}
return Collections.EMPTY_LIST;
}
/**
* Compute the classpath for the worker process
* @param stormJar the topology jar
* @param dependencyLocations any dependencies from the topology
* @param topoVersion the version of the storm framework to use
* @return the full classpath
*/
protected String getWorkerClassPath(String stormJar, List<String> dependencyLocations, SimpleVersion topoVersion) {
List<String> workercp = new ArrayList<>();
workercp.addAll(asStringList(_topoConf.get(Config.TOPOLOGY_CLASSPATH_BEGINNING)));
workercp.addAll(frameworkClasspath(topoVersion));
workercp.add(stormJar);
workercp.addAll(dependencyLocations);
workercp.addAll(asStringList(_topoConf.get(Config.TOPOLOGY_CLASSPATH)));
return CPJ.join(workercp);
}
private String substituteChildOptsInternal(String string, int memOnheap) {
if (StringUtils.isNotBlank(string)) {
String p = String.valueOf(_port);
string = string.replace("%ID%", p);
string = string.replace("%WORKER-ID%", _workerId);
string = string.replace("%TOPOLOGY-ID%", _topologyId);
string = string.replace("%WORKER-PORT%", p);
if (memOnheap > 0) {
string = string.replace("%HEAP-MEM%", String.valueOf(memOnheap));
}
if (memoryLimitMB > 0) {
string = string.replace("%LIMIT-MEM%", String.valueOf(memoryLimitMB));
}
}
return string;
}
protected List<String> substituteChildopts(Object value) {
return substituteChildopts(value, -1);
}
protected List<String> substituteChildopts(Object value, int memOnheap) {
List<String> rets = new ArrayList<>();
if (value instanceof String) {
String string = substituteChildOptsInternal((String) value, memOnheap);
if (StringUtils.isNotBlank(string)) {
String[] strings = string.split("\\s+");
for (String s: strings) {
if (StringUtils.isNotBlank(s)) {
rets.add(s);
}
}
}
} else if (value instanceof List) {
@SuppressWarnings("unchecked")
List<String> objects = (List<String>) value;
for (String object : objects) {
String str = substituteChildOptsInternal(object, memOnheap);
if (StringUtils.isNotBlank(str)) {
rets.add(str);
}
}
}
return rets;
}
/**
* Launch the worker process (non-blocking)
*
* @param command
* the command to run
* @param env
* the environment to run the command
* @param processExitCallback
* a callback for when the process exits
* @param logPrefix
* the prefix to include in the logs
* @param targetDir
* the working directory to run the command in
* @return true if it ran successfully, else false
* @throws IOException
* on any error
*/
protected void launchWorkerProcess(List<String> command, Map<String, String> env, String logPrefix,
ExitCodeCallback processExitCallback, File targetDir) throws IOException {
if (_resourceIsolationManager != null) {
command = _resourceIsolationManager.getLaunchCommand(_workerId, command);
}
ClientSupervisorUtils.launchProcess(command, env, logPrefix, processExitCallback, targetDir);
}
private String getWorkerLoggingConfigFile() {
String log4jConfigurationDir = (String) (_conf.get(DaemonConfig.STORM_LOG4J2_CONF_DIR));
if (StringUtils.isNotBlank(log4jConfigurationDir)) {
if (!ServerUtils.isAbsolutePath(log4jConfigurationDir)) {
log4jConfigurationDir = _stormHome + File.separator + log4jConfigurationDir;
}
} else {
log4jConfigurationDir = _stormHome + File.separator + "log4j2";
}
if (ServerUtils.IS_ON_WINDOWS && !log4jConfigurationDir.startsWith("file:")) {
log4jConfigurationDir = "file:///" + log4jConfigurationDir;
}
return log4jConfigurationDir + File.separator + "worker.xml";
}
private static class TopologyMetaData {
private boolean _dataCached = false;
private List<String> _depLocs = null;
private String _stormVersion = null;
private final Map<String, Object> _conf;
private final String _topologyId;
private final AdvancedFSOps _ops;
private final String _stormRoot;
public TopologyMetaData(final Map<String, Object> conf, final String topologyId, final AdvancedFSOps ops, final String stormRoot) {
_conf = conf;
_topologyId = topologyId;
_ops = ops;
_stormRoot = stormRoot;
}
public String toString() {
List<String> data;
String stormVersion;
synchronized(this) {
data = _depLocs;
stormVersion = _stormVersion;
}
return "META for " + _topologyId +" DEP_LOCS => " + data + " STORM_VERSION => " + stormVersion;
}
private synchronized void readData() throws IOException {
final StormTopology stormTopology = ConfigUtils.readSupervisorTopology(_conf, _topologyId, _ops);
final List<String> dependencyLocations = new ArrayList<>();
if (stormTopology.get_dependency_jars() != null) {
for (String dependency : stormTopology.get_dependency_jars()) {
dependencyLocations.add(new File(_stormRoot, dependency).getAbsolutePath());
}
}
if (stormTopology.get_dependency_artifacts() != null) {
for (String dependency : stormTopology.get_dependency_artifacts()) {
dependencyLocations.add(new File(_stormRoot, dependency).getAbsolutePath());
}
}
_depLocs = dependencyLocations;
_stormVersion = stormTopology.get_storm_version();
_dataCached = true;
}
public synchronized List<String> getDepLocs() throws IOException {
if (!_dataCached) {
readData();
}
return _depLocs;
}
public synchronized String getStormVersion() throws IOException {
if (!_dataCached) {
readData();
}
return _stormVersion;
}
}
static class TopoMetaLRUCache {
public final int _maxSize = 100; //We could make this configurable in the future...
@SuppressWarnings("serial")
private LinkedHashMap<String, TopologyMetaData> _cache = new LinkedHashMap<String, TopologyMetaData>() {
@Override
protected boolean removeEldestEntry(Map.Entry<String,TopologyMetaData> eldest) {
return (size() > _maxSize);
}
};
public synchronized TopologyMetaData get(final Map<String, Object> conf, final String topologyId, final AdvancedFSOps ops, String stormRoot) {
//Only go off of the topology id for now.
TopologyMetaData dl = _cache.get(topologyId);
if (dl == null) {
_cache.putIfAbsent(topologyId, new TopologyMetaData(conf, topologyId, ops, stormRoot));
dl = _cache.get(topologyId);
}
return dl;
}
public synchronized void clear() {
_cache.clear();
}
}
static final TopoMetaLRUCache TOPO_META_CACHE = new TopoMetaLRUCache();
public static List<String> getDependencyLocationsFor(final Map<String, Object> conf, final String topologyId, final AdvancedFSOps ops, String stormRoot) throws IOException {
return TOPO_META_CACHE.get(conf, topologyId, ops, stormRoot).getDepLocs();
}
public static String getStormVersionFor(final Map<String, Object> conf, final String topologyId, final AdvancedFSOps ops, String stormRoot) throws IOException {
return TOPO_META_CACHE.get(conf, topologyId, ops, stormRoot).getStormVersion();
}
/**
* Get parameters for the class path of the worker process. Also used by the
* log Writer
* @param stormRoot the root dist dir for the topology
* @return the classpath for the topology as command line arguments.
* @throws IOException on any error.
*/
private List<String> getClassPathParams(final String stormRoot, final SimpleVersion topoVersion) throws IOException {
final String stormJar = ConfigUtils.supervisorStormJarPath(stormRoot);
final List<String> dependencyLocations = getDependencyLocationsFor(_conf, _topologyId, _ops, stormRoot);
final String workerClassPath = getWorkerClassPath(stormJar, dependencyLocations, topoVersion);
List<String> classPathParams = new ArrayList<>();
classPathParams.add("-cp");
classPathParams.add(workerClassPath);
return classPathParams;
}
/**
* Get a set of java properties that are common to both the log writer and the worker processes.
* These are mostly system properties that are used by logging.
* @return a list of command line options
*/
private List<String> getCommonParams() {
final String workersArtifacts = ConfigUtils.workerArtifactsRoot(_conf);
String stormLogDir = ConfigUtils.getLogDir();
String log4jConfigurationFile = getWorkerLoggingConfigFile();
List<String> commonParams = new ArrayList<>();
commonParams.add("-Dlogging.sensitivity=" + OR((String) _topoConf.get(Config.TOPOLOGY_LOGGING_SENSITIVITY), "S3"));
commonParams.add("-Dlogfile.name=worker.log");
commonParams.add("-Dstorm.home=" + OR(_stormHome, ""));
commonParams.add("-Dworkers.artifacts=" + workersArtifacts);
commonParams.add("-Dstorm.id=" + _topologyId);
commonParams.add("-Dworker.id=" + _workerId);
commonParams.add("-Dworker.port=" + _port);
commonParams.add("-Dstorm.log.dir=" + stormLogDir);
commonParams.add("-Dlog4j.configurationFile=" + log4jConfigurationFile);
commonParams.add("-DLog4jContextSelector=org.apache.logging.log4j.core.selector.BasicContextSelector");
commonParams.add("-Dstorm.local.dir=" + _conf.get(Config.STORM_LOCAL_DIR));
if (memoryLimitMB > 0) {
commonParams.add("-Dworker.memory_limit_mb="+ memoryLimitMB);
}
return commonParams;
}
private int getMemOnHeap(WorkerResources resources) {
int memOnheap = 0;
if (resources != null && resources.is_set_mem_on_heap() &&
resources.get_mem_on_heap() > 0) {
memOnheap = (int) Math.ceil(resources.get_mem_on_heap());
} else {
// set the default heap memory size for supervisor-test
memOnheap = ObjectReader.getInt(_topoConf.get(Config.WORKER_HEAP_MEMORY_MB), 768);
}
return memOnheap;
}
private List<String> getWorkerProfilerChildOpts(int memOnheap) {
List<String> workerProfilerChildopts = new ArrayList<>();
if (ObjectReader.getBoolean(_conf.get(DaemonConfig.WORKER_PROFILER_ENABLED), false)) {
workerProfilerChildopts = substituteChildopts(_conf.get(DaemonConfig.WORKER_PROFILER_CHILDOPTS), memOnheap);
}
return workerProfilerChildopts;
}
protected String javaCmd(String cmd) {
String ret = null;
String javaHome = System.getenv().get("JAVA_HOME");
if (StringUtils.isNotBlank(javaHome)) {
ret = javaHome + File.separator + "bin" + File.separator + cmd;
} else {
ret = cmd;
}
return ret;
}
/**
* Create the command to launch the worker process
* @param memOnheap the on heap memory for the worker
* @param stormRoot the root dist dir for the topology
* @param jlp java library path for the topology
* @return the command to run
* @throws IOException on any error.
*/
private List<String> mkLaunchCommand(final int memOnheap, final String stormRoot,
final String jlp) throws IOException {
final String javaCmd = javaCmd("java");
final String stormOptions = ConfigUtils.concatIfNotNull(System.getProperty("storm.options"));
final String topoConfFile = ConfigUtils.concatIfNotNull(System.getProperty("storm.conf.file"));
final String workerTmpDir = ConfigUtils.workerTmpRoot(_conf, _workerId);
String topoVersionString = getStormVersionFor(_conf, _topologyId, _ops, stormRoot);
if (topoVersionString == null) {
topoVersionString = (String)_conf.getOrDefault(Config.SUPERVISOR_WORKER_DEFAULT_VERSION, VersionInfo.getVersion());
}
final SimpleVersion topoVersion = new SimpleVersion(topoVersionString);
List<String> classPathParams = getClassPathParams(stormRoot, topoVersion);
List<String> commonParams = getCommonParams();
List<String> commandList = new ArrayList<>();
String logWriter = getWorkerLogWriter(topoVersion);
if (logWriter != null) {
//Log Writer Command...
commandList.add(javaCmd);
commandList.addAll(classPathParams);
commandList.addAll(substituteChildopts(_topoConf.get(Config.TOPOLOGY_WORKER_LOGWRITER_CHILDOPTS)));
commandList.addAll(commonParams);
commandList.add(logWriter); //The LogWriter in turn launches the actual worker.
}
//Worker Command...
commandList.add(javaCmd);
commandList.add("-server");
commandList.addAll(commonParams);
commandList.addAll(substituteChildopts(_conf.get(Config.WORKER_CHILDOPTS), memOnheap));
commandList.addAll(substituteChildopts(_topoConf.get(Config.TOPOLOGY_WORKER_CHILDOPTS), memOnheap));
commandList.addAll(substituteChildopts(Utils.OR(
_topoConf.get(Config.TOPOLOGY_WORKER_GC_CHILDOPTS),
_conf.get(Config.WORKER_GC_CHILDOPTS)), memOnheap));
commandList.addAll(getWorkerProfilerChildOpts(memOnheap));
commandList.add("-Djava.library.path=" + jlp);
commandList.add("-Dstorm.conf.file=" + topoConfFile);
commandList.add("-Dstorm.options=" + stormOptions);
commandList.add("-Djava.io.tmpdir=" + workerTmpDir);
commandList.addAll(classPathParams);
commandList.add(getWorkerMain(topoVersion));
commandList.add(_topologyId);
commandList.add(_supervisorId);
commandList.add(String.valueOf(_port));
commandList.add(_workerId);
return commandList;
}
@Override
public boolean isMemoryLimitViolated(LocalAssignment withUpdatedLimits) throws IOException {
if (super.isMemoryLimitViolated(withUpdatedLimits)) {
return true;
}
if (_resourceIsolationManager != null) {
// In the short term the goal is to not shoot anyone unless we really need to.
// The on heap should limit the memory usage in most cases to a reasonable amount
// If someone is using way more than they requested this is a bug and we should
// not allow it
long usageMb;
long memoryLimitMb;
long hardMemoryLimitOver;
String typeOfCheck;
if (withUpdatedLimits.is_set_total_node_shared()) {
//We need to do enforcement on a topology level, not a single worker level...
// Because in for cgroups each page in shared memory goes to the worker that touched it
// first. We may need to make this more plugable in the future and let the resource
// isolation manager tell us what to do
usageMb = getTotalTopologyMemoryUsed();
memoryLimitMb = getTotalTopologyMemoryReserved(withUpdatedLimits);
hardMemoryLimitOver = this.hardMemoryLimitOver * getTotalWorkersForThisTopology();
typeOfCheck = "TOPOLOGY " + _topologyId;
} else {
usageMb = getMemoryUsageMb();
memoryLimitMb = this.memoryLimitMB;
hardMemoryLimitOver = this.hardMemoryLimitOver;
typeOfCheck = "WORKER " + _workerId;
}
LOG.debug(
"Enforcing memory usage for {} with usage of {} out of {} total and a hard limit of {}",
typeOfCheck,
usageMb,
memoryLimitMb,
hardMemoryLimitOver);
if (usageMb <= 0) {
//Looks like usage might not be supported
return false;
}
long hardLimitMb = Math.max((long)(memoryLimitMb * hardMemoryLimitMultiplier), memoryLimitMb + hardMemoryLimitOver);
if (usageMb > hardLimitMb) {
LOG.warn(
"{} is using {} MB > adjusted hard limit {} MB", typeOfCheck, usageMb, hardLimitMb);
return true;
}
if (usageMb > memoryLimitMb) {
//For others using too much it is really a question of how much memory is free in the system
// to be use. If we cannot calculate it assume that it is bad
long systemFreeMemoryMb = 0;
try {
systemFreeMemoryMb = _resourceIsolationManager.getSystemFreeMemoryMb();
} catch (IOException e) {
LOG.warn("Error trying to calculate free memory on the system {}", e);
}
LOG.debug("SYSTEM MEMORY FREE {} MB", systemFreeMemoryMb);
//If the system is low on memory we cannot be kind and need to shoot something
if (systemFreeMemoryMb <= lowMemoryThresholdMB) {
LOG.warn(
"{} is using {} MB > memory limit {} MB and system is low on memory {} free",
typeOfCheck,
usageMb,
memoryLimitMb,
systemFreeMemoryMb);
return true;
}
//If the system still has some free memory give them a grace period to
// drop back down.
if (systemFreeMemoryMb < mediumMemoryThresholdMb) {
if (memoryLimitExceededStart < 0) {
memoryLimitExceededStart = Time.currentTimeMillis();
} else {
long timeInViolation = Time.currentTimeMillis() - memoryLimitExceededStart;
if (timeInViolation > mediumMemoryGracePeriodMs) {
LOG.warn(
"{} is using {} MB > memory limit {} MB for {} seconds",
typeOfCheck,
usageMb,
memoryLimitMb,
timeInViolation / 1000);
return true;
}
}
} else {
//Otherwise don't bother them
LOG.debug("{} is using {} MB > memory limit {} MB", typeOfCheck, usageMb, memoryLimitMb);
memoryLimitExceededStart = -1;
}
} else {
memoryLimitExceededStart = -1;
}
}
return false;
}
@Override
public long getMemoryUsageMb() {
try {
long ret = 0;
if (_resourceIsolationManager != null) {
long usageBytes = _resourceIsolationManager.getMemoryUsage(_workerId);
if (usageBytes >= 0) {
ret = usageBytes / 1024 / 1024;
}
}
return ret;
} catch (IOException e) {
LOG.warn("Error trying to calculate worker memory usage {}", e);
return 0;
}
}
@Override
public long getMemoryReservationMb() {
return memoryLimitMB;
}
private long calculateMemoryLimit(final WorkerResources resources, final int memOnHeap) {
long ret = memOnHeap;
if (_resourceIsolationManager != null) {
final int memoffheap = (int) Math.ceil(resources.get_mem_off_heap());
final int extraMem =
(int)
(Math.ceil(
ObjectReader.getDouble(
_conf.get(DaemonConfig.STORM_SUPERVISOR_MEMORY_LIMIT_TOLERANCE_MARGIN_MB),
0.0)));
ret += memoffheap + extraMem;
}
return ret;
}
@Override
public void launch() throws IOException {
_type.assertFull();
LOG.info("Launching worker with assignment {} for this supervisor {} on port {} with id {}", _assignment,
_supervisorId, _port, _workerId);
String logPrefix = "Worker Process " + _workerId;
ProcessExitCallback processExitCallback = new ProcessExitCallback(logPrefix);
_exitedEarly = false;
final WorkerResources resources = _assignment.get_resources();
final int memOnHeap = getMemOnHeap(resources);
memoryLimitMB = calculateMemoryLimit(resources, memOnHeap);
final String stormRoot = ConfigUtils.supervisorStormDistRoot(_conf, _topologyId);
String jlp = javaLibraryPath(stormRoot, _conf);
Map<String, String> topEnvironment = new HashMap<String, String>();
@SuppressWarnings("unchecked")
Map<String, String> environment = (Map<String, String>) _topoConf.get(Config.TOPOLOGY_ENVIRONMENT);
if (environment != null) {
topEnvironment.putAll(environment);
}
String ld_library_path = topEnvironment.get("LD_LIBRARY_PATH");
if (ld_library_path != null) {
jlp = jlp + System.getProperty("path.separator") + ld_library_path;
}
topEnvironment.put("LD_LIBRARY_PATH", jlp);
if (_resourceIsolationManager != null) {
final int cpu = (int) Math.ceil(resources.get_cpu());
//Save the memory limit so we can enforce it less strictly
_resourceIsolationManager.reserveResourcesForWorker(_workerId, (int) memoryLimitMB, cpu);
}
List<String> commandList = mkLaunchCommand(memOnHeap, stormRoot, jlp);
LOG.info("Launching worker with command: {}. ", ServerUtils.shellCmd(commandList));
String workerDir = ConfigUtils.workerRoot(_conf, _workerId);
launchWorkerProcess(commandList, topEnvironment, logPrefix, processExitCallback, new File(workerDir));
}
}
| |
// ============================================================================
// Copyright (C) 2006-2018 Talend Inc. - www.talend.com
//
// This source code is available under agreement available at
// https://github.com/Talend/data-prep/blob/master/LICENSE
//
// You should have received a copy of the agreement
// along with this program; if not, write to Talend SA
// 9 rue Pages 92150 Suresnes, France
//
// ============================================================================
package org.talend.dataprep.preparation.service;
import static org.slf4j.LoggerFactory.getLogger;
import static org.springframework.http.MediaType.APPLICATION_JSON_VALUE;
import static org.springframework.http.MediaType.TEXT_PLAIN_VALUE;
import static org.springframework.web.bind.annotation.RequestMethod.DELETE;
import static org.springframework.web.bind.annotation.RequestMethod.GET;
import static org.springframework.web.bind.annotation.RequestMethod.HEAD;
import static org.springframework.web.bind.annotation.RequestMethod.POST;
import static org.springframework.web.bind.annotation.RequestMethod.PUT;
import java.util.List;
import java.util.stream.Stream;
import javax.validation.Valid;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.talend.dataprep.api.dataset.RowMetadata;
import org.talend.dataprep.api.folder.Folder;
import org.talend.dataprep.api.preparation.Action;
import org.talend.dataprep.api.preparation.AppendStep;
import org.talend.dataprep.api.preparation.Preparation;
import org.talend.dataprep.api.preparation.PreparationDTO;
import org.talend.dataprep.api.preparation.PreparationDetailsDTO;
import org.talend.dataprep.api.preparation.Step;
import org.talend.dataprep.conversions.BeanConversionService;
import org.talend.dataprep.exception.json.JsonErrorCodeDescription;
import org.talend.dataprep.metrics.Timed;
import org.talend.dataprep.util.SortAndOrderHelper.Order;
import org.talend.dataprep.util.SortAndOrderHelper.Sort;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
@RestController
@Api(value = "preparations", basePath = "/preparations", description = "Operations on preparations")
public class PreparationController {
private static final Logger LOGGER = getLogger(PreparationController.class);
@Autowired
private PreparationService preparationService;
@Autowired
private BeanConversionService beanConversionService;
/**
* Create a preparation from the http request body.
*
* @param preparation the preparation to create.
* @param folderId where to store the preparation.
* @return the created preparation id.
*/
@RequestMapping(value = "/preparations", method = POST)
@ApiOperation(value = "Create a preparation", notes = "Returns the id of the created preparation.")
@Timed
public String create(@ApiParam("preparation") @RequestBody @Valid final Preparation preparation,
@ApiParam(value = "The folderId path to create the entry.") @RequestParam String folderId) {
return preparationService.create(preparation, folderId);
}
/**
* List all the preparations id.
*
* @param sort how the preparation should be sorted (default is 'last modification date').
* @param order how to apply the sort.
* @return the preparations id list.
*/
@RequestMapping(value = "/preparations", method = GET)
@ApiOperation(value = "List all preparations id",
notes = "Returns the list of preparations ids the current user is allowed to see. Creation date is always displayed in UTC time zone. See 'preparations/all' to get all details at once.")
@Timed
public Stream<String> list(
@ApiParam(name = "name",
value = "Filter preparations by name.") @RequestParam(required = false) String name,
@ApiParam(name = "folder_path", value = "Filter preparations by folder path.") @RequestParam(
required = false, name = "folder_path") String folderPath,
@ApiParam(name = "path",
value = "Filter preparations by full path (<folder path>/<preparation name>).") @RequestParam(
required = false, name = "path") String path,
@ApiParam(value = "Sort key (by name or date).") @RequestParam(
defaultValue = "lastModificationDate") Sort sort,
@ApiParam(value = "Order for sort key (desc or asc).") @RequestParam(defaultValue = "desc") Order order) {
LOGGER.debug("Get list of preparations (summary).");
return preparationService.listAll(name, folderPath, path, sort, order).map(PreparationDTO::getId);
}
/**
* List all preparation details.
*
* @param sort how to sort the preparations.
* @param order how to order the sort.
* @return the preparation details.
*/
@RequestMapping(value = "/preparations/details", method = GET)
@ApiOperation(value = "List all preparations",
notes = "Returns the list of preparations details the current user is allowed to see. Creation date is always displayed in UTC time zone. This operation return all details on the preparations.")
@Timed
public Stream<PreparationDTO> listAll(
@ApiParam(name = "name",
value = "Filter preparations by name.") @RequestParam(required = false) String name,
@ApiParam(name = "folder_path", value = "Filter preparations by folder path.") @RequestParam(
required = false, name = "folder_path") String folderPath,
@ApiParam(name = "path",
value = "Filter preparations by full path (<folder path>/<preparation name>).") @RequestParam(
required = false, name = "path") String path,
@ApiParam(value = "Sort key (by name or date).") @RequestParam(
defaultValue = "lastModificationDate") Sort sort,
@ApiParam(value = "Order for sort key (desc or asc).") @RequestParam(defaultValue = "desc") Order order) {
return preparationService.listAll(name, folderPath, path, sort, order);
}
/**
* List all preparation summaries.
*
* @return the preparation summaries, sorted by descending last modification date.
*/
@RequestMapping(value = "/preparations/summaries", method = GET)
@ApiOperation(value = "List all preparations",
notes = "Returns the list of preparations summaries the current user is allowed to see. Creation date is always displayed in UTC time zone.")
@Timed
public Stream<PreparationDTO> listSummary(
@ApiParam(name = "name",
value = "Filter preparations by name.") @RequestParam(required = false) String name,
@ApiParam(name = "folder_path", value = "Filter preparations by folder path.") @RequestParam(
required = false, name = "folder_path") String folderPath,
@ApiParam(name = "path",
value = "Filter preparations by full path (<folder path>/<preparation name>).") @RequestParam(
required = false, name = "path") String path,
@ApiParam(value = "Sort key (by name or date).") @RequestParam(
defaultValue = "lastModificationDate") Sort sort,
@ApiParam(value = "Order for sort key (desc or asc).") @RequestParam(defaultValue = "desc") Order order) {
LOGGER.debug("Get list of preparations (summary).");
return preparationService.listSummary(name, folderPath, path, sort, order);
}
/**
* <p>
* Search preparation entry point.
* </p>
* <p>
* <p>
* So far at least one search criteria can be processed at a time among the following ones :
* <ul>
* <li>dataset id</li>
* <li>preparation name & exact match</li>
* <li>folderId path</li>
* </ul>
* </p>
*
* @param dataSetId to search all preparations based on this dataset id.
* @param folderId to search all preparations located in this folderId.
* @param name to search all preparations that match this name.
* @param exactMatch if true, the name matching must be exact.
* @param sort Sort key (by name, creation date or modification date).
* @param order Order for sort key (desc or asc).
*/
@RequestMapping(value = "/preparations/search", method = GET)
@ApiOperation(value = "Search for preparations details",
notes = "Returns the list of preparations details that match the search criteria.")
@Timed
public Stream<PreparationDTO> searchPreparations(
@RequestParam(required = false) @ApiParam("dataSetId") String dataSetId,
@RequestParam(required = false) @ApiParam(
value = "Id of the folder where to look for preparations") String folderId,
@RequestParam(required = false) @ApiParam(
value = "Path of the folder where to look for preparations") String folderPath,
@RequestParam(required = false) @ApiParam("name") String name,
@RequestParam(defaultValue = "true") @ApiParam("exactMatch") boolean exactMatch,
@RequestParam(defaultValue = "lastModificationDate") @ApiParam(
value = "Sort key (by name or date).") Sort sort,
@RequestParam(defaultValue = "desc") @ApiParam(value = "Order for sort key (desc or asc).") Order order) {
return preparationService.searchPreparations(dataSetId, folderId, name, exactMatch, folderPath, sort, order);
}
/**
* Copy the given preparation to the given name / folder ans returns the new if in the response.
*
* @param name the name of the copied preparation, if empty, the name is "orginal-preparation-name Copy"
* @param destination the folder path where to copy the preparation, if empty, the copy is in the same folder.
* @return The new preparation id.
*/
@RequestMapping(value = "/preparations/{id}/copy", method = POST, produces = TEXT_PLAIN_VALUE)
@ApiOperation(value = "Copy a preparation", produces = TEXT_PLAIN_VALUE,
notes = "Copy the preparation to the new name / folder and returns the new id.")
@Timed
public String copy(
@PathVariable(value = "id") @ApiParam(name = "id",
value = "Id of the preparation to copy") String preparationId,
@ApiParam(value = "The name of the copied preparation.") @RequestParam(required = false) String name,
@ApiParam(value = "The folder path to create the copy.") @RequestParam() String destination) {
return preparationService.copy(preparationId, name, destination);
}
/**
* Move a preparation to an other folder.
*
* @param folder The original folder of the preparation.
* @param destination The new folder of the preparation.
* @param newName The new preparation name.
*/
@RequestMapping(value = "/preparations/{id}/move", method = PUT)
@ApiOperation(value = "Move a preparation", notes = "Move a preparation to an other folder.")
@Timed
public void move(
@PathVariable(value = "id") @ApiParam(name = "id",
value = "Id of the preparation to move") String preparationId,
@ApiParam(value = "The original folder path of the preparation.") @RequestParam String folder,
@ApiParam(value = "The new folder path of the preparation.") @RequestParam String destination,
@ApiParam(value = "The new name of the moved dataset.") @RequestParam(defaultValue = "") String newName) {
preparationService.move(preparationId, folder, destination, newName);
}
/**
* Delete the preparation that match the given id.
*
* @param id the preparation id to delete.
*/
@RequestMapping(value = "/preparations/{id}", method = RequestMethod.DELETE)
@ApiOperation(value = "Delete a preparation by id",
notes = "Delete a preparation content based on provided id. Id should be a UUID returned by the list operation. Not valid or non existing preparation id returns empty content.")
@Timed
public void delete(
@PathVariable(value = "id") @ApiParam(name = "id", value = "Id of the preparation to delete") String id) {
preparationService.delete(id);
}
/**
* Update a preparation.
*
* @param id the preparation id to update.
* @param preparation the updated preparation.
* @return the updated preparation id.
*/
@RequestMapping(value = "/preparations/{id}", method = PUT)
@ApiOperation(value = "Create a preparation", notes = "Returns the id of the updated preparation.")
@Timed
public String update(@ApiParam("id") @PathVariable("id") String id,
@RequestBody @ApiParam("preparation") final PreparationDTO preparation) {
return preparationService.update(id, preparation);
}
/**
* Update a preparation steps.
*
* @param stepId the step to update.
* @param rowMetadata the row metadata to associate with step.
* @return the updated step id.
*/
@RequestMapping(value = "/preparations/steps/{stepId}/metadata", method = PUT, produces = TEXT_PLAIN_VALUE,
consumes = APPLICATION_JSON_VALUE)
@ApiOperation(value = "Update a preparation steps", notes = "Returns the id of the updated step.")
@Timed
public String updateStepMetadata(@ApiParam("stepId") @PathVariable("stepId") String stepId,
@RequestBody @ApiParam("rowMetadata") final RowMetadata rowMetadata) {
preparationService.updatePreparationStep(stepId, rowMetadata);
return stepId;
}
@RequestMapping(value = "/preparations/steps/{stepId}/metadata", method = DELETE)
@ApiOperation(value = "Deletes the metadata associated with step")
@Timed
public void invalidateStepMetadata(@ApiParam("stepId") @PathVariable("stepId") String stepId) {
preparationService.invalidatePreparationStep(stepId);
}
/**
* Get a preparation step metadata.
*
* @param stepId the steps to get metadata from.
* @return the row metadata associated with step.
*/
@RequestMapping(value = "/preparations/steps/{stepId}/metadata", method = GET, produces = APPLICATION_JSON_VALUE)
@ApiOperation(value = "Update a preparation steps", notes = "Returns the id of the updated step.")
@Timed
public RowMetadata getStepMetadata(@ApiParam("stepId") @PathVariable("stepId") String stepId) {
return preparationService.getPreparationStep(stepId);
}
/**
* Copy the steps from the another preparation to this one.
* <p>
* This is only allowed if this preparation has no steps.
*
* @param id the preparation id to update.
* @param from the preparation id to copy the steps from.
*/
@RequestMapping(value = "/preparations/{id}/steps/copy", method = PUT)
@ApiOperation(value = "Copy the steps from another preparation",
notes = "Copy the steps from another preparation if this one has no steps.")
@Timed
public void copyStepsFrom(@ApiParam(value = "the preparation id to update") @PathVariable("id") String id,
@ApiParam(value = "the preparation to copy the steps from.") @RequestParam String from) {
preparationService.copyStepsFrom(id, from);
}
/**
* Return a preparation details.
*
* @param id the wanted preparation id.
* @param stepId optional step id.
* @return the preparation details.
*/
@RequestMapping(value = "/preparations/{id}/details", method = GET)
@ApiOperation(value = "Get preparation details", notes = "Return the details of the preparation with provided id.")
@Timed
public PreparationDTO getDetails( //
@ApiParam("id") @PathVariable("id") String id, //
@ApiParam(value = "stepId", defaultValue = "head") @RequestParam(value = "stepId",
defaultValue = "head") String stepId) {
return preparationService.getPreparationDetails(id, stepId);
}
/**
* Return a preparation details.
*
* @param id the wanted preparation id.
* @param stepId optional step id.
* @return the preparation details.
*/
@RequestMapping(value = "/preparations/{id}/details/full", method = GET)
@ApiOperation(value = "Get preparation details", notes = "Return the details of the preparation with provided id.")
@Timed
public PreparationDetailsDTO getDetailsFull( //
@ApiParam("id") @PathVariable("id") String id, //
@ApiParam(value = "stepId", defaultValue = "head") @RequestParam(value = "stepId",
defaultValue = "head") String stepId) {
return preparationService.getPreparationDetailsFull(id, stepId);
}
/**
* Return a preparation.
*
* @param id the wanted preparation id.
* @return the preparation details.
*/
@RequestMapping(value = "/preparations/{id}", method = GET)
@ApiOperation(value = "Get preparation", notes = "Return the preparation with provided id.")
@Timed
public PreparationDTO get(@ApiParam("id") @PathVariable("id") String id) {
return preparationService.getPreparation(id);
}
/**
* Return the folder that holds this preparation.
*
* @param id the wanted preparation id.
* @return the folder that holds this preparation.
*/
@RequestMapping(value = "/preparations/{id}/folder", method = GET)
@ApiOperation(value = "Get preparation details", notes = "Return the details of the preparation with provided id.")
@Timed
public Folder searchLocation(@ApiParam(value = "the preparation id") @PathVariable("id") String id) {
return preparationService.searchLocation(id);
}
@RequestMapping(value = "/preparations/{id}/steps", method = GET)
@ApiOperation(value = "Get all preparation steps id",
notes = "Return the steps of the preparation with provided id.")
@Timed
public List<String> getSteps(@ApiParam("id") @PathVariable("id") String id) {
return preparationService.getSteps(id);
}
/**
* Update a step in a preparation <b>Strategy</b><br/>
* The goal here is to rewrite the preparation history from 'the step to modify' (STM) to the head, with STM
* containing the new action.<br/>
* <ul>
* <li>1. Extract the actions from STM (excluded) to the head</li>
* <li>2. Insert the new actions before the other extracted actions. The actions list contains all the actions from
* the <b>NEW</b> STM to the head</li>
* <li>3. Set preparation head to STM's parent, so STM will be excluded</li>
* <li>4. Append each action (one step is created by action) after the new preparation head</li>
* </ul>
*/
@RequestMapping(value = "/preparations/{id}/actions/{stepId}", method = PUT)
@ApiOperation(value = "Updates an action in a preparation", notes = "Modifies an action in preparation's steps.")
@Timed
public void updateAction(@PathVariable("id") final String preparationId,
@PathVariable("stepId") final String stepToModifyId, @RequestBody final AppendStep newStep) {
preparationService.updateAction(preparationId, stepToModifyId, newStep);
}
/**
* Delete a step in a preparation.<br/>
* STD : Step To Delete <br/>
* <br/>
* <ul>
* <li>1. Extract the actions from STD (excluded) to the head. The actions list contains all the actions from the
* STD's child to the head.</li>
* <li>2. Filter the preparations that apply on a column created by the step to delete. Those steps will be removed
* too.</li>
* <li>2bis. Change the actions that apply on columns > STD last created column id. The created columns ids after
* the STD are shifted.</li>
* <li>3. Set preparation head to STD's parent, so STD will be excluded</li>
* <li>4. Append each action after the new preparation head</li>
* </ul>
*/
@RequestMapping(value = "/preparations/{id}/actions/{stepId}", method = DELETE)
@ApiOperation(value = "Delete an action in a preparation",
notes = "Delete a step and all following steps from a preparation")
@Timed
public void deleteAction(@PathVariable("id") final String id, @PathVariable("stepId") final String stepToDeleteId) {
preparationService.deleteAction(id, stepToDeleteId);
}
@RequestMapping(value = "/preparations/{id}/head/{headId}", method = PUT)
@ApiOperation(value = "Move preparation head", notes = "Set head to the specified head id")
@Timed
public void setPreparationHead(@PathVariable("id") final String preparationId, //
@PathVariable("headId") final String headId) {
preparationService.setPreparationHead(preparationId, headId);
}
/**
* Get all the actions of a preparation at given version.
*
* @param id the wanted preparation id.
* @param version the wanted preparation version.
* @return the list of actions.
*/
@RequestMapping(value = "/preparations/{id}/actions/{version}", method = GET)
@ApiOperation(value = "Get all the actions of a preparation at given version.",
notes = "Returns the action JSON at version.")
@Timed
public List<Action> getVersionedAction(@ApiParam("id") @PathVariable("id") final String id,
@ApiParam("version") @PathVariable("version") final String version) {
return preparationService.getVersionedAction(id, version);
}
/**
* List all preparation related error codes.
*/
@RequestMapping(value = "/preparations/errors", method = RequestMethod.GET)
@ApiOperation(value = "Get all preparation related error codes.",
notes = "Returns the list of all preparation related error codes.")
@Timed
public Iterable<JsonErrorCodeDescription> listErrors() {
return preparationService.listErrors();
}
@RequestMapping(value = "/preparations/{preparationId}/lock", method = PUT)
@ApiOperation(value = "Lock the specified preparation.", notes = "Returns a locked resource.")
@Timed
public void lockPreparation(@ApiParam("preparationId") @PathVariable("preparationId") final String preparationId) {
preparationService.lockPreparation(preparationId);
}
@RequestMapping(value = "/preparations/{preparationId}/unlock", method = PUT)
@ApiOperation(value = "Unlock the specified preparation.", notes = "Returns a locked resource.")
@Timed
public void
unlockPreparation(@ApiParam("preparationId") @PathVariable("preparationId") final String preparationId) {
preparationService.unlockPreparation(preparationId);
}
@RequestMapping(value = "/preparations/use/dataset/{datasetId}", method = HEAD)
@ApiOperation(value = "Check if dataset is used by a preparation.",
notes = "Returns no content, the response code is the meaning.")
@Timed
public ResponseEntity<Void>
preparationsThatUseDataset(@ApiParam("datasetId") @PathVariable("datasetId") final String datasetId) {
if (preparationService.isDatasetUsedInPreparation(datasetId)) {
return ResponseEntity.noContent().build();
} else {
return ResponseEntity.notFound().build();
}
}
@RequestMapping(value = "/preparations/{id}/steps/{stepId}/order", method = POST)
@ApiOperation(value = "Moves a step within a preparation after a specified step",
notes = "Moves a step within a preparation after a specified step.")
@Timed
public void moveStep(@PathVariable("id") final String preparationId,
@ApiParam(value = "The id of the step we want to move.") @PathVariable String stepId,
@ApiParam(value = "The step that will become the parent of stepId") @RequestParam String parentStepId) {
preparationService.moveStep(preparationId, stepId, parentStepId);
}
@RequestMapping(value = "/preparations/{id}/actions", method = POST)
@ApiOperation(value = "Adds an action at the end of preparation.",
notes = "Does not return any value, client may expect successful operation based on HTTP status code.")
@Timed
public void addPreparationAction(
@ApiParam(name = "id", value = "Preparation id.") @PathVariable(value = "id") final String preparationId,
@ApiParam("Action to add at end of the preparation.") @RequestBody final List<AppendStep> steps) {
for (AppendStep step : steps) {
preparationService.addPreparationAction(preparationId, step);
}
}
@RequestMapping(value = "/steps/{id}", method = GET)
@ApiOperation(value = "Retrieve a specific step.", notes = "Just find the step for this ID.")
@Timed
public Step getStep(@PathVariable("id") final String stepId) {
return beanConversionService.convert(preparationService.getStep(stepId), Step.class);
}
}
| |
// Copyright 2015-2018 The NATS Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at:
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package io.nats.client.impl;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.time.Duration;
import java.time.Instant;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.LockSupport;
import org.junit.Test;
import io.nats.client.Connection;
import io.nats.client.Dispatcher;
import io.nats.client.Message;
import io.nats.client.Nats;
import io.nats.client.NatsTestServer;
import io.nats.client.Options;
import io.nats.client.Subscription;
import io.nats.client.TestHandler;
import io.nats.client.ConnectionListener.Events;
public class DrainTests {
@Test
public void testSimpleSubDrain() throws Exception {
try (NatsTestServer ts = new NatsTestServer(false);
Connection subCon = Nats.connect(new Options.Builder().server(ts.getURI()).maxReconnects(0).build());
Connection pubCon = Nats.connect(new Options.Builder().server(ts.getURI()).maxReconnects(0).build())) {
assertTrue("Connected Status", Connection.Status.CONNECTED == subCon.getStatus());
assertTrue("Connected Status", Connection.Status.CONNECTED == pubCon.getStatus());
Subscription sub = subCon.subscribe("draintest");
subCon.flush(Duration.ofSeconds(1)); // Get the sub to the server
pubCon.publish("draintest", null);
pubCon.publish("draintest", null); // publish 2
pubCon.flush(Duration.ofSeconds(1));
Message msg = sub.nextMessage(Duration.ofSeconds(1)); // read 1
assertNotNull(msg);
subCon.flush(Duration.ofSeconds(1));
CompletableFuture<Boolean> tracker = sub.drain(Duration.ofSeconds(1));
msg = sub.nextMessage(Duration.ofSeconds(1)); // read the second one, should be there because we drained
assertNotNull(msg);
assertTrue(tracker.get(1, TimeUnit.SECONDS));
assertFalse(sub.isActive());
assertEquals(((NatsConnection) subCon).getConsumerCount(), 0);
}
}
@Test
public void testSimpleDispatchDrain() throws Exception {
try (NatsTestServer ts = new NatsTestServer(false);
Connection subCon = Nats.connect(new Options.Builder().server(ts.getURI()).maxReconnects(0).build());
Connection pubCon = Nats.connect(new Options.Builder().server(ts.getURI()).maxReconnects(0).build())) {
assertTrue("Connected Status", Connection.Status.CONNECTED == subCon.getStatus());
assertTrue("Connected Status", Connection.Status.CONNECTED == pubCon.getStatus());
AtomicInteger count = new AtomicInteger();
Dispatcher d = subCon.createDispatcher((msg) -> {
count.incrementAndGet();
try {
Thread.sleep(2000); // go slow so the main app can drain us
} catch (Exception e) {
}
});
d.subscribe("draintest");
d.subscribe("draintest", (msg) -> { count.incrementAndGet(); });
subCon.flush(Duration.ofSeconds(5)); // Get the sub to the server
pubCon.publish("draintest", null);
pubCon.publish("draintest", null);
pubCon.flush(Duration.ofSeconds(1));
subCon.flush(Duration.ofSeconds(1));
// Drain will unsub the dispatcher, only messages that already arrived
// are there
CompletableFuture<Boolean> tracker = d.drain(Duration.ofSeconds(8));
assertTrue(tracker.get(10, TimeUnit.SECONDS)); // wait for the drain to complete
assertEquals(count.get(), 4); // Should get both, two times.
assertFalse(d.isActive());
assertEquals(((NatsConnection) subCon).getConsumerCount(), 0);
}
}
@Test
public void testSimpleConnectionDrain() throws Exception {
try (NatsTestServer ts = new NatsTestServer(false);
Connection subCon = Nats.connect(new Options.Builder().server(ts.getURI()).maxReconnects(0).build());
Connection pubCon = Nats.connect(new Options.Builder().server(ts.getURI()).maxReconnects(0).build())) {
assertTrue("Connected Status", Connection.Status.CONNECTED == subCon.getStatus());
assertTrue("Connected Status", Connection.Status.CONNECTED == pubCon.getStatus());
AtomicInteger count = new AtomicInteger();
Dispatcher d = subCon.createDispatcher((msg) -> {
count.incrementAndGet();
try {
Thread.sleep(500); // go slow so the main app can drain us
} catch (Exception e) {
}
});
d.subscribe("draintest");
Subscription sub = subCon.subscribe("draintest");
subCon.flush(Duration.ofSeconds(1)); // Get the sub to the server
pubCon.publish("draintest", null);
pubCon.publish("draintest", null);
pubCon.flush(Duration.ofSeconds(1));
subCon.flush(Duration.ofSeconds(1));
try {
Thread.sleep(500); // give the msgs time to get to subCon
} catch (Exception e) {
}
CompletableFuture<Boolean> tracker = subCon.drain(Duration.ofSeconds(5));
Message msg = sub.nextMessage(Duration.ofSeconds(1));
assertNotNull(msg);
msg = sub.nextMessage(Duration.ofSeconds(1));
assertNotNull(msg);
assertTrue(tracker.get(2, TimeUnit.SECONDS));
assertTrue(((NatsConnection) subCon).isDrained());
assertEquals(count.get(), 2); // Should get both
assertTrue(Connection.Status.CLOSED == subCon.getStatus());
}
}
@Test
public void testConnectionDrainWithZeroTimeout() throws Exception {
try (NatsTestServer ts = new NatsTestServer(false);
Connection subCon = Nats.connect(new Options.Builder().server(ts.getURI()).maxReconnects(0).build());
Connection pubCon = Nats.connect(new Options.Builder().server(ts.getURI()).maxReconnects(0).build())) {
assertTrue("Connected Status", Connection.Status.CONNECTED == subCon.getStatus());
assertTrue("Connected Status", Connection.Status.CONNECTED == pubCon.getStatus());
AtomicInteger count = new AtomicInteger();
Dispatcher d = subCon.createDispatcher((msg) -> {
count.incrementAndGet();
try {
Thread.sleep(500); // go slow so the main app can drain us
} catch (Exception e) {
}
});
d.subscribe("draintest");
Subscription sub = subCon.subscribe("draintest");
subCon.flush(Duration.ofSeconds(1)); // Get the sub to the server
pubCon.publish("draintest", null);
pubCon.publish("draintest", null);
pubCon.flush(Duration.ofSeconds(1));
subCon.flush(Duration.ofSeconds(1));
try {
Thread.sleep(500); // give the msgs time to get to subCon
} catch (Exception e) {
}
CompletableFuture<Boolean> tracker = subCon.drain(null);
Message msg = sub.nextMessage(Duration.ofSeconds(1));
assertNotNull(msg);
msg = sub.nextMessage(Duration.ofSeconds(1));
assertNotNull(msg);
assertTrue(tracker.get(2, TimeUnit.SECONDS));
assertTrue(((NatsConnection) subCon).isDrained());
assertEquals(count.get(), 2); // Should get both
assertTrue(Connection.Status.CLOSED == subCon.getStatus());
}
}
@Test
public void testDrainWithZeroTimeout() throws Exception {
try (NatsTestServer ts = new NatsTestServer(false);
Connection subCon = Nats.connect(new Options.Builder().server(ts.getURI()).maxReconnects(0).build());
Connection pubCon = Nats.connect(new Options.Builder().server(ts.getURI()).maxReconnects(0).build())) {
assertTrue("Connected Status", Connection.Status.CONNECTED == subCon.getStatus());
assertTrue("Connected Status", Connection.Status.CONNECTED == pubCon.getStatus());
Subscription sub = subCon.subscribe("draintest");
subCon.flush(Duration.ofSeconds(1)); // Get the sub to the server
pubCon.publish("draintest", null);
pubCon.publish("draintest", null); // publish 2
pubCon.flush(Duration.ofSeconds(1));
Message msg = sub.nextMessage(Duration.ofSeconds(1)); // read 1
assertNotNull(msg);
subCon.flush(Duration.ofSeconds(1));
CompletableFuture<Boolean> tracker = sub.drain(Duration.ZERO);
msg = sub.nextMessage(Duration.ofSeconds(1)); // read the second one, should be there because we drained
assertNotNull(msg);
assertTrue(tracker.get(1, TimeUnit.SECONDS));
assertFalse(sub.isActive());
}
}
@Test(expected = IllegalStateException.class)
public void testSubDuringDrainThrows() throws Exception {
try (NatsTestServer ts = new NatsTestServer(false);
Connection subCon = Nats.connect(new Options.Builder().server(ts.getURI()).maxReconnects(0).build());
Connection pubCon = Nats.connect(new Options.Builder().server(ts.getURI()).maxReconnects(0).build())) {
assertTrue("Connected Status", Connection.Status.CONNECTED == subCon.getStatus());
assertTrue("Connected Status", Connection.Status.CONNECTED == pubCon.getStatus());
subCon.subscribe("draintest");
subCon.flush(Duration.ofSeconds(1)); // Get the sub to the server
pubCon.publish("draintest", null);
pubCon.publish("draintest", null);
pubCon.flush(Duration.ofSeconds(1));
subCon.flush(Duration.ofSeconds(1));
CompletableFuture<Boolean> tracker = subCon.drain(Duration.ofSeconds(500));
// Try to subscribe while we are draining the sub
subCon.subscribe("another"); // Should throw
assertTrue(tracker.get(1000, TimeUnit.SECONDS));
}
}
@Test(expected = IllegalStateException.class)
public void testCreateDispatcherDuringDrainThrows() throws Exception {
try (NatsTestServer ts = new NatsTestServer(false);
Connection subCon = Nats.connect(new Options.Builder().server(ts.getURI()).maxReconnects(0).build());
Connection pubCon = Nats.connect(new Options.Builder().server(ts.getURI()).maxReconnects(0).build())) {
assertTrue("Connected Status", Connection.Status.CONNECTED == subCon.getStatus());
assertTrue("Connected Status", Connection.Status.CONNECTED == pubCon.getStatus());
subCon.subscribe("draintest");
subCon.flush(Duration.ofSeconds(1)); // Get the sub to the server
pubCon.publish("draintest", null);
pubCon.publish("draintest", null);
pubCon.flush(Duration.ofSeconds(1));
subCon.flush(Duration.ofSeconds(1));
CompletableFuture<Boolean> tracker = subCon.drain(Duration.ofSeconds(500));
subCon.createDispatcher((msg) -> {
});
assertTrue(tracker.get(1000, TimeUnit.SECONDS));
}
}
@Test
public void testUnsubDuringDrainIsNoop() throws Exception {
try (NatsTestServer ts = new NatsTestServer(false);
Connection subCon = Nats.connect(new Options.Builder().server(ts.getURI()).maxReconnects(0).build());
Connection pubCon = Nats.connect(new Options.Builder().server(ts.getURI()).maxReconnects(0).build())) {
assertTrue("Connected Status", Connection.Status.CONNECTED == subCon.getStatus());
assertTrue("Connected Status", Connection.Status.CONNECTED == pubCon.getStatus());
AtomicInteger count = new AtomicInteger();
Dispatcher d = subCon.createDispatcher((msg) -> {
count.incrementAndGet();
try {
Thread.sleep(1000); // go slow so the main app can drain us
} catch (Exception e) {
}
});
d.subscribe("draintest");
Subscription sub = subCon.subscribe("draintest");
subCon.flush(Duration.ofSeconds(1)); // Get the sub to the server
pubCon.publish("draintest", null);
pubCon.publish("draintest", null);
pubCon.flush(Duration.ofSeconds(1));
subCon.flush(Duration.ofSeconds(1));
try {
Thread.sleep(500); // give the msgs time to get to subCon
} catch (Exception e) {
}
CompletableFuture<Boolean> tracker = subCon.drain(Duration.ofSeconds(5));
try {
Thread.sleep(1000); // give the drain time to get started
} catch (Exception e) {
}
sub.unsubscribe();
d.unsubscribe("draintest");
Message msg = sub.nextMessage(Duration.ofSeconds(1));
assertNotNull(msg);
msg = sub.nextMessage(Duration.ofSeconds(1));
assertNotNull(msg);
assertTrue(tracker.get(2, TimeUnit.SECONDS));
assertEquals(count.get(), 2); // Should get both
assertTrue(Connection.Status.CLOSED == subCon.getStatus());
}
}
@Test
public void testDrainInMessageHandler() throws Exception {
try (NatsTestServer ts = new NatsTestServer(false);
Connection subCon = Nats.connect(new Options.Builder().server(ts.getURI()).maxReconnects(0).build());
Connection pubCon = Nats.connect(new Options.Builder().server(ts.getURI()).maxReconnects(0).build())) {
assertTrue("Connected Status", Connection.Status.CONNECTED == subCon.getStatus());
assertTrue("Connected Status", Connection.Status.CONNECTED == pubCon.getStatus());
AtomicInteger count = new AtomicInteger();
AtomicReference<Dispatcher> dispatcher = new AtomicReference<>();
AtomicReference<CompletableFuture<Boolean>> tracker = new AtomicReference<>();
Dispatcher d = subCon.createDispatcher((msg) -> {
count.incrementAndGet();
tracker.set(dispatcher.get().drain(Duration.ofSeconds(1)));
});
d.subscribe("draintest");
dispatcher.set(d);
subCon.flush(Duration.ofSeconds(1)); // Get the sub to the server
pubCon.publish("draintest", null);
pubCon.publish("draintest", null);
pubCon.flush(Duration.ofSeconds(1));
subCon.flush(Duration.ofSeconds(1));
try {
Thread.sleep(500); // give the msgs time to get to subCon
} catch (Exception e) {
}
assertTrue(tracker.get().get(5, TimeUnit.SECONDS)); // wait for the drain to complete
assertEquals(count.get(), 2); // Should get both
assertFalse(d.isActive());
assertEquals(((NatsConnection) subCon).getConsumerCount(), 0);
}
}
@Test
public void testDrainFutureMatches() throws Exception {
try (NatsTestServer ts = new NatsTestServer(false);
Connection subCon = Nats.connect(new Options.Builder().server(ts.getURI()).maxReconnects(0).build());
Connection pubCon = Nats.connect(new Options.Builder().server(ts.getURI()).maxReconnects(0).build())) {
assertTrue("Connected Status", Connection.Status.CONNECTED == subCon.getStatus());
assertTrue("Connected Status", Connection.Status.CONNECTED == pubCon.getStatus());
AtomicInteger count = new AtomicInteger();
Dispatcher d = subCon.createDispatcher((msg) -> {
count.incrementAndGet();
try {
Thread.sleep(500); // go slow so the main app can drain us
} catch (Exception e) {
}
});
d.subscribe("draintest");
Subscription sub = subCon.subscribe("draintest");
subCon.flush(Duration.ofSeconds(1)); // Get the sub to the server
pubCon.publish("draintest", null);
pubCon.publish("draintest", null);
pubCon.flush(Duration.ofSeconds(1));
subCon.flush(Duration.ofSeconds(1));
try {
Thread.sleep(500); // give the msgs time to get to subCon
} catch (Exception e) {
}
CompletableFuture<Boolean> tracker = subCon.drain(Duration.ofSeconds(5));
assertTrue(tracker == sub.drain(Duration.ZERO));
assertTrue(tracker == sub.drain(Duration.ZERO));
assertTrue(tracker == d.drain(Duration.ZERO));
assertTrue(tracker == d.drain(Duration.ZERO));
assertTrue(tracker == subCon.drain(Duration.ZERO));
assertTrue(tracker == subCon.drain(Duration.ZERO));
Message msg = sub.nextMessage(Duration.ofSeconds(1));
assertNotNull(msg);
msg = sub.nextMessage(Duration.ofSeconds(1));
assertNotNull(msg);
assertTrue(tracker.get(2, TimeUnit.SECONDS));
assertEquals(count.get(), 2); // Should get both
assertTrue(Connection.Status.CLOSED == subCon.getStatus());
}
}
@Test(expected=IllegalStateException.class)
public void testFirstTimeRequestReplyDuringDrain() throws Exception {
try (NatsTestServer ts = new NatsTestServer(false);
Connection subCon = Nats.connect(new Options.Builder().server(ts.getURI()).maxReconnects(0).build());
Connection pubCon = Nats.connect(new Options.Builder().server(ts.getURI()).maxReconnects(0).build())) {
assertTrue("Connected Status", Connection.Status.CONNECTED == subCon.getStatus());
assertTrue("Connected Status", Connection.Status.CONNECTED == pubCon.getStatus());
Subscription sub = subCon.subscribe("draintest");
subCon.flush(Duration.ofSeconds(1)); // Get the sub to the server
Dispatcher d = pubCon.createDispatcher((msg) -> {
pubCon.publish(msg.getReplyTo(), null);
});
d.subscribe("reply");
pubCon.flush(Duration.ofSeconds(1)); // Get the sub to the server
pubCon.publish("draintest", null);
pubCon.publish("draintest", null);
pubCon.flush(Duration.ofSeconds(1));
CompletableFuture<Boolean> tracker = subCon.drain(Duration.ofSeconds(500));
Message msg = sub.nextMessage(Duration.ofSeconds(1)); // read 1
assertNotNull(msg);
CompletableFuture<Message> response = subCon.request("reply", null);
subCon.flush(Duration.ofSeconds(1)); // Get the sub to the server
assertNotNull(response.get(200, TimeUnit.SECONDS));
msg = sub.nextMessage(Duration.ofSeconds(1)); // read 1
assertNotNull(msg);
assertTrue(tracker.get(500, TimeUnit.SECONDS)); // wait for the drain to complete
assertTrue(Connection.Status.CLOSED == subCon.getStatus());
}
}
@Test(expected=IllegalStateException.class)
public void testRequestReplyDuringDrain() throws Exception {
try (NatsTestServer ts = new NatsTestServer(false);
Connection subCon = Nats.connect(new Options.Builder().server(ts.getURI()).maxReconnects(0).build());
Connection pubCon = Nats.connect(new Options.Builder().server(ts.getURI()).maxReconnects(0).build())) {
assertTrue("Connected Status", Connection.Status.CONNECTED == subCon.getStatus());
assertTrue("Connected Status", Connection.Status.CONNECTED == pubCon.getStatus());
Subscription sub = subCon.subscribe("draintest");
subCon.flush(Duration.ofSeconds(1)); // Get the sub to the server
Dispatcher d = pubCon.createDispatcher((msg) -> {
pubCon.publish(msg.getReplyTo(), null);
});
d.subscribe("reply");
pubCon.flush(Duration.ofSeconds(1)); // Get the sub to the server
pubCon.publish("draintest", null);
pubCon.publish("draintest", null);
pubCon.flush(Duration.ofSeconds(1));
CompletableFuture<Message> response = subCon.request("reply", null);
subCon.flush(Duration.ofSeconds(1)); // Get the sub to the server
assertNotNull(response.get(1, TimeUnit.SECONDS));
CompletableFuture<Boolean> tracker = subCon.drain(Duration.ofSeconds(1));
Message msg = sub.nextMessage(Duration.ofSeconds(1)); // read 1
assertNotNull(msg);
response = subCon.request("reply", null);
subCon.flush(Duration.ofSeconds(1)); // Get the sub to the server
assertNotNull(response.get(200, TimeUnit.SECONDS));
msg = sub.nextMessage(Duration.ofSeconds(1)); // read 1
assertNotNull(msg);
assertTrue(tracker.get(500, TimeUnit.SECONDS)); // wait for the drain to complete
assertTrue(Connection.Status.CLOSED == subCon.getStatus());
}
}
@Test
public void testQueueHandoffWithDrain() throws Exception {
try (NatsTestServer ts = new NatsTestServer(false);
Connection pubCon = Nats.connect(new Options.Builder().server(ts.getURI()).maxReconnects(0).build())) {
assertTrue("Connected Status", Connection.Status.CONNECTED == pubCon.getStatus());
final int total = 5_000;
final Duration sleepBetweenDrains = Duration.ofMillis(250);
final Duration sleepBetweenMessages = Duration.ofMillis(1);
final Duration testTimeout = Duration.ofMillis(5 * total * sleepBetweenMessages.toMillis());
final Duration drainTimeout = testTimeout;
final Duration waitTimeout = drainTimeout.plusSeconds(1);
AtomicInteger count = new AtomicInteger();
Instant start = Instant.now();
Instant now = start;
Connection working = null;
NatsDispatcher workingD = null;
NatsDispatcher drainingD = null;
Connection draining = Nats.connect(new Options.Builder().server(ts.getURI()).maxReconnects(0).build());
assertTrue("Connected Status", Connection.Status.CONNECTED == draining.getStatus());
drainingD = (NatsDispatcher) draining.createDispatcher((msg) -> {
count.incrementAndGet();
}).subscribe("draintest", "queue");
draining.flush(Duration.ofSeconds(5));
Thread pubThread = new Thread(() -> {
for (int i = 0; i < total; i++) {
pubCon.publish("draintest", null);
try {
LockSupport.parkNanos(sleepBetweenMessages.toNanos()); // use a nice stead pace to avoid slow consumers
} catch (Exception e) {
}
}
try {
pubCon.flush(Duration.ofSeconds(5));
} catch (Exception e) {
}
});
pubThread.start();
while (count.get() < total && Duration.between(start, now).compareTo(testTimeout) < 0) {
working = Nats.connect(new Options.Builder().server(ts.getURI()).maxReconnects(0).build());
assertTrue("Connected Status", Connection.Status.CONNECTED == working.getStatus());
workingD = (NatsDispatcher) working.createDispatcher((msg) -> {
count.incrementAndGet();
}).subscribe("draintest", "queue");
working.flush(Duration.ofSeconds(5));
try {
LockSupport.parkNanos(sleepBetweenDrains.toNanos()); // let them both work a bit
} catch (Exception e) {
}
CompletableFuture<Boolean> tracker = draining.drain(drainTimeout);
assertTrue(tracker.get(waitTimeout.toMillis(), TimeUnit.MILLISECONDS)); // wait for the drain to complete
assertTrue(drainingD.isDrained());
assertTrue(((NatsConnection) draining).isDrained());
draining.close(); // no op, but ide wants this for auto-closable
draining = working;
drainingD = workingD;
now = Instant.now();
}
draining.close();
pubThread.join();
assertEquals(count.get(), total);
}
}
@Test
public void testDrainWithLotsOfMessages() throws Exception {
try (NatsTestServer ts = new NatsTestServer(false);
Connection subCon = Nats.connect(new Options.Builder().server(ts.getURI()).maxReconnects(0).build());
Connection pubCon = Nats.connect(new Options.Builder().server(ts.getURI()).maxReconnects(0).build())) {
assertTrue("Connected Status", Connection.Status.CONNECTED == subCon.getStatus());
assertTrue("Connected Status", Connection.Status.CONNECTED == pubCon.getStatus());
int total = 1000;
Subscription sub = subCon.subscribe("draintest");
sub.setPendingLimits(5 * total, -1);
subCon.flush(Duration.ofSeconds(1)); // Get the sub to the server
// Sub should cache them in the pending queue
for (int i = 0; i < total; i++) {
pubCon.publish("draintest", null);
try {
Thread.sleep(1); // use a nice stead pace to avoid slow consumers
} catch (Exception e) {
}
}
try {
pubCon.flush(Duration.ofSeconds(5));
} catch (Exception e) {
}
Message msg = sub.nextMessage(Duration.ofSeconds(1)); // read 1
assertNotNull(msg);
subCon.flush(Duration.ofSeconds(1));
CompletableFuture<Boolean> tracker = sub.drain(Duration.ofSeconds(10));
for (int i = 1; i < total; i++) { // we read 1 so start there
msg = sub.nextMessage(Duration.ofSeconds(1)); // read the second one, should be there because we drained
assertNotNull(msg);
}
assertTrue(tracker.get(5, TimeUnit.SECONDS));
assertFalse(sub.isActive());
assertEquals(((NatsConnection) subCon).getConsumerCount(), 0);
}
}
@Test
public void testSlowAsyncDuringDrainCanFinishIfTime() throws Exception {
try (NatsTestServer ts = new NatsTestServer(false);
Connection subCon = Nats.connect(new Options.Builder().server(ts.getURI()).maxReconnects(0).build());
Connection pubCon = Nats.connect(new Options.Builder().server(ts.getURI()).maxReconnects(0).build())) {
assertTrue("Connected Status", Connection.Status.CONNECTED == subCon.getStatus());
assertTrue("Connected Status", Connection.Status.CONNECTED == pubCon.getStatus());
AtomicInteger count = new AtomicInteger();
Dispatcher d = subCon.createDispatcher((msg) -> {
try {
Thread.sleep(1500); // go slow so the main app can drain us
} catch (Exception e) {
assertNull(e);
}
if (!Thread.interrupted()) {
count.incrementAndGet();
}
});
d.subscribe("draintest");
subCon.flush(Duration.ofSeconds(1)); // Get the sub to the server
pubCon.publish("draintest", null);
pubCon.publish("draintest", null);
pubCon.flush(Duration.ofSeconds(1));
subCon.flush(Duration.ofSeconds(1));
try {
Thread.sleep(500); // give the msgs time to get to subCon
} catch (Exception e) {
}
CompletableFuture<Boolean> tracker = subCon.drain(Duration.ofSeconds(4));
assertTrue(tracker.get(10, TimeUnit.SECONDS));
assertTrue(((NatsConnection) subCon).isDrained());
assertEquals(count.get(), 2); // Should get both
assertTrue(Connection.Status.CLOSED == subCon.getStatus());
}
}
@Test
public void testSlowAsyncDuringDrainCanBeInterrupted() throws Exception {
TestHandler handler = new TestHandler();
try (NatsTestServer ts = new NatsTestServer(false);
Connection subCon = Nats.connect(new Options.Builder().server(ts.getURI()).errorListener(handler).maxReconnects(0).build());
Connection pubCon = Nats.connect(new Options.Builder().server(ts.getURI()).maxReconnects(0).build())) {
assertTrue("Connected Status", Connection.Status.CONNECTED == subCon.getStatus());
assertTrue("Connected Status", Connection.Status.CONNECTED == pubCon.getStatus());
AtomicInteger count = new AtomicInteger();
Dispatcher d = subCon.createDispatcher((msg) -> {
try {
Thread.sleep(3000); // go slow so the main app can drain us
} catch (Exception e) {
assertNull(e);
}
if (!Thread.interrupted()) {
count.incrementAndGet();
}
});
d.subscribe("draintest");
subCon.flush(Duration.ofSeconds(1)); // Get the sub to the server
pubCon.publish("draintest", null);
pubCon.publish("draintest", null);
pubCon.flush(Duration.ofSeconds(1));
subCon.flush(Duration.ofSeconds(1));
try {
Thread.sleep(500); // give the msgs time to get to subCon
} catch (Exception e) {
}
assertTrue(handler.getExceptionCount() == 0);
CompletableFuture<Boolean> tracker = subCon.drain(Duration.ofSeconds(2));
assertFalse(tracker.get(10, TimeUnit.SECONDS));
assertFalse(((NatsConnection) subCon).isDrained());
assertTrue(handler.getExceptionCount() == 0); // Don't throw during drain from reader
assertTrue(Connection.Status.CLOSED == subCon.getStatus());
}
}
@Test(expected=TimeoutException.class)
public void testThrowIfCantFlush() throws Exception {
TestHandler handler = new TestHandler();
try (NatsTestServer ts = new NatsTestServer(false);
Connection subCon = Nats.connect(new Options.Builder().connectionListener(handler).server(ts.getURI()).build())) {
assertTrue("Connected Status", Connection.Status.CONNECTED == subCon.getStatus());
subCon.flush(Duration.ofSeconds(1)); // Get the sub to the server
handler.prepForStatusChange(Events.DISCONNECTED);
ts.close(); // make the drain flush fail
handler.waitForStatusChange(2, TimeUnit.SECONDS); // make sure the connection is down
subCon.drain(Duration.ofSeconds(1)); //should throw
}
}
@Test(expected=IllegalStateException.class)
public void testThrowIfClosing() throws Exception {
try (NatsTestServer ts = new NatsTestServer(false);
Connection subCon = Nats.connect(new Options.Builder().server(ts.getURI()).maxReconnects(0).build())) {
assertTrue("Connected Status", Connection.Status.CONNECTED == subCon.getStatus());
subCon.close();
subCon.drain(Duration.ofSeconds(1));
}
}
}
| |
/*
* Copyright 2010-2013 The MyBatis Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mybatis.spring.annotation;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.fail;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.mybatis.spring.SqlSessionFactoryBean;
import org.mybatis.spring.SqlSessionTemplate;
import org.mybatis.spring.annotation.EnableMapperScanning;
import org.mybatis.spring.mapper.MapperInterface;
import org.springframework.beans.factory.NoSuchBeanDefinitionException;
import org.springframework.beans.factory.config.ConstructorArgumentValues;
import org.springframework.beans.factory.config.RuntimeBeanReference;
import org.springframework.beans.factory.support.GenericBeanDefinition;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.Configuration;
import org.springframework.stereotype.Component;
import com.mockrunner.mock.jdbc.MockDataSource;
/**
* Test for the MapperScannerRegistrar.
* <p>
* This test works fine with Spring 3.1 and 3.2 but with 3.1 the registrar is called twice.
*
* @version $Id$
*/
public final class EnableMapperScanningTest {
private AnnotationConfigApplicationContext applicationContext;
@Before
public void setupContext() {
applicationContext = new AnnotationConfigApplicationContext();
setupSqlSessionFactory("sqlSessionFactory");
// assume support for autowiring fields is added by MapperScannerConfigurer
// via
// org.springframework.context.annotation.ClassPathBeanDefinitionScanner.includeAnnotationConfig
}
private void startContext() {
applicationContext.refresh();
applicationContext.start();
// this will throw an exception if the beans cannot be found
applicationContext.getBean("sqlSessionFactory");
}
@After
public void assertNoMapperClass() {
// concrete classes should always be ignored by MapperScannerPostProcessor
assertBeanNotLoaded("mapperClass");
// no method interfaces should be ignored too
assertBeanNotLoaded("package-info");
// assertBeanNotLoaded("annotatedMapperZeroMethods"); // as of 1.1.0 mappers
// with no methods are loaded
}
@After
public void destroyContext() {
applicationContext.destroy();
}
@Test
public void testInterfaceScan() {
applicationContext.register(AppConfigWithPackageScan.class);
startContext();
// all interfaces with methods should be loaded
applicationContext.getBean("mapperInterface");
applicationContext.getBean("mapperSubinterface");
applicationContext.getBean("mapperChildInterface");
applicationContext.getBean("annotatedMapper");
}
@Test
public void testMarkerInterfaceScan() {
applicationContext.register(AppConfigWithMarkerInterface.class);
startContext();
// only child inferfaces should be loaded
applicationContext.getBean("mapperSubinterface");
applicationContext.getBean("mapperChildInterface");
assertBeanNotLoaded("mapperInterface");
assertBeanNotLoaded("annotatedMapper");
}
@Test
public void testAnnotationScan() {
applicationContext.register(AppConfigWithAnnotation.class);
startContext();
// only annotated mappers should be loaded
applicationContext.getBean("annotatedMapper");
applicationContext.getBean("mapperChildInterface");
assertBeanNotLoaded("mapperInterface");
assertBeanNotLoaded("mapperSubinterface");
}
@Test
public void testMarkerInterfaceAndAnnotationScan() {
applicationContext.register(AppConfigWithMarkerInterfaceAndAnnotation.class);
startContext();
// everything should be loaded but the marker interface
applicationContext.getBean("annotatedMapper");
applicationContext.getBean("mapperSubinterface");
applicationContext.getBean("mapperChildInterface");
assertBeanNotLoaded("mapperInterface");
}
@Test
public void testScanWithNameConflict() {
GenericBeanDefinition definition = new GenericBeanDefinition();
definition.setBeanClass(Object.class);
applicationContext.registerBeanDefinition("mapperInterface", definition);
applicationContext.register(AppConfigWithPackageScan.class);
startContext();
assertSame("scanner should not overwite existing bean definition", applicationContext.getBean("mapperInterface").getClass(), Object.class);
}
private void setupSqlSessionFactory(String name) {
GenericBeanDefinition definition = new GenericBeanDefinition();
definition.setBeanClass(SqlSessionFactoryBean.class);
definition.getPropertyValues().add("dataSource", new MockDataSource());
applicationContext.registerBeanDefinition(name, definition);
}
private void assertBeanNotLoaded(String name) {
try {
applicationContext.getBean(name);
fail("Spring bean should not be defined for class " + name);
} catch (NoSuchBeanDefinitionException nsbde) {
// success
}
}
@Test
public void testScanWithExplicitSqlSessionTemplate() throws Exception {
GenericBeanDefinition definition = new GenericBeanDefinition();
definition.setBeanClass(SqlSessionTemplate.class);
ConstructorArgumentValues constructorArgs = new ConstructorArgumentValues();
constructorArgs.addGenericArgumentValue(new RuntimeBeanReference("sqlSessionFactory"));
definition.setConstructorArgumentValues(constructorArgs);
applicationContext.registerBeanDefinition("sqlSessionTemplate", definition);
applicationContext.register(AppConfigWithSqlSessionTemplate.class);
startContext();
// all interfaces with methods should be loaded
applicationContext.getBean("mapperInterface");
applicationContext.getBean("mapperSubinterface");
applicationContext.getBean("mapperChildInterface");
applicationContext.getBean("annotatedMapper");
}
@Configuration
@EnableMapperScanning("org.mybatis.spring.mapper")
public static class AppConfigWithPackageScan {
}
@Configuration
@EnableMapperScanning(basePackages = "org.mybatis.spring.mapper", markerInterface = MapperInterface.class)
public static class AppConfigWithMarkerInterface {
}
@Configuration
@EnableMapperScanning(basePackages = "org.mybatis.spring.mapper", annotationClass = Component.class)
public static class AppConfigWithAnnotation {
}
@Configuration
@EnableMapperScanning(basePackages = "org.mybatis.spring.mapper", annotationClass = Component.class, markerInterface = MapperInterface.class)
public static class AppConfigWithMarkerInterfaceAndAnnotation {
}
@Configuration
@EnableMapperScanning(basePackages = "org.mybatis.spring.mapper", sqlSessionTemplateBeanName = "sqlSessionTemplate")
public static class AppConfigWithSqlSessionTemplate {
}
}
| |
/* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is part of dcm4che, an implementation of DICOM(TM) in
* Java(TM), available at http://sourceforge.net/projects/dcm4che.
*
* The Initial Developer of the Original Code is
* TIANI Medgraph AG.
* Portions created by the Initial Developer are Copyright (C) 2005
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Gunter Zeilinger <gunter.zeilinger@tiani.com>
* Franz Willer <franz.willer@gwi-ag.com>
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
package org.dcm4chex.archive.hsm.module;
import java.io.File;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.HashMap;
import java.util.Map;
import java.util.StringTokenizer;
import org.dcm4chex.archive.common.FileStatus;
import org.dcm4chex.archive.hsm.VerifyTar;
import org.dcm4chex.archive.util.FileUtils;
/**
* @author franz.willer@gmail.com
* @version $Revision: $ $Date: $
* @since Aug 17, 2010
*/
public class HSMFileBasedModule extends AbstractHSMModule {
private static final String FILE_PARAM = "%f";
private static final String DATE_PARAM = "%d";
private static final String NEWLINE = System.getProperty("line.separator", "\n");
private static final String CAL_FIELD_NAMES = "yMd";
private static final int[] CAL_FIELDS = new int[]{Calendar.YEAR,Calendar.MONTH,Calendar.DAY_OF_MONTH};
private byte[] buf = new byte[8192];
private int[] retentionTime = new int[2];
private String[] accessTimeCmd;
private boolean setAccessTimeAfterSetReadonly;
private SimpleDateFormat df;
private HashMap<String,Integer> extensionStatusMap = new HashMap<String,Integer>();
private Integer noStatusFileStatus;
private boolean checkMD5forStatusChange;
public String getRetentionTime() {
return (setAccessTimeAfterSetReadonly ? "+" : "" )+String.valueOf(retentionTime[0])+CAL_FIELD_NAMES.charAt(retentionTime[1]);
}
public void setRetentionTime(String s) {
int len = s.length();
setAccessTimeAfterSetReadonly = s.charAt(0) == '+';
retentionTime[0] = Integer.parseInt(s.substring(setAccessTimeAfterSetReadonly ? 1 : 0, --len));
int idx = CAL_FIELD_NAMES.indexOf(s.charAt(len));
if (idx<0 || idx > 2) {
throw new IllegalArgumentException("Last character must be 'y', 'M' or 'd'!");
}
retentionTime[1] = idx;
}
public final String getAccessTimeCmd() {
return cmd2str(accessTimeCmd);
}
public final void setAccessTimeCmd(String cmd) {
accessTimeCmd = str2cmd(cmd);
}
public String getPattern() {
return df.toPattern();
}
public void setPattern(String pattern) {
df = new SimpleDateFormat(pattern);
}
public String getStatusExtensions() {
StringBuilder sb = new StringBuilder();
for ( Map.Entry<String,Integer> entry : extensionStatusMap.entrySet()) {
sb.append(entry.getKey()).append("=")
.append(FileStatus.toString(entry.getValue())).append(NEWLINE);
}
sb.append(noStatusFileStatus == null ? NONE : FileStatus.toString(noStatusFileStatus));
return sb.toString();
}
public void setStatusExtensions(String s) {
StringTokenizer st = new StringTokenizer(s, " \t\r\n;");
int pos;
String token;
while (st.hasMoreTokens()) {
token = st.nextToken().trim();
if ((pos=token.indexOf('=')) == -1) {
noStatusFileStatus = NONE.equals(token) ? null : FileStatus.toInt(token);
} else {
extensionStatusMap.put(token.substring(0,pos),
FileStatus.toInt(token.substring(++pos)));
}
}
}
public boolean isCheckMD5forStatusChange() {
return checkMD5forStatusChange;
}
public void setCheckMD5forStatusChange(boolean checkMD5forStatusChange) {
this.checkMD5forStatusChange = checkMD5forStatusChange;
}
@Override
public File prepareHSMFile(String destPath, String fileID) {
return FileUtils.toFile(destPath, fileID);
}
@Override
public String storeHSMFile(File file, String destPath, String fileID) throws HSMException {
if (setAccessTimeAfterSetReadonly)
file.setReadOnly();
if (accessTimeCmd != null) {
String cmd = makeAccessTimeCommand(file.getAbsolutePath(), getRetentionDate());
doCommand(cmd, null, "Set Access Time of file "+file);
}
if (!setAccessTimeAfterSetReadonly)
file.setReadOnly();
return fileID;
}
private String getRetentionDate() {
Calendar c = Calendar.getInstance();
c.add(CAL_FIELDS[retentionTime[1]], retentionTime[0]);
return df.format(c.getTime());
}
@Override
public void failedHSMFile(File file, String destPath, String fileID) {
}
@Override
public File fetchHSMFile(String fsID, String path, String filename) throws HSMException {
return FileUtils.toFile(fsID, path);
}
@Override
public Integer queryStatus(String dirPath, String filePath, String userInfo) {
boolean isTar = dirPath.startsWith("tar:");
for ( Map.Entry<String,Integer> entry : extensionStatusMap.entrySet()) {
if (FileUtils.toFile(isTar ? dirPath.substring(4) : dirPath, filePath+entry.getKey()).exists()) {
if (checkMD5forStatusChange) {
if (isTar) {
try {
VerifyTar.verify(FileUtils.toFile(dirPath.substring(4), filePath), buf);
} catch (Exception x) {
log.error("Verify tar file failed! dirPath:"+dirPath+" filePath:"+filePath, x);
return FileStatus.MD5_CHECK_FAILED;
}
} else {
log.info("Check MD5 for Status change ignored. Not a tar filesystem!");
}
}
return entry.getValue();
}
}
return noStatusFileStatus;
}
private String makeAccessTimeCommand(String file, String date) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < accessTimeCmd.length; i++) {
sb.append(accessTimeCmd[i] == DATE_PARAM ? date : accessTimeCmd[i] == FILE_PARAM ? file : accessTimeCmd[i]);
}
return sb.toString();
}
}
| |
/*
* Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.connector.amazonsns.constants;
/**
* AmazonSNS contains required constants.
*/
public class AmazonSNSConstants {
/**
* Constant for Api Url.
*/
public static final String API_URL = "uri.var.apiUrl";
/**
* Constant for Access key id.
*/
public static final String ACCESS_KEY_ID = "uri.var.accessKeyId";
/**
* Constant for Secret access key.
*/
public static final String SECRET_ACCESS_KEY = "uri.var.secretAccessKey";
/**
* Constant for Next token.
*/
public static final String NEXT_TOKEN = "uri.var.nextToken";
/**
* Constant for topic arn.
*/
public static final String TOPIC_ARN = "uri.var.topicArn";
/**
* Constant for attribute name.
*/
public static final String ATTRIBUTE_NAME = "uri.var.attributeName";
/**
* Constant for attribute value.
*/
public static final String ATTRIBUTE_VALUE = "uri.var.attributeValue";
/**
* Constant for protocol.
*/
public static final String PROTOCOL = "uri.var.protocol";
/**
* Constant for endpoint.
*/
public static final String ENDPOINT = "uri.var.endpoint";
/**
* Constant for subscription arn.
*/
public static final String SUBSCRIPTION_ARN = "uri.var.subscriptionArn";
/**
* Constant for name.
*/
public static final String NAME = "uri.var.name";
/**
* Constant for platform.
*/
public static final String PLATFORM = "uri.var.platform";
/**
* Constant for message.
*/
public static final String MESSAGE = "uri.var.message";
/**
* constants for attributes_entry_key.
*/
public static final String ATTRIBUTES_ENTRY_KEY = "uri.var.attributesEntryKey";
/**
* The Constant API_attributes_entry_key.
*/
public static final String API_ATTRIBUTES_ENTRY_KEY = "Attributes.entry.1.key";
/**
* constants for attributes_entry_key_value.
*/
public static final String ATTRIBUTES_ENTRY_VALUE = "uri.var.attributesEntryValue";
/**
* The Constant API_attributes_entry_key_value.
*/
public static final String API_ATTRIBUTES_ENTRY_VALUE = "Attributes.entry.1.value";
/**
* Constant for message structure.
*/
public static final String MESSAGE_STRUCTURE = "uri.var.messageStructure";
/**
* Constant for subject.
*/
public static final String SUBJECT = "uri.var.subject";
/**
* Constant for message target arn.
*/
public static final String TARGET_ARN = "uri.var.targetArn";
/**
* Constant for token.
*/
public static final String TOKEN = "uri.var.token";
/**
* Constant for token.
*/
public static final String AUTHENTICATE_ON_UNSUBSCRIBE = "uri.var.authenticateOnUnsubscribe";
/**
* Constant for platform application.
*/
public static final String PLATFORM_APPLICATION_ARN = "uri.var.platformApplicationArn";
/**
* Constant for endpoint arn.
*/
public static final String ENDPOINT_ARN = "uri.var.endpointArn";
/**
* Constant for Custom User Data.
*/
public static final String CUSTOM_USER_DATA = "uri.var.customUserData";
/**
* Constant for Action.
*/
public static final String ACTION = "uri.var.action";
/**
* Constant for Label.
*/
public static final String LABEL = "uri.var.label";
/**
* Constant for ActionNameMember.
*/
public static final String ACTION_NAME_MEMBER = "uri.var.actionNameMember";
/**
* Constant for AccountIdMember.
*/
public static final String ACCOUNT_ID_MEMBER = "uri.var.accountIdMember";
/**
* Constant for Version.
*/
public static final String VERSION = "uri.var.version";
/**
* Constant for Signature version.
*/
public static final String SIGNATURE_VERSION = "uri.var.signatureVersion";
/**
* Constant for Signature method.
*/
public static final String SIGNATURE_METHOD = "uri.var.signatureMethod";
/**
* Constant for Timestamp.
*/
public static final String TIMESTAMP = "uri.var.timestamp";
/**
* Constant for next token Api parameter.
*/
public static final String API_NEXT_TOKEN = "NextToken";
/**
* Constant for next token Api parameter.
*/
public static final String API_LABEL = "Label";
/**
* Constant for ActionName api parameter.
*/
public static final String API_ACTION_NAME_MEMBER = "ActionName.member.1";
/**
* Constant for AWSAccountId api parameter.
*/
public static final String API_ACCOUNT_ID_MEMBER = "AWSAccountId.member.1";
/**
* Constant for topic arn api parameter.
*/
public static final String API_TOPIC_ARN = "TopicArn";
/**
* Constant for attribute name.
*/
public static final String API_ATTRIBUTE_NAME = "AttributeName";
/**
* Constant for attribute value.
*/
public static final String API_ATTRIBUTE_VALUE = "AttributeValue";
/**
* Constant for protocol api parameter.
*/
public static final String API_PROTOCOL = "Protocol";
/**
* Constant for endpoint api parameter.
*/
public static final String API_ENDPOINT = "Endpoint";
/**
* Constant for subscription arn api parameter.
*/
public static final String API_SUBSCRIPTION_ARN = "SubscriptionArn";
/**
* Constant for name api parameter.
*/
public static final String API_NAME = "Name";
/**
* Constant for platform api parameter.
*/
public static final String API_PLATFORM = "Platform";
/**
* Constant for message api parameter.
*/
public static final String API_MESSAGE = "Message";
/**
* Constant for message structure api parameter.
*/
public static final String API_MESSAGE_STRUCTURE = "MessageStructure";
/**
* Constant for subject structure api parameter.
*/
public static final String API_SUBJECT = "Subject";
/**
* Constant for message target arn parameter.
*/
public static final String API_TARGET_ARN = "TargetArn";
/**
* Constant for token api parameter.
*/
public static final String API_TOKEN = "Token";
/**
* Constant for authenticate on unsubscribe api parameter.
*/
public static final String API_AUTHENTICATE_ON_UNSUBSCRIBE = "AuthenticateOnUnsubscribe";
/**
* Constant for platform application api parameter.
*/
public static final String API_PLATFORM_APPLICATION_ARN = "PlatformApplicationArn";
/**
* Constant for endpoint arn api parameter.
*/
public static final String API_ENDPOINT_ARN = "EndpointArn";
/**
* Constant for Custom User Data api parameter.
*/
public static final String API_CUSTOM_USER_DATA = "CustomUserData";
/**
* Constant for action Api Action.
*/
public static final String API_ACTION = "Action";
/**
* Constant for version Api version.
*/
public static final String API_VERSION = "Version";
/**
* Constant for signature version Api signature version.
*/
public static final String API_SIGNATURE_VERSION = "SignatureVersion";
/**
* Constant for Signature method Api Signature method.
*/
public static final String API_SIGNATURE_METHOD = "SignatureMethod";
/**
* Constant for Timestamp Api timestamp.
*/
public static final String API_TIMESTAMP = "Timestamp";
/**
* Constant for Signature Api signature.
*/
public static final String API_SIGNATURE = "Signature";
/**
* Constant for GMT.
*/
public static final String GMT = "GMT";
/**
* Constant for Http method.
*/
public static final String HTTP_METHOD = "uri.var.httpMethod";
/**
* Constant for Host.
*/
public static final String HOST = "uri.var.hostName";
/**
* Constant for Http request URI.
*/
public static final String HTTP_REQUEST_URI = "uri.var.httpRequestUri";
/**
* Constant for AWS access key.
*/
public static final String AWS_ACCESS_KEY_ID = "AWSAccessKeyId";
/**
* Constant for POST.
*/
public static final String POST = "POST";
/**
* Constant for New line.
*/
public static final String NEW_LINE = "\n";
/**
* Constant for Comma.
*/
public static final String COMMA = ",";
/**
* Constant for Colon.
*/
public static final String COLON = ":";
/**
* Constant for Forward Slash.
*/
public static final String FORWARD_SLASH = "/";
/**
* Error code constant for Plus.
*/
public static final String PLUS = "+";
/**
* Error code constant for Url encoded plus.
*/
public static final String URL_ENCODED_PLUS = "%20";
/**
* Error code constant for Url encoded tilt.
*/
public static final String URL_ENCODED_TILT = "%7E";
/**
* Error code constant for Tilt.
*/
public static final String TILT = "~";
/**
* Error code constant for Asterisk.
*/
public static final String ASTERISK = "*";
/**
* Error code constant for Url encoded asterisk.
*/
public static final String URL_ENCODED_ASTERISK = "%2A";
/**
* Constant for Signature.
*/
public static final String SIGNATURE = "uri.var.signature";
/**
* Constant for Equal.
*/
public static final String EQUAL = "=";
/**
* Constant for Ampersand.
*/
public static final String AMPERSAND = "&";
/**
* Error code constant for Invalid parameters.
*/
public static final String INVALID_PARAMETERS = "Invalid parameters";
/**
* Constant fot Invalid Key Error.
*/
public static final String INVALID_KEY_ERROR = "Invalid key";
/**
* Constant for Invalid Error.
*/
public static final String NO_SUCH_ALGORITHM_ERROR = "Invalid Algorithm";
/**
* Constant for Illegal State Error.
*/
public static final String ILLEGAL_STATE_ERROR = "Illegal State";
/**
* Constant for Unsupported Encoding Error.
*/
public static final String UNSUPPORTED_ENCORDING_ERROR = "Unsupported Encoding";
/**
* Constant errorCode for InvalidKeyException.
*/
public static final int INVALID_KEY_ERROR_CODE = 700007;
/**
* Constant errorCode for NoSuchAlgorithmException.
*/
public static final int NO_SUCH_ALGORITHM_ERROR_CODE = 700008;
/**
* Constant errorCode for IllegalArgumentException.
*/
public static final int ILLEGAL_ARGUMENT_ERROR_CODE = 700013;
/**
* Constant for Illegal State Error Code.
*/
public static final int ILLEGAL_STATE_ERROR_CODE = 700015;
/**
* Constant errorCode for UnsupportedEncodingException.
*/
public static final int UNSUPPORTED_ENCORDING_ERROR_CODE = 700009;
/**
* Error code constant for generic exception.
*/
public static final int ERROR_CODE_EXCEPTION = 900001;
/**
* Constant for two.
*/
public static final int TWO = 2;
/**
* Constant for API variable for Expires.
*/
public static final String API_EXPIRES = "Expires";
/**
* Constant for Expires.
*/
public static final String EXPIRES = "uri.var.expires";
/**
* Constant for API variable for Security Token.
*/
public static final String API_SECURITY_TOKEN = "SecurityToken";
/**
* Constant for variable for Security Token.
*/
public static final String SECURITY_TOKEN = "uri.var.securityToken";
/**
* Constant for API variable for host.
*/
public static final String API_HOST = "host";
/**
* Constant for Content type.
*/
public static final String CONTENT_TYPE = "uri.var.contentType";
/**
* Constant for API Content Type variable.
*/
public static final String API_CONTENT_TYPE = "Content-type";
/**
* Constant for x-amz-Date.
*/
public static final String AMZ_DATE = "uri.var.amzDate";
/**
* Constant for API variable x-amz-Date.
*/
public static final String API_AMZ_DATE = "x-amz-date";
/**
* Constant for semi colon.
*/
public static final String SEMI_COLON = ";";
/**
* Constant for sha 256.
*/
public static final String SHA_256 = "SHA-256";
/**
* Constant for aws4-hmac-sha256.
*/
public static final String AWS4_HMAC_SHA_256 = "AWS4-HMAC-SHA256";
/**
* Constant for credential.
*/
public static final String CREDENTIAL = "Credential";
/**
* Constant for Region.
*/
public static final String REGION = "uri.var.region";
/**
* Constant for service.
*/
public static final String SERVICE = "uri.var.service";
/**
* Constant for Termination String.
*/
public static final String TERMINATION_STRING = "uri.var.terminationString";
/**
* Constant for Signed Headers.
*/
public static final String SIGNED_HEADERS = "SignedHeaders";
/**
* Constant for Authorization Header.
*/
public static final String AUTHORIZATION_HEADER = "uri.var.authorizationHeader";
/**
* Constant for Connector Error.
*/
public static final String CONNECTOR_ERROR = "Error occured in connector";
/**
* Constant for Date format.
*/
public static final String ISO8601_BASIC_DATE_FORMAT = "yyyyMMdd'T'HHmmss'Z'";
/**
* Constant for Short Date Format.
*/
public static final String SHORT_DATE_FORMAT = "yyyyMMdd";
/**
* Constant for Request Payload.
*/
public static final String REQUEST_PAYLOAD = "uri.var.requestPayload";
/**
* Constant for UTF-8.
*/
public static final String UTF_8 = "UTF-8";
/**
* Constant for UTF8.
*/
public static final String UTF8 = "UTF8";
/**
* Constant for HmacSha256.
*/
public static final String HAMC_SHA_256 = "HmacSHA256";
/**
* Constant for AWS4.
*/
public static final String AWS4 = "AWS4";
/**
* Constant for Hex Array String.
*/
public static final String HEX_ARRAY_STRING = "0123456789ABCDEF";
/**
* Constant for Trim Spaces Regex.
*/
public static final String TRIM_SPACE_REGEX = " ++(?=(?:[^\"]*+\"[^\"]*+\")*+[^\"]*+$)";
/**
* Constant for Space.
*/
public static final String SPACE = " ";
/**
* Constant for AWS Account Numbers.
*/
public static final String AWS_ACCOUNT_NUMBERS = "uri.var.awsAccountNumbers";
/**
* Constant for Action Names.
*/
public static final String ACTION_NAMES = "uri.var.actionNames";
/**
* Constant for Request Entries.
*/
public static final String REQUEST_ENTRIES = "uri.var.requestEntries";
/**
* Constant for Attribute Entries.
*/
public static final String ATTRIBUTE_ENTRIES = "uri.var.attributeEntries";
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.storm.cassandra.executor;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.ResultSetFuture;
import com.datastax.driver.core.Session;
import com.datastax.driver.core.Statement;
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.SettableFuture;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.storm.topology.FailedException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Semaphore;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Service to asynchronously executes cassandra statements.
*/
public class AsyncExecutor<T> implements Serializable {
private final static Logger LOG = LoggerFactory.getLogger(AsyncExecutor.class);
protected Session session;
protected ExecutorService executorService;
protected AsyncResultHandler<T> handler;
private AtomicInteger pending = new AtomicInteger();
/**
* Creates a new {@link AsyncExecutor} instance.
*/
protected AsyncExecutor(Session session, AsyncResultHandler<T> handler) {
this(session, newSingleThreadExecutor(), handler);
}
/**
* Creates a new {@link AsyncExecutor} instance.
*
* @param session The cassandra session.
* @param executorService The executor service responsible to execute handler.
*/
private AsyncExecutor(Session session, ExecutorService executorService, AsyncResultHandler<T> handler) {
this.session = session;
this.executorService = executorService;
this.handler = handler;
}
protected static ExecutorService newSingleThreadExecutor() {
return Executors.newSingleThreadExecutor(new ThreadFactoryBuilder().setNameFormat("cassandra-async-handler-%d").build());
}
/**
* Asynchronously executes all statements associated to the specified input.
* The input will be passed to handler#onSuccess once all queries succeed or to handler#onFailure if any one of them fails.
*/
public List<SettableFuture<T>> execAsync(List<Statement> statements, final T input) {
List<SettableFuture<T>> settableFutures = new ArrayList<>(statements.size());
for(Statement s : statements)
settableFutures.add(execAsync(s, input, AsyncResultHandler.NO_OP_HANDLER));
ListenableFuture<List<T>> allAsList = Futures.allAsList(settableFutures);
Futures.addCallback(allAsList, new FutureCallback<List<T>>(){
@Override
public void onSuccess(List<T> inputs) {
handler.success(input);
}
@Override
public void onFailure(Throwable t) {
handler.failure(t, input);
}
}, executorService);
return settableFutures;
}
/**
* Asynchronously executes the specified batch statement. Inputs will be passed to
* the {@link #handler} once query succeed or failed.
*/
public SettableFuture<T> execAsync(final Statement statement, final T inputs) {
return execAsync(statement, inputs, handler);
}
/**
* Asynchronously executes the specified batch statement. Inputs will be passed to
* the {@link #handler} once query succeed or failed.
*/
public SettableFuture<T> execAsync(final Statement statement, final T inputs, final AsyncResultHandler<T> handler) {
final SettableFuture<T> settableFuture = SettableFuture.create();
pending.incrementAndGet();
ResultSetFuture future = session.executeAsync(statement);
Futures.addCallback(future, new FutureCallback<ResultSet>() {
public void release() {
pending.decrementAndGet();
}
@Override
public void onSuccess(ResultSet result) {
release();
settableFuture.set(inputs);
handler.success(inputs);
}
@Override
public void onFailure(Throwable t) {
LOG.error(String.format("Failed to execute statement '%s' ", statement), t);
release();
settableFuture.setException(t);
handler.failure(t, inputs);
}
}, executorService);
return settableFuture;
}
/**
* Asynchronously executes the specified select statements. Results will be passed to the {@link AsyncResultSetHandler}
* once each query has succeed or failed.
*/
public SettableFuture<List<T>> execAsync(final List<Statement> statements, final List<T> inputs, Semaphore throttle, final AsyncResultSetHandler<T> handler) {
final SettableFuture<List<T>> settableFuture = SettableFuture.create();
if (inputs.size() == 0) {
settableFuture.set(new ArrayList<T>());
return settableFuture;
}
final AsyncContext<T> asyncContext = new AsyncContext<>(inputs, throttle, settableFuture);
for (int i = 0; i < statements.size(); i++) {
// Acquire a slot
if (asyncContext.acquire()) {
try {
pending.incrementAndGet();
final T input = inputs.get(i);
final Statement statement = statements.get(i);
ResultSetFuture future = session.executeAsync(statement);
Futures.addCallback(future, new FutureCallback<ResultSet>() {
@Override
public void onSuccess(ResultSet result) {
try {
handler.success(input, result);
} catch (Throwable throwable) {
asyncContext.exception(throwable);
} finally {
pending.decrementAndGet();
asyncContext.release();
}
}
@Override
public void onFailure(Throwable throwable) {
try {
handler.failure(throwable, input);
} catch (Throwable throwable2) {
asyncContext.exception(throwable2);
}
finally {
asyncContext
.exception(throwable)
.release();
pending.decrementAndGet();
LOG.error(String.format("Failed to execute statement '%s' ", statement), throwable);
}
}
}, executorService);
} catch (Throwable throwable) {
asyncContext.exception(throwable)
.release();
pending.decrementAndGet();
break;
}
}
}
return settableFuture;
}
private static class AsyncContext<T> {
private final List<T> inputs;
private final SettableFuture<List<T>> future;
private final AtomicInteger latch;
private final List<Throwable> exceptions;
private final Semaphore throttle;
public AsyncContext(List<T> inputs, Semaphore throttle, SettableFuture<List<T>> settableFuture) {
this.inputs = inputs;
this.latch = new AtomicInteger(inputs.size());
this.throttle = throttle;
this.exceptions = Collections.synchronizedList(new ArrayList<Throwable>());
this.future = settableFuture;
}
public boolean acquire() {
throttle.acquireUninterruptibly();
// Don't start new requests if there is an exception
if (exceptions.size() > 0) {
latch.decrementAndGet();
throttle.release();
return false;
}
return true;
}
public AsyncContext release() {
int remaining = latch.decrementAndGet();
if (remaining == 0) {
if (exceptions.size() == 0) {
future.set(inputs);
}
else {
future.setException(new MultiFailedException(exceptions));
}
}
throttle.release();
return this;
}
public AsyncContext exception(Throwable throwable) {
this.exceptions.add(throwable);
return this;
}
}
/**
* Returns the number of currently executed tasks which are not yet completed.
*/
public int getPendingTasksSize() {
return this.pending.intValue();
}
public void shutdown( ) {
if( ! executorService.isShutdown() ) {
LOG.info("shutting down async handler executor");
this.executorService.shutdownNow();
}
}
public static class MultiFailedException extends FailedException {
private final List<Throwable> exceptions;
public MultiFailedException(List<Throwable> exceptions) {
super(getMessage(exceptions), exceptions.get(0));
this.exceptions = exceptions;
}
private static String getMessage(List<Throwable> exceptions) {
int top5 = Math.min(exceptions.size(), 5);
StringBuilder sb = new StringBuilder();
sb.append("First ")
.append(top5)
.append(" exceptions: ")
.append(System.lineSeparator());
for (int i = 0; i < top5; i++) {
sb.append(exceptions.get(i).getMessage())
.append(System.lineSeparator());
}
return sb.toString();
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getMessage())
.append(System.lineSeparator())
.append("Multiple exceptions encountered: ")
.append(System.lineSeparator());
for (Throwable exception : exceptions) {
sb.append(exception.toString())
.append(System.lineSeparator());
}
return super.toString();
}
public List<Throwable> getExceptions() {
return exceptions;
}
}
}
| |
/*
* Copyright 2014-2022 TNG Technology Consulting GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.tngtech.archunit.core.importer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.google.common.base.Joiner;
import com.google.common.base.Supplier;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.SetMultimap;
import com.tngtech.archunit.Internal;
import com.tngtech.archunit.base.Function;
import com.tngtech.archunit.base.HasDescription;
import com.tngtech.archunit.base.Optional;
import com.tngtech.archunit.core.domain.AccessTarget;
import com.tngtech.archunit.core.domain.AccessTarget.CodeUnitAccessTarget;
import com.tngtech.archunit.core.domain.AccessTarget.ConstructorCallTarget;
import com.tngtech.archunit.core.domain.AccessTarget.ConstructorReferenceTarget;
import com.tngtech.archunit.core.domain.AccessTarget.FieldAccessTarget;
import com.tngtech.archunit.core.domain.AccessTarget.MethodCallTarget;
import com.tngtech.archunit.core.domain.AccessTarget.MethodReferenceTarget;
import com.tngtech.archunit.core.domain.DomainObjectCreationContext;
import com.tngtech.archunit.core.domain.Formatters;
import com.tngtech.archunit.core.domain.InstanceofCheck;
import com.tngtech.archunit.core.domain.JavaAnnotation;
import com.tngtech.archunit.core.domain.JavaClass;
import com.tngtech.archunit.core.domain.JavaClassDescriptor;
import com.tngtech.archunit.core.domain.JavaCodeUnit;
import com.tngtech.archunit.core.domain.JavaConstructor;
import com.tngtech.archunit.core.domain.JavaConstructorCall;
import com.tngtech.archunit.core.domain.JavaConstructorReference;
import com.tngtech.archunit.core.domain.JavaEnumConstant;
import com.tngtech.archunit.core.domain.JavaField;
import com.tngtech.archunit.core.domain.JavaFieldAccess;
import com.tngtech.archunit.core.domain.JavaFieldAccess.AccessType;
import com.tngtech.archunit.core.domain.JavaMember;
import com.tngtech.archunit.core.domain.JavaMethod;
import com.tngtech.archunit.core.domain.JavaMethodCall;
import com.tngtech.archunit.core.domain.JavaMethodReference;
import com.tngtech.archunit.core.domain.JavaModifier;
import com.tngtech.archunit.core.domain.JavaParameter;
import com.tngtech.archunit.core.domain.JavaParameterizedType;
import com.tngtech.archunit.core.domain.JavaStaticInitializer;
import com.tngtech.archunit.core.domain.JavaType;
import com.tngtech.archunit.core.domain.JavaTypeVariable;
import com.tngtech.archunit.core.domain.JavaWildcardType;
import com.tngtech.archunit.core.domain.ReferencedClassObject;
import com.tngtech.archunit.core.domain.Source;
import com.tngtech.archunit.core.domain.ThrowsClause;
import com.tngtech.archunit.core.domain.properties.HasTypeParameters;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.collect.Sets.union;
import static com.tngtech.archunit.core.domain.DomainObjectCreationContext.completeTypeVariable;
import static com.tngtech.archunit.core.domain.DomainObjectCreationContext.createGenericArrayType;
import static com.tngtech.archunit.core.domain.DomainObjectCreationContext.createInstanceofCheck;
import static com.tngtech.archunit.core.domain.DomainObjectCreationContext.createReferencedClassObject;
import static com.tngtech.archunit.core.domain.DomainObjectCreationContext.createSource;
import static com.tngtech.archunit.core.domain.DomainObjectCreationContext.createThrowsClause;
import static com.tngtech.archunit.core.domain.DomainObjectCreationContext.createTypeVariable;
import static com.tngtech.archunit.core.domain.DomainObjectCreationContext.createWildcardType;
import static com.tngtech.archunit.core.domain.Formatters.ensureCanonicalArrayTypeName;
import static com.tngtech.archunit.core.domain.JavaConstructor.CONSTRUCTOR_NAME;
import static com.tngtech.archunit.core.domain.properties.HasName.Utils.namesOf;
@Internal
public final class DomainBuilders {
private DomainBuilders() {
}
static <T extends HasDescription> Map<String, JavaAnnotation<T>> buildAnnotations(T owner, Set<JavaAnnotationBuilder> annotations, ImportedClasses importedClasses) {
ImmutableMap.Builder<String, JavaAnnotation<T>> result = ImmutableMap.builder();
for (JavaAnnotationBuilder annotationBuilder : annotations) {
JavaAnnotation<T> javaAnnotation = annotationBuilder.build(owner, importedClasses);
result.put(javaAnnotation.getRawType().getName(), javaAnnotation);
}
return result.build();
}
@Internal
public static final class JavaEnumConstantBuilder {
private JavaClass declaringClass;
private String name;
JavaEnumConstantBuilder() {
}
JavaEnumConstantBuilder withDeclaringClass(final JavaClass declaringClass) {
this.declaringClass = declaringClass;
return this;
}
JavaEnumConstantBuilder withName(final String name) {
this.name = name;
return this;
}
public JavaClass getDeclaringClass() {
return declaringClass;
}
public String getName() {
return name;
}
JavaEnumConstant build() {
return DomainObjectCreationContext.createJavaEnumConstant(this);
}
}
@Internal
public abstract static class JavaMemberBuilder<OUTPUT, SELF extends JavaMemberBuilder<OUTPUT, SELF>>
implements BuilderWithBuildParameter<JavaClass, OUTPUT> {
private String name;
private String descriptor;
private Set<JavaModifier> modifiers;
private JavaClass owner;
ImportedClasses importedClasses;
private int firstLineNumber;
private JavaMemberBuilder() {
}
SELF withName(String name) {
this.name = name;
return self();
}
SELF withDescriptor(String descriptor) {
this.descriptor = descriptor;
return self();
}
SELF withModifiers(Set<JavaModifier> modifiers) {
this.modifiers = modifiers;
return self();
}
void recordLineNumber(int lineNumber) {
this.firstLineNumber = this.firstLineNumber == 0 ? lineNumber : Math.min(this.firstLineNumber, lineNumber);
}
@SuppressWarnings("unchecked")
SELF self() {
return (SELF) this;
}
abstract OUTPUT construct(SELF self, ImportedClasses importedClasses);
JavaClass get(String typeName) {
return importedClasses.getOrResolve(typeName);
}
public String getName() {
return name;
}
public String getDescriptor() {
return descriptor;
}
public Set<JavaModifier> getModifiers() {
return modifiers;
}
public JavaClass getOwner() {
return owner;
}
public int getFirstLineNumber() {
return firstLineNumber;
}
@Override
public final OUTPUT build(JavaClass owner, ImportedClasses importedClasses) {
this.owner = owner;
this.importedClasses = importedClasses;
return construct(self(), importedClasses);
}
}
@Internal
public static final class JavaFieldBuilder extends JavaMemberBuilder<JavaField, JavaFieldBuilder> {
private Optional<JavaTypeCreationProcess<JavaField>> genericType;
private JavaClassDescriptor rawType;
JavaFieldBuilder() {
}
JavaFieldBuilder withType(Optional<JavaTypeCreationProcess<JavaField>> genericTypeBuilder, JavaClassDescriptor rawType) {
this.genericType = checkNotNull(genericTypeBuilder);
this.rawType = checkNotNull(rawType);
return self();
}
String getTypeName() {
return rawType.getFullyQualifiedClassName();
}
public JavaType getType(JavaField field) {
return genericType.isPresent()
? genericType.get().finish(field, allTypeParametersInContextOf(field.getOwner()), importedClasses)
: importedClasses.getOrResolve(rawType.getFullyQualifiedClassName());
}
private static Iterable<JavaTypeVariable<?>> allTypeParametersInContextOf(JavaClass javaClass) {
return FluentIterable.from(getTypeParametersOf(javaClass)).append(allTypeParametersInEnclosingContextOf(javaClass));
}
@Override
JavaField construct(JavaFieldBuilder builder, ImportedClasses importedClasses) {
return DomainObjectCreationContext.createJavaField(builder);
}
}
@Internal
public abstract static class JavaCodeUnitBuilder<OUTPUT, SELF extends JavaCodeUnitBuilder<OUTPUT, SELF>> extends JavaMemberBuilder<OUTPUT, SELF> {
private Optional<JavaTypeCreationProcess<JavaCodeUnit>> genericReturnType;
private JavaClassDescriptor rawReturnType;
private List<JavaTypeCreationProcess<JavaCodeUnit>> genericParameterTypes;
private List<JavaClassDescriptor> rawParameterTypes;
private SetMultimap<Integer, JavaAnnotationBuilder> parameterAnnotationsByIndex;
private JavaCodeUnitTypeParametersBuilder typeParametersBuilder;
private List<JavaClassDescriptor> throwsDeclarations;
private final Set<RawReferencedClassObject> rawReferencedClassObjects = new HashSet<>();
private final List<RawInstanceofCheck> instanceOfChecks = new ArrayList<>();
private JavaCodeUnitBuilder() {
}
SELF withReturnType(Optional<JavaTypeCreationProcess<JavaCodeUnit>> genericReturnType, JavaClassDescriptor rawReturnType) {
this.genericReturnType = genericReturnType;
this.rawReturnType = rawReturnType;
return self();
}
SELF withParameterTypes(List<JavaTypeCreationProcess<JavaCodeUnit>> genericParameterTypes, List<JavaClassDescriptor> rawParameterTypes) {
this.genericParameterTypes = genericParameterTypes;
this.rawParameterTypes = rawParameterTypes;
return self();
}
SELF withParameterAnnotations(SetMultimap<Integer, JavaAnnotationBuilder> parameterAnnotationsByIndex) {
this.parameterAnnotationsByIndex = parameterAnnotationsByIndex;
return self();
}
SELF withTypeParameters(List<JavaTypeParameterBuilder<JavaCodeUnit>> typeParameterBuilders) {
this.typeParametersBuilder = new JavaCodeUnitTypeParametersBuilder(typeParameterBuilders);
return self();
}
SELF withThrowsClause(List<JavaClassDescriptor> throwsDeclarations) {
this.throwsDeclarations = throwsDeclarations;
return self();
}
SELF addReferencedClassObject(RawReferencedClassObject rawReferencedClassObject) {
rawReferencedClassObjects.add(rawReferencedClassObject);
return self();
}
SELF addInstanceOfCheck(RawInstanceofCheck rawInstanceOfChecks) {
this.instanceOfChecks.add(rawInstanceOfChecks);
return self();
}
List<String> getParameterTypeNames() {
ImmutableList.Builder<String> result = ImmutableList.builder();
for (JavaClassDescriptor parameter : rawParameterTypes) {
result.add(parameter.getFullyQualifiedClassName());
}
return result.build();
}
String getReturnTypeName() {
return rawReturnType.getFullyQualifiedClassName();
}
boolean hasNoParameters() {
return rawParameterTypes.isEmpty();
}
public JavaClass getRawReturnType() {
return get(rawReturnType.getFullyQualifiedClassName());
}
public JavaType getGenericReturnType(JavaCodeUnit codeUnit) {
return genericReturnType.isPresent()
? genericReturnType.get().finish(codeUnit, allTypeParametersInContextOf(codeUnit), importedClasses)
: getRawReturnType();
}
private Iterable<JavaTypeVariable<?>> allTypeParametersInContextOf(JavaCodeUnit codeUnit) {
return FluentIterable.from(getTypeParametersOf(codeUnit)).append(allTypeParametersInEnclosingContextOf(codeUnit));
}
public List<JavaClass> getRawParameterTypes() {
return asJavaClasses(rawParameterTypes);
}
public List<JavaType> getGenericParameterTypes(JavaCodeUnit codeUnit) {
return build(genericParameterTypes, codeUnit);
}
private List<JavaType> build(List<JavaTypeCreationProcess<JavaCodeUnit>> genericParameterTypeBuilders, JavaCodeUnit codeUnit) {
ImmutableList.Builder<JavaType> result = ImmutableList.builder();
for (JavaTypeCreationProcess<JavaCodeUnit> parameterTypeBuilder : genericParameterTypeBuilders) {
result.add(parameterTypeBuilder.finish(codeUnit, allTypeParametersInContextOf(codeUnit), importedClasses));
}
return result.build();
}
public Set<JavaAnnotationBuilder> getParameterAnnotations(int index) {
return parameterAnnotationsByIndex.get(index);
}
public List<JavaTypeVariable<JavaCodeUnit>> getTypeParameters(JavaCodeUnit owner) {
return typeParametersBuilder.build(owner, importedClasses);
}
public <CODE_UNIT extends JavaCodeUnit> ThrowsClause<CODE_UNIT> getThrowsClause(CODE_UNIT codeUnit) {
return createThrowsClause(codeUnit, asJavaClasses(this.throwsDeclarations));
}
public Set<ReferencedClassObject> getReferencedClassObjects(JavaCodeUnit codeUnit) {
ImmutableSet.Builder<ReferencedClassObject> result = ImmutableSet.builder();
for (RawReferencedClassObject rawReferencedClassObject : this.rawReferencedClassObjects) {
result.add(createReferencedClassObject(codeUnit, get(rawReferencedClassObject.getClassName()), rawReferencedClassObject.getLineNumber()));
}
return result.build();
}
public Set<InstanceofCheck> getInstanceofChecks(JavaCodeUnit codeUnit) {
ImmutableSet.Builder<InstanceofCheck> result = ImmutableSet.builder();
for (RawInstanceofCheck instanceOfCheck : this.instanceOfChecks) {
result.add(createInstanceofCheck(codeUnit, get(instanceOfCheck.getTarget().getFullyQualifiedClassName()), instanceOfCheck.getLineNumber()));
}
return result.build();
}
private List<JavaClass> asJavaClasses(List<JavaClassDescriptor> descriptors) {
ImmutableList.Builder<JavaClass> result = ImmutableList.builder();
for (JavaClassDescriptor javaClassDescriptor : descriptors) {
result.add(get(javaClassDescriptor.getFullyQualifiedClassName()));
}
return result.build();
}
public ParameterAnnotationsBuilder getParameterAnnotationsBuilder(int index) {
return new ParameterAnnotationsBuilder(parameterAnnotationsByIndex.get(index), importedClasses);
}
public Iterable<JavaAnnotationBuilder> getParameterAnnotationBuilders() {
return parameterAnnotationsByIndex.values();
}
@Internal
public static class ParameterAnnotationsBuilder {
private final Iterable<JavaAnnotationBuilder> annotationBuilders;
private final ImportedClasses importedClasses;
private ParameterAnnotationsBuilder(Iterable<JavaAnnotationBuilder> annotationBuilders, ImportedClasses importedClasses) {
this.annotationBuilders = annotationBuilders;
this.importedClasses = importedClasses;
}
public Set<JavaAnnotation<JavaParameter>> build(JavaParameter owner) {
ImmutableSet.Builder<JavaAnnotation<JavaParameter>> result = ImmutableSet.builder();
for (DomainBuilders.JavaAnnotationBuilder annotationBuilder : annotationBuilders) {
result.add(annotationBuilder.build(owner, importedClasses));
}
return result.build();
}
}
}
@Internal
public static final class JavaMethodBuilder extends JavaCodeUnitBuilder<JavaMethod, JavaMethodBuilder> {
private static final Function<JavaMethod, Optional<Object>> NO_ANNOTATION_DEFAULT_VALUE = new Function<JavaMethod, Optional<Object>>() {
@Override
public Optional<Object> apply(JavaMethod input) {
return Optional.empty();
}
};
private Function<JavaMethod, Optional<Object>> createAnnotationDefaultValue = NO_ANNOTATION_DEFAULT_VALUE;
JavaMethodBuilder() {
}
void withAnnotationDefaultValue(Function<JavaMethod, Optional<Object>> createAnnotationDefaultValue) {
this.createAnnotationDefaultValue = createAnnotationDefaultValue;
}
@Override
JavaMethod construct(JavaMethodBuilder builder, final ImportedClasses importedClasses) {
return DomainObjectCreationContext.createJavaMethod(builder, createAnnotationDefaultValue);
}
}
@Internal
public static final class JavaConstructorBuilder extends JavaCodeUnitBuilder<JavaConstructor, JavaConstructorBuilder> {
JavaConstructorBuilder() {
}
@Override
JavaConstructor construct(JavaConstructorBuilder builder, ImportedClasses importedClasses) {
return DomainObjectCreationContext.createJavaConstructor(builder);
}
}
@Internal
public static final class JavaClassBuilder {
private final boolean stub;
private Optional<SourceDescriptor> sourceDescriptor = Optional.empty();
private Optional<String> sourceFileName = Optional.empty();
private JavaClassDescriptor descriptor;
private boolean isInterface;
private boolean isEnum;
private boolean isAnnotation;
private boolean isRecord;
private boolean isAnonymousClass;
private boolean isMemberClass;
private Set<JavaModifier> modifiers = new HashSet<>();
JavaClassBuilder() {
this(false);
}
private JavaClassBuilder(boolean stub) {
this.stub = stub;
}
JavaClassBuilder withSourceDescriptor(SourceDescriptor sourceDescriptor) {
this.sourceDescriptor = Optional.of(sourceDescriptor);
return this;
}
JavaClassBuilder withSourceFileName(String sourceFileName) {
this.sourceFileName = Optional.of(sourceFileName);
return this;
}
JavaClassBuilder withDescriptor(JavaClassDescriptor descriptor) {
this.descriptor = descriptor;
return this;
}
JavaClassBuilder withInterface(boolean isInterface) {
this.isInterface = isInterface;
return this;
}
JavaClassBuilder withAnonymousClass(boolean isAnonymousClass) {
this.isAnonymousClass = isAnonymousClass;
return this;
}
JavaClassBuilder withMemberClass(boolean isMemberClass) {
this.isMemberClass = isMemberClass;
return this;
}
JavaClassBuilder withEnum(boolean isEnum) {
this.isEnum = isEnum;
return this;
}
public JavaClassBuilder withAnnotation(boolean isAnnotation) {
this.isAnnotation = isAnnotation;
return this;
}
JavaClassBuilder withRecord(boolean isRecord) {
this.isRecord = isRecord;
return this;
}
JavaClassBuilder withModifiers(Set<JavaModifier> modifiers) {
this.modifiers = modifiers;
return this;
}
JavaClassBuilder withSimpleName(String simpleName) {
this.descriptor = descriptor.withSimpleClassName(simpleName);
return this;
}
JavaClass build() {
return DomainObjectCreationContext.createJavaClass(this);
}
public Optional<Source> getSource() {
return sourceDescriptor.isPresent()
? Optional.of(createSource(sourceDescriptor.get().getUri(), sourceFileName, sourceDescriptor.get().isMd5InClassSourcesEnabled()))
: Optional.<Source>empty();
}
public JavaClassDescriptor getDescriptor() {
return descriptor;
}
public boolean isInterface() {
return isInterface;
}
public boolean isEnum() {
return isEnum;
}
public boolean isAnnotation() {
return isAnnotation;
}
public boolean isRecord() {
return isRecord;
}
public boolean isAnonymousClass() {
return isAnonymousClass;
}
public boolean isMemberClass() {
return isMemberClass;
}
public Set<JavaModifier> getModifiers() {
return modifiers;
}
public boolean isStub() {
return stub;
}
static JavaClassBuilder forStub() {
return new JavaClassBuilder(true);
}
}
@Internal
public static final class JavaAnnotationBuilder {
private JavaClassDescriptor type;
private final Map<String, ValueBuilder> values = new LinkedHashMap<>();
private ImportedClasses importedClasses;
JavaAnnotationBuilder() {
}
JavaAnnotationBuilder withType(JavaClassDescriptor type) {
this.type = type;
return this;
}
String getFullyQualifiedClassName() {
return type.getFullyQualifiedClassName();
}
JavaAnnotationBuilder addProperty(String key, ValueBuilder valueBuilder) {
values.put(key, valueBuilder);
return this;
}
public JavaClass getType() {
return importedClasses.getOrResolve(type.getFullyQualifiedClassName());
}
public <T extends HasDescription> Map<String, Object> getValues(T owner) {
ImmutableMap.Builder<String, Object> result = ImmutableMap.builder();
for (Map.Entry<String, ValueBuilder> entry : values.entrySet()) {
Optional<Object> value = entry.getValue().build(owner, importedClasses);
if (value.isPresent()) {
result.put(entry.getKey(), value.get());
}
}
return result.build();
}
public <T extends HasDescription> JavaAnnotation<T> build(T owner, ImportedClasses importedClasses) {
this.importedClasses = importedClasses;
return DomainObjectCreationContext.createJavaAnnotation(owner, this);
}
abstract static class ValueBuilder {
abstract <T extends HasDescription> Optional<Object> build(T owner, ImportedClasses importedClasses);
static ValueBuilder fromPrimitiveProperty(final Object value) {
return new ValueBuilder() {
@Override
<T extends HasDescription> Optional<Object> build(T owner, ImportedClasses unused) {
return Optional.of(value);
}
};
}
public static ValueBuilder fromEnumProperty(final JavaClassDescriptor enumType, final String value) {
return new ValueBuilder() {
@Override
<T extends HasDescription> Optional<Object> build(T owner, ImportedClasses importedClasses) {
return Optional.<Object>of(
new DomainBuilders.JavaEnumConstantBuilder()
.withDeclaringClass(importedClasses.getOrResolve(enumType.getFullyQualifiedClassName()))
.withName(value)
.build());
}
};
}
static ValueBuilder fromClassProperty(final JavaClassDescriptor value) {
return new ValueBuilder() {
@Override
<T extends HasDescription> Optional<Object> build(T owner, ImportedClasses importedClasses) {
return Optional.<Object>of(importedClasses.getOrResolve(value.getFullyQualifiedClassName()));
}
};
}
static ValueBuilder fromAnnotationProperty(final JavaAnnotationBuilder builder) {
return new ValueBuilder() {
@Override
<T extends HasDescription> Optional<Object> build(T owner, ImportedClasses importedClasses) {
return Optional.<Object>of(builder.build(owner, importedClasses));
}
};
}
}
}
@Internal
public static final class JavaStaticInitializerBuilder extends JavaCodeUnitBuilder<JavaStaticInitializer, JavaStaticInitializerBuilder> {
JavaStaticInitializerBuilder() {
withReturnType(Optional.<JavaTypeCreationProcess<JavaCodeUnit>>empty(), JavaClassDescriptor.From.name(void.class.getName()));
withParameterTypes(Collections.<JavaTypeCreationProcess<JavaCodeUnit>>emptyList(), Collections.<JavaClassDescriptor>emptyList());
withName(JavaStaticInitializer.STATIC_INITIALIZER_NAME);
withDescriptor("()V");
withModifiers(Collections.<JavaModifier>emptySet());
withThrowsClause(Collections.<JavaClassDescriptor>emptyList());
}
@Override
JavaStaticInitializer construct(JavaStaticInitializerBuilder builder, ImportedClasses importedClasses) {
return DomainObjectCreationContext.createJavaStaticInitializer(builder);
}
}
interface JavaTypeCreationProcess<OWNER> {
JavaType finish(OWNER owner, Iterable<JavaTypeVariable<?>> allTypeParametersInContext, ImportedClasses classes);
abstract class JavaTypeFinisher {
private JavaTypeFinisher() {
}
abstract JavaType finish(JavaType input, ImportedClasses classes);
abstract String getFinishedName(String name);
JavaTypeFinisher after(final JavaTypeFinisher other) {
return new JavaTypeFinisher() {
@Override
JavaType finish(JavaType input, ImportedClasses classes) {
return JavaTypeFinisher.this.finish(other.finish(input, classes), classes);
}
@Override
String getFinishedName(String name) {
return JavaTypeFinisher.this.getFinishedName(other.getFinishedName(name));
}
};
}
static JavaTypeFinisher IDENTITY = new JavaTypeFinisher() {
@Override
JavaType finish(JavaType input, ImportedClasses classes) {
return input;
}
@Override
String getFinishedName(String name) {
return name;
}
};
static final JavaTypeFinisher ARRAY_CREATOR = new JavaTypeFinisher() {
@Override
public JavaType finish(JavaType componentType, ImportedClasses classes) {
JavaClassDescriptor erasureType = JavaClassDescriptor.From.javaClass(componentType.toErasure()).toArrayDescriptor();
if (componentType instanceof JavaClass) {
return classes.getOrResolve(erasureType.getFullyQualifiedClassName());
}
JavaClass erasure = classes.getOrResolve(erasureType.getFullyQualifiedClassName());
return createGenericArrayType(componentType, erasure);
}
@Override
String getFinishedName(String name) {
return name + "[]";
}
};
}
}
@Internal
public static final class JavaTypeParameterBuilder<OWNER extends HasDescription> {
private final String name;
private final List<JavaTypeCreationProcess<OWNER>> upperBounds = new ArrayList<>();
private OWNER owner;
private ImportedClasses importedClasses;
JavaTypeParameterBuilder(String name) {
this.name = checkNotNull(name);
}
void addBound(JavaTypeCreationProcess<OWNER> bound) {
upperBounds.add(bound);
}
public JavaTypeVariable<OWNER> build(OWNER owner, ImportedClasses importedClasses) {
this.owner = owner;
this.importedClasses = importedClasses;
return createTypeVariable(name, owner, this.importedClasses.getOrResolve(Object.class.getName()));
}
String getName() {
return name;
}
@SuppressWarnings("unchecked") // Iterable is covariant
public List<JavaType> getUpperBounds(Iterable<? extends JavaTypeVariable<?>> allGenericParametersInContext) {
return buildJavaTypes(upperBounds, owner, (Iterable<JavaTypeVariable<?>>) allGenericParametersInContext, importedClasses);
}
}
private static abstract class AbstractTypeParametersBuilder<OWNER extends HasDescription> {
private final List<JavaTypeParameterBuilder<OWNER>> typeParameterBuilders;
AbstractTypeParametersBuilder(List<JavaTypeParameterBuilder<OWNER>> typeParameterBuilders) {
this.typeParameterBuilders = typeParameterBuilders;
}
final List<JavaTypeVariable<OWNER>> build(OWNER owner, ImportedClasses ImportedClasses) {
if (typeParameterBuilders.isEmpty()) {
return Collections.emptyList();
}
Map<JavaTypeVariable<OWNER>, JavaTypeParameterBuilder<OWNER>> typeArgumentsToBuilders = new LinkedHashMap<>();
for (JavaTypeParameterBuilder<OWNER> builder : typeParameterBuilders) {
typeArgumentsToBuilders.put(builder.build(owner, ImportedClasses), builder);
}
Set<JavaTypeVariable<?>> allGenericParametersInContext = union(typeParametersFromEnclosingContextOf(owner), typeArgumentsToBuilders.keySet());
for (Map.Entry<JavaTypeVariable<OWNER>, JavaTypeParameterBuilder<OWNER>> typeParameterToBuilder : typeArgumentsToBuilders.entrySet()) {
List<JavaType> upperBounds = typeParameterToBuilder.getValue().getUpperBounds(allGenericParametersInContext);
completeTypeVariable(typeParameterToBuilder.getKey(), upperBounds);
}
return ImmutableList.copyOf(typeArgumentsToBuilders.keySet());
}
abstract Set<JavaTypeVariable<?>> typeParametersFromEnclosingContextOf(OWNER owner);
}
static class JavaClassTypeParametersBuilder extends AbstractTypeParametersBuilder<JavaClass> {
JavaClassTypeParametersBuilder(List<JavaTypeParameterBuilder<JavaClass>> typeParameterBuilders) {
super(typeParameterBuilders);
}
@Override
Set<JavaTypeVariable<?>> typeParametersFromEnclosingContextOf(JavaClass javaClass) {
return allTypeParametersInEnclosingContextOf(javaClass);
}
}
static class JavaCodeUnitTypeParametersBuilder extends AbstractTypeParametersBuilder<JavaCodeUnit> {
JavaCodeUnitTypeParametersBuilder(List<JavaTypeParameterBuilder<JavaCodeUnit>> typeParameterBuilders) {
super(typeParameterBuilders);
}
@Override
Set<JavaTypeVariable<?>> typeParametersFromEnclosingContextOf(JavaCodeUnit codeUnit) {
return allTypeParametersInEnclosingContextOf(codeUnit);
}
}
private static Set<JavaTypeVariable<?>> allTypeParametersInEnclosingContextOf(JavaCodeUnit codeUnit) {
JavaClass declaringClass = codeUnit.getOwner();
return FluentIterable.from(getTypeParametersOf(declaringClass))
.append(allTypeParametersInEnclosingContextOf(declaringClass))
.toSet();
}
@SuppressWarnings({"unchecked", "rawtypes"})
private static List<JavaTypeVariable<?>> getTypeParametersOf(HasTypeParameters<?> hasTypeParameters) {
List<? extends JavaTypeVariable<?>> result = hasTypeParameters.getTypeParameters();
return (List) result;
}
private static Set<JavaTypeVariable<?>> allTypeParametersInEnclosingContextOf(JavaClass javaClass) {
Set<JavaTypeVariable<?>> result = new HashSet<>();
while (javaClass.getEnclosingClass().isPresent()) {
if (javaClass.getEnclosingCodeUnit().isPresent()) {
// Note that there can't be a case where we could have an enclosing code unit without an enclosing class,
// since by definition the class where the enclosing code unit is declared in is an enclosing class
result.addAll(javaClass.getEnclosingCodeUnit().get().getTypeParameters());
}
result.addAll(javaClass.getEnclosingClass().get().getTypeParameters());
javaClass = javaClass.getEnclosingClass().get();
}
return result;
}
interface JavaTypeBuilder<OWNER extends HasDescription> {
JavaType build(OWNER owner, Iterable<JavaTypeVariable<?>> allTypeParametersInContext, ImportedClasses importedClasses);
}
@Internal
public static final class JavaWildcardTypeBuilder<OWNER extends HasDescription> implements JavaTypeBuilder<OWNER> {
private final List<JavaTypeCreationProcess<OWNER>> lowerBoundCreationProcesses = new ArrayList<>();
private final List<JavaTypeCreationProcess<OWNER>> upperBoundCreationProcesses = new ArrayList<>();
private OWNER owner;
private Iterable<JavaTypeVariable<?>> allTypeParametersInContext;
private ImportedClasses importedClasses;
JavaWildcardTypeBuilder() {
}
public JavaWildcardTypeBuilder<OWNER> addLowerBound(JavaTypeCreationProcess<OWNER> boundCreationProcess) {
lowerBoundCreationProcesses.add(boundCreationProcess);
return this;
}
public JavaWildcardTypeBuilder<OWNER> addUpperBound(JavaTypeCreationProcess<OWNER> boundCreationProcess) {
upperBoundCreationProcesses.add(boundCreationProcess);
return this;
}
@Override
public JavaWildcardType build(OWNER owner, Iterable<JavaTypeVariable<?>> allTypeParametersInContext, ImportedClasses importedClasses) {
this.owner = owner;
this.allTypeParametersInContext = allTypeParametersInContext;
this.importedClasses = importedClasses;
return createWildcardType(this);
}
public List<JavaType> getUpperBounds() {
return buildJavaTypes(upperBoundCreationProcesses, owner, allTypeParametersInContext, importedClasses);
}
public List<JavaType> getLowerBounds() {
return buildJavaTypes(lowerBoundCreationProcesses, owner, allTypeParametersInContext, importedClasses);
}
public JavaClass getUnboundErasureType(List<JavaType> upperBounds) {
return DomainBuilders.getUnboundErasureType(upperBounds, importedClasses);
}
}
static class JavaParameterizedTypeBuilder<OWNER extends HasDescription> implements JavaTypeBuilder<OWNER> {
private final JavaClassDescriptor type;
private final List<JavaTypeCreationProcess<OWNER>> typeArgumentCreationProcesses = new ArrayList<>();
JavaParameterizedTypeBuilder(JavaClassDescriptor type) {
this.type = type;
}
void addTypeArgument(JavaTypeCreationProcess<OWNER> typeCreationProcess) {
typeArgumentCreationProcesses.add(typeCreationProcess);
}
@Override
public JavaType build(OWNER owner, Iterable<JavaTypeVariable<?>> allTypeParametersInContext, ImportedClasses classes) {
List<JavaType> typeArguments = buildJavaTypes(typeArgumentCreationProcesses, owner, allTypeParametersInContext, classes);
return typeArguments.isEmpty()
? classes.getOrResolve(type.getFullyQualifiedClassName())
: new ImportedParameterizedType(classes.getOrResolve(type.getFullyQualifiedClassName()), typeArguments);
}
String getTypeName() {
return type.getFullyQualifiedClassName();
}
JavaParameterizedTypeBuilder<OWNER> forInnerClass(String simpleInnerClassName) {
return new JavaParameterizedTypeBuilder<>(JavaClassDescriptorImporter.createFromAsmObjectTypeName(
type.getFullyQualifiedClassName() + '$' + simpleInnerClassName));
}
}
private static <OWNER> List<JavaType> buildJavaTypes(List<? extends JavaTypeCreationProcess<OWNER>> typeCreationProcesses, OWNER owner, Iterable<JavaTypeVariable<?>> allGenericParametersInContext, ImportedClasses classes) {
ImmutableList.Builder<JavaType> result = ImmutableList.builder();
for (JavaTypeCreationProcess<OWNER> typeCreationProcess : typeCreationProcesses) {
result.add(typeCreationProcess.finish(owner, allGenericParametersInContext, classes));
}
return result.build();
}
private static JavaClass getUnboundErasureType(List<JavaType> upperBounds, ImportedClasses importedClasses) {
return upperBounds.size() > 0
? upperBounds.get(0).toErasure()
: importedClasses.getOrResolve(Object.class.getName());
}
@Internal
interface BuilderWithBuildParameter<PARAMETER, VALUE> {
VALUE build(PARAMETER parameter, ImportedClasses importedClasses);
@Internal
class BuildFinisher {
static <PARAMETER, VALUE> Set<VALUE> build(
Set<? extends BuilderWithBuildParameter<PARAMETER, ? extends VALUE>> builders,
PARAMETER parameter,
ImportedClasses importedClasses) {
checkNotNull(builders);
checkNotNull(parameter);
ImmutableSet.Builder<VALUE> result = ImmutableSet.builder();
for (BuilderWithBuildParameter<PARAMETER, ? extends VALUE> builder : builders) {
result.add(builder.build(parameter, importedClasses));
}
return result.build();
}
}
}
@Internal
public abstract static class JavaAccessBuilder<TARGET extends AccessTarget, SELF extends JavaAccessBuilder<TARGET, SELF>> {
private JavaCodeUnit origin;
private TARGET target;
private int lineNumber;
private JavaAccessBuilder() {
}
SELF withOrigin(final JavaCodeUnit origin) {
this.origin = origin;
return self();
}
SELF withTarget(final TARGET target) {
this.target = target;
return self();
}
SELF withLineNumber(final int lineNumber) {
this.lineNumber = lineNumber;
return self();
}
public JavaCodeUnit getOrigin() {
return origin;
}
public TARGET getTarget() {
return target;
}
public int getLineNumber() {
return lineNumber;
}
@SuppressWarnings("unchecked")
private SELF self() {
return (SELF) this;
}
}
@Internal
public static class JavaFieldAccessBuilder extends JavaAccessBuilder<FieldAccessTarget, JavaFieldAccessBuilder> {
private AccessType accessType;
JavaFieldAccessBuilder() {
}
JavaFieldAccessBuilder withAccessType(final AccessType accessType) {
this.accessType = accessType;
return this;
}
public AccessType getAccessType() {
return accessType;
}
JavaFieldAccess build() {
return DomainObjectCreationContext.createJavaFieldAccess(this);
}
}
@Internal
public static final class JavaMethodCallBuilder extends JavaAccessBuilder<MethodCallTarget, JavaMethodCallBuilder> {
JavaMethodCallBuilder() {
}
JavaMethodCall build() {
return DomainObjectCreationContext.createJavaMethodCall(this);
}
}
@Internal
public static final class JavaMethodReferenceBuilder extends JavaAccessBuilder<MethodReferenceTarget, JavaMethodReferenceBuilder> {
JavaMethodReferenceBuilder() {
}
JavaMethodReference build() {
return DomainObjectCreationContext.createJavaMethodReference(this);
}
}
@Internal
public static class JavaConstructorCallBuilder extends JavaAccessBuilder<ConstructorCallTarget, JavaConstructorCallBuilder> {
JavaConstructorCallBuilder() {
}
JavaConstructorCall build() {
return DomainObjectCreationContext.createJavaConstructorCall(this);
}
}
@Internal
public static class JavaConstructorReferenceBuilder extends JavaAccessBuilder<ConstructorReferenceTarget, JavaConstructorReferenceBuilder> {
JavaConstructorReferenceBuilder() {
}
JavaConstructorReference build() {
return DomainObjectCreationContext.createJavaConstructorReference(this);
}
}
@Internal
public static abstract class AccessTargetBuilder<MEMBER extends JavaMember, TARGET extends AccessTarget, SELF extends AccessTargetBuilder<MEMBER, TARGET, SELF>> {
private final Function<SELF, TARGET> createTarget;
private JavaClass owner;
private String name;
Supplier<Optional<MEMBER>> member;
AccessTargetBuilder(Function<SELF, TARGET> createTarget) {
this.createTarget = createTarget;
}
SELF withOwner(final JavaClass owner) {
this.owner = owner;
return self();
}
SELF withName(final String name) {
this.name = name;
return self();
}
SELF withMember(Supplier<Optional<MEMBER>> member) {
this.member = member;
return self();
}
TARGET build() {
return createTarget.apply(self());
}
public JavaClass getOwner() {
return owner;
}
public String getName() {
return name;
}
public Supplier<Optional<MEMBER>> getMember() {
return member;
}
@SuppressWarnings("unchecked")
SELF self() {
return (SELF) this;
}
public abstract String getFullName();
}
@Internal
public static final class FieldAccessTargetBuilder extends AccessTargetBuilder<JavaField, FieldAccessTarget, FieldAccessTargetBuilder> {
private static final Function<FieldAccessTargetBuilder, FieldAccessTarget> CREATE_TARGET = new Function<FieldAccessTargetBuilder, FieldAccessTarget>() {
@Override
public FieldAccessTarget apply(FieldAccessTargetBuilder targetBuilder) {
return DomainObjectCreationContext.createFieldAccessTarget(targetBuilder);
}
};
private JavaClass type;
FieldAccessTargetBuilder() {
super(CREATE_TARGET);
}
FieldAccessTargetBuilder withType(final JavaClass type) {
this.type = type;
return this;
}
public JavaClass getType() {
return type;
}
@Override
public String getFullName() {
return getOwner().getName() + "." + getName();
}
}
@Internal
public static class CodeUnitAccessTargetBuilder<CODE_UNIT extends JavaCodeUnit, ACCESS_TARGET extends CodeUnitAccessTarget>
extends AccessTargetBuilder<CODE_UNIT, ACCESS_TARGET, CodeUnitAccessTargetBuilder<CODE_UNIT, ACCESS_TARGET>> {
private List<JavaClass> parameters;
private JavaClass returnType;
private CodeUnitAccessTargetBuilder(Function<CodeUnitAccessTargetBuilder<CODE_UNIT, ACCESS_TARGET>, ACCESS_TARGET> createTarget) {
super(createTarget);
}
CodeUnitAccessTargetBuilder<CODE_UNIT, ACCESS_TARGET> withParameters(final List<JavaClass> parameters) {
this.parameters = parameters;
return self();
}
CodeUnitAccessTargetBuilder<CODE_UNIT, ACCESS_TARGET> withReturnType(final JavaClass returnType) {
this.returnType = returnType;
return self();
}
public List<JavaClass> getParameters() {
return parameters;
}
public JavaClass getReturnType() {
return returnType;
}
public String getFullName() {
return Formatters.formatMethod(getOwner().getName(), getName(), namesOf(parameters));
}
}
public static CodeUnitAccessTargetBuilder<JavaConstructor, ConstructorCallTarget> newConstructorCallTargetBuilder() {
return new CodeUnitAccessTargetBuilder<>(CREATE_CONSTRUCTOR_CALL_TARGET).withName(CONSTRUCTOR_NAME);
}
public static CodeUnitAccessTargetBuilder<JavaConstructor, ConstructorReferenceTarget> newConstructorReferenceTargetBuilder() {
return new CodeUnitAccessTargetBuilder<>(CREATE_CONSTRUCTOR_REFERENCE_TARGET).withName(CONSTRUCTOR_NAME);
}
public static CodeUnitAccessTargetBuilder<JavaMethod, MethodCallTarget> newMethodCallTargetBuilder() {
return new CodeUnitAccessTargetBuilder<>(CREATE_METHOD_CALL_TARGET);
}
public static CodeUnitAccessTargetBuilder<JavaMethod, MethodReferenceTarget> newMethodReferenceTargetBuilder() {
return new CodeUnitAccessTargetBuilder<>(CREATE_METHOD_REFERENCE_TARGET);
}
private static final Function<CodeUnitAccessTargetBuilder<JavaConstructor, ConstructorCallTarget>, ConstructorCallTarget> CREATE_CONSTRUCTOR_CALL_TARGET =
new Function<CodeUnitAccessTargetBuilder<JavaConstructor, ConstructorCallTarget>, ConstructorCallTarget>() {
@Override
public ConstructorCallTarget apply(CodeUnitAccessTargetBuilder<JavaConstructor, ConstructorCallTarget> targetBuilder) {
return DomainObjectCreationContext.createConstructorCallTarget(targetBuilder);
}
};
private static final Function<CodeUnitAccessTargetBuilder<JavaConstructor, ConstructorReferenceTarget>, ConstructorReferenceTarget> CREATE_CONSTRUCTOR_REFERENCE_TARGET =
new Function<CodeUnitAccessTargetBuilder<JavaConstructor, ConstructorReferenceTarget>, ConstructorReferenceTarget>() {
@Override
public ConstructorReferenceTarget apply(CodeUnitAccessTargetBuilder<JavaConstructor, ConstructorReferenceTarget> targetBuilder) {
return DomainObjectCreationContext.createConstructorReferenceTarget(targetBuilder);
}
};
private static final Function<CodeUnitAccessTargetBuilder<JavaMethod, MethodCallTarget>, MethodCallTarget> CREATE_METHOD_CALL_TARGET =
new Function<CodeUnitAccessTargetBuilder<JavaMethod, MethodCallTarget>, MethodCallTarget>() {
@Override
public MethodCallTarget apply(CodeUnitAccessTargetBuilder<JavaMethod, MethodCallTarget> targetBuilder) {
return DomainObjectCreationContext.createMethodCallTarget(targetBuilder);
}
};
private static final Function<CodeUnitAccessTargetBuilder<JavaMethod, MethodReferenceTarget>, MethodReferenceTarget> CREATE_METHOD_REFERENCE_TARGET =
new Function<CodeUnitAccessTargetBuilder<JavaMethod, MethodReferenceTarget>, MethodReferenceTarget>() {
@Override
public MethodReferenceTarget apply(CodeUnitAccessTargetBuilder<JavaMethod, MethodReferenceTarget> targetBuilder) {
return DomainObjectCreationContext.createMethodReferenceTarget(targetBuilder);
}
};
private static class ImportedParameterizedType implements JavaParameterizedType {
private final JavaType type;
private final List<JavaType> typeArguments;
ImportedParameterizedType(JavaType type, List<JavaType> typeArguments) {
checkArgument(typeArguments.size() > 0,
"Parameterized type cannot be created without type arguments. This is likely a bug.");
this.type = type;
this.typeArguments = typeArguments;
}
@Override
public String getName() {
return type.getName() + formatTypeArguments();
}
@Override
public JavaClass toErasure() {
return type.toErasure();
}
@Override
public List<JavaType> getActualTypeArguments() {
return typeArguments;
}
@Override
public String toString() {
return getClass().getSimpleName() + "{" + getName() + '}';
}
private String formatTypeArguments() {
List<String> formatted = new ArrayList<>();
for (JavaType typeArgument : typeArguments) {
formatted.add(ensureCanonicalArrayTypeName(typeArgument.getName()));
}
return "<" + Joiner.on(", ").join(formatted) + ">";
}
}
}
| |
/*
* Copyright 2014 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.apiman.manager.api.jpa;
import io.apiman.manager.api.beans.orgs.OrganizationBasedCompositeId;
import io.apiman.manager.api.beans.orgs.OrganizationBean;
import io.apiman.manager.api.beans.search.OrderByBean;
import io.apiman.manager.api.beans.search.PagingBean;
import io.apiman.manager.api.beans.search.SearchCriteriaBean;
import io.apiman.manager.api.beans.search.SearchCriteriaFilterBean;
import io.apiman.manager.api.beans.search.SearchCriteriaFilterOperator;
import io.apiman.manager.api.beans.search.SearchResultsBean;
import io.apiman.manager.api.core.exceptions.StorageException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import javax.inject.Inject;
import javax.persistence.EntityExistsException;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import javax.persistence.RollbackException;
import javax.persistence.TypedQuery;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Path;
import javax.persistence.criteria.Predicate;
import javax.persistence.criteria.Root;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A base class that JPA storage impls can extend.
*
* @author eric.wittmann@redhat.com
*/
public abstract class AbstractJpaStorage {
private static Logger logger = LoggerFactory.getLogger(AbstractJpaStorage.class);
@Inject
private IEntityManagerFactoryAccessor emfAccessor;
public String getDialect() {
return (String) emfAccessor.getEntityManagerFactory().getProperties().get("hibernate.dialect"); //$NON-NLS-1$
}
private static ThreadLocal<EntityManager> activeEM = new ThreadLocal<>();
public static boolean isTxActive() {
return activeEM.get() != null;
}
/**
* Constructor.
*/
public AbstractJpaStorage() {
}
/**
* @see io.apiman.manager.api.core.IStorage#beginTx()
*/
protected void beginTx() throws StorageException {
if (activeEM.get() != null) {
throw new StorageException("Transaction already active."); //$NON-NLS-1$
}
EntityManager entityManager = emfAccessor.getEntityManagerFactory().createEntityManager();
activeEM.set(entityManager);
entityManager.getTransaction().begin();
}
/**
* @see io.apiman.manager.api.core.IStorage#commitTx()
*/
protected void commitTx() throws StorageException {
if (activeEM.get() == null) {
throw new StorageException("Transaction not active."); //$NON-NLS-1$
}
try {
activeEM.get().getTransaction().commit();
activeEM.get().close();
activeEM.set(null);
} catch (EntityExistsException e) {
throw new StorageException(e);
} catch (RollbackException e) {
logger.error(e.getMessage(), e);
throw new StorageException(e);
} catch (Throwable t) {
logger.error(t.getMessage(), t);
throw new StorageException(t);
}
}
/**
* @see io.apiman.manager.api.core.IStorage#rollbackTx()
*/
protected void rollbackTx() {
if (activeEM.get() == null) {
throw new RuntimeException("Transaction not active."); //$NON-NLS-1$
}
try {
JpaUtil.rollbackQuietly(activeEM.get());
} finally {
activeEM.get().close();
activeEM.set(null);
}
}
/**
* @return the thread's entity manager
* @throws StorageException if a storage problem occurs while storing a bean
*/
protected EntityManager getActiveEntityManager() throws StorageException {
EntityManager entityManager = activeEM.get();
if (entityManager == null) {
throw new StorageException("Transaction not active."); //$NON-NLS-1$
}
return entityManager;
}
/**
* @param bean the bean to create
* @throws StorageException if a storage problem occurs while storing a bean
*/
public <T> void create(T bean) throws StorageException {
if (bean == null) {
return;
}
EntityManager entityManager = getActiveEntityManager();
try {
entityManager.persist(bean);
} catch (Throwable t) {
logger.error(t.getMessage(), t);
throw new StorageException(t);
}
}
/**
* @param bean the bean to update
* @throws StorageException if a storage problem occurs while storing a bean
*/
public <T> void update(T bean) throws StorageException {
EntityManager entityManager = getActiveEntityManager();
try {
if (!entityManager.contains(bean)) {
entityManager.merge(bean);
}
} catch (Throwable t) {
logger.error(t.getMessage(), t);
throw new StorageException(t);
}
}
/**
* Delete using bean
*
* @param bean the bean to delete
* @throws StorageException if a storage problem occurs while storing a bean
*/
public <T> void delete(T bean) throws StorageException {
EntityManager entityManager = getActiveEntityManager();
try {
entityManager.remove(bean);
} catch (Throwable t) {
logger.error(t.getMessage(), t);
throw new StorageException(t);
}
}
/**
* Get object of type T
*
* @param id identity key
* @param type class of type T
* @return Instance of type T
* @throws StorageException if a storage problem occurs while storing a bean
*/
public <T> T get(Long id, Class<T> type) throws StorageException {
T rval;
EntityManager entityManager = getActiveEntityManager();
try {
rval = entityManager.find(type, id);
} catch (Throwable t) {
logger.error(t.getMessage(), t);
throw new StorageException(t);
}
return rval;
}
/**
* Get object of type T
*
* @param id identity key
* @param type class of type T
* @return Instance of type T
* @throws StorageException if a storage problem occurs while storing a bean
*/
public <T> T get(String id, Class<T> type) throws StorageException {
T rval;
EntityManager entityManager = getActiveEntityManager();
try {
rval = entityManager.find(type, id);
} catch (Throwable t) {
logger.error(t.getMessage(), t);
throw new StorageException(t);
}
return rval;
}
@SuppressWarnings({ "unchecked", "rawtypes" })
protected <T> Iterator<T> getAll(Class<T> type, Query query) throws StorageException {
return new EntityIterator(type, query);
}
/**
* Get object of type T
*
* @param organizationId org id
* @param id identity
* @param type class of type T
* @return Instance of type T
* @throws StorageException if a storage problem occurs while storing a bean
*/
public <T> T get(String organizationId, String id, Class<T> type) throws StorageException {
T rval;
EntityManager entityManager = getActiveEntityManager();
try {
OrganizationBean orgBean = entityManager.find(OrganizationBean.class, organizationId);
Object key = new OrganizationBasedCompositeId(orgBean, id);
rval = entityManager.find(type, key);
} catch (Throwable t) {
logger.error(t.getMessage(), t);
throw new StorageException(t);
}
return rval;
}
/**
* Get a list of entities based on the provided criteria and entity type.
* @param criteria
* @param type
* @throws StorageException if a storage problem occurs while storing a bean
*/
protected <T> SearchResultsBean<T> find(SearchCriteriaBean criteria, Class<T> type) throws StorageException {
SearchResultsBean<T> results = new SearchResultsBean<>();
EntityManager entityManager = getActiveEntityManager();
try {
// Set some default in the case that paging information was not included in the request.
PagingBean paging = criteria.getPaging();
if (paging == null) {
paging = new PagingBean();
paging.setPage(1);
paging.setPageSize(20);
}
int page = paging.getPage();
int pageSize = paging.getPageSize();
int start = (page - 1) * pageSize;
CriteriaBuilder builder = entityManager.getCriteriaBuilder();
CriteriaQuery<T> criteriaQuery = builder.createQuery(type);
Root<T> from = criteriaQuery.from(type);
applySearchCriteriaToQuery(criteria, builder, criteriaQuery, from, false);
TypedQuery<T> typedQuery = entityManager.createQuery(criteriaQuery);
typedQuery.setFirstResult(start);
typedQuery.setMaxResults(pageSize+1);
boolean hasMore = false;
// Now query for the actual results
List<T> resultList = typedQuery.getResultList();
// Check if we got back more than we actually needed.
if (resultList.size() > pageSize) {
resultList.remove(resultList.size() - 1);
hasMore = true;
}
// If there are more results than we needed, then we will need to do another
// query to determine how many rows there are in total
int totalSize = start + resultList.size();
if (hasMore) {
totalSize = executeCountQuery(criteria, entityManager, type);
}
results.setTotalSize(totalSize);
results.setBeans(resultList);
return results;
} catch (Throwable t) {
logger.error(t.getMessage(), t);
throw new StorageException(t);
}
}
/**
* Gets a count of the number of rows that would be returned by the search.
* @param criteria
* @param entityManager
* @param type
*/
protected <T> int executeCountQuery(SearchCriteriaBean criteria, EntityManager entityManager, Class<T> type) {
CriteriaBuilder builder = entityManager.getCriteriaBuilder();
CriteriaQuery<Long> countQuery = builder.createQuery(Long.class);
Root<T> from = countQuery.from(type);
countQuery.select(builder.count(from));
applySearchCriteriaToQuery(criteria, builder, countQuery, from, true);
TypedQuery<Long> query = entityManager.createQuery(countQuery);
return query.getSingleResult().intValue();
}
/**
* Applies the criteria found in the {@link SearchCriteriaBean} to the JPA query.
* @param criteria
* @param builder
* @param query
* @param from
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
protected <T> void applySearchCriteriaToQuery(SearchCriteriaBean criteria, CriteriaBuilder builder,
CriteriaQuery<?> query, Root<T> from, boolean countOnly) {
List<SearchCriteriaFilterBean> filters = criteria.getFilters();
if (filters != null && !filters.isEmpty()) {
List<Predicate> predicates = new ArrayList<>();
for (SearchCriteriaFilterBean filter : filters) {
if (filter.getOperator() == SearchCriteriaFilterOperator.eq) {
Path<Object> path = from.get(filter.getName());
Class<?> pathc = path.getJavaType();
if (pathc.isAssignableFrom(String.class)) {
predicates.add(builder.equal(path, filter.getValue()));
} else if (pathc.isEnum()) {
predicates.add(builder.equal(path, Enum.valueOf((Class)pathc, filter.getValue())));
}
} else if (filter.getOperator() == SearchCriteriaFilterOperator.bool_eq) {
predicates.add(builder.equal(from.<Boolean>get(filter.getName()), Boolean.valueOf(filter.getValue())));
} else if (filter.getOperator() == SearchCriteriaFilterOperator.gt) {
predicates.add(builder.greaterThan(from.<Long>get(filter.getName()), new Long(filter.getValue())));
} else if (filter.getOperator() == SearchCriteriaFilterOperator.gte) {
predicates.add(builder.greaterThanOrEqualTo(from.<Long>get(filter.getName()), new Long(filter.getValue())));
} else if (filter.getOperator() == SearchCriteriaFilterOperator.lt) {
predicates.add(builder.lessThan(from.<Long>get(filter.getName()), new Long(filter.getValue())));
} else if (filter.getOperator() == SearchCriteriaFilterOperator.lte) {
predicates.add(builder.lessThanOrEqualTo(from.<Long>get(filter.getName()), new Long(filter.getValue())));
} else if (filter.getOperator() == SearchCriteriaFilterOperator.neq) {
predicates.add(builder.notEqual(from.get(filter.getName()), filter.getValue()));
} else if (filter.getOperator() == SearchCriteriaFilterOperator.like) {
predicates.add(builder.like(builder.upper(from.<String>get(filter.getName())), filter.getValue().toUpperCase().replace('*', '%')));
}
}
query.where(predicates.toArray(new Predicate[predicates.size()]));
}
OrderByBean orderBy = criteria.getOrderBy();
if (orderBy != null && !countOnly) {
if (orderBy.isAscending()) {
query.orderBy(builder.asc(from.get(orderBy.getName())));
} else {
query.orderBy(builder.desc(from.get(orderBy.getName())));
}
}
}
/**
* @return the emfAccessor
*/
public IEntityManagerFactoryAccessor getEmfAccessor() {
return emfAccessor;
}
/**
* @param emfAccessor the emfAccessor to set
*/
public void setEmfAccessor(IEntityManagerFactoryAccessor emfAccessor) {
this.emfAccessor = emfAccessor;
}
/**
* Allows iterating over all entities of a given type.
* @author eric.wittmann@redhat.com
*/
private class EntityIterator<T> implements Iterator<T> {
private Query query;
private int pageIndex = 0;
private int pageSize = 100;
private int resultIndex;
private List<T> results;
/**
* Constructor.
* @param query the query
* @throws StorageException if a storage problem occurs while storing a bean.
*/
public EntityIterator(Class<T> type, Query query) throws StorageException {
this.query = query;
fetch();
}
/**
* Initialize the search.
*/
private void fetch() {
if (results != null && results.size() < pageSize) {
results = new ArrayList<>();
} else {
query.setFirstResult(pageIndex);
query.setMaxResults(pageSize);
results = query.getResultList();
}
resultIndex = 0;
pageIndex += pageSize;
}
/**
* @see java.util.Iterator#hasNext()
*/
@Override
public boolean hasNext() {
return resultIndex < results.size();
}
/**
* @see java.util.Iterator#next()
*/
@Override
public T next() {
T rval = results.get(resultIndex++);
if (resultIndex >= results.size()) {
fetch();
}
return rval;
}
/**
* @see java.util.Iterator#remove()
*/
@Override
public void remove() {
throw new UnsupportedOperationException();
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.chat.v1.model;
/**
* A UI element contains a key (label) and a value (content). And this element may also contain some
* actions such as onclick button.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Google Chat API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class KeyValue extends com.google.api.client.json.GenericJson {
/**
* The text of the bottom label. Formatted text supported.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String bottomLabel;
/**
* A button that can be clicked to trigger an action.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Button button;
/**
* The text of the content. Formatted text supported and always required.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String content;
/**
* If the content should be multiline.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean contentMultiline;
/**
* An enum value that will be replaced by the Chat API with the corresponding icon image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String icon;
/**
* The icon specified by a URL.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String iconUrl;
/**
* The onclick action. Only the top label, bottom label and content region are clickable.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private OnClick onClick;
/**
* The text of the top label. Formatted text supported.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String topLabel;
/**
* The text of the bottom label. Formatted text supported.
* @return value or {@code null} for none
*/
public java.lang.String getBottomLabel() {
return bottomLabel;
}
/**
* The text of the bottom label. Formatted text supported.
* @param bottomLabel bottomLabel or {@code null} for none
*/
public KeyValue setBottomLabel(java.lang.String bottomLabel) {
this.bottomLabel = bottomLabel;
return this;
}
/**
* A button that can be clicked to trigger an action.
* @return value or {@code null} for none
*/
public Button getButton() {
return button;
}
/**
* A button that can be clicked to trigger an action.
* @param button button or {@code null} for none
*/
public KeyValue setButton(Button button) {
this.button = button;
return this;
}
/**
* The text of the content. Formatted text supported and always required.
* @return value or {@code null} for none
*/
public java.lang.String getContent() {
return content;
}
/**
* The text of the content. Formatted text supported and always required.
* @param content content or {@code null} for none
*/
public KeyValue setContent(java.lang.String content) {
this.content = content;
return this;
}
/**
* If the content should be multiline.
* @return value or {@code null} for none
*/
public java.lang.Boolean getContentMultiline() {
return contentMultiline;
}
/**
* If the content should be multiline.
* @param contentMultiline contentMultiline or {@code null} for none
*/
public KeyValue setContentMultiline(java.lang.Boolean contentMultiline) {
this.contentMultiline = contentMultiline;
return this;
}
/**
* An enum value that will be replaced by the Chat API with the corresponding icon image.
* @return value or {@code null} for none
*/
public java.lang.String getIcon() {
return icon;
}
/**
* An enum value that will be replaced by the Chat API with the corresponding icon image.
* @param icon icon or {@code null} for none
*/
public KeyValue setIcon(java.lang.String icon) {
this.icon = icon;
return this;
}
/**
* The icon specified by a URL.
* @return value or {@code null} for none
*/
public java.lang.String getIconUrl() {
return iconUrl;
}
/**
* The icon specified by a URL.
* @param iconUrl iconUrl or {@code null} for none
*/
public KeyValue setIconUrl(java.lang.String iconUrl) {
this.iconUrl = iconUrl;
return this;
}
/**
* The onclick action. Only the top label, bottom label and content region are clickable.
* @return value or {@code null} for none
*/
public OnClick getOnClick() {
return onClick;
}
/**
* The onclick action. Only the top label, bottom label and content region are clickable.
* @param onClick onClick or {@code null} for none
*/
public KeyValue setOnClick(OnClick onClick) {
this.onClick = onClick;
return this;
}
/**
* The text of the top label. Formatted text supported.
* @return value or {@code null} for none
*/
public java.lang.String getTopLabel() {
return topLabel;
}
/**
* The text of the top label. Formatted text supported.
* @param topLabel topLabel or {@code null} for none
*/
public KeyValue setTopLabel(java.lang.String topLabel) {
this.topLabel = topLabel;
return this;
}
@Override
public KeyValue set(String fieldName, Object value) {
return (KeyValue) super.set(fieldName, value);
}
@Override
public KeyValue clone() {
return (KeyValue) super.clone();
}
}
| |
/*
* Copyright (c) 2014 Ngewi Fet <ngewif@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gnucash.android.ui.transaction.dialog;
import android.database.Cursor;
import android.os.Bundle;
import android.support.v4.app.DialogFragment;
import android.support.v4.widget.SimpleCursorAdapter;
import android.text.Editable;
import android.text.TextWatcher;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowManager;
import android.widget.AdapterView;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageButton;
import android.widget.LinearLayout;
import android.widget.Spinner;
import android.widget.TextView;
import android.widget.Toast;
import org.gnucash.android.R;
import org.gnucash.android.db.AccountsDbAdapter;
import org.gnucash.android.db.DatabaseSchema;
import org.gnucash.android.db.SplitsDbAdapter;
import org.gnucash.android.model.AccountType;
import org.gnucash.android.model.Money;
import org.gnucash.android.model.Split;
import org.gnucash.android.model.Transaction;
import org.gnucash.android.model.TransactionType;
import org.gnucash.android.ui.UxArgument;
import org.gnucash.android.ui.transaction.TransactionFormFragment;
import org.gnucash.android.ui.transaction.TransactionsActivity;
import org.gnucash.android.ui.util.AmountInputFormatter;
import org.gnucash.android.ui.util.TransactionTypeToggleButton;
import org.gnucash.android.util.QualifiedAccountNameCursorAdapter;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Currency;
import java.util.List;
import java.util.UUID;
/**
* Dialog for editing the splits in a transaction
*
* @author Ngewi Fet <ngewif@gmail.com>
*/
public class SplitEditorDialogFragment extends DialogFragment {
private LinearLayout mSplitsLinearLayout;
private TextView mImbalanceTextView;
private Button mAddSplit;
private Button mSaveButton;
private Button mCancelButton;
private AccountsDbAdapter mAccountsDbAdapter;
private SplitsDbAdapter mSplitsDbAdapter;
private Cursor mCursor;
private SimpleCursorAdapter mCursorAdapter;
private List<View> mSplitItemViewList;
private String mAccountUID;
private BalanceTextWatcher mBalanceUpdater = new BalanceTextWatcher();
private BigDecimal mBaseAmount = BigDecimal.ZERO;
private List<String> mRemovedSplitUIDs = new ArrayList<String>();
private boolean mMultiCurrency = false;
/**
* Create and return a new instance of the fragment with the appropriate paramenters
* @param baseAmountString String with base amount which is being split
* @return New instance of SplitEditorDialogFragment
*/
public static SplitEditorDialogFragment newInstance(String baseAmountString){
SplitEditorDialogFragment fragment = new SplitEditorDialogFragment();
Bundle args = new Bundle();
args.putString(UxArgument.AMOUNT_STRING, baseAmountString);
fragment.setArguments(args);
return fragment;
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.dialog_split_editor, container, false);
mSplitsLinearLayout = (LinearLayout) view.findViewById(R.id.split_list_layout);
mImbalanceTextView = (TextView) view.findViewById(R.id.imbalance_textview);
mAddSplit = (Button) view.findViewById(R.id.btn_add_split);
mSaveButton = (Button) view.findViewById(R.id.btn_save);
mCancelButton = (Button) view.findViewById(R.id.btn_cancel);
return view;
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
getDialog().getWindow().setLayout(WindowManager.LayoutParams.MATCH_PARENT,
WindowManager.LayoutParams.MATCH_PARENT);
getDialog().setTitle(R.string.title_transaction_splits);
mSplitItemViewList = new ArrayList<View>();
mSplitsDbAdapter = SplitsDbAdapter.getInstance();
//we are editing splits for a new transaction.
// But the user may have already created some splits before. Let's check
List<Split> splitList = ((TransactionFormFragment) getTargetFragment()).getSplitList();
{
Currency currency = null;
for (Split split : splitList) {
if (currency == null) {
currency = split.getAmount().getCurrency();
} else if (currency != split.getAmount().getCurrency()) {
mMultiCurrency = true;
}
}
}
initArgs();
if (!splitList.isEmpty()) {
//aha! there are some splits. Let's load those instead
loadSplitViews(splitList);
} else {
final Currency currency = Currency.getInstance(mAccountsDbAdapter.getAccountCurrencyCode(mAccountUID));
Split split = new Split(new Money(mBaseAmount, currency), mAccountUID);
AccountType accountType = mAccountsDbAdapter.getAccountType(mAccountUID);
TransactionType transactionType = Transaction.getTypeForBalance(accountType, mBaseAmount.signum() < 0);
split.setType(transactionType);
View view = addSplitView(split);
view.findViewById(R.id.input_accounts_spinner).setEnabled(false);
view.findViewById(R.id.btn_remove_split).setVisibility(View.GONE);
}
setListeners();
updateTotal();
}
private void loadSplitViews(List<Split> splitList) {
for (Split split : splitList) {
addSplitView(split);
}
if (mMultiCurrency) {
enableAllControls(false);
}
}
private void enableAllControls(boolean b) {
for (View splitView : mSplitItemViewList) {
EditText splitMemoEditText = (EditText) splitView.findViewById(R.id.input_split_memo);
final EditText splitAmountEditText = (EditText) splitView.findViewById(R.id.input_split_amount);
ImageButton removeSplitButton = (ImageButton) splitView.findViewById(R.id.btn_remove_split);
Spinner accountsSpinner = (Spinner) splitView.findViewById(R.id.input_accounts_spinner);
final TextView splitCurrencyTextView = (TextView) splitView.findViewById(R.id.split_currency_symbol);
final TextView splitUidTextView = (TextView) splitView.findViewById(R.id.split_uid);
final TransactionTypeToggleButton splitTypeButton = (TransactionTypeToggleButton) splitView.findViewById(R.id.btn_split_type);
splitMemoEditText.setEnabled(b);
splitAmountEditText.setEnabled(b);
removeSplitButton.setEnabled(b);
accountsSpinner.setEnabled(b);
splitCurrencyTextView.setEnabled(b);
splitUidTextView.setEnabled(b);
splitTypeButton.setEnabled(b);
}
}
/**
* Add a split view and initialize it with <code>split</code>
* @param split Split to initialize the contents to
* @return Returns the split view which was added
*/
private View addSplitView(Split split){
LayoutInflater layoutInflater = getActivity().getLayoutInflater();
View splitView = layoutInflater.inflate(R.layout.item_split_entry, mSplitsLinearLayout, false);
mSplitsLinearLayout.addView(splitView,0);
bindSplitView(splitView, split);
mSplitItemViewList.add(splitView);
return splitView;
}
/**
* Extracts arguments passed to the view and initializes necessary adapters and cursors
*/
private void initArgs() {
mAccountsDbAdapter = AccountsDbAdapter.getInstance();
Bundle args = getArguments();
mAccountUID = ((TransactionsActivity) getActivity()).getCurrentAccountUID();
mBaseAmount = new BigDecimal(args.getString(UxArgument.AMOUNT_STRING));
String conditions = "(" //+ AccountEntry._ID + " != " + mAccountId + " AND "
+ (mMultiCurrency ? "" : (DatabaseSchema.AccountEntry.COLUMN_CURRENCY + " = ? AND "))
+ DatabaseSchema.AccountEntry.COLUMN_UID + " != '" + mAccountsDbAdapter.getOrCreateGnuCashRootAccountUID() + "' AND "
+ DatabaseSchema.AccountEntry.COLUMN_PLACEHOLDER + " = 0"
+ ")";
mCursor = mAccountsDbAdapter.fetchAccountsOrderedByFullName(conditions,
mMultiCurrency ? new String[]{"" + mAccountsDbAdapter.getOrCreateGnuCashRootAccountUID()} :
new String[]{mAccountsDbAdapter.getCurrencyCode(mAccountUID)}
);
}
/**
* Binds the different UI elements of an inflated list view to corresponding actions
* @param splitView Split item view
* @param split {@link org.gnucash.android.model.Split} to use to populate the view
*/
private void bindSplitView(final View splitView, Split split){
EditText splitMemoEditText = (EditText) splitView.findViewById(R.id.input_split_memo);
final EditText splitAmountEditText = (EditText) splitView.findViewById(R.id.input_split_amount);
ImageButton removeSplitButton = (ImageButton) splitView.findViewById(R.id.btn_remove_split);
Spinner accountsSpinner = (Spinner) splitView.findViewById(R.id.input_accounts_spinner);
final TextView splitCurrencyTextView = (TextView) splitView.findViewById(R.id.split_currency_symbol);
final TextView splitUidTextView = (TextView) splitView.findViewById(R.id.split_uid);
final TransactionTypeToggleButton splitTypeButton = (TransactionTypeToggleButton) splitView.findViewById(R.id.btn_split_type);
splitAmountEditText.addTextChangedListener(new AmountInputFormatter(splitAmountEditText));
removeSplitButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
mRemovedSplitUIDs.add(splitUidTextView.getText().toString());
mSplitsLinearLayout.removeView(splitView);
mSplitItemViewList.remove(splitView);
updateTotal();
}
});
updateTransferAccountsList(accountsSpinner);
accountsSpinner.setOnItemSelectedListener(new TypeButtonLabelUpdater(splitTypeButton));
Currency accountCurrency = Currency.getInstance(mAccountsDbAdapter.getCurrencyCode(
split == null ? mAccountUID : split.getAccountUID()));
splitCurrencyTextView.setText(accountCurrency.getSymbol());
splitTypeButton.setAmountFormattingListener(splitAmountEditText, splitCurrencyTextView);
splitTypeButton.setChecked(mBaseAmount.signum() > 0);
splitUidTextView.setText(UUID.randomUUID().toString());
if (split != null) {
splitAmountEditText.setText(split.getAmount().toPlainString());
splitMemoEditText.setText(split.getMemo());
splitUidTextView.setText(split.getUID());
String splitAccountUID = split.getAccountUID();
setSelectedTransferAccount(mAccountsDbAdapter.getID(splitAccountUID), accountsSpinner);
splitTypeButton.setAccountType(mAccountsDbAdapter.getAccountType(splitAccountUID));
splitTypeButton.setChecked(split.getType());
}
//put these balance update triggers last last so as to avoid computing while still loading
splitAmountEditText.addTextChangedListener(mBalanceUpdater);
splitTypeButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
updateTotal();
}
});
}
/**
* Updates the spinner to the selected transfer account
* @param accountId Database ID of the transfer account
*/
private void setSelectedTransferAccount(long accountId, final Spinner accountsSpinner){
for (int pos = 0; pos < mCursorAdapter.getCount(); pos++) {
if (mCursorAdapter.getItemId(pos) == accountId){
accountsSpinner.setSelection(pos);
break;
}
}
}
/**
* Updates the list of possible transfer accounts.
* Only accounts with the same currency can be transferred to
*/
private void updateTransferAccountsList(Spinner transferAccountSpinner){
mCursorAdapter = new QualifiedAccountNameCursorAdapter(getActivity(),
android.R.layout.simple_spinner_item, mCursor);
mCursorAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
transferAccountSpinner.setAdapter(mCursorAdapter);
}
/**
* Attaches listeners for the buttons of the dialog
*/
protected void setListeners(){
mCancelButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
dismiss();
}
});
mSaveButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (mMultiCurrency) {
Toast.makeText(getActivity(), R.string.toast_error_edit_multi_currency_transaction, Toast.LENGTH_LONG).show();
}
else {
List<Split> splitList = extractSplitsFromView();
((TransactionFormFragment) getTargetFragment()).setSplitList(splitList, mRemovedSplitUIDs);
}
dismiss();
}
});
mAddSplit.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (mMultiCurrency) {
Toast.makeText(getActivity(), R.string.toast_error_edit_multi_currency_transaction, Toast.LENGTH_LONG).show();
}
else {
addSplitView(null);
}
}
});
}
/**
* Extracts the input from the views and builds {@link org.gnucash.android.model.Split}s to correspond to the input.
* @return List of {@link org.gnucash.android.model.Split}s represented in the view
*/
private List<Split> extractSplitsFromView(){
List<Split> splitList = new ArrayList<Split>();
for (View splitView : mSplitItemViewList) {
EditText splitMemoEditText = (EditText) splitView.findViewById(R.id.input_split_memo);
EditText splitAmountEditText = (EditText) splitView.findViewById(R.id.input_split_amount);
Spinner accountsSpinner = (Spinner) splitView.findViewById(R.id.input_accounts_spinner);
TextView splitUidTextView = (TextView) splitView.findViewById(R.id.split_uid);
TransactionTypeToggleButton splitTypeButton = (TransactionTypeToggleButton) splitView.findViewById(R.id.btn_split_type);
BigDecimal amountBigDecimal = TransactionFormFragment.parseInputToDecimal(splitAmountEditText.getText().toString());
String accountUID = mAccountsDbAdapter.getUID(accountsSpinner.getSelectedItemId());
String currencyCode = mAccountsDbAdapter.getCurrencyCode(accountUID);
Money amount = new Money(amountBigDecimal, Currency.getInstance(currencyCode));
Split split = new Split(amount, accountUID);
split.setMemo(splitMemoEditText.getText().toString());
split.setType(splitTypeButton.getTransactionType());
split.setUID(splitUidTextView.getText().toString().trim());
splitList.add(split);
}
return splitList;
}
/**
* Updates the displayed total for the transaction.
* Computes the total of the splits, the unassigned balance and the split sum
*/
private void updateTotal(){
List<Split> splitList = extractSplitsFromView();
String currencyCode = mAccountsDbAdapter.getCurrencyCode(mAccountUID);
Money splitSum = Money.createZeroInstance(currencyCode);
if (!mMultiCurrency) {
for (Split split : splitList) {
Money amount = split.getAmount().absolute();
if (split.getType() == TransactionType.DEBIT)
splitSum = splitSum.subtract(amount);
else
splitSum = splitSum.add(amount);
}
}
TransactionsActivity.displayBalance(mImbalanceTextView, splitSum);
}
@Override
public void onDestroy() {
super.onDestroy();
}
/**
* Updates the displayed balance of the accounts when the amount of a split is changed
*/
private class BalanceTextWatcher implements TextWatcher {
@Override
public void beforeTextChanged(CharSequence charSequence, int i, int i2, int i3) {
}
@Override
public void onTextChanged(CharSequence charSequence, int i, int i2, int i3) {
}
@Override
public void afterTextChanged(Editable editable) {
updateTotal();
}
}
/**
* Updates the account type for the TransactionTypeButton when the selected account is changed in the spinner
*/
private class TypeButtonLabelUpdater implements AdapterView.OnItemSelectedListener {
TransactionTypeToggleButton mTypeToggleButton;
public TypeButtonLabelUpdater(TransactionTypeToggleButton typeToggleButton){
this.mTypeToggleButton = typeToggleButton;
}
@Override
public void onItemSelected(AdapterView<?> parentView, View selectedItemView, int position, long id) {
AccountType accountType = mAccountsDbAdapter.getAccountType(id);
mTypeToggleButton.setAccountType(accountType);
}
@Override
public void onNothingSelected(AdapterView<?> adapterView) {
}
}
}
| |
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.keyguard;
import android.animation.Animator;
import android.animation.Animator.AnimatorListener;
import android.animation.AnimatorListenerAdapter;
import android.os.Handler;
import android.os.Looper;
import android.util.Log;
import android.view.View;
public class KeyguardViewStateManager implements
SlidingChallengeLayout.OnChallengeScrolledListener,
ChallengeLayout.OnBouncerStateChangedListener {
private static final String TAG = "KeyguardViewStateManager";
private KeyguardWidgetPager mKeyguardWidgetPager;
private ChallengeLayout mChallengeLayout;
private KeyguardHostView mKeyguardHostView;
private int[] mTmpPoint = new int[2];
private int[] mTmpLoc = new int[2];
private KeyguardSecurityView mKeyguardSecurityContainer;
private static final int SCREEN_ON_HINT_DURATION = 1000;
private static final int SCREEN_ON_RING_HINT_DELAY = 300;
private static final boolean SHOW_INITIAL_PAGE_HINTS = false;
Handler mMainQueue = new Handler(Looper.myLooper());
int mLastScrollState = SlidingChallengeLayout.SCROLL_STATE_IDLE;
// Paged view state
private int mPageListeningToSlider = -1;
private int mCurrentPage = -1;
private int mPageIndexOnPageBeginMoving = -1;
int mChallengeTop = 0;
private final AnimatorListener mPauseListener = new AnimatorListenerAdapter() {
public void onAnimationEnd(Animator animation) {
mKeyguardSecurityContainer.onPause();
}
};
private final AnimatorListener mResumeListener = new AnimatorListenerAdapter() {
public void onAnimationEnd(Animator animation) {
if (((View)mKeyguardSecurityContainer).isShown()) {
mKeyguardSecurityContainer.onResume(0);
}
}
};
public KeyguardViewStateManager(KeyguardHostView hostView) {
mKeyguardHostView = hostView;
}
public void setPagedView(KeyguardWidgetPager pagedView) {
mKeyguardWidgetPager = pagedView;
updateEdgeSwiping();
}
public void setChallengeLayout(ChallengeLayout layout) {
mChallengeLayout = layout;
updateEdgeSwiping();
}
private void updateEdgeSwiping() {
if (mChallengeLayout != null && mKeyguardWidgetPager != null) {
if (mChallengeLayout.isChallengeOverlapping()) {
mKeyguardWidgetPager.setOnlyAllowEdgeSwipes(true);
} else {
mKeyguardWidgetPager.setOnlyAllowEdgeSwipes(false);
}
}
}
public boolean isChallengeShowing() {
if (mChallengeLayout != null) {
return mChallengeLayout.isChallengeShowing();
}
return false;
}
public boolean isChallengeOverlapping() {
if (mChallengeLayout != null) {
return mChallengeLayout.isChallengeOverlapping();
}
return false;
}
public void setSecurityViewContainer(KeyguardSecurityView container) {
mKeyguardSecurityContainer = container;
}
public void showBouncer(boolean show) {
CharSequence what = mKeyguardHostView.getContext().getResources().getText(
show ? R.string.keyguard_accessibility_show_bouncer
: R.string.keyguard_accessibility_hide_bouncer);
mKeyguardHostView.announceForAccessibility(what);
mKeyguardHostView.announceCurrentSecurityMethod();
mChallengeLayout.showBouncer();
}
public boolean isBouncing() {
return mChallengeLayout.isBouncing();
}
public void fadeOutSecurity(int duration) {
((View) mKeyguardSecurityContainer).animate().alpha(0f).setDuration(duration)
.setListener(mPauseListener);
}
public void fadeInSecurity(int duration) {
((View) mKeyguardSecurityContainer).animate().alpha(1f).setDuration(duration)
.setListener(mResumeListener);
}
public void onPageBeginMoving() {
if (mChallengeLayout.isChallengeOverlapping() &&
mChallengeLayout instanceof SlidingChallengeLayout) {
SlidingChallengeLayout scl = (SlidingChallengeLayout) mChallengeLayout;
scl.fadeOutChallenge();
mPageIndexOnPageBeginMoving = mKeyguardWidgetPager.getCurrentPage();
}
// We use mAppWidgetToShow to show a particular widget after you add it--
// once the user swipes a page we clear that behavior
if (mKeyguardHostView != null) {
mKeyguardHostView.clearAppWidgetToShow();
mKeyguardHostView.setOnDismissAction(null);
}
if (mHideHintsRunnable != null) {
mMainQueue.removeCallbacks(mHideHintsRunnable);
mHideHintsRunnable = null;
}
}
public void onPageEndMoving() {
mPageIndexOnPageBeginMoving = -1;
}
public void onPageSwitching(View newPage, int newPageIndex) {
if (mKeyguardWidgetPager != null && mChallengeLayout instanceof SlidingChallengeLayout) {
boolean isCameraPage = newPage instanceof CameraWidgetFrame;
if (isCameraPage) {
CameraWidgetFrame camera = (CameraWidgetFrame) newPage;
camera.setUseFastTransition(mKeyguardWidgetPager.isWarping());
}
SlidingChallengeLayout scl = (SlidingChallengeLayout) mChallengeLayout;
scl.setChallengeInteractive(!isCameraPage);
final int currentFlags = mKeyguardWidgetPager.getSystemUiVisibility();
final int newFlags = isCameraPage ? (currentFlags | View.STATUS_BAR_DISABLE_SEARCH)
: (currentFlags & ~View.STATUS_BAR_DISABLE_SEARCH);
mKeyguardWidgetPager.setSystemUiVisibility(newFlags);
}
// If the page we're settling to is the same as we started on, and the action of
// moving the page hid the security, we restore it immediately.
if (mPageIndexOnPageBeginMoving == mKeyguardWidgetPager.getNextPage() &&
mChallengeLayout instanceof SlidingChallengeLayout) {
SlidingChallengeLayout scl = (SlidingChallengeLayout) mChallengeLayout;
scl.fadeInChallenge();
mKeyguardWidgetPager.setWidgetToResetOnPageFadeOut(-1);
}
mPageIndexOnPageBeginMoving = -1;
}
public void onPageSwitched(View newPage, int newPageIndex) {
// Reset the previous page size and ensure the current page is sized appropriately.
// We only modify the page state if it is not currently under control by the slider.
// This prevents conflicts.
// If the page hasn't switched, don't bother with any of this
if (mCurrentPage == newPageIndex) return;
if (mKeyguardWidgetPager != null && mChallengeLayout != null) {
KeyguardWidgetFrame prevPage = mKeyguardWidgetPager.getWidgetPageAt(mCurrentPage);
if (prevPage != null && mCurrentPage != mPageListeningToSlider && mCurrentPage
!= mKeyguardWidgetPager.getWidgetToResetOnPageFadeOut()) {
prevPage.resetSize();
}
KeyguardWidgetFrame newCurPage = mKeyguardWidgetPager.getWidgetPageAt(newPageIndex);
boolean challengeOverlapping = mChallengeLayout.isChallengeOverlapping();
if (challengeOverlapping && !newCurPage.isSmall()
&& mPageListeningToSlider != newPageIndex) {
newCurPage.shrinkWidget(true);
}
}
mCurrentPage = newPageIndex;
}
public void onPageBeginWarp() {
fadeOutSecurity(SlidingChallengeLayout.CHALLENGE_FADE_OUT_DURATION);
View frame = mKeyguardWidgetPager.getPageAt(mKeyguardWidgetPager.getPageWarpIndex());
((KeyguardWidgetFrame)frame).showFrame(this);
}
public void onPageEndWarp() {
fadeInSecurity(SlidingChallengeLayout.CHALLENGE_FADE_IN_DURATION);
View frame = mKeyguardWidgetPager.getPageAt(mKeyguardWidgetPager.getPageWarpIndex());
((KeyguardWidgetFrame)frame).hideFrame(this);
}
private int getChallengeTopRelativeToFrame(KeyguardWidgetFrame frame, int top) {
mTmpPoint[0] = 0;
mTmpPoint[1] = top;
mapPoint((View) mChallengeLayout, frame, mTmpPoint);
return mTmpPoint[1];
}
/**
* Simple method to map a point from one view's coordinates to another's. Note: this method
* doesn't account for transforms, so if the views will be transformed, this should not be used.
*
* @param fromView The view to which the point is relative
* @param toView The view into which the point should be mapped
* @param pt The point
*/
private void mapPoint(View fromView, View toView, int pt[]) {
fromView.getLocationInWindow(mTmpLoc);
int x = mTmpLoc[0];
int y = mTmpLoc[1];
toView.getLocationInWindow(mTmpLoc);
int vX = mTmpLoc[0];
int vY = mTmpLoc[1];
pt[0] += x - vX;
pt[1] += y - vY;
}
private void userActivity() {
if (mKeyguardHostView != null) {
mKeyguardHostView.onUserActivityTimeoutChanged();
mKeyguardHostView.userActivity();
}
}
@Override
public void onScrollStateChanged(int scrollState) {
if (mKeyguardWidgetPager == null || mChallengeLayout == null) return;
boolean challengeOverlapping = mChallengeLayout.isChallengeOverlapping();
if (scrollState == SlidingChallengeLayout.SCROLL_STATE_IDLE) {
KeyguardWidgetFrame frame = mKeyguardWidgetPager.getWidgetPageAt(mPageListeningToSlider);
if (frame == null) return;
if (!challengeOverlapping) {
if (!mKeyguardWidgetPager.isPageMoving()) {
frame.resetSize();
userActivity();
} else {
mKeyguardWidgetPager.setWidgetToResetOnPageFadeOut(mPageListeningToSlider);
}
}
if (frame.isSmall()) {
// This is to make sure that if the scroller animation gets cut off midway
// that the frame doesn't stay in a partial down position.
frame.setFrameHeight(frame.getSmallFrameHeight());
}
if (scrollState != SlidingChallengeLayout.SCROLL_STATE_FADING) {
frame.hideFrame(this);
}
updateEdgeSwiping();
if (mChallengeLayout.isChallengeShowing()) {
mKeyguardSecurityContainer.onResume(KeyguardSecurityView.VIEW_REVEALED);
} else {
mKeyguardSecurityContainer.onPause();
}
mPageListeningToSlider = -1;
} else if (mLastScrollState == SlidingChallengeLayout.SCROLL_STATE_IDLE) {
// Whether dragging or settling, if the last state was idle, we use this signal
// to update the current page who will receive events from the sliding challenge.
// We resize the frame as appropriate.
mPageListeningToSlider = mKeyguardWidgetPager.getNextPage();
KeyguardWidgetFrame frame = mKeyguardWidgetPager.getWidgetPageAt(mPageListeningToSlider);
if (frame == null) return;
// Skip showing the frame and shrinking the widget if we are
if (!mChallengeLayout.isBouncing()) {
if (scrollState != SlidingChallengeLayout.SCROLL_STATE_FADING) {
frame.showFrame(this);
}
// As soon as the security begins sliding, the widget becomes small (if it wasn't
// small to begin with).
if (!frame.isSmall()) {
// We need to fetch the final page, in case the pages are in motion.
mPageListeningToSlider = mKeyguardWidgetPager.getNextPage();
frame.shrinkWidget(false);
}
} else {
if (!frame.isSmall()) {
// We need to fetch the final page, in case the pages are in motion.
mPageListeningToSlider = mKeyguardWidgetPager.getNextPage();
}
}
// View is on the move. Pause the security view until it completes.
mKeyguardSecurityContainer.onPause();
}
mLastScrollState = scrollState;
}
@Override
public void onScrollPositionChanged(float scrollPosition, int challengeTop) {
mChallengeTop = challengeTop;
KeyguardWidgetFrame frame = mKeyguardWidgetPager.getWidgetPageAt(mPageListeningToSlider);
if (frame != null && mLastScrollState != SlidingChallengeLayout.SCROLL_STATE_FADING) {
frame.adjustFrame(getChallengeTopRelativeToFrame(frame, mChallengeTop));
}
}
private Runnable mHideHintsRunnable = new Runnable() {
@Override
public void run() {
if (mKeyguardWidgetPager != null) {
mKeyguardWidgetPager.hideOutlinesAndSidePages();
}
}
};
public void showUsabilityHints() {
mMainQueue.postDelayed( new Runnable() {
@Override
public void run() {
mKeyguardSecurityContainer.showUsabilityHint();
}
} , SCREEN_ON_RING_HINT_DELAY);
if (SHOW_INITIAL_PAGE_HINTS) {
mKeyguardWidgetPager.showInitialPageHints();
}
if (mHideHintsRunnable != null) {
mMainQueue.postDelayed(mHideHintsRunnable, SCREEN_ON_HINT_DURATION);
}
}
// ChallengeLayout.OnBouncerStateChangedListener
@Override
public void onBouncerStateChanged(boolean bouncerActive) {
if (bouncerActive) {
mKeyguardWidgetPager.zoomOutToBouncer();
} else {
mKeyguardWidgetPager.zoomInFromBouncer();
if (mKeyguardHostView != null) {
mKeyguardHostView.setOnDismissAction(null);
}
}
}
}
| |
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.actionSystem.impl;
import com.intellij.ide.DataManager;
import com.intellij.ide.IdeEventQueue;
import com.intellij.ide.ui.UISettings;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.actionSystem.impl.actionholder.ActionRef;
import com.intellij.openapi.ui.JBPopupMenu;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.IconLoader;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.wm.IdeFocusManager;
import com.intellij.openapi.wm.IdeFrame;
import com.intellij.openapi.wm.StatusBar;
import com.intellij.ui.plaf.beg.IdeaMenuUI;
import com.intellij.ui.plaf.gtk.GtkMenuUI;
import com.intellij.util.ReflectionUtil;
import com.intellij.util.SingleAlarm;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import javax.swing.event.MenuEvent;
import javax.swing.event.MenuListener;
import javax.swing.plaf.MenuItemUI;
import java.awt.*;
import java.awt.event.AWTEventListener;
import java.awt.event.ComponentEvent;
import java.awt.event.KeyEvent;
import java.awt.event.MouseEvent;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
public final class ActionMenu extends JMenu {
private final String myPlace;
private DataContext myContext;
private final ActionRef<ActionGroup> myGroup;
private final PresentationFactory myPresentationFactory;
private final Presentation myPresentation;
private boolean myMnemonicEnabled;
private MenuItemSynchronizer myMenuItemSynchronizer;
private StubItem myStubItem; // A PATCH!!! Do not remove this code, otherwise you will lose all keyboard navigation in JMenuBar.
private final boolean myTopLevel;
private final Disposable myDisposable;
public ActionMenu(final DataContext context,
@NotNull final String place,
final ActionGroup group,
final PresentationFactory presentationFactory,
final boolean enableMnemonics,
final boolean topLevel) {
myContext = context;
myPlace = place;
myGroup = ActionRef.fromAction(group);
myPresentationFactory = presentationFactory;
myPresentation = myPresentationFactory.getPresentation(group);
myMnemonicEnabled = enableMnemonics;
myTopLevel = topLevel;
updateUI();
init();
// addNotify won't be called for menus in MacOS system menu
if (SystemInfo.isMacSystemMenu) {
installSynchronizer();
}
if (UIUtil.isUnderIntelliJLaF()) {
setOpaque(true);
}
myDisposable = new Disposable() {
@Override
public void dispose() {
}
};
}
public void updateContext(DataContext context) {
myContext = context;
}
public void addNotify() {
super.addNotify();
installSynchronizer();
}
private void installSynchronizer() {
if (myMenuItemSynchronizer == null) {
myMenuItemSynchronizer = new MenuItemSynchronizer();
myGroup.getAction().addPropertyChangeListener(myMenuItemSynchronizer);
myPresentation.addPropertyChangeListener(myMenuItemSynchronizer);
}
}
@Override
public void removeNotify() {
uninstallSynchronizer();
super.removeNotify();
Disposer.dispose(myDisposable);
}
private void uninstallSynchronizer() {
if (myMenuItemSynchronizer != null) {
myGroup.getAction().removePropertyChangeListener(myMenuItemSynchronizer);
myPresentation.removePropertyChangeListener(myMenuItemSynchronizer);
myMenuItemSynchronizer = null;
}
}
private JPopupMenu mySpecialMenu = null;
@Override
public JPopupMenu getPopupMenu() {
if (mySpecialMenu == null) {
mySpecialMenu = new JBPopupMenu();
mySpecialMenu.setInvoker(this);
popupListener = createWinListener(mySpecialMenu);
ReflectionUtil.setField(JMenu.class, this, JPopupMenu.class, "popupMenu", mySpecialMenu);
}
return super.getPopupMenu();
}
@Override
public void updateUI() {
boolean isAmbiance = UIUtil.isUnderGTKLookAndFeel() && "Ambiance".equalsIgnoreCase(UIUtil.getGtkThemeName());
if (myTopLevel && !isAmbiance && UIUtil.GTK_AMBIANCE_TEXT_COLOR.equals(getForeground())) {
setForeground(null);
}
if (UIUtil.isStandardMenuLAF()) {
super.updateUI();
}
else {
setUI(IdeaMenuUI.createUI(this));
setFont(UIUtil.getMenuFont());
JPopupMenu popupMenu = getPopupMenu();
if (popupMenu != null) {
popupMenu.updateUI();
}
}
if (myTopLevel && isAmbiance) {
setForeground(UIUtil.GTK_AMBIANCE_TEXT_COLOR);
}
if (myTopLevel && UIUtil.isUnderGTKLookAndFeel()) {
Insets insets = getInsets();
Insets newInsets = new Insets(insets.top, insets.left, insets.bottom, insets.right);
if (insets.top + insets.bottom < 6) {
newInsets.top = newInsets.bottom = 3;
}
if (insets.left + insets.right < 12) {
newInsets.left = newInsets.right = 6;
}
if (!newInsets.equals(insets)) {
setBorder(BorderFactory.createEmptyBorder(newInsets.top, newInsets.left, newInsets.bottom, newInsets.right));
}
}
}
@Override
public void setUI(final MenuItemUI ui) {
final MenuItemUI newUi = !myTopLevel && UIUtil.isUnderGTKLookAndFeel() && GtkMenuUI.isUiAcceptable(ui) ? new GtkMenuUI(ui) : ui;
super.setUI(newUi);
}
private void init() {
boolean macSystemMenu = SystemInfo.isMacSystemMenu && myPlace == ActionPlaces.MAIN_MENU;
myStubItem = macSystemMenu ? null : new StubItem();
addStubItem();
addMenuListener(new MenuListenerImpl());
setBorderPainted(false);
setVisible(myPresentation.isVisible());
setEnabled(myPresentation.isEnabled());
setText(myPresentation.getText());
updateIcon();
setMnemonicEnabled(myMnemonicEnabled);
}
private void addStubItem() {
if (myStubItem != null) {
add(myStubItem);
}
}
public void setMnemonicEnabled(boolean enable) {
myMnemonicEnabled = enable;
setMnemonic(myPresentation.getMnemonic());
setDisplayedMnemonicIndex(myPresentation.getDisplayedMnemonicIndex());
}
@Override
public void setDisplayedMnemonicIndex(final int index) throws IllegalArgumentException {
super.setDisplayedMnemonicIndex(myMnemonicEnabled ? index : -1);
}
@Override
public void setMnemonic(int mnemonic) {
super.setMnemonic(myMnemonicEnabled ? mnemonic : 0);
}
private void updateIcon() {
if (UISettings.getInstance().SHOW_ICONS_IN_MENUS) {
final Presentation presentation = myPresentation;
final Icon icon = presentation.getIcon();
setIcon(icon);
if (presentation.getDisabledIcon() != null) {
setDisabledIcon(presentation.getDisabledIcon());
}
else {
setDisabledIcon(IconLoader.getDisabledIcon(icon));
}
}
}
@Override
public void menuSelectionChanged(boolean isIncluded) {
super.menuSelectionChanged(isIncluded);
showDescriptionInStatusBar(isIncluded, this, myPresentation.getDescription());
}
public static void showDescriptionInStatusBar(boolean isIncluded, Component component, String description) {
IdeFrame frame = component instanceof IdeFrame
? (IdeFrame)component
: (IdeFrame)SwingUtilities.getAncestorOfClass(IdeFrame.class, component);
StatusBar statusBar;
if (frame != null && (statusBar = frame.getStatusBar()) != null) {
statusBar.setInfo(isIncluded ? description : null);
}
}
private class MenuListenerImpl implements MenuListener {
public void menuCanceled(MenuEvent e) {
clearItems();
addStubItem();
}
public void menuDeselected(MenuEvent e) {
Disposer.dispose(myDisposable);
clearItems();
addStubItem();
}
public void menuSelected(MenuEvent e) {
new UsabilityHelper(ActionMenu.this, myDisposable);
fillMenu();
}
}
private void clearItems() {
if (SystemInfo.isMacSystemMenu && myPlace == ActionPlaces.MAIN_MENU) {
for (Component menuComponent : getMenuComponents()) {
if (menuComponent instanceof ActionMenu) {
((ActionMenu)menuComponent).clearItems();
if (SystemInfo.isMacSystemMenu) {
// hideNotify is not called on Macs
((ActionMenu)menuComponent).uninstallSynchronizer();
}
}
else if (menuComponent instanceof ActionMenuItem) {
// Looks like an old-fashioned ugly workaround
// JDK 1.7 on Mac works wrong with such functional keys
if (!(SystemInfo.isJavaVersionAtLeast("1.7") && SystemInfo.isMac)) {
((ActionMenuItem)menuComponent).setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_F24, 0));
}
}
}
}
removeAll();
validate();
}
private void fillMenu() {
DataContext context;
boolean mayContextBeInvalid;
if (myContext != null) {
context = myContext;
mayContextBeInvalid = false;
}
else {
@SuppressWarnings("deprecation") DataContext contextFromFocus = DataManager.getInstance().getDataContext();
context = contextFromFocus;
if (PlatformDataKeys.CONTEXT_COMPONENT.getData(context) == null) {
IdeFrame frame = UIUtil.getParentOfType(IdeFrame.class, this);
context = DataManager.getInstance().getDataContext(IdeFocusManager.getGlobalInstance().getLastFocusedFor(frame));
}
mayContextBeInvalid = true;
}
Utils.fillMenu(myGroup.getAction(), this, myMnemonicEnabled, myPresentationFactory, context, myPlace, true, mayContextBeInvalid);
}
private class MenuItemSynchronizer implements PropertyChangeListener {
public void propertyChange(PropertyChangeEvent e) {
String name = e.getPropertyName();
if (Presentation.PROP_VISIBLE.equals(name)) {
setVisible(myPresentation.isVisible());
if (SystemInfo.isMacSystemMenu && myPlace.equals(ActionPlaces.MAIN_MENU)) {
validateTree();
}
}
else if (Presentation.PROP_ENABLED.equals(name)) {
setEnabled(myPresentation.isEnabled());
}
else if (Presentation.PROP_MNEMONIC_KEY.equals(name)) {
setMnemonic(myPresentation.getMnemonic());
}
else if (Presentation.PROP_MNEMONIC_INDEX.equals(name)) {
setDisplayedMnemonicIndex(myPresentation.getDisplayedMnemonicIndex());
}
else if (Presentation.PROP_TEXT.equals(name)) {
setText(myPresentation.getText());
}
else if (Presentation.PROP_ICON.equals(name) || Presentation.PROP_DISABLED_ICON.equals(name)) {
updateIcon();
}
}
}
private static class UsabilityHelper implements IdeEventQueue.EventDispatcher, AWTEventListener, Disposable {
private Component myComponent;
private Point myLastMousePoint = null;
private Point myUpperTargetPoint = null;
private Point myLowerTargetPoint = null;
private SingleAlarm myCallbackAlarm;
private MouseEvent myEventToRedispatch = null;
private UsabilityHelper(Component component, @NotNull Disposable disposable) {
Disposer.register(disposable, this);
myCallbackAlarm = new SingleAlarm(new Runnable() {
@Override
public void run() {
Disposer.dispose(myCallbackAlarm);
myCallbackAlarm = null;
if (myEventToRedispatch != null) {
IdeEventQueue.getInstance().dispatchEvent(myEventToRedispatch);
}
}
}, 50, this);
myComponent = component;
PointerInfo info = MouseInfo.getPointerInfo();
myLastMousePoint = info != null ? info.getLocation() : null;
if (myLastMousePoint != null) {
Toolkit.getDefaultToolkit().addAWTEventListener(this, AWTEvent.COMPONENT_EVENT_MASK);
IdeEventQueue.getInstance().addDispatcher(this, this);
}
}
@Override
public void eventDispatched(AWTEvent event) {
if (event instanceof ComponentEvent) {
ComponentEvent componentEvent = (ComponentEvent)event;
Component component = componentEvent.getComponent();
JPopupMenu popup = UIUtil.findParentByClass(component, JPopupMenu.class);
if (popup != null && popup.getInvoker() == myComponent) {
Rectangle bounds = popup.getBounds();
if (bounds.isEmpty()) return;
bounds.setLocation(popup.getLocationOnScreen());
if (myLastMousePoint.x < bounds.x) {
myUpperTargetPoint = new Point(bounds.x, bounds.y);
myLowerTargetPoint = new Point(bounds.x, bounds.y + bounds.height);
}
if (myLastMousePoint.x > bounds.x + bounds.width) {
myUpperTargetPoint = new Point(bounds.x + bounds.width, bounds.y);
myLowerTargetPoint = new Point(bounds.x + bounds.width, bounds.y + bounds.height);
}
}
}
}
@Override
public boolean dispatch(AWTEvent e) {
if (e instanceof MouseEvent && myUpperTargetPoint != null && myLowerTargetPoint != null && myCallbackAlarm != null) {
if (e.getID() == MouseEvent.MOUSE_PRESSED || e.getID() == MouseEvent.MOUSE_RELEASED || e.getID() == MouseEvent.MOUSE_CLICKED) {
return false;
}
Point point = ((MouseEvent)e).getLocationOnScreen();
myCallbackAlarm.cancel();
boolean isMouseMovingTowardsSubmenu = new Polygon(
new int[]{myLastMousePoint.x, myUpperTargetPoint.x, myLowerTargetPoint.x},
new int[]{myLastMousePoint.y, myUpperTargetPoint.y, myLowerTargetPoint.y},
3).contains(point);
myEventToRedispatch = (MouseEvent)e;
if (!isMouseMovingTowardsSubmenu) {
myCallbackAlarm.request();
}
myLastMousePoint = point;
return true;
}
return false;
}
@Override
public void dispose() {
myComponent = null;
myEventToRedispatch = null;
myLastMousePoint = myUpperTargetPoint = myLowerTargetPoint = null;
Toolkit.getDefaultToolkit().removeAWTEventListener(this);
}
}
}
| |
/* Copyright (C) 2005-2011 Fabio Riccardi */
package com.lightcrafts.utils.bytebuffer;
import java.io.IOException;
import java.nio.BufferUnderflowException;
import java.nio.ByteOrder;
/**
* An <code>LCByteBuffer</code> is a Light Crafts version of Java's
* <code>ByteBuffer</code> that allows different implementations.
* <p>
* Ideally, you'd like to derive from <code>ByteBuffer</code>, but you can't
* since it contains abstract package-protected methods that you can't
* implement. This is totally brain-damaged.
* <p>
* Despite not being derived from <code>ByteBuffer</code>, the API is designed
* to mimick <code>ByteBuffer</code>'s API.
* <p>
* Most of the methods are declared to throw {@link IOException} because some
* implementations may choose to use file I/O as the backing-store for the
* buffer. This is annoying, but that's life with checked exceptions.
*
* @author Paul J. Lucas [paul@lightcrafts.com]
*/
public abstract class LCByteBuffer {
////////// public /////////////////////////////////////////////////////////
/**
* Gets this buffer's capacity.
*
* @return Returns said capacity.
* @see #limit()
* @see #limit(int)
*/
public final int capacity() {
return m_capacity;
}
/**
* Gets a <code>byte</code> at the buffer's current position and advances
* the position by 1.
*
* @return Returns said <code>byte</code>.
* @throws BufferUnderflowException if the buffer's current position is not
* less than its limit.
* @see #get(int)
* @see #getUnsignedByte()
* @see #getUnsignedByte(int)
*/
public final byte get() throws IOException {
final int origPos = m_position;
final byte result = get( m_position );
//
// It's possible that m_position might have been altered by the call to
// an implementation of get(int) above. Therefore, to guarantee that
// m_position is updated to the correct value, 1 is added to the
// original position rather than doing m_position++.
//
m_position = origPos + 1;
return result;
}
/**
* Gets a <code>byte</code> at the given position. The buffer's current
* position is not changed.
*
* @param pos The position to get the <code>byte</code> from.
* @return Returns said <code>byte</code>.
* @throws BufferUnderflowException if the given position is not less than
* the buffer's limit.
* @see #get()
* @see #getUnsignedByte()
* @see #getUnsignedByte(int)
*/
public abstract byte get( int pos ) throws IOException;
/**
* Gets a range of bytes starting at the buffer's current position and
* advances the position by the number of bytes obtained.
*
* @param dest The array to deposit the bytes into.
* @param offset The element in the array to start depositing.
* @param length The number of bytes to get.
* @return Always returns <code>this</code>.
* @throws BufferUnderflowException if there are fewer than
* <code>length</code> bytes remaining in the buffer starting at its
* current position.
* @see #getBytes(int)
* @see #getBytes(int,int)
* @see #getBytes(byte[],int,int)
*/
public abstract LCByteBuffer get( byte[] dest, int offset, int length )
throws IOException;
/**
* Gets a range of bytes starting at the buffer's current position and
* advances the position by the number of bytes obtained.
*
* @param length The number of bytes to get.
* @return Returns a new <code>byte</code> array containing the obtained
* bytes.
* @throws BufferUnderflowException if there are fewer than
* <code>length</code> bytes remaining in the buffer starting at its
* current position.
* @see #get(byte[],int,int)
* @see #getBytes(int,int)
* @see #getBytes(byte[],int,int)
*/
public final byte[] getBytes( int length ) throws IOException {
final byte[] dest = new byte[ length ];
get( dest, 0, length );
return dest;
}
/**
* Gets a range of bytes starting at the given position. The buffer's
* current position is not changed.
*
* @param pos The position to get the <code>byte</code> from.
* @param length The number of bytes to get.
* @return Returns a new <code>byte</code> array containing the obtained
* bytes.
* @throws BufferUnderflowException if there are fewer than
* <code>length</code> bytes remaining in the buffer starting at the given
* position.
* @see #get(byte[],int,int)
* @see #getBytes(int)
* @see #getBytes(byte[],int,int)
*/
public final byte[] getBytes( int pos, int length ) throws IOException {
final byte[] dest = new byte[ length ];
getBytes( dest, pos, length );
return dest;
}
/**
* Gets a range of bytes starting at the given position. The buffer's
* current position is not changed.
*
* @param dest The array to deposit the bytes into.
* @param pos The position to get the <code>byte</code> from.
* @param length The number of bytes to get.
* @throws BufferUnderflowException if there are fewer than
* <code>length</code> bytes remaining in the buffer starting at the given
* position.
* @see #get(byte[],int,int)
* @see #getBytes(int)
* @see #getBytes(int,int)
*/
public final void getBytes( byte[] dest, int pos, int length )
throws IOException
{
final int origPos = m_position;
position( pos );
get( dest, 0, length );
m_position = origPos;
}
/**
* Gest a Unicode <code>char</code> at the buffer's current position and
* advances the position by 2.
*
* @return Returns said <code>char</code>.
* @throws BufferUnderflowException if there are fewer than 2 bytes
* remaining in the buffer starting at its current position.
* @see #getChar(int)
*/
public final char getChar() throws IOException {
return (char)getShort();
}
/**
* Gets a <code>double</code> at the given position. The buffer's current
* position is not changed.
*
* @param pos The position to get the <code>double</code> from.
* @return Returns said <code>double</code>.
* @throws BufferUnderflowException if there are fewer than 2 bytes
* remaining in the buffer starting at the given position.
* @see #getChar()
*/
public final char getChar( int pos ) throws IOException {
return (char)getShort( pos );
}
/**
* Gets a <code>double</code> at the buffer's current position and advances
* the position by 8.
*
* @return Returns said <code>double</code>.
* @throws BufferUnderflowException if there are fewer than 8 bytes
* remaining in the buffer starting at its current position.
* @see #getDouble(int)
*/
public final double getDouble() throws IOException {
final int origPos = m_position;
final double result = getDouble( m_position );
//
// It's possible that m_position might have been altered by the call to
// an implementation of getDouble(int) above. Therefore, to guarantee
// that m_position is updated to the correct value, 8 is added to the
// original position rather than doing m_position += 8.
//
m_position = origPos + 8;
return result;
}
/**
* Gets a <code>double</code> at the given position. The buffer's current
* position is not changed.
*
* @param pos The position to get the <code>double</code> from.
* @return Returns said <code>double</code>.
* @throws BufferUnderflowException if there are fewer than 8 bytes
* remaining in the buffer starting at the given position.
* @see #getDouble()
*/
public abstract double getDouble( int pos ) throws IOException;
/**
* Gets a {@link String} that is the same length as the given string
* starting at the buffer's current position, advances the position by the
* same length, and compares the two strings for equality.
*
* @param s The {@link String} to compare to.
* @param charsetName The name of a supported character set.
* @return Returns <code>true</code> only if the two strings are equal.
* @throws BufferUnderflowException if there are fewer than the number of
* bytes in the given string remaining in the buffer starting at its
* current position.
* @see #getString(int,String)
*/
public final boolean getEquals( String s, String charsetName )
throws IOException
{
return s.equals( getString( s.length(), charsetName ) );
}
/**
* Gets a {@link String} that is the same length as the given string
* starting at the given position and comares the two strings for equality.
* The buffer's current position is not changed.
*
* @param pos The position to get the {@link String} from.
* @param s The {@link String} to compare to.
* @param charsetName The name of a supported character set.
* @return Returns {@code true} only if the two strings are equal.
* @throws BufferUnderflowException if there are fewer than the number of
* bytes in the given string remaining in the buffer starting at its
* current position.
* @see #getEquals(String,String)
* @see #getString(int,int,String)
*/
public final boolean getEquals( int pos, String s, String charsetName )
throws IOException
{
return s.equals( getString( pos, s.length(), charsetName ) );
}
/**
* Gets a <code>float</code> at the buffer's current position and advances
* the position by 4.
*
* @return Returns said <code>float</code>.
* @throws BufferUnderflowException if there are fewer than 4 bytes
* remaining in the buffer starting at its current position.
* @see #getFloat(int)
*/
public final float getFloat() throws IOException {
final int origPos = m_position;
final float result = getFloat( m_position );
//
// It's possible that m_position might have been altered by the call to
// an implementation of getFloat(int) above. Therefore, to guarantee
// that m_position is updated to the correct value, 4 is added to the
// original position rather than doing m_position += 4.
//
m_position = origPos + 4;
return result;
}
/**
* Gets a <code>float</code> at the given position. The buffer's current
* position is not changed.
*
* @param pos The position to get the <code>float</code> from.
* @return Returns said <code>float</code>.
* @throws BufferUnderflowException if there are fewer than 4 bytes
* remaining in the buffer starting at the given position.
* @see #getFloat()
*/
public abstract float getFloat( int pos ) throws IOException;
/**
* Gets an <code>int</code> at the buffer's current position and advances
* the position by 4.
*
* @return Returns said <code>int</code>.
* @throws BufferUnderflowException if there are fewer than 4 bytes
* remaining in the buffer starting at its current position.
* @see #getInt(int)
*/
public final int getInt() throws IOException {
final int origPos = m_position;
final int result = getInt( m_position );
//
// It's possible that m_position might have been altered by the call to
// an implementation of getInt(int) above. Therefore, to guarantee
// that m_position is updated to the correct value, 4 is added to the
// original position rather than doing m_position += 4.
//
m_position = origPos + 4;
return result;
}
/**
* Gets an <code>int</code> at the given position. The buffer's current
* position is not changed.
*
* @param pos The position to get the <code>int</code> from.
* @return Returns said <code>int</code>.
* @throws BufferUnderflowException if there are fewer than 4 bytes
* remaining in the buffer starting at the given position.
* @see #getInt()
*/
public abstract int getInt( int pos ) throws IOException;
/**
* Gets a <code>long</code> at the buffer's current position and advances
* the position by 8.
*
* @return Returns said <code>long</code>.
* @throws BufferUnderflowException if there are fewer than 8 bytes
* remaining in the buffer starting at its current position.
* @see #getLong(int)
*/
public final long getLong() throws IOException {
final int origPos = m_position;
final long result = getLong( m_position );
//
// It's possible that m_position might have been altered by the call to
// an implementation of getLong(int) above. Therefore, to guarantee
// that m_position is updated to the correct value, 8 is added to the
// original position rather than doing m_position += 8.
//
m_position = origPos + 8;
return result;
}
/**
* Gets a <code>long</code> at the given position. The buffer's current
* position is not changed.
*
* @param pos The position to get the <code>long</code> from.
* @return Returns said <code>long</code>.
* @throws BufferUnderflowException if there are fewer than 8 bytes
* remaining in the buffer starting at the given position.
* @see #getLong()
*/
public abstract long getLong( int pos ) throws IOException;
/**
* Gets a <code>short</code> at the buffer's current position and advances
* the position by 2.
*
* @return Returns said <code>short</code>.
* @throws BufferUnderflowException if there are fewer than 2 bytes
* remaining in the buffer starting at its current position.
* @see #getShort(int)
* @see #getUnsignedShort()
* @see #getUnsignedShort(int)
*/
public final short getShort() throws IOException {
final int origPos = m_position;
final short result = getShort( m_position );
//
// It's possible that m_position might have been altered by the call to
// an implementation of getShort(int) above. Therefore, to guarantee
// that m_position is updated to the correct value, 2 is added to the
// original position rather than doing m_position += 2.
//
m_position = origPos + 2;
return result;
}
/**
* Gets a <code>short</code> at the given position. The buffer's current
* position is not changed.
*
* @param pos The position to get the <code>short</code> from.
* @return Returns said <code>short</code>.
* @throws BufferUnderflowException if there are fewer than 2 bytes
* remaining in the buffer starting at the given position.
* @see #getShort()
* @see #getUnsignedShort()
* @see #getUnsignedShort(int)
*/
public abstract short getShort( int pos ) throws IOException;
/**
* Gets a {@link String} at the buffer's current position and advances the
* position by the number of bytes obtained.
*
* @param length The number of bytes (not characters) to get.
* @param charsetName The name of a supported character set.
* @return Returns said {@link String}.
* @throws BufferUnderflowException if there are fewer than
* <code>length</code> bytes remaining in the buffer starting at its
* current position.
* @see #getString(int,int,String)
*/
public final String getString( int length, String charsetName )
throws IOException
{
return new String( getBytes( length ), charsetName );
}
/**
* Gets a {@link String} at the given position. The buffer's current
* position is not changed.
*
* @param pos The position to get the {@link String} from.
* @param length The number of bytes (not characters) to get.
* @param charsetName The name of a supported character set.
* @return Returns said {@link String}.
* @throws BufferUnderflowException if there are fewer than
* <code>length</code> bytes remaining in the buffer starting at the given
* position.
* @see #getString(int,String)
*/
public final String getString( int pos, int length, String charsetName )
throws IOException
{
return new String( getBytes( pos, length ), charsetName );
}
/**
* Gets an unsigned <code>byte</code> at the buffer's current position and
* advances the position by 1.
*
* @return Return said unsigned <code>byte</code>.
* @throws BufferUnderflowException if the buffer's current position is not
* less than its limit.
* @see #get()
* @see #get(int)
* @see #getUnsignedByte(int)
*/
public final int getUnsignedByte() throws IOException {
return get() & 0x000000FF;
}
/**
* Gets an unsigned <code>byte</code> at given position. The buffer's
* current position is not changed.
*
* @param pos The position to get the unsigned <code>byte</code> from.
* @return Return said unsigned <code>byte</code>.
* @throws BufferUnderflowException if the given position is not less than
* its limit.
* @see #get()
* @see #get(int)
* @see #getUnsignedByte()
*/
public final int getUnsignedByte( int pos ) throws IOException {
return get( pos ) & 0x000000FF;
}
/**
* Gets an unsigned <code>short</code> at the buffer's current position and
* advances the position by 2.
*
* @return Return said unsigned <code>short</code>.
* @throws BufferUnderflowException if there are fewer than 2 bytes
* remaining in the buffer starting at its current position.
* @see #getShort()
* @see #getShort(int)
* @see #getUnsignedShort(int)
*/
public final int getUnsignedShort() throws IOException {
return getShort() & 0x0000FFFF;
}
/**
* Gets an unsigned <code>short</code> at given position. The buffer's
* current position is not changed.
*
* @param pos The position to get the unsigned <code>short</code> from.
* @return Return said unsigned <code>short</code>.
* @throws BufferUnderflowException if there are fewer than 2 bytes
* remaining in the buffer starting at the given position.
* @see #getShort()
* @see #getShort(int)
* @see #getUnsignedShort()
*/
public final int getUnsignedShort( int pos ) throws IOException {
return getShort( pos ) & 0x0000FFFF;
}
/**
* Gets ths buffer's initial offset.
*
* @return Returns said offset.
* @see #initialOffset(int)
*/
public final int initialOffset() {
return m_initialOffset;
}
/**
* Sets the initial offset. This offset is added to all absolute positions
* for reading.
*
* @param offset The new offset.
* @return Returns this <code>LCByteBuffer</code>.
* @see #initialOffset()
*/
public LCByteBuffer initialOffset( int offset ) {
m_initialOffset = offset;
return this;
}
/**
* Get the buffer's limit.
*
* @return Returns said limit.
* @see #capacity()
* @see #limit(int)
*/
public final int limit() {
return m_limit;
}
/**
* Sets this buffer's limit.
*
* @param newLimit The new limit.
* @return Returns this <code>LCByteBuffer</code>.
* @throws IllegalArgumentException if <code>newLimit</code> is negative or
* greater than the buffer's capacity.
* @see #capacity()
* @see #limit()
*/
public LCByteBuffer limit( int newLimit ) {
if ( newLimit < 0 || newLimit > m_capacity )
throw new IllegalArgumentException();
m_limit = newLimit;
if ( m_position > m_limit )
m_position = m_limit;
return this;
}
/**
* Get the current {@link ByteOrder}.
*
* @return Returns said {@link ByteOrder}.
*/
public abstract ByteOrder order();
/**
* Sets the current {@link ByteOrder}.
*
* @param order the new {@link ByteOrder}.
* @return Returns this <code>LCByteBuffer</code>.
*/
public abstract LCByteBuffer order( ByteOrder order );
/**
* Get the current position.
*
* @return Returns said position.
*/
public final int position() {
return m_position;
}
/**
* Sets the current position.
*
* @param newPosition The new position.
* @return Returns this <code>LCByteBuffer</code>.
* @throws IllegalArgumentException if <code>newPosition</code> is negative
* or greater than the buffer's limit.
*/
public final LCByteBuffer position( int newPosition ) {
if ( newPosition < 0 || newPosition > m_limit )
throw new IllegalArgumentException();
m_position = newPosition;
return this;
}
/**
* Probes the buffer to determine what byte order the data is in and sets
* it accordingly.
*
* @param shortOffset An offset of a supposed unsigned <code>short</code>.
* @return Returns the original {@link ByteOrder}.
* @throws BufferUnderflowException if there are fewer than 2 bytes
* remaining in the buffer starting at its current position.
*/
public ByteOrder probeOrder( int shortOffset ) throws IOException {
final ByteOrder origOrder = order();
//
// We use a heuristic of extracting a short both using big and little
// endian byte orders. The assumption is that reading the bytes in the
// right order willl yield a smaller number than the wrong order, e.g.
// 0x0100 is 1 when read as little endian is 1 but is 256 when read as
// big endian.
//
order( ByteOrder.BIG_ENDIAN );
final int nBig = getUnsignedShort( shortOffset );
order( ByteOrder.LITTLE_ENDIAN );
final int nLittle = getUnsignedShort( shortOffset );
if ( nLittle > nBig )
order( ByteOrder.BIG_ENDIAN );
return origOrder;
}
/**
* Puts a <code>byte</code> at the given position. The buffer's current
* position is not changed.
*
* @param pos The position to put the byte at.
* @param value The byte to put.
* @return Returns this <code>LCByteBuffer</code>.
* @throws IndexOutOfBoundsException if <tt>pos</tt> is negative or not
* smaller than the buffer's limit.
* @see #put(byte)
*/
public abstract LCByteBuffer put( int pos, byte value ) throws IOException;
/**
* Puts a <code>byte</code> at the buffer's current position and advances
* the position by 1.
*
* @return Returns this <code>LCByteBuffer</code>.
* @throws IndexOutOfBoundsException if the current position is not smaller
* than the buffer's limit.
* @see #put(int,byte)
*/
public final LCByteBuffer put( byte value ) throws IOException {
final int origPos = m_position;
put( m_position, value );
//
// It's possible that m_position might have been altered by the call to
// an implementation of getInt(int) above. Therefore, to guarantee
// that m_position is updated to the correct value, 1 is added to the
// original position rather than doing m_position += 1.
//
m_position = origPos + 1;
return this;
}
/**
* Puts a <code>double</code> at the buffer's current position and advances
* the position by 8.
*
* @return Returns this <code>LCByteBuffer</code>.
* @throws IndexOutOfBoundsException if the current position is not smaller
* than the buffer's limit - 7.
* @see #putDouble(int,double)
*/
public final LCByteBuffer putDouble( double value ) throws IOException {
final int origPos = m_position;
putDouble( m_position, value );
//
// It's possible that m_position might have been altered by the call to
// an implementation of getInt(int) above. Therefore, to guarantee
// that m_position is updated to the correct value, 8 is added to the
// original position rather than doing m_position += 8.
//
m_position = origPos + 8;
return this;
}
/**
* Put a <code>double</code> at the given position. The buffer's current
* position is not changed.
*
* @param pos The position to put the double at.
* @param value The double to put.
* @return Returns this <code>LCByteBuffer</code>.
* @throws IndexOutOfBoundsException if <tt>pos</tt> is negative or not
* smaller than the buffer's limit - 7.
* @see #putDouble(double)
*/
public abstract LCByteBuffer putDouble( int pos, double value )
throws IOException;
/**
* Puts a <code>float</code> at the buffer's current position and advances
* the position by 4.
*
* @return Returns this <code>LCByteBuffer</code>.
* @throws IndexOutOfBoundsException if the current position is not smaller
* than the buffer's limit - 3.
* @see #putFloat(int,float)
*/
public final LCByteBuffer putFloat( float value ) throws IOException {
final int origPos = m_position;
putFloat( m_position, value );
//
// It's possible that m_position might have been altered by the call to
// an implementation of getInt(int) above. Therefore, to guarantee
// that m_position is updated to the correct value, 4 is added to the
// original position rather than doing m_position += 4.
//
m_position = origPos + 4;
return this;
}
/**
* Puts a <code>float</code> at the given position. The buffer's current
* position it not changed.
*
* @param pos The position to put the float at.
* @param value The float to put.
* @return Returns this <code>LCByteBuffer</code>.
* @throws IndexOutOfBoundsException if <tt>pos</tt> is negative or not
* smaller than the buffer's limit.
* @see #putFloat(float)
*/
public abstract LCByteBuffer putFloat( int pos, float value )
throws IOException;
/**
* Puts an <code>int</code> at the buffer's current position and advances
* the position by 4.
*
* @return Returns this <code>LCByteBuffer</code>.
* @throws IndexOutOfBoundsException if the current position is not smaller
* than the buffer's limit - 3.
* @see #putInt(int,int)
*/
public final LCByteBuffer putInt( int value ) throws IOException {
final int origPos = m_position;
putInt( m_position, value );
//
// It's possible that m_position might have been altered by the call to
// an implementation of getInt(int) above. Therefore, to guarantee
// that m_position is updated to the correct value, 4 is added to the
// original position rather than doing m_position += 4.
//
m_position = origPos + 4;
return this;
}
/**
* Puts an <code>int</code> at the given position. The buffer's current
* position is not changed.
*
* @param pos The position to put the integer at.
* @param value The integer to put.
* @return Returns this <code>LCByteBuffer</code>.
* @throws IndexOutOfBoundsException if <tt>pos</tt> is negative or not
* smaller than the buffer's limit - 3.
* @see #putInt(int)
*/
public abstract LCByteBuffer putInt( int pos, int value )
throws IOException;
/**
* Puts a <code>long</code> at the buffer's current position and advances
* the position by 8.
*
* @return Returns this <code>LCByteBuffer</code>.
* @throws IndexOutOfBoundsException if the current position is not smaller
* than the buffer's limit - 7.
* @see #putLong(int,long)
*/
public final LCByteBuffer putLong( long value ) throws IOException {
final int origPos = m_position;
putLong( m_position, value );
//
// It's possible that m_position might have been altered by the call to
// an implementation of getInt(int) above. Therefore, to guarantee
// that m_position is updated to the correct value, 8 is added to the
// original position rather than doing m_position += 8.
//
m_position = origPos + 8;
return this;
}
/**
* Puts a <code>long</code> at the given position. The buffer's current
* position is not changed.
*
* @param pos The position to put the long at.
* @param value The long to put.
* @return Returns this <code>LCByteBuffer</code>.
* @throws IndexOutOfBoundsException if <tt>pos</tt> is negative or not
* smaller than the buffer's limit - 7.
* @see #putLong(long)
*/
public abstract LCByteBuffer putLong( int pos, long value )
throws IOException;
/**
* Puts a <code>short</code> at the buffer's current position and advances
* the position by 2.
*
* @return Returns this <code>LCByteBuffer</code>.
* @throws IndexOutOfBoundsException if the current position is not smaller
* than the buffer's limit - 1.
* @see #putShort(int,short)
*/
public final LCByteBuffer putShort( short value ) throws IOException {
final int origPos = m_position;
putShort( m_position, value );
//
// It's possible that m_position might have been altered by the call to
// an implementation of getInt(int) above. Therefore, to guarantee
// that m_position is updated to the correct value, 2 is added to the
// original position rather than doing m_position += 2.
//
m_position = origPos + 2;
return this;
}
/**
* Puts a <code>short</code> at the given position. The buffer's current
* position is not changed.
*
* @param pos The position to put the short at.
* @param value The short to put.
* @return Returns this <code>LCByteBuffer</code>.
* @throws IndexOutOfBoundsException if <tt>pos</tt> is negative or not
* smaller than the buffer's limit - 1.
* @see #putShort(short)
*/
public abstract LCByteBuffer putShort( int pos, short value )
throws IOException;
/**
* Gets the number of bytes remaining between the current position and the
* buffer's limit.
*
* @return Returns said number of bytes.
*/
public final int remaining() {
return m_limit - m_position;
}
/**
* Skip a specified number of bytes, i.e., advance the buffer's position.
*
* @param count The number of bytes to skip.
* @throws IllegalArgumentException if the buffer's current position plus
* the count is either negative or greater than the buffer's limit.
* @return Returns this <code>LCByteBuffer</code>.
*/
public LCByteBuffer skipBytes( int count ) {
return position( m_position + count );
}
////////// protected //////////////////////////////////////////////////////
/**
* Construct an <code>LCByteBuffer</code>.
*
* @param capacity The size of the buffer.
*/
protected LCByteBuffer( int capacity ) {
m_capacity = m_limit = capacity;
m_position = 0;
}
////////// private ////////////////////////////////////////////////////////
/**
* The buffer's capacity.
*/
private final int m_capacity;
/**
* This offset is added to all absolute positions for reading.
*/
private int m_initialOffset;
/**
* The buffer's limit.
*/
private int m_limit;
/**
* The current position.
*/
private int m_position;
}
/* vim:set et sw=4 ts=4: */
| |
package com.cmendenhall.tests;
import com.cmendenhall.controllers.GameController;
import com.cmendenhall.mocks.MockExitManager;
import com.cmendenhall.mocks.MockTerminalView;
import com.cmendenhall.players.HumanPlayer;
import com.cmendenhall.players.MinimaxPlayer;
import com.cmendenhall.players.Player;
import com.cmendenhall.utils.StringLoader;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import java.text.MessageFormat;
import java.util.HashMap;
import java.util.NoSuchElementException;
import static com.cmendenhall.TicTacToeSymbols.O;
import static com.cmendenhall.TicTacToeSymbols.X;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@RunWith(JUnit4.class)
public class GameControllerTest extends TicTacToeTest {
private HashMap<String, String> viewStrings = new StringLoader().getViewStrings("/viewstrings.properties");
private MockExitManager mockExitManager;
private MockTerminalView view;
private GameController controller;
@Before
public void setUp() throws Exception {
startRecorder();
mockExitManager = new MockExitManager();
view = new MockTerminalView();
controller = new GameController(view, mockExitManager);
Player playerOne = new HumanPlayer(X);
Player playerTwo = new MinimaxPlayer(O);
controller.setPlayerOne(playerOne);
controller.setPlayerTwo(playerTwo);
recorder.popFirstOutput();
}
@Test
public void controllerShouldStartNewGame() {
startRecorder();
controller.newGame();
assertEquals(viewStrings.get("welcome"),
recorder.popFirstOutput());
assertEquals(viewStrings.get("divider"),
recorder.popFirstOutput());
}
@Test
public void controllerShouldCheckForGameOverStates() {
controller.checkForGameOver();
}
@Test()
public void controllerShouldLoadGames() {
view.enqueueInput("n");
controller.loadGame(TicTacToeTestHelper.noWins);
controller.checkForGameOver();
assertTrue(mockExitManager.exitWasCalled());
}
@Test()
public void controllerShouldEndGameOnRestartIfInputIsNo() {
startRecorder();
view.enqueueInput("n");
controller.restartGame();
String output = recorder.popLastOutput();
assertEquals(viewStrings.get("playagain"), output);
}
@Test
public void controllerShouldStartNewGameOnRestartIfInputIsYes() {
startRecorder();
view.enqueueInput("y",
"3",
"h",
"h",
"top left",
"bottom left",
"top middle",
"bottom middle",
"top right",
"n");
try {
controller.restartGame();
} catch (NoSuchElementException e) {
assertEquals(viewStrings.get("playagain"),
recorder.popFirstOutput());
}
}
@Test
public void controllerShouldHandleNextRound() {
startRecorder();
view.enqueueInput("middle center",
"top right",
"middle right",
"n");
controller.newGame();
try {
controller.startGame();
} catch (Exception e) {
String expectedFirst = TicTacToeTestHelper.emptyBoard.toString();
String expectedSecond = MessageFormat.format(viewStrings.get("yourmovethreesquares"), 2) + " X.";
String expectedThird = TicTacToeTestHelper.xInCenter.toString();
recorder.discardFirstNStrings(2);
String outputFirst = recorder.popFirstOutput();
String outputSecond = recorder.popFirstOutput();
String outputThird = recorder.popFirstOutput();
assertEquals(expectedFirst, outputFirst);
assertEquals(expectedSecond, outputSecond);
assertEquals(expectedThird, outputThird);
}
}
@Test
public void controllerShouldPassErrorMessageToViewOnInvalidInput() {
view.enqueueInput("invalid phrase");
startRecorder();
controller.newGame();
try {
controller.playRound();
} catch (NoSuchElementException e) {
recorder.discardFirstNStrings(3);
String output = recorder.popFirstOutput();
assertEquals("That's not a valid board location.", output);
}
}
@Test
public void controllerShouldPassErrorMessageToViewOnInvalidMove() {
view.enqueueInput("middle center", "middle center");
controller.newGame();
startRecorder();
try {
controller.playRound();
} catch (NoSuchElementException e) {
recorder.discardFirstNStrings(6);
String output = recorder.popFirstOutput();
assertEquals("Square is already full.", output);
}
}
@Test
public void controllerShouldPrintWinnerMessageAfterWin() {
view.enqueueInput("n");
controller.loadGame(TicTacToeTestHelper.playerXWins);
startRecorder();
controller.checkForGameOver();
String expected = viewStrings.get("gameoverwin") + viewStrings.get("xwins");
recorder.discardFirstNStrings(1);
String output = recorder.popFirstOutput();
assertEquals(expected, output);
}
@Test
public void controllerShouldLoadPlayers() {
view.enqueueInput("3", "c", "h");
try {
controller.setUp();
} catch (NoSuchElementException e) {
Player playerOne = controller.getPlayerOne();
Player playerTwo = controller.getPlayerTwo();
assertEquals("MinimaxPlayer", playerOne.getClass().getSimpleName());
assertEquals("HumanPlayer", playerTwo.getClass().getSimpleName());
}
}
@Test()
public void gameShouldEndOnWin() {
controller.newGame();
view.enqueueInput("3", "h", "h");
controller.setUp();
view.enqueueInput("middle center",
"top left",
"top right",
"middle left",
"lower right",
"lower left",
"n");
try {
controller.startGame();
} catch (NoSuchElementException e) {
assertTrue(mockExitManager.exitWasCalled());
}
}
@Test
public void playersShouldBeSettable() {
controller = new GameController(view);
controller.setPlayerOne(new HumanPlayer(X));
controller.setPlayerTwo(new HumanPlayer(O));
Player playerOne = controller.getPlayerOne();
Player playerTwo = controller.getPlayerTwo();
assertEquals("HumanPlayer", playerOne.getClass().getSimpleName());
assertEquals("HumanPlayer", playerTwo.getClass().getSimpleName());
}
@Test
public void setUpShouldSetPlayers() {
controller = new GameController(view);
view.enqueueInput("3", "h", "c");
controller.setUp();
Player playerOne = controller.getPlayerOne();
Player playerTwo = controller.getPlayerTwo();
assertEquals("HumanPlayer", playerOne.getClass().getSimpleName());
assertEquals("MinimaxPlayer", playerTwo.getClass().getSimpleName());
}
@Test
public void setUpRepromptsForInputIfPlayerTypeIsInvalid() {
controller = new GameController(view);
view.enqueueInput("3", "z", "x", "f");
startRecorder();
try {
controller.setUp();
} catch (NoSuchElementException e) {
String output = recorder.popLastOutput();
assertEquals(viewStrings.get("chooseplayerone"), output);
}
}
@Test
public void controllerShouldPromptForBoardSizeDuringSetup() {
controller = new GameController(view);
view.enqueueInput("4");
startRecorder();
try {
controller.setUp();
} catch (NoSuchElementException e) {
String output = recorder.popFirstOutput();
assertEquals(viewStrings.get("boardsize"), output);
}
}
@Test
public void controllerShouldRepromptForBoardSizeAfterInvalidInput() {
controller = new GameController(view);
view.enqueueInput("Kindly create a board with five squares and five columns.");
startRecorder();
try {
controller.setUp();
} catch (NoSuchElementException e) {
String output = recorder.popLastOutput();
assertEquals(viewStrings.get("boardsize"), output);
}
}
@Test
public void movePromptShouldHaveDifferentMessageBasedOnBoardSize() {
controller = new GameController(view);
view.enqueueInput("3", "h", "h");
startRecorder();
try {
controller.setUp();
controller.startGame();
} catch (NoSuchElementException e) {
String output = recorder.popLastOutput();
String expected = MessageFormat.format(viewStrings.get("yourmovethreesquares"), 2) + " X.";
assertEquals(expected, output);
}
view.clearInput();
controller = new GameController(view);
view.enqueueInput("4", "h", "h");
startRecorder();
try {
controller.setUp();
controller.startGame();
} catch (NoSuchElementException e) {
String output = recorder.popLastOutput();
String expected = MessageFormat.format(viewStrings.get("yourmove"), 3) + " X.";
assertEquals(expected, output);
}
}
}
| |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.authority;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
import org.dspace.event.Event;
import org.dspace.storage.rdbms.DatabaseManager;
import org.dspace.storage.rdbms.TableRow;
import org.dspace.storage.rdbms.TableRowIterator;
import java.math.BigInteger;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.sql.SQLException;
import java.util.*;
/**
* Base Class supporting all DSpace Authority Model
* Objects. Provides Common Metadata Support to any
* Object that inherits this class. Ideally, this class
* is a model for providing metadata across all DSpace
* Objects. Eventual this code may be pushed up into DSpaceObject
* and new tables created for Community, Collection, Bundle, Bitstream
* and so on.
*
* @author Lantian Gai, Mark Diggory
*/
public abstract class AuthorityObject extends DSpaceObject {
// findAll sortby types
public static final int ID = 0; // sort by ID
public static final int NAME = 1; // sort by NAME (default)
/** log4j logger */
private static Logger log = Logger.getLogger(AuthorityObject.class);
/** Our context */
protected Context myContext;
/** The row in the table representing this object */
protected TableRow myRow;
/** lists that need to be written out again */
protected boolean modified = false;
private boolean metadataModified=false;
ArrayList<AuthorityMetadataValue> metadataValues;
AuthorityObject(Context context, TableRow row) throws SQLException
{
myContext = context;
myRow = row;
// Cache ourselves
context.cache(this, row.getIntColumn("id"));
metadataValues = getMetadata();
modified = false;
clearDetails();
}
public String getIdentifier()
{
return myRow.getStringColumn("identifier");
}
protected void setIdentifier(String identifier)
{
myRow.setColumn("identifier", identifier);
modified = true;
}
public void updateLastModified() {
try {
Date lastModified = new java.sql.Timestamp(new Date().getTime());
myRow.setColumn("modified", lastModified);
DatabaseManager.updateQuery(myContext, "UPDATE "+getType()+" SET modified = ? WHERE id= ? ", lastModified, getID());
//Also fire a modified event since the item HAS been modified
//ourContext.addEvent(new Event(Event.MODIFY, Constants.ITEM, getID(), null));
} catch (SQLException e) {
log.error(LogManager.getHeader(myContext, "Error while updating modified timestamp", getType()+": " + getID()));
}
}
/**
* Used to Identify the Table that Metadata Is stored in for this Authority Object.
* @return
*/
public abstract String getMetadataTable();
public ArrayList<AuthorityMetadataValue> getMetadata()
{
try
{
if(metadataValues==null){
metadataValues = new ArrayList<AuthorityMetadataValue>();
// Get Dublin Core metadata
TableRowIterator tri = retrieveMetadata();
if (tri != null)
{
try
{
while (tri.hasNext())
{
TableRow resultRow = tri.next();
// Get the associated metadata field and schema information
int fieldID = resultRow.getIntColumn("field_id");
MetadataField field = MetadataField.find(myContext, fieldID);
if (field == null)
{
log.error("Loading - cannot find metadata field " + fieldID);
}
else
{
MetadataSchema schema = MetadataSchema.find(myContext, field.getSchemaID());
if (schema == null)
{
log.error("Loading - cannot find metadata schema " + field.getSchemaID() + ", field " + fieldID);
}
else
{
// Make a DCValue object
AuthorityMetadataValue dcv = new AuthorityMetadataValue(resultRow);
dcv.element = field.getElement();
//dcv.namespace = schema.getNamespace();
dcv.schema = schema.getName();
dcv.qualifier = field.getQualifier();
// Add it to the list
metadataValues.add(dcv);
}
}
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
{
tri.close();
}
}
}
}
return metadataValues;
}
catch (SQLException e)
{
log.error("Loading item - cannot load metadata");
}
return new ArrayList<AuthorityMetadataValue>();
}
/**
* Get metadata for the item in a chosen schema.
* See <code>MetadataSchema</code> for more information about schemas.
* Passing in a <code>null</code> value for <code>qualifier</code>
* or <code>lang</code> only matches metadata fields where that
* qualifier or languages is actually <code>null</code>.
* Passing in <code>Item.ANY</code>
* retrieves all metadata fields with any value for the qualifier or
* language, including <code>null</code>
* <P>
* Examples:
* <P>
* Return values of the unqualified "title" field, in any language.
* Qualified title fields (e.g. "title.uniform") are NOT returned:
* <P>
* <code>item.getMetadata("dc", "title", null, Item.ANY );</code>
* <P>
* Return all US English values of the "title" element, with any qualifier
* (including unqualified):
* <P>
* <code>item.getMetadata("dc, "title", Item.ANY, "en_US" );</code>
* <P>
* The ordering of values of a particular element/qualifier/language
* combination is significant. When retrieving with wildcards, values of a
* particular element/qualifier/language combinations will be adjacent, but
* the overall ordering of the combinations is indeterminate.
*
* @param schema
* the schema for the metadata field. <em>Must</em> match
* the <code>name</code> of an existing metadata schema.
* @param element
* the element name. <code>Item.ANY</code> matches any
* element. <code>null</code> doesn't really make sense as all
* metadata must have an element.
* @param qualifier
* the qualifier. <code>null</code> means unqualified, and
* <code>Item.ANY</code> means any qualifier (including
* unqualified.)
* @param lang
* the ISO639 language code, optionally followed by an underscore
* and the ISO3166 country code. <code>null</code> means only
* values with no language are returned, and
* <code>Item.ANY</code> means values with any country code or
* no country code are returned.
* @return metadata fields that match the parameters
*/
public AuthorityMetadataValue[] getMetadata(String schema, String element, String qualifier,
String lang)
{
// Build up list of matching values
List<AuthorityMetadataValue> values = new ArrayList<AuthorityMetadataValue>();
ArrayList<AuthorityMetadataValue> amv = getMetadata();
for (AuthorityMetadataValue dcv : amv)
{
if (match(schema, element, qualifier, lang, dcv))
{
// We will return a copy of the object in case it is altered
AuthorityMetadataValue copy = new AuthorityMetadataValue(this.getMetadataTable());
copy.element = dcv.element;
copy.qualifier = dcv.qualifier;
copy.value = dcv.value;
copy.language = dcv.language;
copy.schema = dcv.schema;
copy.authority = dcv.authority;
copy.confidence = dcv.confidence;
values.add(copy);
}
}
// Create an array of matching values
AuthorityMetadataValue[] valueArray = new AuthorityMetadataValue[values.size()];
valueArray = (AuthorityMetadataValue[]) values.toArray(valueArray);
return valueArray;
}
TableRowIterator retrieveMetadata() throws SQLException
{
if (getID() > 0)
{
return DatabaseManager.queryTable(myContext, this.getMetadataTable() ,
"SELECT * FROM " + this.getMetadataTable() + " WHERE parent_id= ? ORDER BY field_id",
getID());
}
return null;
}
public void clearMetadata(String schema, String element, String qualifier,
String lang)
{
// We will build a list of values NOT matching the values to clear
ArrayList<AuthorityMetadataValue> values = new ArrayList<AuthorityMetadataValue>();
for (AuthorityMetadataValue dcv : getMetadata())
{
boolean match = match(schema, element, qualifier, lang, dcv);
if (!match)
{
values.add(dcv);
}
}
// Now swap the old list of values for the new, unremoved values
setMetadata(values);
metadataModified = true;
myContext.addEvent(new Event(Event.MODIFY_METADATA, getType(), getID(), null));
}
private void setMetadata(ArrayList<AuthorityMetadataValue> metadata)
{
metadataValues =metadata;
metadataModified = true;
}
private boolean match(String schema, String element, String qualifier,
String language, AuthorityMetadataValue dcv)
{
// We will attempt to disprove a match - if we can't we have a match
if (!element.equals(Item.ANY) && !element.equals(dcv.element))
{
// Elements do not match, no wildcard
return false;
}
if (qualifier == null)
{
// Value must be unqualified
if (dcv.qualifier != null)
{
// Value is qualified, so no match
return false;
}
}
else if (!qualifier.equals(Item.ANY))
{
// Not a wildcard, so qualifier must match exactly
if (!qualifier.equals(dcv.qualifier))
{
return false;
}
}
if (language == null)
{
// Value must be null language to match
if (dcv.language != null)
{
// Value is qualified, so no match
return false;
}
}
else if (!language.equals(Item.ANY))
{
// Not a wildcard, so language must match exactly
if (!language.equals(dcv.language))
{
return false;
}
}
if (!schema.equals(Item.ANY))
{
if (dcv.schema != null && !dcv.schema.equals(schema))
{
// The namespace doesn't match
return false;
}
}
// If we get this far, we have a match
return true;
}
public void addMetadata(String schema, String element, String qualifier, String lang,
String[] values, String authorities[], int confidences[])
{
MetadataAuthorityManager mam = MetadataAuthorityManager.getManager();
boolean authorityControlled = mam.isAuthorityControlled(schema, element, qualifier);
boolean authorityRequired = mam.isAuthorityRequired(schema, element, qualifier);
String fieldName = schema+"."+element+((qualifier==null)? "": "."+qualifier);
// We will not verify that they are valid entries in the registry
// until update() is called.
for (int i = 0; i < values.length; i++)
{
AuthorityMetadataValue dcv = new AuthorityMetadataValue(this.getMetadataTable());
dcv.schema = schema;
dcv.element = element;
dcv.qualifier = qualifier;
dcv.language = (lang == null ? null : lang.trim());
// Logic to set Authority and Confidence:
// - normalize an empty string for authority to NULL.
// - if authority key is present, use given confidence or NOVALUE if not given
// - otherwise, preserve confidence if meaningful value was given since it may document a failed authority lookup
// - CF_UNSET signifies no authority nor meaningful confidence.
// - it's possible to have empty authority & CF_ACCEPTED if e.g. user deletes authority key
if (authorityControlled)
{
if (authorities != null && authorities[i] != null && authorities[i].length() > 0)
{
dcv.authority = authorities[i];
dcv.confidence = confidences == null ? Choices.CF_NOVALUE : confidences[i];
}
else
{
dcv.authority = null;
dcv.confidence = confidences == null ? Choices.CF_UNSET : confidences[i];
}
// authority sanity check: if authority is required, was it supplied?
// XXX FIXME? can't throw a "real" exception here without changing all the callers to expect it, so use a runtime exception
if (authorityRequired && (dcv.authority == null || dcv.authority.length() == 0))
{
throw new IllegalArgumentException("The metadata field \"" + fieldName + "\" requires an authority key but none was provided. Vaue=\"" + dcv.value + "\"");
}
}
if (values[i] != null)
{
// remove control unicode char
String temp = values[i].trim();
char[] dcvalue = temp.toCharArray();
for (int charPos = 0; charPos < dcvalue.length; charPos++)
{
if (Character.isISOControl(dcvalue[charPos]) &&
!String.valueOf(dcvalue[charPos]).equals("\u0009") &&
!String.valueOf(dcvalue[charPos]).equals("\n") &&
!String.valueOf(dcvalue[charPos]).equals("\r"))
{
dcvalue[charPos] = ' ';
}
}
dcv.value = String.valueOf(dcvalue);
}
else
{
dcv.value = null;
}
metadataValues.add(dcv);
addDetails(fieldName);
}
if (values.length > 0)
{
metadataModified = true;
}
}
public void addMetadata(String schema, String element, String qualifier,
String lang, String value, String authority, int confidence)
{
String[] valArray = new String[1];
String[] authArray = new String[1];
int[] confArray = new int[1];
valArray[0] = value;
authArray[0] = authority;
confArray[0] = confidence;
addMetadata(schema, element, qualifier, lang, valArray, authArray, confArray);
}
private transient MetadataField[] allMetadataFields = null;
protected MetadataField getMetadataField(AuthorityMetadataValue dcv) throws SQLException, AuthorizeException
{
if (allMetadataFields == null)
{
allMetadataFields = MetadataField.findAll(myContext);
}
if (allMetadataFields != null)
{
int schemaID = getMetadataSchemaID(dcv);
for (MetadataField field : allMetadataFields)
{
if (field.getSchemaID() == schemaID &&
StringUtils.equals(field.getElement(), dcv.element) &&
StringUtils.equals(field.getQualifier(), dcv.qualifier))
{
return field;
}
}
}
return null;
}
private int getMetadataSchemaID(AuthorityMetadataValue dcv) throws SQLException
{
int schemaID;
MetadataSchema schema = MetadataSchema.find(myContext,dcv.schema);
if (schema == null)
{
schemaID = MetadataSchema.DC_SCHEMA_ID;
}
else
{
schemaID = schema.getSchemaID();
}
return schemaID;
}
Map<String,Integer> elementCount = new HashMap<String,Integer>();
/**
* Update the scheme - writing out scheme object and Concept list if necessary
*/
public void update() throws SQLException, AuthorizeException
{
if(metadataModified)
{
metadataModified = false;
// Arrays to store the working information required
int[] placeNum = new int[getMetadata().size()];
boolean[] storedDC = new boolean[getMetadata().size()];
MetadataField[] dcFields = new MetadataField[getMetadata().size()];
// Work out the place numbers for the in memory DC
for (int dcIdx = 0; dcIdx < getMetadata().size(); dcIdx++)
{
AuthorityMetadataValue dcv = getMetadata().get(dcIdx);
// Work out the place number for ordering
int current = 0;
// Key into map is "element" or "element.qualifier"
String key = dcv.element + ((dcv.qualifier == null) ? "" : ("." + dcv.qualifier));
Integer currentInteger = elementCount.get(key);
if (currentInteger != null)
{
current = currentInteger.intValue();
}
current++;
elementCount.put(key, Integer.valueOf(current));
// Store the calculated place number, reset the stored flag, and cache the metadatafield
placeNum[dcIdx] = current;
storedDC[dcIdx] = false;
dcFields[dcIdx] = getMetadataField(dcv);
if (dcFields[dcIdx] == null)
{
// Bad DC field, log and throw exception
log.warn(LogManager
.getHeader(myContext, "bad_dc",
"Bad DC field. schema=" + dcv.schema
+ ", element: \""
+ ((dcv.element == null) ? "null"
: dcv.element)
+ "\" qualifier: \""
+ ((dcv.qualifier == null) ? "null"
: dcv.qualifier)
+ "\" value: \""
+ ((dcv.value == null) ? "null"
: dcv.value) + "\""));
throw new SQLException("bad_dublin_core "
+ "schema="+dcv.schema+", "
+ dcv.element
+ " " + dcv.qualifier);
}
}
// Now the precalculations are done, iterate through the existing metadata
// looking for matches
TableRowIterator tri = retrieveMetadata();
if (tri != null)
{
try
{
while (tri.hasNext())
{
TableRow tr = tri.next();
// Assume that we will remove this row, unless we get a match
boolean removeRow = true;
// Go through the in-memory metadata, unless we've already decided to keep this row
for (int dcIdx = 0; dcIdx < getMetadata().size() && removeRow; dcIdx++)
{
// Only process if this metadata has not already been matched to something in the DB
if (!storedDC[dcIdx])
{
boolean matched = true;
AuthorityMetadataValue dcv = getMetadata().get(dcIdx);
// Check the metadata field is the same
if (matched && dcFields[dcIdx].getFieldID() != tr.getIntColumn("field_id"))
{
matched = false;
}
// Check the place is the same
if (matched && placeNum[dcIdx] != tr.getIntColumn("place"))
{
matched = false;
}
// Check the text is the same
if (matched)
{
String text = tr.getStringColumn("text_value");
if (dcv.value == null && text == null)
{
matched = true;
}
else if (dcv.value != null && dcv.value.equals(text))
{
matched = true;
}
else
{
matched = false;
}
}
// Check the language is the same
if (matched)
{
String lang = tr.getStringColumn("text_lang");
if (dcv.language == null && lang == null)
{
matched = true;
}
else if (dcv.language != null && dcv.language.equals(lang))
{
matched = true;
}
else
{
matched = false;
}
}
// check that authority and confidence match
if (matched)
{
String auth = tr.getStringColumn("authority");
int conf = tr.getIntColumn("confidence");
if (!((dcv.authority == null && auth == null) ||
(dcv.authority != null && auth != null && dcv.authority.equals(auth))
&& dcv.confidence == conf))
{
matched = false;
}
}
// If the db record is identical to the in memory values
if (matched)
{
// Flag that the metadata is already in the DB
storedDC[dcIdx] = true;
// Flag that we are not going to remove the row
removeRow = false;
}
}
}
// If after processing all the metadata values, we didn't find a match
// delete this row from the DB
if (removeRow)
{
DatabaseManager.delete(myContext, tr);
metadataModified = true;
modified = true;
}
}
}
finally
{
tri.close();
}
}
// Add missing in-memory DC
for (int dcIdx = 0; dcIdx < getMetadata().size(); dcIdx++)
{
// Only write values that are not already in the db
if (!storedDC[dcIdx])
{
AuthorityMetadataValue dcv = getMetadata().get(dcIdx);
// Write DCValue
AuthorityMetadataValue metadata = new AuthorityMetadataValue(this.getMetadataTable());
metadata.setParentId(getID());
metadata.setFieldId(dcFields[dcIdx].getFieldID());
metadata.setValue(dcv.value);
metadata.setLanguage(dcv.language);
metadata.setPlace(placeNum[dcIdx]);
metadata.setAuthority(dcv.authority);
metadata.setConfidence(dcv.confidence);
metadata.create(myContext);
metadataModified = true;
modified = true;
}
}
metadataModified=false;
myContext.addEvent(new Event(Event.MODIFY_METADATA, this.getType(), getID(), null));
}
if (metadataModified || modified)
{
// Set the last modified date
myRow.setColumn("modified", new Date());
DatabaseManager.update(myContext, myRow);
if (metadataModified)
{
myContext.addEvent(new Event(Event.MODIFY_METADATA, this.getType(), getID(), getDetails()));
clearDetails();
metadataModified = false;
}
myContext.addEvent(new Event(Event.MODIFY, this.getType(), getID(), null));
modified = false;
}
}
public static AuthorityObject find(Context context, int type, int id)
throws SQLException
{
switch (type)
{
case Constants.SCHEME : return Scheme.find(context, id);
case Constants.CONCEPT : return Concept.find(context, id);
case Constants.TERM : return Term.find(context, id);
}
return null;
}
public static String createIdentifier(){
return UUID.randomUUID().toString().replace("-", "");
}
public static String hash(String input) {
try {
MessageDigest m = MessageDigest.getInstance("MD5");
byte[] data = input.getBytes();
m.update(data, 0, data.length);
BigInteger i = new BigInteger(1, m.digest());
return String.format("%1$032X", i);
} catch (NoSuchAlgorithmException e) {
log.error(e.getMessage(),e);
throw new RuntimeException(e.getMessage(),e);
}
}
}
| |
/*
* Copyright 2014 Uwe Trottmann
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.battlelancer.seriesguide.ui;
import android.app.ActionBar;
import android.app.AlertDialog;
import android.app.Dialog;
import android.app.ProgressDialog;
import android.content.DialogInterface;
import android.database.Cursor;
import android.database.sqlite.SQLiteException;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Environment;
import android.preference.PreferenceManager;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.TextView;
import android.widget.Toast;
import com.battlelancer.seriesguide.R;
import com.battlelancer.seriesguide.provider.SeriesGuideContract.Shows;
import com.battlelancer.seriesguide.provider.SeriesGuideDatabase;
import com.battlelancer.seriesguide.sync.SgSyncAdapter;
import com.battlelancer.seriesguide.util.TaskManager;
import com.battlelancer.seriesguide.util.Utils;
import com.uwetrottmann.androidutils.AndroidUtils;
import java.io.File;
import java.io.IOException;
import timber.log.Timber;
/**
* Allows to back up or restore the show database to external storage.
*/
public class BackupDeleteActivity extends BaseActivity {
private static final String TAG = "Backup";
private static final int EXPORT_DIALOG = 0;
private static final int IMPORT_DIALOG = 1;
private static final int EXPORT_PROGRESS = 3;
private static final int IMPORT_PROGRESS = 4;
private AsyncTask<Void, Void, String> mTask;
private ProgressDialog mProgressDialog;
@Override
public void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_backup);
setupActionBar();
setupViews();
}
private void setupActionBar() {
final ActionBar actionBar = getActionBar();
actionBar.setDisplayHomeAsUpEnabled(true);
actionBar.setTitle(getString(R.string.backup));
actionBar.setDisplayShowTitleEnabled(true);
}
private void setupViews() {
Button exportDbToSdButton = (Button) findViewById(R.id.ButtonExportDBtoSD);
exportDbToSdButton.setOnClickListener(new OnClickListener() {
public void onClick(final View v) {
showDialog(EXPORT_DIALOG);
}
});
Button importDbFromSdButton = (Button) findViewById(R.id.ButtonImportDBfromSD);
importDbFromSdButton.setOnClickListener(new OnClickListener() {
public void onClick(final View v) {
showDialog(IMPORT_DIALOG);
}
});
// display backup path
TextView backuppath = (TextView) findViewById(R.id.textViewBackupPath);
String path = getBackupFolder().toString();
backuppath.setText(getString(R.string.backup_path) + ": " + path);
// display current db version
TextView dbVersion = (TextView) findViewById(R.id.textViewBackupDatabaseVersion);
dbVersion.setText(getString(R.string.backup_version) + ": "
+ SeriesGuideDatabase.DATABASE_VERSION);
}
@Override
protected void onDestroy() {
super.onDestroy();
onCancelTasks();
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (item.getItemId() == android.R.id.home) {
super.onBackPressed();
return true;
}
return false;
}
private void onCancelTasks() {
if (mTask != null && mTask.getStatus() != AsyncTask.Status.FINISHED) {
mTask.cancel(true);
}
mTask = null;
}
private File getBackupFolder() {
return new File(Environment.getExternalStorageDirectory(), "seriesguidebackup");
}
private class ExportDatabaseTask extends AsyncTask<Void, Void, String> {
// can use UI thread here
@Override
protected void onPreExecute() {
showDialog(EXPORT_PROGRESS);
}
// automatically done on worker thread (separate from UI thread)
@Override
protected String doInBackground(final Void... args) {
TaskManager tm = TaskManager.getInstance(BackupDeleteActivity.this);
if (SgSyncAdapter.isSyncActive(BackupDeleteActivity.this, false)
|| tm.isAddTaskRunning()) {
return getString(R.string.update_inprogress);
}
File dbFile = getApplication().getDatabasePath(SeriesGuideDatabase.DATABASE_NAME);
File exportDir = getBackupFolder();
if (!exportDir.exists()) {
exportDir.mkdirs();
}
File file = new File(exportDir, dbFile.getName());
if (isCancelled()) {
return null;
}
String errorMsg = null;
try {
file.createNewFile();
AndroidUtils.copyFile(dbFile, file);
} catch (IOException e) {
Timber.e(e, "Creating backup failed");
errorMsg = e.getMessage();
}
return errorMsg;
}
// can use UI thread here
@Override
protected void onPostExecute(final String errorMsg) {
if (mProgressDialog.isShowing()) {
mProgressDialog.dismiss();
}
if (errorMsg == null) {
Toast.makeText(BackupDeleteActivity.this, getString(R.string.backup_success),
Toast.LENGTH_SHORT).show();
Utils.trackCustomEvent(BackupDeleteActivity.this, TAG, "Backup", "Success");
} else {
Toast.makeText(BackupDeleteActivity.this,
getString(R.string.backup_failed) + " - " + errorMsg, Toast.LENGTH_LONG)
.show();
Utils.trackCustomEvent(BackupDeleteActivity.this, TAG, "Backup", "Failure");
}
setResult(RESULT_OK);
finish();
}
}
private class ImportDatabaseTask extends AsyncTask<Void, Void, String> {
@Override
protected void onPreExecute() {
showDialog(IMPORT_PROGRESS);
}
// could pass the params used here in AsyncTask<String, Void, String> -
// but not being re-used
@Override
protected String doInBackground(final Void... args) {
TaskManager tm = TaskManager.getInstance(BackupDeleteActivity.this);
if (SgSyncAdapter.isSyncActive(BackupDeleteActivity.this, false)
|| tm.isAddTaskRunning()) {
return getString(R.string.update_inprogress);
}
File dbBackupFile = new File(getBackupFolder(), "seriesdatabase");
if (!dbBackupFile.exists()) {
return getString(R.string.import_failed_nofile);
} else if (!dbBackupFile.canRead()) {
return getString(R.string.import_failed_noread);
}
if (isCancelled()) {
return null;
}
File dbFile = getApplication().getDatabasePath(SeriesGuideDatabase.DATABASE_NAME);
getApplication().deleteDatabase(SeriesGuideDatabase.DATABASE_NAME);
try {
dbFile.createNewFile();
AndroidUtils.copyFile(dbBackupFile, dbFile);
PreferenceManager.getDefaultSharedPreferences(getApplicationContext()).edit()
.putBoolean(SeriesGuidePreferences.KEY_DATABASEIMPORTED, true).commit();
getContentResolver().notifyChange(Shows.CONTENT_URI, null);
// wait a little for the new db to settle in
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
Timber.e(e, "Failed to sleep");
}
// tell user something might have gone wrong if there are no
// shows in the database right now
try {
final Cursor shows = getContentResolver().query(Shows.CONTENT_URI,
new String[] {
Shows._ID
}, null, null, null
);
if (shows != null) {
if (shows.getCount() == 0) {
return getString(R.string.dbupgradefailed);
}
shows.close();
}
} catch (SQLiteException e) {
Timber.e(e, "Failed to import backup");
return e.getMessage();
}
return null;
} catch (IOException e) {
Timber.e(e, "Failed to import backup");
return e.getMessage();
}
}
@Override
protected void onPostExecute(final String errMsg) {
if (mProgressDialog.isShowing()) {
mProgressDialog.dismiss();
}
if (errMsg == null) {
Toast.makeText(BackupDeleteActivity.this, getString(R.string.import_success),
Toast.LENGTH_SHORT).show();
Utils.trackCustomEvent(BackupDeleteActivity.this, TAG, "Import", "Success");
} else {
Toast.makeText(BackupDeleteActivity.this,
getString(R.string.import_failed) + " - " + errMsg, Toast.LENGTH_LONG)
.show();
Utils.trackCustomEvent(BackupDeleteActivity.this, TAG, "Import", "Failure");
}
setResult(RESULT_OK);
finish();
}
}
@Override
protected Dialog onCreateDialog(int id) {
switch (id) {
case EXPORT_DIALOG:
return new AlertDialog.Builder(BackupDeleteActivity.this)
.setMessage(getString(R.string.backup_question))
.setPositiveButton(getString(R.string.backup_button),
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface arg0, int arg1) {
if (isExternalStorageAvailable(
R.string.backup_failed_nosd)) {
mTask = new ExportDatabaseTask();
Utils.executeInOrder(mTask);
}
}
}
).setNegativeButton(getString(R.string.backup_no), null).create();
case IMPORT_DIALOG:
return new AlertDialog.Builder(BackupDeleteActivity.this)
.setMessage(getString(R.string.import_question))
.setPositiveButton(getString(R.string.import_button),
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface arg0, int arg1) {
if (isExternalStorageAvailable(
R.string.import_failed_nosd)) {
mTask = new ImportDatabaseTask();
Utils.executeInOrder(mTask);
}
}
}
).setNegativeButton(getString(R.string.import_no), null).create();
case EXPORT_PROGRESS:
return getProgressDialog(R.string.backup_inprogress);
case IMPORT_PROGRESS:
return getProgressDialog(R.string.import_inprogress);
}
return null;
}
private ProgressDialog getProgressDialog(int messageId) {
if (mProgressDialog == null) {
mProgressDialog = new ProgressDialog(BackupDeleteActivity.this);
}
mProgressDialog.setMessage(getString(messageId));
return mProgressDialog;
}
private boolean isExternalStorageAvailable(int errorMessageID) {
boolean extStorageAvailable = AndroidUtils.isExtStorageAvailable();
if (!extStorageAvailable) {
Toast.makeText(BackupDeleteActivity.this, getString(errorMessageID), Toast.LENGTH_LONG)
.show();
}
return extStorageAvailable;
}
}
| |
// HTMLParser Library $Name: v1_5 $ - A java-based parser for HTML
// http://sourceforge.org/projects/htmlparser
// Copyright (C) 2003 Derrick Oswald
//
// Revision Control Information
//
// $Source: /cvsroot/htmlparser/htmlparser/src/org/htmlparser/lexerapplications/thumbelina/ThumbelinaFrame.java,v $
// $Author: derrickoswald $
// $Date: 2004/09/02 02:28:14 $
// $Revision: 1.4 $
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
//
package org.htmlparser.lexerapplications.thumbelina;
import java.awt.BorderLayout;
import java.awt.Component;
import java.awt.Dimension;
import java.awt.Rectangle;
import java.awt.Toolkit;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.ItemEvent;
import java.awt.event.ItemListener;
import java.awt.event.WindowEvent;
import java.awt.event.WindowListener;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.prefs.BackingStoreException;
import java.util.prefs.Preferences;
import javax.swing.ImageIcon;
import javax.swing.JCheckBoxMenuItem;
import javax.swing.JFrame;
import javax.swing.JMenu;
import javax.swing.JMenuBar;
import javax.swing.JMenuItem;
import javax.swing.JOptionPane;
import javax.swing.JSeparator;
import org.htmlparser.lexer.Lexer;
/**
* Encapsulate a Thumbelina bean and add menu and preferences support.
* Provides a JFrame base in which to place a Thumbelina bean, and
* adds a menu system with MRU (Most Recently Used) list.
* Also provides a Google search capability.
* Will eventually provide Javahelp too.
*/
public class ThumbelinaFrame
extends
JFrame
implements
WindowListener,
ActionListener,
ItemListener,
PropertyChangeListener
{
/**
* Window title.
*/
private static final String TITLE = "Thumbelina";
/**
* Preference name for frame location and size.
*/
private static final String FRAMESIZE = "FrameSize";
/**
* Percent of screen to leave as border when no preferences available.
*/
private static final int BORDERPERCENT = 5;
/**
* Preference name for most recently used count.
*/
private static final String MRULENGTH = "MRULength";
/**
* Preference name for most recently used maximum count.
*/
private static final String MRUMAX = "MRUMax";
/**
* Preference prefix for most recently used list items.
*/
private static final String MRUPREFIX = "MRUListItem";
/**
* Preference name for google query.
*/
private static final String GOOGLEQUERY = "GoogleQuery";
/**
* Default google query when no preferences are available.
*/
private static final String DEFAULTGOOGLEQUERY = "thumbs";
/**
* List of URLs to prime the MRU list with.
*/
private static final String[] DEFAULTMRULIST =
{
"www.a2zcds.com",
"www.stoneschool.com/Japan/",
"www.tommys-bookmarks.com",
"www.unitedmedia.com/comics/dilbert/archive",
"www.pastelartists.com",
};
/**
* Send Mozilla headers in request if <code>true</code>.
*/
private static final boolean USE_MOZILLA_HEADERS = false;
/**
* Preference name for status bar visibility state.
*/
private static final String STATUSBARSTATE = "StatusBarVisible";
/**
* Preference name for history list visibility state.
*/
private static final String HISTORYLISTSTATE = "HistoryListVisible";
/**
* Preference name for sequencer active state.
*/
private static final String SEQUENCERACTIVE = "SequencerActive";
/**
* Preference name for background thread active state.
*/
private static final String BACKGROUNDTHREADACTIVE =
"BackgroundThreadActive";
/**
* Preference name for sequencer display speed.
*/
private static final String DISPLAYSPEED = "DisplaySpeed";
/**
* Main menu.
*/
protected JMenuBar mMenu;
/**
* URL submenu.
*/
protected JMenu mURL;
/**
* Open menu item.
*/
protected JMenuItem mOpen;
/**
* Google menu item.
*/
protected JMenuItem mGoogle;
/**
* MRU list separator #1.
*/
protected JSeparator mSeparator1;
/**
* MRU list separator #2.
*/
protected JSeparator mSeparator2;
/**
* Exit menu item.
*/
protected JMenuItem mExit;
/**
* View submenu.
*/
protected JMenu mView;
/**
* Status bar visible menu item.
*/
protected JCheckBoxMenuItem mStatusVisible;
/**
* History list visible menu item.
*/
protected JCheckBoxMenuItem mHistoryVisible;
/**
* Vommand menu.
*/
protected JMenu mCommand;
/**
* Reset menu item.
*/
protected JMenuItem mReset;
/**
* Clear menu item
*/
protected JMenuItem mClear;
/**
* Help submenu.
*/
protected JMenu mHelp;
/**
* About menu item.
*/
protected JMenuItem mAbout;
/**
* Construct a new Thumbelina frame with an idle Thumbelina.
*/
public ThumbelinaFrame ()
{
this (new Thumbelina ());
}
/**
* Construct a new Thumbelina frame with a Thumbelina primed with one URL.
* @param url The URL to prime the Thumbelina with.
* @exception MalformedURLException If the given string doesn't represent
* a valid url.
*/
public ThumbelinaFrame (final String url)
throws
MalformedURLException
{
this (new Thumbelina (url));
}
/**
* Construct a new Thumbelina frame with a Thumbelina primed with one URL.
* @param url The URL to prime the Thumbelina with.
*/
public ThumbelinaFrame (final URL url)
{
this (new Thumbelina (url));
}
/**
* Construct a new Thumbelina frame with a given Thumbelina.
* @param thumbelina The Thumbelina to encapsulate.
*/
public ThumbelinaFrame (final Thumbelina thumbelina)
{
setTitle (TITLE);
thumbelina.addPropertyChangeListener (this);
getContentPane ().add (thumbelina, BorderLayout.CENTER);
addWindowListener (this);
makeMenu ();
setJMenuBar (mMenu);
restoreSize ();
initState ();
updateMenu ();
}
/**
* Access the Thumbelina object contained in the frame.
* @return The Thumbelina bean.
*/
public Thumbelina getThumbelina ()
{
return ((Thumbelina)getContentPane ().getComponent (0));
}
/**
* Initialize the user preferences.
* Reads from the existing user preferences,
* or initializes values from the bean directly if they don't exist.
* Sets the state of the view checkboxes to match.
*/
public void initState ()
{
Preferences prefs;
prefs = Preferences.userNodeForPackage (getClass ());
if (-1 == prefs.getInt (MRULENGTH, -1))
for (int i = 0; i < DEFAULTMRULIST.length; i++)
updateMRU (DEFAULTMRULIST[i]);
getThumbelina ().setStatusBarVisible (
prefs.getBoolean (STATUSBARSTATE,
getThumbelina ().getStatusBarVisible ()));
mStatusVisible.setSelected (getThumbelina ().getStatusBarVisible ());
getThumbelina ().setHistoryListVisible (
prefs.getBoolean (HISTORYLISTSTATE,
getThumbelina ().getHistoryListVisible ()));
mHistoryVisible.setSelected (getThumbelina ().getHistoryListVisible ());
getThumbelina ().setSequencerActive (
prefs.getBoolean (SEQUENCERACTIVE,
getThumbelina ().getSequencerActive ()));
getThumbelina ().setBackgroundThreadActive (
prefs.getBoolean (BACKGROUNDTHREADACTIVE,
getThumbelina ().getBackgroundThreadActive ()));
getThumbelina ().setSpeed (
prefs.getInt (DISPLAYSPEED, getThumbelina ().getSpeed ()));
}
/**
* Saves the current settings in the user preferences.
* By default this writes to the thumbelina subdirectory under
* .java in the users home directory.
*/
public void saveState ()
{
Preferences prefs;
prefs = Preferences.userNodeForPackage (getClass ());
// don't save size unless we're in normal state
if (NORMAL == getExtendedState ())
prefs.put (FRAMESIZE, toString (getBounds ()));
prefs.putBoolean (STATUSBARSTATE,
getThumbelina ().getStatusBarVisible ());
prefs.putBoolean (HISTORYLISTSTATE,
getThumbelina ().getHistoryListVisible ());
prefs.putBoolean (SEQUENCERACTIVE,
getThumbelina ().getSequencerActive ());
prefs.putBoolean (BACKGROUNDTHREADACTIVE,
getThumbelina ().getBackgroundThreadActive ());
prefs.putInt (DISPLAYSPEED,
getThumbelina ().getSpeed ());
try
{
prefs.flush ();
}
catch (BackingStoreException bse)
{
bse.printStackTrace ();
}
}
/**
* Sets the frame size if no previous preference has been stored.
* It creates a window covering all but <code>BORDERPERCENT</code>
* margins.
*/
public void initSize ()
{
Toolkit tk;
Dimension dim;
int borderx;
int bordery;
tk = getToolkit ();
dim = tk.getScreenSize ();
borderx = dim.width * BORDERPERCENT / 100;
bordery = dim.height * BORDERPERCENT / 100;
setBounds (
borderx,
bordery,
dim.width - (2 * borderx),
dim.height - (2 * bordery));
}
/**
* Restores the window size based on stored preferences.
* If no preferences exist, it calls <code>initSize()</code>.
*/
public void restoreSize ()
{
Preferences prefs;
String size;
Rectangle rectangle;
prefs = Preferences.userNodeForPackage (getClass ());
size = prefs.get (FRAMESIZE, "");
if ("".equals (size))
initSize ();
else
try
{
rectangle = fromString (size);
if (rational (rectangle))
setBounds (
rectangle.x,
rectangle.y,
rectangle.width,
rectangle.height);
else
initSize ();
}
catch (IllegalArgumentException iae)
{
initSize ();
}
}
/**
* Converts the rectangle to a string.
* The rectangle is converted into a string that is of the form
* <pre>
* [x,y,width,height].
* </pre>
* @return The string equivalent of the rectangle.
* @param r The rectangle containing the window position and size,
* as returned by <code>getBounds()</code>.
*/
protected String toString (final Rectangle r)
{
return ("[" + r.x + "," + r.y + "," + r.width + "," + r.height + "]");
}
/**
* Convert the given string to a valid rectangle.
* The string is converted to a Rectangle.
* @param value The value to parse.
* @exception IllegalArgumentException if the format does not match the
* form "[x,y,width,height]" with all values integers.
* @return Returns the rectangle extracted from the string.
*/
protected Rectangle fromString (final String value)
throws
IllegalArgumentException
{
String guts;
int current;
int[] values;
int index;
Rectangle ret;
try
{
// parse "[x,y,width,height]"
if (value.startsWith ("[") && value.endsWith ("]"))
{
guts = value.substring (1, value.length () - 1) + ",";
current = 0;
values = new int[4];
for (int i = 0; i < 4; i++)
{
index = guts.indexOf (",", current);
if (-1 == index)
throw new IllegalArgumentException (
"invalid format \"" + value + "\"");
else
{
values[i] = Integer.parseInt (
guts.substring (current, index));
current = index + 1;
}
}
ret = new Rectangle (
values[0], values[1], values[2], values[3]);
}
else
throw new IllegalArgumentException (
"invalid format \"" + value + "\"");
}
catch (NumberFormatException nfe)
{
throw new IllegalArgumentException (nfe.getMessage ());
}
return (ret);
}
/**
* Check if the rectangle represents a valid screen position and size.
* @param r The rectangle to check.
* @return <code>true</code> if this could be a valid frame bounds.
*/
private boolean rational (final Rectangle r)
{
Toolkit tk;
Dimension winsize;
tk = getToolkit ();
winsize = tk.getScreenSize();
// all elements must be not stupid w.r.t. the screen size
// we assume here that that means no more than 10% off screen
// on the left, right and bottom sides
return ( (r.x >= r.width / -10)
&& (r.y >= 0)
&& (r.width > 0)
&& (r.height > 0)
&& (r.x + r.width <= winsize.width + r.width / 10)
&& (r.y + r.height <= winsize.height + r.height / 10));
}
/**
* Create the menu.
* Initializes the menu and adds it to the frame.
*/
public void makeMenu ()
{
mMenu = new JMenuBar ();
mURL = new JMenu ();
mOpen = new JMenuItem ();
mGoogle = new JMenuItem ();
mSeparator1 = new JSeparator ();
mSeparator2 = new JSeparator ();
mExit = new JMenuItem ();
mView = new JMenu ();
mStatusVisible = new JCheckBoxMenuItem ();
mHistoryVisible = new JCheckBoxMenuItem ();
mHelp = new JMenu ();
mAbout = new JMenuItem ();
mCommand = new JMenu ();
mReset = new JMenuItem ();
mClear = new JMenuItem ();
mURL.setMnemonic ('U');
mURL.setText ("URL");
mOpen.setMnemonic ('O');
mOpen.setText ("Open");
mOpen.setToolTipText ("Open a URL.");
mURL.add (mOpen);
mGoogle.setMnemonic ('G');
mGoogle.setText ("Google");
mGoogle.setToolTipText ("Search Google.");
mURL.add (mGoogle);
mURL.add (mSeparator1);
mURL.add (mSeparator2);
mExit.setMnemonic ('E');
mExit.setText ("Exit");
mExit.setToolTipText ("Quit Thumbelina.");
mURL.add (mExit);
mMenu.add (mURL);
mView.setMnemonic ('V');
mView.setText ("View");
mStatusVisible.setMnemonic ('S');
mStatusVisible.setSelected (getThumbelina ().getStatusBarVisible ());
mStatusVisible.setText ("Status Bar");
mStatusVisible.setToolTipText ("Show/Hide the status bar.");
mView.add (mStatusVisible);
mHistoryVisible.setMnemonic ('H');
mHistoryVisible.setSelected (getThumbelina ().getHistoryListVisible ());
mHistoryVisible.setText ("History List");
mHistoryVisible.setToolTipText ("Show/Hide the history list.");
mView.add (mHistoryVisible);
mMenu.add (mView);
mCommand.setMnemonic ('C');
mCommand.setText ("Command");
mReset.setMnemonic ('R');
mReset.setText ("Reset");
mReset.setToolTipText ("Reset Thumbelina.");
mClear.setMnemonic ('L');
mClear.setText ("Clear");
mClear.setToolTipText ("Clear display.");
mCommand.add (mReset);
mCommand.add (mClear);
mCommand.add (mHelp);
mMenu.add (mCommand);
mHelp.setMnemonic ('H');
mHelp.setText ("Help");
mAbout.setMnemonic ('A');
mAbout.setText ("About");
mAbout.setToolTipText ("Information about Thumbelina.");
mHelp.add (mAbout);
mMenu.add (mHelp);
mOpen.addActionListener (this);
mGoogle.addActionListener (this);
mExit.addActionListener (this);
mStatusVisible.addItemListener (this);
mHistoryVisible.addItemListener (this);
mReset.addActionListener (this);
mClear.addActionListener (this);
mAbout.addActionListener (this);
}
/**
* Adjusts the menu, by inserting the current MRU list.
* Removes the old MRU (Most Recently Used) items and inserts new
* ones betweeen the two separators.
*/
public void updateMenu ()
{
Preferences prefs;
int start;
int end;
Component component;
JMenuItem item;
int count;
String string;
prefs = Preferences.userNodeForPackage (getClass ());
start = -1;
end = -1;
for (int i = 0; i < mURL.getItemCount (); i++)
{
component = mURL.getMenuComponent (i);
if (component == mSeparator1)
start = i + 1;
else if (component == mSeparator2)
end = i;
}
if ((-1 != start) && (-1 != end))
{
for (int i = start; i < end; i++)
mURL.remove (start);
count = prefs.getInt (MRULENGTH, 0);
for (int i = 0; i < count; i++)
{
string = prefs.get (MRUPREFIX + i, "");
if (!"".equals (string))
{
item = new JMenuItem ();
item.setActionCommand (string);
if (string.length () > 40)
string = string.substring (0, 38) + "...";
item.setText (string);
item.addActionListener (this);
mURL.add (item, start++);
}
}
}
}
//
// WindowListener interface
//
/**
* Invoked the first time a window is made visible.
* <i>Not used.</i>
* @param event The window event.
*/
public void windowOpened (final WindowEvent event)
{
}
/**
* Handles window closing event.
* Performs function <code>exitApplication()</code>.
* @param event The window event.
*/
public void windowClosing (final WindowEvent event)
{
exit ();
}
/**
* Invoked when a window has been closed as the result
* of calling dispose on the window.
* <i>Not used.</i>
* @param event The window event.
*/
public void windowClosed (final WindowEvent event)
{
}
/**
* Invoked when a window is changed from a normal to a
* minimized state. For many platforms, a minimized window
* is displayed as the icon specified in the window's
* iconImage property.
* <i>Not used.</i>
* @param event The window event.
*/
public void windowIconified (final WindowEvent event)
{
}
/**
* Invoked when a window is changed from a minimized
* to a normal state.
* <i>Not used.</i>
* @param event The window event.
*/
public void windowDeiconified (final WindowEvent event)
{
}
/**
* Invoked when the window is set to be the user's
* active window, which means the window (or one of its
* subcomponents) will receive keyboard events.
* <i>Not used.</i>
* @param event The window event.
*/
public void windowActivated (final WindowEvent event)
{
}
/**
* Invoked when a window is no longer the user's active
* window, which means that keyboard events will no longer
* be delivered to the window or its subcomponents.
* <i>Not used.</i>
* @param event The window event.
*/
public void windowDeactivated (final WindowEvent event)
{
}
//
// ActionListener interface
//
/**
* Handles events from the menu.
* Based on the action of the event, executes the necessary subroutine.
* @param actionEvent The event describing the user action.
*/
public void actionPerformed (final ActionEvent actionEvent)
{
String action;
action = actionEvent.getActionCommand ();
if (action.equals ("Open"))
open ();
else if (action.equals ("Google"))
googlesearch ();
else if (action.equals ("Reset"))
getThumbelina ().reset ();
else if (action.equals ("Clear"))
getThumbelina ().getPicturePanel ().reset ();
else if (action.equals ("About"))
about ();
else if (action.equals ("Exit"))
exit ();
else
{
// must be a URL from the most recently used list
getThumbelina ().open (action);
updateMRU (action);
updateMenu ();
}
}
//
// ItemListener interface
//
/**
* Handles selections on the view state checkboxes.
* @param event The event describing the checkbox affected.
*/
public void itemStateChanged (final ItemEvent event)
{
Object source;
boolean visible;
source = event.getItemSelectable ();
visible = ItemEvent.SELECTED == event.getStateChange ();
if (source == mStatusVisible)
getThumbelina ().setStatusBarVisible (visible);
else if (source == mHistoryVisible)
getThumbelina ().setHistoryListVisible (visible);
}
//
// PropertyChangeListener
//
/**
* Handle a property change.
* @param event The property old and new values.
*/
public void propertyChange (final PropertyChangeEvent event)
{
String url;
if (event.getPropertyName ().equals (
Thumbelina.PROP_CURRENT_URL_PROPERTY))
{
url = (String)event.getNewValue ();
if (null == url)
setTitle ("Thumbelina");
else
setTitle ("Thumbelina - " + url);
}
}
/**
* Updates the user preferences based on the most recently used list.
* @param url The URL that is to be placed at the top of the MRU list.
*/
public void updateMRU (String url)
{
Preferences prefs;
int count;
ArrayList list;
String string;
int max;
if (url.startsWith ("http://"))
url = url.substring (7);
prefs = Preferences.userNodeForPackage (getClass ());
count = prefs.getInt (MRULENGTH, -1);
list = new ArrayList ();
for (int i = 0; i < count; i++)
{
string = prefs.get (MRUPREFIX + i, "");
if (!"".equals (string) && !url.equalsIgnoreCase (string))
list.add (string);
}
list.add (0, url);
max = prefs.getInt (MRUMAX, -1);
if (-1 == max)
max = 8;
while (list.size () > max)
list.remove (max);
prefs.putInt (MRULENGTH, list.size ());
prefs.putInt (MRUMAX, max);
for (int i = 0; i < list.size (); i++)
prefs.put (MRUPREFIX + i, (String)list.get (i));
try
{
prefs.flush ();
}
catch (BackingStoreException bse)
{
bse.printStackTrace ();
}
}
/**
* Opens a user specified URL.
*/
public void open ()
{
String result;
result = JOptionPane.showInputDialog (
this,
"Enter the URL:",
"Open URL",
JOptionPane.PLAIN_MESSAGE);
if (null != result)
{
getThumbelina ().open (result);
updateMRU (result);
updateMenu ();
}
}
/**
* Query google via user specified keywords and queue results.
* Asks the user for keywords, and then submits them as input to the
* usual google form:
* <pre>
* <form action="/search" name=f>
* <span id=hf></span>
* <table cellspacing=0 cellpadding=0>
* <tr valign=middle>
* <td width=75> </td>
* <td align=center>
* <input maxLength=256 size=55 name=q value="">
* <input type=hidden name=ie value="UTF-8">
* <input type=hidden name=oe value="UTF-8">
* <input name=hl type=hidden value=en><br>
* <input type=submit value="Google Search" name=btnG>
* <input type=submit value="I'm Feeling Lucky" name=btnI>
* </td>
* <td valign=top nowrap><font size=-2>
* • <a href=/advanced_search?hl=en>Advanced Search</a>
* <br> • <a href=/preferences?hl=en>Preferences</a>
* <br> • <a href=/language_tools?hl=en>Language Tools</a>
* </font>
* </td>
* </tr>
* <tr>
* <td colspan=3 align=center><font size=-1>
* Search: <input id=all type=radio name=meta value="" checked>
* <label for=all> the web</label>
* <input id=cty type=radio name=meta value="cr=countryCA" >
* <label for=cty>pages from Canada</label>
* </font>
* </td>
* </tr>
* </table>
* </form>
* </pre>
* Creates a query of the form:
* <pre>
* http://www.google.ca/search?hl=en&ie=UTF-8&oe=UTF-8&q=thumbs&btnG=Google+Search&meta=
* </pre>
*/
public void googlesearch ()
{
Preferences prefs;
String query;
String terms;
StringBuffer buffer;
HttpURLConnection connection;
URL url;
Lexer lexer;
URL[][] results;
prefs = Preferences.userNodeForPackage (getClass ());
query = prefs.get (GOOGLEQUERY, DEFAULTGOOGLEQUERY);
try
{
query = (String)JOptionPane.showInputDialog (
this,
"Enter the search term:",
"Search Google",
JOptionPane.PLAIN_MESSAGE,
null,
null,
query);
if (null != query)
{
// replace spaces with +
terms = query.replace (' ', '+');
buffer = new StringBuffer (1024);
buffer.append ("http://www.google.ca/search?");
buffer.append ("q=");
buffer.append (terms);
buffer.append ("&ie=UTF-8");
buffer.append ("&oe=UTF-8");
buffer.append ("&hl=en");
buffer.append ("&btnG=Google+Search");
buffer.append ("&meta=");
url = new URL (buffer.toString ());
connection = (HttpURLConnection)url.openConnection ();
if (USE_MOZILLA_HEADERS)
{
// These are the Mozilla header fields:
//Accept: text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,video/x-mng,image/png,image/jpeg,image/gif;q=0.2,text/css,*/*;q=0.1
//Accept-Language: en-us, en;q=0.50
//Connection: keep-alive
//Host: grc.com
//Referer: https://grc.com/x/ne.dll?bh0bkyd2
//User-Agent: Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.2.1) Gecko/20030225
//Content-Length: 27
//Content-Type: application/x-www-form-urlencoded
//Accept-Encoding: gzip, deflate, compress;q=0.9
//Accept-Charset: ISO-8859-1, utf-8;q=0.66, *;q=0.66
//Keep-Alive: 300
connection.setRequestProperty ("Referer", "http://www.google.ca");
connection.setRequestProperty ("Accept", "text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,video/x-mng,image/png,image/jpeg,image/gif;q=0.2,text/css,*/*;q=0.1");
connection.setRequestProperty ("Accept-Language", "en-us, en;q=0.50");
connection.setRequestProperty ("User-Agent", "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.2.1) Gecko/20030225");
connection.setRequestProperty ("Accept-Charset", "ISO-8859-1, utf-8;q=0.66, *;q=0.66");
}
else
{
// These are the IE header fields:
//Accept: image/gif, image/x-xbitmap, image/jpeg, image/pjpeg, application/vnd.ms-excel, application/vnd.ms-powerpoint, application/msword, */*
//Accept-Language: en-ca
//Connection: Keep-Alive
//Host: grc.com
//User-Agent: Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; trieste; .NET CLR 1.1.4322; .NET CLR 1.0.3705)
//Content-Length: 32
//Content-Type: application/x-www-form-urlencoded
//Accept-Encoding: gzip, deflate
//Cache-Control: no-cache
connection.setRequestProperty ("Accept", "image/gif, image/x-xbitmap, image/jpeg, image/pjpeg, application/vnd.ms-excel, application/vnd.ms-powerpoint, application/msword, */*");
connection.setRequestProperty ("Accept-Language", "en-ca");
connection.setRequestProperty ("User-Agent", "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; trieste; .NET CLR 1.1.4322; .NET CLR 1.0.3705)");
}
connection.setDoOutput (true);
connection.setDoInput (true);
connection.setUseCaches (false);
lexer = new Lexer (connection);
results = getThumbelina ().extractImageLinks (lexer, url);
// add 'em
getThumbelina ().reset ();
// remove google links, not just append (results[1]);
for (int i = 0; i < results[1].length; i++)
{
String found = results[1][i].toExternalForm ();
if (-1 == found.indexOf ("google"))
getThumbelina ().append (results[1][i]);
}
prefs.put (GOOGLEQUERY, query);
try
{
prefs.flush ();
}
catch (BackingStoreException bse)
{
bse.printStackTrace ();
}
}
}
catch (Exception e)
{
System.out.println (e.getMessage ());
}
}
/**
* Display information about Thumbelina.
*/
public void about ()
{
URL url;
try
{
url = new URL ("http://sourceforge.net/sflogo.php?group_id=24399");
}
catch (MalformedURLException murle)
{
url = null;
}
JOptionPane.showMessageDialog (
this,
"Scan and display the images behind thumbnails.\n"
+ "\n"
+ "An example application using the HTML Parser project.\n"
+ "Visit http://htmlparser.sourceforge.org for the latest\n"
+ "version and source code.\n",
"Thumbelina - About",
JOptionPane.PLAIN_MESSAGE,
new ImageIcon (url));
}
/**
* Exits the application.
* Saves user preferences before exiting.
*/
public void exit ()
{
saveState ();
System.exit (0);
}
/**
* Alternate mainline for Thumbelina.
* Similar code exists in the Thumbelina class, but this version doesn't
* worry about java version.
* @param args The command line arguments.
* Optionally, arg[0] can be the URL to preload the Thumeblina bean with.
*/
public static void main (final String[] args)
{
String url;
ThumbelinaFrame thumbelina;
System.setProperty ("sun.net.client.defaultReadTimeout", "7000");
System.setProperty ("sun.net.client.defaultConnectTimeout", "7000");
url = null;
if (0 != args.length)
if (args[0].equalsIgnoreCase ("help")
|| args[0].equalsIgnoreCase ("-help")
|| args[0].equalsIgnoreCase ("-h")
|| args[0].equalsIgnoreCase ("?")
|| args[0].equalsIgnoreCase ("-?"))
Thumbelina.help ();
else
url = args[0];
try
{
thumbelina = new ThumbelinaFrame (url);
thumbelina.setVisible (true);
}
catch (MalformedURLException murle)
{
System.err.println (murle.getMessage ());
Thumbelina.help ();
}
}
}
| |
package br.com.starcode.jerichoselector;
import static br.com.starcode.jerichoselector.jerQuery.$;
import static org.junit.Assert.*;
import java.util.List;
import net.htmlparser.jericho.Element;
import net.htmlparser.jericho.Source;
import org.junit.Before;
import org.junit.Test;
public class TestPseudoSelectors {
Source source;
@Before
public void setup() throws Exception {
source = new Source(getClass().getResourceAsStream("test-02.html"));
}
@Test
public void rootSelector() throws Exception {
List<Element> elements = $(source, ":root").getSelectedElements();
assertEquals(1, elements.size());
assertEquals("html", elements.get(0).getName());
}
@Test
public void nthChildSelector() throws Exception {
List<Element> elements = $(source, "body h1:nth-child(1)").getSelectedElements();
assertEquals(1, elements.size());
assertEquals("h1", elements.get(0).getName());
elements = $(source, "body > *").getSelectedElements();
assertEquals(7, elements.size());
elements = $(source, "body > *:nth-child(7)").getSelectedElements();
assertEquals(1, elements.size());
assertEquals("button", elements.get(0).getName());
elements = $(source, "html:nth-child(1)").getSelectedElements();
assertEquals(1, elements.size());
assertEquals("html", elements.get(0).getName());
}
@Test
public void nthChildSelectorNoResults() throws Exception {
List<Element> elements = $(source, "body:nth-child(0)").getSelectedElements();
assertEquals(0, elements.size());
elements = $(source, "body:nth-child()").getSelectedElements();
assertEquals(0, elements.size());
elements = $(source, "body:nth-child(-1)").getSelectedElements();
assertEquals(0, elements.size());
elements = $(source, "body:nth-child(6)").getSelectedElements();
assertEquals(0, elements.size());
elements = $(source, "html:nth-child()").getSelectedElements();
assertEquals(0, elements.size());
elements = $(source, "html:nth-child(2)").getSelectedElements();
assertEquals(0, elements.size());
}
@Test
public void nthLastChildSelector() throws Exception {
List<Element> elements = $(source, "body > *:nth-last-child(1)").getSelectedElements();
assertEquals(1, elements.size());
assertEquals("button", elements.get(0).getName());
elements = $(source, "body > *:nth-last-child(7)").getSelectedElements();
assertEquals(1, elements.size());
assertEquals("h1", elements.get(0).getName());
elements = $(source, "html:nth-last-child(1)").getSelectedElements();
assertEquals("html", elements.get(0).getName());
}
@Test
public void nthLastChildSelectorNoResults() throws Exception {
List<Element> elements = $(source, "body:nth-last-child(0)").getSelectedElements();
assertEquals(0, elements.size());
elements = $(source, "body:nth-last-child()").getSelectedElements();
assertEquals(0, elements.size());
elements = $(source, "body:nth-last-child(-1)").getSelectedElements();
assertEquals(0, elements.size());
elements = $(source, "body:nth-last-child(6)").getSelectedElements();
assertEquals(0, elements.size());
elements = $(source, "html:nth-last-child()").getSelectedElements();
assertEquals(0, elements.size());
elements = $(source, "html:nth-last-child(2)").getSelectedElements();
assertEquals(0, elements.size());
}
@Test
public void nthTypeSelector() throws Exception {
List<Element> elements = $(source, "body > p:nth-of-type(1)").getSelectedElements();
assertEquals(1, elements.size());
assertEquals("my-text", elements.get(0).getAttributeValue("class"));
elements = $(source, "body p:nth-of-type(2)").getSelectedElements();
assertEquals("my-text-2", elements.get(0).getAttributeValue("class"));
elements = $(source, "html:nth-of-type(1)").getSelectedElements();
assertEquals("html", elements.get(0).getName());
elements = $(source, "body:nth-of-type()").getSelectedElements();
assertEquals(0, elements.size());
elements = $(source, "body:nth-of-type(2)").getSelectedElements();
assertEquals(0, elements.size());
elements = $(source, "html:nth-of-type()").getSelectedElements();
assertEquals(0, elements.size());
elements = $(source, "html:nth-of-type(2)").getSelectedElements();
assertEquals(0, elements.size());
}
@Test
public void nthLastTypeSelector() throws Exception {
List<Element> elements = $(source, "body > p:nth-last-of-type(1)").getSelectedElements();
assertEquals(1, elements.size());
assertEquals("my-text-2", elements.get(0).getAttributeValue("class"));
elements = $(source, "body p:nth-last-of-type(2)").getSelectedElements();
assertEquals("my-text", elements.get(0).getAttributeValue("class"));
elements = $(source, "html:nth-last-of-type(1)").getSelectedElements();
assertEquals("html", elements.get(0).getName());
elements = $(source, "body:nth-last-of-type()").getSelectedElements();
assertEquals(0, elements.size());
elements = $(source, "body:nth-last-of-type(2)").getSelectedElements();
assertEquals(0, elements.size());
elements = $(source, "html:nth-last-of-type()").getSelectedElements();
assertEquals(0, elements.size());
elements = $(source, "html:nth-last-of-type(2)").getSelectedElements();
assertEquals(0, elements.size());
}
@Test
public void firstChild() throws Exception {
List<Element> elements = $(source, "body h1:first-child").getSelectedElements();
assertEquals(1, elements.size());
assertEquals("h1", elements.get(0).getName());
elements = $(source, "button p:first-child").getSelectedElements();
assertEquals(1, elements.size());
assertEquals("p", elements.get(0).getName());
assertNull(elements.get(0).getAttributeValue("id"));
elements = $(source, "html:first-child").getSelectedElements();
assertEquals("html", elements.get(0).getName());
}
@Test
public void lastChild() throws Exception {
List<Element> elements = $(source, "body button:last-child").getSelectedElements();
assertEquals(1, elements.size());
assertEquals("button", elements.get(0).getName());
elements = $(source, "button :last-child").getSelectedElements();
assertEquals(1, elements.size());
assertEquals("p", elements.get(0).getName());
assertEquals("p2", elements.get(0).getAttributeValue("id"));
elements = $(source, "html:last-child").getSelectedElements();
assertEquals("html", elements.get(0).getName());
}
@Test
public void firstType() throws Exception {
List<Element> elements = $(source, "body h1:first-of-type").getSelectedElements();
assertEquals(1, elements.size());
assertEquals("h1", elements.get(0).getName());
elements = $(source, "body > p:first-of-type").getSelectedElements();
assertEquals(1, elements.size());
assertEquals("p", elements.get(0).getName());
assertEquals("my-text", elements.get(0).getAttributeValue("class"));
elements = $(source, "html:first-of-type").getSelectedElements();
assertEquals("html", elements.get(0).getName());
}
@Test
public void lastType() throws Exception {
List<Element> elements = $(source, "body button:last-of-type").getSelectedElements();
assertEquals(1, elements.size());
assertEquals("button", elements.get(0).getName());
elements = $(source, "body > p:last-of-type").getSelectedElements();
assertEquals(1, elements.size());
assertEquals("p", elements.get(0).getName());
assertEquals("my-text-2", elements.get(0).getAttributeValue("class"));
elements = $(source, "body p:last-of-type").getSelectedElements();
assertEquals(2, elements.size());
assertEquals("p", elements.get(0).getName());
assertEquals("p2", elements.get(1).getAttributeValue("id"));
elements = $(source, "html:last-of-type").getSelectedElements();
assertEquals("html", elements.get(0).getName());
}
@Test
public void onlyChild() throws Exception {
List<Element> elements = $(source, "title:only-child").getSelectedElements();
assertEquals(1, elements.size());
assertEquals("title", elements.get(0).getName());
elements = $(source, "button p:only-child").getSelectedElements();
assertEquals(0, elements.size());
elements = $(source, "html:only-child").getSelectedElements();
assertEquals(1, elements.size());
}
@Test
public void onlyType() throws Exception {
List<Element> elements = $(source, "title:only-of-type").getSelectedElements();
assertEquals(1, elements.size());
assertEquals("title", elements.get(0).getName());
elements = $(source, "body > button:only-of-type").getSelectedElements();
assertEquals(1, elements.size());
assertEquals("button", elements.get(0).getName());
elements = $(source, "button p:only-of-type").getSelectedElements();
assertEquals(0, elements.size());
elements = $(source, "html:only-of-type").getSelectedElements();
assertEquals(1, elements.size());
}
@Test
public void empty() throws Exception {
List<Element> elements = $(source, ":empty").getSelectedElements();
assertEquals(3, elements.size());
assertEquals("input", elements.get(0).getName());
assertEquals("div", elements.get(1).getName());
assertEquals("img", elements.get(2).getName());
}
}
| |
/*
* Copyright 2014 BitPOS Pty Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.bushstar.kobocoinj.store;
import com.bushstar.kobocoinj.core.*;
import com.bushstar.kobocoinj.script.Script;
import com.google.common.collect.Lists;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.math.BigInteger;
import java.sql.*;
import java.util.*;
/**
* <p>A full pruned block store using the Postgres database engine. As an added bonus an address index is calculated,
* so you can use {@link #calculateBalanceForAddress(com.bushstar.kobocoinj.core.Address)} to quickly look up
* the quantity of kobocoins controlled by that address.</p>
*/
public class PostgresFullPrunedBlockStore implements FullPrunedBlockStore {
private static final Logger log = LoggerFactory.getLogger(PostgresFullPrunedBlockStore.class);
private static final String POSTGRES_DUPLICATE_KEY_ERROR_CODE = "23505";
private Sha256Hash chainHeadHash;
private StoredBlock chainHeadBlock;
private Sha256Hash verifiedChainHeadHash;
private StoredBlock verifiedChainHeadBlock;
private NetworkParameters params;
private ThreadLocal<Connection> conn;
private List<Connection> allConnections;
private String connectionURL;
private int fullStoreDepth;
private String username;
private String password;
private static final String driver = "org.postgresql.Driver";
private static final String CREATE_SETTINGS_TABLE = "CREATE TABLE settings (\n" +
" name character varying(32) NOT NULL,\n" +
" value bytea\n" +
");";
private static final String CHAIN_HEAD_SETTING = "chainhead";
private static final String VERIFIED_CHAIN_HEAD_SETTING = "verifiedchainhead";
private static final String VERSION_SETTING = "version";
private static final String CREATE_HEADERS_TABLE = "CREATE TABLE headers (" +
" hash bytea NOT NULL," +
" chainwork bytea NOT NULL," +
" height integer NOT NULL," +
" header bytea NOT NULL," +
" wasundoable boolean NOT NULL" +
");";
private static final String CREATE_UNDOABLE_TABLE = "CREATE TABLE undoableblocks (" +
" hash bytea NOT NULL," +
" height integer NOT NULL," +
" txoutchanges bytea," +
" transactions bytea" +
");";
private static final String CREATE_OPEN_OUTPUT_TABLE = "CREATE TABLE openoutputs (" +
" hash bytea NOT NULL," +
" index integer NOT NULL," +
" height integer NOT NULL," +
" value bytea NOT NULL," +
" scriptbytes bytea NOT NULL," +
" toaddress character varying(35)," +
" addresstargetable integer" +
");";
private static final String CREATE_UNDOABLE_TABLE_INDEX = "CREATE INDEX heightIndex ON undoableBlocks (height)";
// Some indexes to speed up inserts
private static final String CREATE_HEADERS_HASH_INDEX = "CREATE INDEX headershashindex ON headers USING btree (hash);";
private static final String CREATE_OUTPUTS_ADDRESS_INDEX = "CREATE INDEX idx_address ON openoutputs USING btree (hash, index, height, toaddress);";
private static final String CREATE_OUTPUT_ADDRESS_TYPE_INDEX = "CREATE INDEX idx_addresstargetable ON openoutputs USING btree (addresstargetable);";
private static final String CREATE_OUTPUTS_HASH_INDEX = "CREATE INDEX openoutputshash ON openoutputs USING btree (hash);";
private static final String CREATE_OUTPUTS_HASH_INDEX_INDEX = "CREATE INDEX openoutputshashindex ON openoutputs USING btree (hash, index);";
private static final String CREATE_UNDOABLE_HASH_INDEX = "CREATE INDEX undoableblockshashindex ON undoableblocks USING btree (hash);";
/**
* Creates a new PostgresFullPrunedBlockStore.
*
* @param params A copy of the NetworkParameters used
* @param fullStoreDepth The number of blocks of history stored in full (something like 1000 is pretty safe)
* @param hostname The hostname of the database to connect to
* @param dbName The database to connect to
* @param username The database username
* @param password The password to the database
* @throws BlockStoreException if the database fails to open for any reason
*/
public PostgresFullPrunedBlockStore(NetworkParameters params, int fullStoreDepth, String hostname, String dbName,
String username, String password) throws BlockStoreException {
this.params = params;
this.fullStoreDepth = fullStoreDepth;
connectionURL = "jdbc:postgresql://" + hostname + "/" + dbName;
this.username = username;
this.password = password;
conn = new ThreadLocal<Connection>();
allConnections = new LinkedList<Connection>();
try {
Class.forName(driver);
log.info(driver + " loaded. ");
} catch (java.lang.ClassNotFoundException e) {
log.error("check CLASSPATH for Postgres jar ", e);
}
maybeConnect();
try {
// Create tables if needed
if (!tableExists("settings"))
createTables();
initFromDatabase();
} catch (SQLException e) {
throw new BlockStoreException(e);
}
}
private synchronized void maybeConnect() throws BlockStoreException {
try {
if (conn.get() != null)
return;
Properties props = new Properties();
props.setProperty("user", this.username);
props.setProperty("password", this.password);
conn.set(DriverManager.getConnection(connectionURL, props));
Connection connection = conn.get();
allConnections.add(conn.get());
log.info("Made a new connection to database " + connectionURL);
} catch (SQLException ex) {
throw new BlockStoreException(ex);
}
}
public synchronized void close() {
for (Connection conn : allConnections) {
try {
conn.rollback();
} catch (SQLException ex) {
throw new RuntimeException(ex);
}
}
allConnections.clear();
}
public void resetStore() throws BlockStoreException {
maybeConnect();
try {
Statement s = conn.get().createStatement();
s.execute("DROP TABLE settings");
s.execute("DROP TABLE headers");
s.execute("DROP TABLE undoableBlocks");
s.execute("DROP TABLE openOutputs");
s.close();
createTables();
initFromDatabase();
} catch (SQLException ex) {
throw new RuntimeException(ex);
}
}
private void createTables() throws SQLException, BlockStoreException {
Statement s = conn.get().createStatement();
if (log.isDebugEnabled())
log.debug("PostgresFullPrunedBlockStore : CREATE headers table");
s.executeUpdate(CREATE_HEADERS_TABLE);
if (log.isDebugEnabled())
log.debug("PostgresFullPrunedBlockStore : CREATE settings table");
s.executeUpdate(CREATE_SETTINGS_TABLE);
if (log.isDebugEnabled())
log.debug("PostgresFullPrunedBlockStore : CREATE undoable block table");
s.executeUpdate(CREATE_UNDOABLE_TABLE);
if (log.isDebugEnabled())
log.debug("PostgresFullPrunedBlockStore : CREATE undoable block index");
s.executeUpdate(CREATE_UNDOABLE_TABLE_INDEX);
if (log.isDebugEnabled())
log.debug("PostgresFullPrunedBlockStore : CREATE open output table");
s.executeUpdate(CREATE_OPEN_OUTPUT_TABLE);
// Create indexes..
s.executeUpdate(CREATE_HEADERS_HASH_INDEX);
s.executeUpdate(CREATE_OUTPUT_ADDRESS_TYPE_INDEX);
s.executeUpdate(CREATE_OUTPUTS_ADDRESS_INDEX);
s.executeUpdate(CREATE_OUTPUTS_HASH_INDEX);
s.executeUpdate(CREATE_OUTPUTS_HASH_INDEX_INDEX);
s.executeUpdate(CREATE_UNDOABLE_HASH_INDEX);
s.executeUpdate("INSERT INTO settings(name, value) VALUES('" + CHAIN_HEAD_SETTING + "', NULL)");
s.executeUpdate("INSERT INTO settings(name, value) VALUES('" + VERIFIED_CHAIN_HEAD_SETTING + "', NULL)");
s.executeUpdate("INSERT INTO settings(name, value) VALUES('" + VERSION_SETTING + "', '03')");
s.close();
createNewStore(params);
}
private void initFromDatabase() throws SQLException, BlockStoreException {
Statement s = conn.get().createStatement();
ResultSet rs;
rs = s.executeQuery("SELECT value FROM settings WHERE name = '" + CHAIN_HEAD_SETTING + "'");
if (!rs.next()) {
throw new BlockStoreException("corrupt Postgres block store - no chain head pointer");
}
Sha256Hash hash = new Sha256Hash(rs.getBytes(1));
rs.close();
this.chainHeadBlock = get(hash);
this.chainHeadHash = hash;
if (this.chainHeadBlock == null) {
throw new BlockStoreException("corrupt Postgres block store - head block not found");
}
rs = s.executeQuery("SELECT value FROM settings WHERE name = '" + VERIFIED_CHAIN_HEAD_SETTING + "'");
if (!rs.next()) {
throw new BlockStoreException("corrupt Postgres block store - no verified chain head pointer");
}
hash = new Sha256Hash(rs.getBytes(1));
rs.close();
s.close();
this.verifiedChainHeadBlock = get(hash);
this.verifiedChainHeadHash = hash;
if (this.verifiedChainHeadBlock == null) {
throw new BlockStoreException("corrupt Postgres block store - verified head block not found");
}
}
private void createNewStore(NetworkParameters params) throws BlockStoreException {
try {
// Set up the genesis block. When we start out fresh, it is by
// definition the top of the chain.
StoredBlock storedGenesisHeader = new StoredBlock(params.getGenesisBlock().cloneAsHeader(), params.getGenesisBlock().getWork(), 0);
// The coinbase in the genesis block is not spendable. This is because of how the reference client inits
// its database - the genesis transaction isn't actually in the db so its spent flags can never be updated.
List<Transaction> genesisTransactions = Lists.newLinkedList();
StoredUndoableBlock storedGenesis = new StoredUndoableBlock(params.getGenesisBlock().getHash(), genesisTransactions);
put(storedGenesisHeader, storedGenesis);
setChainHead(storedGenesisHeader);
setVerifiedChainHead(storedGenesisHeader);
} catch (VerificationException e) {
throw new RuntimeException(e); // Cannot happen.
}
}
private boolean tableExists(String table) throws SQLException {
Statement s = conn.get().createStatement();
try {
ResultSet results = s.executeQuery("SELECT * FROM " + table + " WHERE 1 = 2");
results.close();
return true;
} catch (SQLException ex) {
return false;
} finally {
s.close();
}
}
/**
* Dumps information about the size of actual data in the database to standard output
* The only truly useless data counted is printed in the form "N in id indexes"
* This does not take database indexes into account
*/
public void dumpSizes() throws SQLException, BlockStoreException {
maybeConnect();
Statement s = conn.get().createStatement();
long size = 0;
long totalSize = 0;
int count = 0;
ResultSet rs = s.executeQuery("SELECT name, value FROM settings");
while (rs.next()) {
size += rs.getString(1).length();
size += rs.getBytes(2).length;
count++;
}
rs.close();
System.out.printf("Settings size: %d, count: %d, average size: %f%n", size, count, (double)size/count);
totalSize += size; size = 0; count = 0;
rs = s.executeQuery("SELECT chainWork, header FROM headers");
while (rs.next()) {
size += 28; // hash
size += rs.getBytes(1).length;
size += 4; // height
size += rs.getBytes(2).length;
count++;
}
rs.close();
System.out.printf("Headers size: %d, count: %d, average size: %f%n", size, count, (double)size/count);
totalSize += size; size = 0; count = 0;
rs = s.executeQuery("SELECT txOutChanges, transactions FROM undoableBlocks");
while (rs.next()) {
size += 28; // hash
size += 4; // height
byte[] txOutChanges = rs.getBytes(1);
byte[] transactions = rs.getBytes(2);
if (txOutChanges == null)
size += transactions.length;
else
size += txOutChanges.length;
// size += the space to represent NULL
count++;
}
rs.close();
System.out.printf("Undoable Blocks size: %d, count: %d, average size: %f%n", size, count, (double)size/count);
totalSize += size; size = 0; count = 0;
long scriptSize = 0;
rs = s.executeQuery("SELECT value, scriptBytes FROM openOutputs");
while (rs.next()) {
size += 32; // hash
size += 4; // index
size += 4; // height
size += rs.getBytes(1).length;
size += rs.getBytes(2).length;
scriptSize += rs.getBytes(2).length;
count++;
}
rs.close();
System.out.printf("Open Outputs size: %d, count: %d, average size: %f, average script size: %f (%d in id indexes)%n",
size, count, (double)size/count, (double)scriptSize/count, count * 8);
totalSize += size;
System.out.println("Total Size: " + totalSize);
s.close();
}
private void putUpdateStoredBlock(StoredBlock storedBlock, boolean wasUndoable) throws SQLException {
try {
PreparedStatement s =
conn.get().prepareStatement("INSERT INTO headers(hash, chainWork, height, header, wasUndoable)"
+ " VALUES(?, ?, ?, ?, ?)");
// We skip the first 4 bytes because (on prodnet) the minimum target has 4 0-bytes
byte[] hashBytes = new byte[28];
System.arraycopy(storedBlock.getHeader().getHash().getBytes(), 3, hashBytes, 0, 28);
s.setBytes(1, hashBytes);
s.setBytes(2, storedBlock.getChainWork().toByteArray());
s.setInt(3, storedBlock.getHeight());
s.setBytes(4, storedBlock.getHeader().unsafeKobocoinSerialize());
s.setBoolean(5, wasUndoable);
s.executeUpdate();
s.close();
} catch (SQLException e) {
// It is possible we try to add a duplicate StoredBlock if we upgraded
// In that case, we just update the entry to mark it wasUndoable
if (!(e.getSQLState().equals(POSTGRES_DUPLICATE_KEY_ERROR_CODE)) || !wasUndoable)
throw e;
PreparedStatement s = conn.get().prepareStatement("UPDATE headers SET wasUndoable=? WHERE hash=?");
s.setBoolean(1, true);
// We skip the first 4 bytes because (on prodnet) the minimum target has 4 0-bytes
byte[] hashBytes = new byte[28];
System.arraycopy(storedBlock.getHeader().getHash().getBytes(), 3, hashBytes, 0, 28);
s.setBytes(2, hashBytes);
s.executeUpdate();
s.close();
}
}
public void put(StoredBlock storedBlock) throws BlockStoreException {
maybeConnect();
try {
putUpdateStoredBlock(storedBlock, false);
} catch (SQLException e) {
throw new BlockStoreException(e);
}
}
public void put(StoredBlock storedBlock, StoredUndoableBlock undoableBlock) throws BlockStoreException {
maybeConnect();
// We skip the first 4 bytes because (on prodnet) the minimum target has 4 0-bytes
byte[] hashBytes = new byte[28];
System.arraycopy(storedBlock.getHeader().getHash().getBytes(), 3, hashBytes, 0, 28);
int height = storedBlock.getHeight();
byte[] transactions = null;
byte[] txOutChanges = null;
try {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
if (undoableBlock.getTxOutChanges() != null) {
undoableBlock.getTxOutChanges().serializeToStream(bos);
txOutChanges = bos.toByteArray();
} else {
int numTxn = undoableBlock.getTransactions().size();
bos.write((int) (0xFF & (numTxn >> 0)));
bos.write((int) (0xFF & (numTxn >> 8)));
bos.write((int) (0xFF & (numTxn >> 16)));
bos.write((int) (0xFF & (numTxn >> 24)));
for (Transaction tx : undoableBlock.getTransactions())
tx.kobocoinSerialize(bos);
transactions = bos.toByteArray();
}
bos.close();
} catch (IOException e) {
throw new BlockStoreException(e);
}
try {
if (log.isDebugEnabled())
log.debug("Looking for undoable block with hash: " + Utils.bytesToHexString(hashBytes));
PreparedStatement findS = conn.get().prepareStatement("select 1 from undoableBlocks where hash = ?");
findS.setBytes(1, hashBytes);
ResultSet rs = findS.executeQuery();
if (rs.next())
{
// We already have this output, update it.
findS.close();
// Postgres insert-or-updates are very complex (and finnicky). This level of transaction isolation
// seems to work for kobocoinj
PreparedStatement s =
conn.get().prepareStatement("UPDATE undoableBlocks SET txOutChanges=?, transactions=?"
+ " WHERE hash = ?");
s.setBytes(3, hashBytes);
if (log.isDebugEnabled())
log.debug("Updating undoable block with hash: " + Utils.bytesToHexString(hashBytes));
if (transactions == null) {
s.setBytes(1, txOutChanges);
s.setNull(2, Types.BINARY);
} else {
s.setNull(1, Types.BINARY);
s.setBytes(2, transactions);
}
s.executeUpdate();
s.close();
return;
}
PreparedStatement s =
conn.get().prepareStatement("INSERT INTO undoableBlocks(hash, height, txOutChanges, transactions)"
+ " VALUES(?, ?, ?, ?)");
s.setBytes(1, hashBytes);
s.setInt(2, height);
if (log.isDebugEnabled())
log.debug("Inserting undoable block with hash: " + Utils.bytesToHexString(hashBytes) + " at height " + height);
if (transactions == null) {
s.setBytes(3, txOutChanges);
s.setNull(4, Types.BINARY);
} else {
s.setNull(3, Types.BINARY);
s.setBytes(4, transactions);
}
s.executeUpdate();
s.close();
try {
putUpdateStoredBlock(storedBlock, true);
} catch (SQLException e) {
throw new BlockStoreException(e);
}
} catch (SQLException e) {
if (!e.getSQLState().equals(POSTGRES_DUPLICATE_KEY_ERROR_CODE))
throw new BlockStoreException(e);
}
}
public StoredBlock get(Sha256Hash hash, boolean wasUndoableOnly) throws BlockStoreException {
// Optimize for chain head
if (chainHeadHash != null && chainHeadHash.equals(hash))
return chainHeadBlock;
if (verifiedChainHeadHash != null && verifiedChainHeadHash.equals(hash))
return verifiedChainHeadBlock;
maybeConnect();
PreparedStatement s = null;
try {
s = conn.get()
.prepareStatement("SELECT chainWork, height, header, wasUndoable FROM headers WHERE hash = ?");
// We skip the first 4 bytes because (on prodnet) the minimum target has 4 0-bytes
byte[] hashBytes = new byte[28];
System.arraycopy(hash.getBytes(), 3, hashBytes, 0, 28);
s.setBytes(1, hashBytes);
ResultSet results = s.executeQuery();
if (!results.next()) {
return null;
}
// Parse it.
if (wasUndoableOnly && !results.getBoolean(4))
return null;
BigInteger chainWork = new BigInteger(results.getBytes(1));
int height = results.getInt(2);
Block b = new Block(params, results.getBytes(3));
b.verifyHeader();
StoredBlock stored = new StoredBlock(b, chainWork, height);
return stored;
} catch (SQLException ex) {
throw new BlockStoreException(ex);
} catch (ProtocolException e) {
// Corrupted database.
throw new BlockStoreException(e);
} catch (VerificationException e) {
// Should not be able to happen unless the database contains bad
// blocks.
throw new BlockStoreException(e);
} finally {
if (s != null)
try {
s.close();
} catch (SQLException e) { throw new BlockStoreException("Failed to close PreparedStatement"); }
}
}
public StoredBlock get(Sha256Hash hash) throws BlockStoreException {
return get(hash, false);
}
public StoredBlock getOnceUndoableStoredBlock(Sha256Hash hash) throws BlockStoreException {
return get(hash, true);
}
public StoredUndoableBlock getUndoBlock(Sha256Hash hash) throws BlockStoreException {
maybeConnect();
PreparedStatement s = null;
try {
s = conn.get()
.prepareStatement("SELECT txOutChanges, transactions FROM undoableBlocks WHERE hash = ?");
// We skip the first 4 bytes because (on prodnet) the minimum target has 4 0-bytes
byte[] hashBytes = new byte[28];
System.arraycopy(hash.getBytes(), 3, hashBytes, 0, 28);
s.setBytes(1, hashBytes);
ResultSet results = s.executeQuery();
if (!results.next()) {
return null;
}
// Parse it.
byte[] txOutChanges = results.getBytes(1);
byte[] transactions = results.getBytes(2);
StoredUndoableBlock block;
if (txOutChanges == null) {
int offset = 0;
int numTxn = ((transactions[offset++] & 0xFF) << 0) |
((transactions[offset++] & 0xFF) << 8) |
((transactions[offset++] & 0xFF) << 16) |
((transactions[offset++] & 0xFF) << 24);
List<Transaction> transactionList = new LinkedList<Transaction>();
for (int i = 0; i < numTxn; i++) {
Transaction tx = new Transaction(params, transactions, offset);
transactionList.add(tx);
offset += tx.getMessageSize();
}
block = new StoredUndoableBlock(hash, transactionList);
} else {
TransactionOutputChanges outChangesObject =
new TransactionOutputChanges(new ByteArrayInputStream(txOutChanges));
block = new StoredUndoableBlock(hash, outChangesObject);
}
return block;
} catch (SQLException ex) {
throw new BlockStoreException(ex);
} catch (NullPointerException e) {
// Corrupted database.
throw new BlockStoreException(e);
} catch (ClassCastException e) {
// Corrupted database.
throw new BlockStoreException(e);
} catch (ProtocolException e) {
// Corrupted database.
throw new BlockStoreException(e);
} catch (IOException e) {
// Corrupted database.
throw new BlockStoreException(e);
} finally {
if (s != null)
try {
s.close();
} catch (SQLException e) { throw new BlockStoreException("Failed to close PreparedStatement"); }
}
}
public StoredBlock getChainHead() throws BlockStoreException {
return chainHeadBlock;
}
public void setChainHead(StoredBlock chainHead) throws BlockStoreException {
Sha256Hash hash = chainHead.getHeader().getHash();
this.chainHeadHash = hash;
this.chainHeadBlock = chainHead;
maybeConnect();
try {
PreparedStatement s = conn.get()
.prepareStatement("UPDATE settings SET value = ? WHERE name = ?");
s.setString(2, CHAIN_HEAD_SETTING);
s.setBytes(1, hash.getBytes());
s.executeUpdate();
s.close();
} catch (SQLException ex) {
throw new BlockStoreException(ex);
}
}
public StoredBlock getVerifiedChainHead() throws BlockStoreException {
return verifiedChainHeadBlock;
}
public void setVerifiedChainHead(StoredBlock chainHead) throws BlockStoreException {
Sha256Hash hash = chainHead.getHeader().getHash();
this.verifiedChainHeadHash = hash;
this.verifiedChainHeadBlock = chainHead;
maybeConnect();
try {
PreparedStatement s = conn.get()
.prepareStatement("UPDATE settings SET value = ? WHERE name = ?");
s.setString(2, VERIFIED_CHAIN_HEAD_SETTING);
s.setBytes(1, hash.getBytes());
s.executeUpdate();
s.close();
} catch (SQLException ex) {
throw new BlockStoreException(ex);
}
if (this.chainHeadBlock.getHeight() < chainHead.getHeight())
setChainHead(chainHead);
removeUndoableBlocksWhereHeightIsLessThan(chainHead.getHeight() - fullStoreDepth);
}
private void removeUndoableBlocksWhereHeightIsLessThan(int height) throws BlockStoreException {
try {
PreparedStatement s = conn.get()
.prepareStatement("DELETE FROM undoableBlocks WHERE height <= ?");
s.setInt(1, height);
if (log.isDebugEnabled())
log.debug("Deleting undoable undoable block with height <= " + height);
s.executeUpdate();
s.close();
} catch (SQLException ex) {
throw new BlockStoreException(ex);
}
}
public StoredTransactionOutput getTransactionOutput(Sha256Hash hash, long index) throws BlockStoreException {
maybeConnect();
PreparedStatement s = null;
try {
s = conn.get()
.prepareStatement("SELECT height, value, scriptBytes FROM openOutputs " +
"WHERE hash = ? AND index = ?");
s.setBytes(1, hash.getBytes());
// index is actually an unsigned int
s.setInt(2, (int)index);
ResultSet results = s.executeQuery();
if (!results.next()) {
return null;
}
// Parse it.
int height = results.getInt(1);
BigInteger value = new BigInteger(results.getBytes(2));
// Tell the StoredTransactionOutput that we are a coinbase, as that is encoded in height
StoredTransactionOutput txout = new StoredTransactionOutput(hash, index, value, height, true, results.getBytes(3));
return txout;
} catch (SQLException ex) {
throw new BlockStoreException(ex);
} finally {
if (s != null)
try {
s.close();
} catch (SQLException e) { throw new BlockStoreException("Failed to close PreparedStatement"); }
}
}
public void addUnspentTransactionOutput(StoredTransactionOutput out) throws BlockStoreException {
maybeConnect();
PreparedStatement s = null;
// Calculate the toAddress (if any)
String dbAddress = "";
int type = 0;
Script outputScript = null;
try
{
outputScript = new Script(out.getScriptBytes());
}
catch (ScriptException e)
{
// Unparseable, but this isn't an error - it's an output not containing an address
log.info("Could not parse script for output: " + out.getHash().toString());
}
if (outputScript != null && (outputScript.isSentToAddress()
|| outputScript.isSentToRawPubKey()
|| outputScript.isPayToScriptHash()))
{
if (outputScript.isSentToAddress())
{
Address targetAddr = new Address(params, outputScript.getPubKeyHash());
dbAddress = targetAddr.toString();
type = 1;
}
else if (outputScript.isSentToRawPubKey())
{
/*
* Note we use the deprecated getFromAddress here. Coinbase outputs seem to have the target address
* in the pubkey of the script - perhaps we can rename this function?
*/
dbAddress = outputScript.getFromAddress(params).toString();
type = 2;
} else if (outputScript.isPayToScriptHash())
{
dbAddress = Address.fromP2SHHash(params, outputScript.getPubKeyHash()).toString();
type = 3;
}
}
try {
s = conn.get().prepareStatement("INSERT INTO openOutputs (hash, index, height, value, scriptBytes, toAddress, addressTargetable) " +
"VALUES (?, ?, ?, ?, ?, ?, ?)");
s.setBytes(1, out.getHash().getBytes());
// index is actually an unsigned int
s.setInt(2, (int)out.getIndex());
s.setInt(3, out.getHeight());
s.setBytes(4, out.getValue().toByteArray());
s.setBytes(5, out.getScriptBytes());
s.setString(6, dbAddress);
s.setInt(7, type);
s.executeUpdate();
s.close();
} catch (SQLException e) {
if (!(e.getSQLState().equals(POSTGRES_DUPLICATE_KEY_ERROR_CODE)))
throw new BlockStoreException(e);
} finally {
if (s != null)
try {
s.close();
} catch (SQLException e) { throw new BlockStoreException(e); }
}
}
public void removeUnspentTransactionOutput(StoredTransactionOutput out) throws BlockStoreException {
maybeConnect();
// TODO: This should only need one query (maybe a stored procedure)
if (getTransactionOutput(out.getHash(), out.getIndex()) == null)
throw new BlockStoreException("Tried to remove a StoredTransactionOutput from PostgresFullPrunedBlockStore that it didn't have!");
try {
PreparedStatement s = conn.get()
.prepareStatement("DELETE FROM openOutputs WHERE hash = ? AND index = ?");
s.setBytes(1, out.getHash().getBytes());
// index is actually an unsigned int
s.setInt(2, (int)out.getIndex());
s.executeUpdate();
s.close();
} catch (SQLException e) {
throw new BlockStoreException(e);
}
}
public void beginDatabaseBatchWrite() throws BlockStoreException {
maybeConnect();
if (log.isDebugEnabled())
log.debug("Starting database batch write with connection: " + conn.get().toString());
try {
conn.get().setAutoCommit(false);
} catch (SQLException e) {
throw new BlockStoreException(e);
}
}
public void commitDatabaseBatchWrite() throws BlockStoreException {
maybeConnect();
if (log.isDebugEnabled())
log.debug("Committing database batch write with connection: " + conn.get().toString());
try {
conn.get().commit();
conn.get().setAutoCommit(true);
} catch (SQLException e) {
throw new BlockStoreException(e);
}
}
public void abortDatabaseBatchWrite() throws BlockStoreException {
maybeConnect();
if (log.isDebugEnabled())
log.debug("Rollback database batch write with connection: " + conn.get().toString());
try {
if (!conn.get().getAutoCommit()) {
conn.get().rollback();
conn.get().setAutoCommit(true);
} else {
log.warn("Warning: Rollback attempt without transaction");
}
} catch (SQLException e) {
throw new BlockStoreException(e);
}
}
public boolean hasUnspentOutputs(Sha256Hash hash, int numOutputs) throws BlockStoreException {
maybeConnect();
PreparedStatement s = null;
try {
s = conn.get()
.prepareStatement("SELECT COUNT(*) FROM openOutputs WHERE hash = ?");
s.setBytes(1, hash.getBytes());
ResultSet results = s.executeQuery();
if (!results.next()) {
throw new BlockStoreException("Got no results from a COUNT(*) query");
}
int count = results.getInt(1);
return count != 0;
} catch (SQLException ex) {
throw new BlockStoreException(ex);
} finally {
if (s != null)
try {
s.close();
} catch (SQLException e) { throw new BlockStoreException("Failed to close PreparedStatement"); }
}
}
/**
* Calculate the balance for a coinbase, to-address, or p2sh address.
* @param address The address to calculate the balance of
* @return The balance of the address supplied. If the address has not been seen, or there are no outputs open for this
* address, the return value is 0
* @throws BlockStoreException
*/
public BigInteger calculateBalanceForAddress(Address address) throws BlockStoreException {
maybeConnect();
PreparedStatement s = null;
try {
s = conn.get().prepareStatement("select sum(('x'||lpad(substr(value::text, 3, 50),16,'0'))::bit(64)::bigint) "
+ "from openoutputs where toaddress = ?");
s.setString(1, address.toString());
ResultSet rs = s.executeQuery();
if (rs.next()) {
return BigInteger.valueOf(rs.getLong(1));
} else {
throw new BlockStoreException("Failed to execute balance lookup");
}
} catch (SQLException ex) {
throw new BlockStoreException(ex);
} finally {
if (s != null)
try {
s.close();
} catch (SQLException e) {
throw new BlockStoreException("Could not close statement");
}
}
}
}
| |
/*
* Copyright 2014-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.store.cluster.messaging.impl;
import com.google.common.base.Throwables;
import org.apache.felix.scr.annotations.Activate;
import org.apache.felix.scr.annotations.Component;
import org.apache.felix.scr.annotations.Deactivate;
import org.apache.felix.scr.annotations.Reference;
import org.apache.felix.scr.annotations.ReferenceCardinality;
import org.apache.felix.scr.annotations.Service;
import org.onlab.util.Tools;
import org.onosproject.cluster.ClusterService;
import org.onosproject.cluster.ControllerNode;
import org.onosproject.cluster.NodeId;
import org.onosproject.store.cluster.messaging.ClusterCommunicationService;
import org.onosproject.store.cluster.messaging.ClusterMessage;
import org.onosproject.store.cluster.messaging.ClusterMessageHandler;
import org.onosproject.store.cluster.messaging.Endpoint;
import org.onosproject.store.cluster.messaging.MessageSubject;
import org.onosproject.store.cluster.messaging.MessagingService;
import org.onosproject.utils.MeteringAgent;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Objects;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.function.BiConsumer;
import java.util.function.BiFunction;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.stream.Collectors;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static org.onosproject.security.AppGuard.checkPermission;
import static org.onosproject.security.AppPermission.Type.CLUSTER_WRITE;
@Component(immediate = true)
@Service
public class ClusterCommunicationManager
implements ClusterCommunicationService {
private final Logger log = LoggerFactory.getLogger(getClass());
private final MeteringAgent subjectMeteringAgent = new MeteringAgent(PRIMITIVE_NAME, SUBJECT_PREFIX, true);
private final MeteringAgent endpointMeteringAgent = new MeteringAgent(PRIMITIVE_NAME, ENDPOINT_PREFIX, true);
private static final String PRIMITIVE_NAME = "clusterCommunication";
private static final String SUBJECT_PREFIX = "subject";
private static final String ENDPOINT_PREFIX = "endpoint";
private static final String SERIALIZING = "serialization";
private static final String DESERIALIZING = "deserialization";
private static final String NODE_PREFIX = "node:";
private static final String ROUND_TRIP_SUFFIX = ".rtt";
private static final String ONE_WAY_SUFFIX = ".oneway";
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
private ClusterService clusterService;
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected MessagingService messagingService;
private NodeId localNodeId;
@Activate
public void activate() {
localNodeId = clusterService.getLocalNode().id();
log.info("Started");
}
@Deactivate
public void deactivate() {
log.info("Stopped");
}
@Override
public <M> void broadcast(M message,
MessageSubject subject,
Function<M, byte[]> encoder) {
checkPermission(CLUSTER_WRITE);
multicast(message,
subject,
encoder,
clusterService.getNodes()
.stream()
.filter(node -> !Objects.equal(node, clusterService.getLocalNode()))
.map(ControllerNode::id)
.collect(Collectors.toSet()));
}
@Override
public <M> void broadcastIncludeSelf(M message,
MessageSubject subject,
Function<M, byte[]> encoder) {
checkPermission(CLUSTER_WRITE);
multicast(message,
subject,
encoder,
clusterService.getNodes()
.stream()
.map(ControllerNode::id)
.collect(Collectors.toSet()));
}
@Override
public <M> CompletableFuture<Void> unicast(M message,
MessageSubject subject,
Function<M, byte[]> encoder,
NodeId toNodeId) {
checkPermission(CLUSTER_WRITE);
try {
byte[] payload = new ClusterMessage(
localNodeId,
subject,
timeFunction(encoder, subjectMeteringAgent, SERIALIZING).apply(message)
).getBytes();
return doUnicast(subject, payload, toNodeId);
} catch (Exception e) {
return Tools.exceptionalFuture(e);
}
}
@Override
public <M> void multicast(M message,
MessageSubject subject,
Function<M, byte[]> encoder,
Set<NodeId> nodes) {
checkPermission(CLUSTER_WRITE);
byte[] payload = new ClusterMessage(
localNodeId,
subject,
timeFunction(encoder, subjectMeteringAgent, SERIALIZING).apply(message))
.getBytes();
nodes.forEach(nodeId -> doUnicast(subject, payload, nodeId));
}
@Override
public <M, R> CompletableFuture<R> sendAndReceive(M message,
MessageSubject subject,
Function<M, byte[]> encoder,
Function<byte[], R> decoder,
NodeId toNodeId) {
checkPermission(CLUSTER_WRITE);
try {
ClusterMessage envelope = new ClusterMessage(
clusterService.getLocalNode().id(),
subject,
timeFunction(encoder, subjectMeteringAgent, SERIALIZING).
apply(message));
return sendAndReceive(subject, envelope.getBytes(), toNodeId).
thenApply(bytes -> timeFunction(decoder, subjectMeteringAgent, DESERIALIZING).apply(bytes));
} catch (Exception e) {
return Tools.exceptionalFuture(e);
}
}
private CompletableFuture<Void> doUnicast(MessageSubject subject, byte[] payload, NodeId toNodeId) {
ControllerNode node = clusterService.getNode(toNodeId);
checkArgument(node != null, "Unknown nodeId: %s", toNodeId);
Endpoint nodeEp = new Endpoint(node.ip(), node.tcpPort());
MeteringAgent.Context context = subjectMeteringAgent.startTimer(subject.toString() + ONE_WAY_SUFFIX);
return messagingService.sendAsync(nodeEp, subject.value(), payload).whenComplete((r, e) -> context.stop(e));
}
private CompletableFuture<byte[]> sendAndReceive(MessageSubject subject, byte[] payload, NodeId toNodeId) {
ControllerNode node = clusterService.getNode(toNodeId);
checkArgument(node != null, "Unknown nodeId: %s", toNodeId);
Endpoint nodeEp = new Endpoint(node.ip(), node.tcpPort());
MeteringAgent.Context epContext = endpointMeteringAgent.
startTimer(NODE_PREFIX + toNodeId.toString() + ROUND_TRIP_SUFFIX);
MeteringAgent.Context subjectContext = subjectMeteringAgent.
startTimer(subject.toString() + ROUND_TRIP_SUFFIX);
return messagingService.sendAndReceive(nodeEp, subject.value(), payload).
whenComplete((bytes, throwable) -> {
subjectContext.stop(throwable);
epContext.stop(throwable);
});
}
@Override
public void addSubscriber(MessageSubject subject,
ClusterMessageHandler subscriber,
ExecutorService executor) {
checkPermission(CLUSTER_WRITE);
messagingService.registerHandler(subject.value(),
new InternalClusterMessageHandler(subscriber),
executor);
}
@Override
public void removeSubscriber(MessageSubject subject) {
checkPermission(CLUSTER_WRITE);
messagingService.unregisterHandler(subject.value());
}
@Override
public <M, R> void addSubscriber(MessageSubject subject,
Function<byte[], M> decoder,
Function<M, R> handler,
Function<R, byte[]> encoder,
Executor executor) {
checkPermission(CLUSTER_WRITE);
messagingService.registerHandler(subject.value(),
new InternalMessageResponder<M, R>(decoder, encoder, m -> {
CompletableFuture<R> responseFuture = new CompletableFuture<>();
executor.execute(() -> {
try {
responseFuture.complete(handler.apply(m));
} catch (Exception e) {
responseFuture.completeExceptionally(e);
}
});
return responseFuture;
}));
}
@Override
public <M, R> void addSubscriber(MessageSubject subject,
Function<byte[], M> decoder,
Function<M, CompletableFuture<R>> handler,
Function<R, byte[]> encoder) {
checkPermission(CLUSTER_WRITE);
messagingService.registerHandler(subject.value(),
new InternalMessageResponder<>(decoder, encoder, handler));
}
@Override
public <M> void addSubscriber(MessageSubject subject,
Function<byte[], M> decoder,
Consumer<M> handler,
Executor executor) {
checkPermission(CLUSTER_WRITE);
messagingService.registerHandler(subject.value(),
new InternalMessageConsumer<>(decoder, handler),
executor);
}
/**
* Performs the timed function, returning the value it would while timing the operation.
*
* @param timedFunction the function to be timed
* @param meter the metering agent to be used to time the function
* @param opName the opname to be used when starting the meter
* @param <A> The param type of the function
* @param <B> The return type of the function
* @return the value returned by the timed function
*/
private <A, B> Function<A, B> timeFunction(Function<A, B> timedFunction,
MeteringAgent meter, String opName) {
checkNotNull(timedFunction);
checkNotNull(meter);
checkNotNull(opName);
return new Function<A, B>() {
@Override
public B apply(A a) {
final MeteringAgent.Context context = meter.startTimer(opName);
B result = null;
try {
result = timedFunction.apply(a);
context.stop(null);
return result;
} catch (Exception e) {
context.stop(e);
Throwables.propagate(e);
return null;
}
}
};
}
private class InternalClusterMessageHandler implements BiFunction<Endpoint, byte[], byte[]> {
private ClusterMessageHandler handler;
public InternalClusterMessageHandler(ClusterMessageHandler handler) {
this.handler = handler;
}
@Override
public byte[] apply(Endpoint sender, byte[] bytes) {
ClusterMessage message = ClusterMessage.fromBytes(bytes);
handler.handle(message);
return message.response();
}
}
private class InternalMessageResponder<M, R> implements BiFunction<Endpoint, byte[], CompletableFuture<byte[]>> {
private final Function<byte[], M> decoder;
private final Function<R, byte[]> encoder;
private final Function<M, CompletableFuture<R>> handler;
public InternalMessageResponder(Function<byte[], M> decoder,
Function<R, byte[]> encoder,
Function<M, CompletableFuture<R>> handler) {
this.decoder = decoder;
this.encoder = encoder;
this.handler = handler;
}
@Override
public CompletableFuture<byte[]> apply(Endpoint sender, byte[] bytes) {
return handler.apply(timeFunction(decoder, subjectMeteringAgent, DESERIALIZING).
apply(ClusterMessage.fromBytes(bytes).payload())).
thenApply(m -> timeFunction(encoder, subjectMeteringAgent, SERIALIZING).apply(m));
}
}
private class InternalMessageConsumer<M> implements BiConsumer<Endpoint, byte[]> {
private final Function<byte[], M> decoder;
private final Consumer<M> consumer;
public InternalMessageConsumer(Function<byte[], M> decoder, Consumer<M> consumer) {
this.decoder = decoder;
this.consumer = consumer;
}
@Override
public void accept(Endpoint sender, byte[] bytes) {
consumer.accept(timeFunction(decoder, subjectMeteringAgent, DESERIALIZING).
apply(ClusterMessage.fromBytes(bytes).payload()));
}
}
}
| |
/*
* Copyright 2016 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.examples.bigquery.snippets;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import com.google.api.gax.paging.Page;
import com.google.cloud.bigquery.BigQuery;
import com.google.cloud.bigquery.BigQuery.TableDataListOption;
import com.google.cloud.bigquery.BigQuery.TableField;
import com.google.cloud.bigquery.DatasetInfo;
import com.google.cloud.bigquery.Field;
import com.google.cloud.bigquery.FieldValueList;
import com.google.cloud.bigquery.InsertAllResponse;
import com.google.cloud.bigquery.Job;
import com.google.cloud.bigquery.LegacySQLTypeName;
import com.google.cloud.bigquery.Schema;
import com.google.cloud.bigquery.StandardTableDefinition;
import com.google.cloud.bigquery.Table;
import com.google.cloud.bigquery.TableId;
import com.google.cloud.bigquery.TableInfo;
import com.google.cloud.bigquery.testing.RemoteBigQueryHelper;
import com.google.cloud.storage.BucketInfo;
import com.google.cloud.storage.Storage;
import com.google.cloud.storage.testing.RemoteStorageHelper;
import com.google.common.base.Function;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import java.util.List;
import java.util.Set;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.Timeout;
/** Integration tests for {@link TableSnippets}. */
public class ITTableSnippets {
private static final String BASE_TABLE_NAME = "my_table";
private static final String DATASET_NAME = RemoteBigQueryHelper.generateDatasetName();
private static final String COPY_DATASET_NAME = RemoteBigQueryHelper.generateDatasetName();
private static final String BUCKET_NAME = RemoteStorageHelper.generateBucketName();
private static final Schema SCHEMA =
Schema.of(
Field.of("stringField", LegacySQLTypeName.STRING),
Field.of("booleanField", LegacySQLTypeName.BOOLEAN));
private static final List<?> ROW1 = ImmutableList.of("value1", true);
private static final List<?> ROW2 = ImmutableList.of("value2", false);
private static final String DOOMED_TABLE_NAME = "doomed_table";
private static final TableId DOOMED_TABLE_ID = TableId.of(DATASET_NAME, DOOMED_TABLE_NAME);
private static BigQuery bigquery;
private static Storage storage;
private static int nextTableNumber;
private Table table;
private TableSnippets tableSnippets;
@Rule public Timeout globalTimeout = Timeout.seconds(300);
@BeforeClass
public static void beforeClass() {
bigquery = RemoteBigQueryHelper.create().getOptions().getService();
bigquery.create(DatasetInfo.newBuilder(DATASET_NAME).build());
bigquery.create(DatasetInfo.newBuilder(COPY_DATASET_NAME).build());
storage = RemoteStorageHelper.create().getOptions().getService();
storage.create(BucketInfo.of(BUCKET_NAME));
}
@Before
public void before() {
++nextTableNumber;
StandardTableDefinition.Builder builder = StandardTableDefinition.newBuilder();
builder.setSchema(SCHEMA);
table = bigquery.create(TableInfo.of(getTableId(), builder.build()));
bigquery.create(TableInfo.of(getCopyTableId(), builder.build()));
tableSnippets = new TableSnippets(table);
}
@After
public void after() {
bigquery.delete(getTableId());
bigquery.delete(getCopyTableId());
}
@AfterClass
public static void afterClass() {
RemoteBigQueryHelper.forceDelete(bigquery, DATASET_NAME);
RemoteBigQueryHelper.forceDelete(bigquery, COPY_DATASET_NAME);
RemoteStorageHelper.forceDelete(storage, BUCKET_NAME);
}
private String getTableName() {
return BASE_TABLE_NAME + nextTableNumber;
}
private TableId getTableId() {
return TableId.of(DATASET_NAME, getTableName());
}
private String getCopyTableName() {
return BASE_TABLE_NAME + "_copy_" + nextTableNumber;
}
private TableId getCopyTableId() {
return TableId.of(COPY_DATASET_NAME, getCopyTableName());
}
@Test
public void testExists() {
assertTrue(tableSnippets.exists());
}
@Test
public void testReloadTableWithFields() {
Table latestTable =
tableSnippets.reloadTableWithFields(TableField.LAST_MODIFIED_TIME, TableField.NUM_ROWS);
assertNotNull(latestTable);
assertNotNull(latestTable.getLastModifiedTime());
}
@Test
public void testUpdate() {
Table updatedTable = tableSnippets.update();
assertEquals("new description", updatedTable.getDescription());
}
@Test
public void testDelete() {
Table doomedTable =
bigquery.create(TableInfo.of(DOOMED_TABLE_ID, StandardTableDefinition.of(SCHEMA)));
TableSnippets doomedTableSnippets = new TableSnippets(doomedTable);
assertTrue(doomedTableSnippets.delete());
}
@Test
public void testInsert() throws InterruptedException {
InsertAllResponse response = tableSnippets.insert("row1", "row2");
assertFalse(response.hasErrors());
verifyTestRows(table);
}
@Test
public void testInsertParams() throws InterruptedException {
InsertAllResponse response = tableSnippets.insertWithParams("row1", "row2");
assertFalse(response.hasErrors());
List<FieldValueList> rows = ImmutableList.copyOf(tableSnippets.list().getValues());
while (rows.isEmpty()) {
Thread.sleep(500);
rows = ImmutableList.copyOf(tableSnippets.list().getValues());
}
Set<List<?>> values =
FluentIterable.from(rows)
.transform(
new Function<FieldValueList, List<?>>() {
@Override
public List<?> apply(FieldValueList row) {
return ImmutableList.of(
row.get(0).getStringValue(), row.get(1).getBooleanValue());
}
})
.toSet();
assertEquals(ImmutableSet.of(ROW2), values);
}
@Test
public void testList() throws InterruptedException {
List<FieldValueList> rows = ImmutableList.copyOf(tableSnippets.list().getValues());
assertEquals(0, rows.size());
InsertAllResponse response = tableSnippets.insert("row1", "row2");
assertFalse(response.hasErrors());
rows = ImmutableList.copyOf(tableSnippets.list().getValues());
while (rows.isEmpty()) {
Thread.sleep(500);
rows = ImmutableList.copyOf(tableSnippets.list().getValues());
}
assertEquals(2, rows.size());
}
@Test
public void testCopy() {
tableSnippets.copy(COPY_DATASET_NAME, BASE_TABLE_NAME);
}
@Test
public void testCopyTableId() {
Job copyJob = tableSnippets.copyTableId(COPY_DATASET_NAME, getCopyTableName());
assertSuccessful(copyJob);
}
@Test
public void testExtractAndLoadList() {
String gcsFile1 = "gs://" + BUCKET_NAME + "/extractTestA_*.csv";
String gcsFile2 = "gs://" + BUCKET_NAME + "/extractTestB_*.csv";
Job extractJob = tableSnippets.extractList("CSV", gcsFile1, gcsFile2);
gcsFile1 = gcsFile1.replace("*", "000000000000");
gcsFile2 = gcsFile2.replace("*", "000000000000");
assertSuccessful(extractJob);
Job loadJob = tableSnippets.loadList(gcsFile1, gcsFile2);
assertSuccessful(loadJob);
}
@Test
public void testExtractAndLoadSingle() {
String gcsFile = "gs://" + BUCKET_NAME + "/extractTest.csv";
Job extractJob = tableSnippets.extractSingle("CSV", gcsFile);
assertSuccessful(extractJob);
Job loadJob = tableSnippets.loadSingle(gcsFile);
assertSuccessful(loadJob);
}
/**
* Verifies that the given table has the rows inserted by InsertTestRows().
*
* @param checkTable the table to query
*/
private void verifyTestRows(Table checkTable) throws InterruptedException {
List<FieldValueList> rows = waitForTableRows(checkTable, 2);
// Verify that the table data matches what it's supposed to.
Set<List<?>> values =
FluentIterable.from(rows)
.transform(
new Function<FieldValueList, List<?>>() {
@Override
public List<?> apply(FieldValueList row) {
return ImmutableList.of(
row.get(0).getStringValue(), row.get(1).getBooleanValue());
}
})
.toSet();
assertEquals(ImmutableSet.of(ROW2, ROW1), values);
}
/**
* Waits for a specified number of rows to appear in the given table. This is used by
* verifyTestRows to wait for data to appear before verifying.
*
* @param checkTable the table to query
* @param numRows the expected number of rows
* @return the rows from the table
*/
private List<FieldValueList> waitForTableRows(Table checkTable, int numRows)
throws InterruptedException {
// Wait for the data to appear.
Page<FieldValueList> page = checkTable.list(TableDataListOption.pageSize(100));
List<FieldValueList> rows = ImmutableList.copyOf(page.getValues());
while (rows.size() != numRows) {
Thread.sleep(1000);
page = checkTable.list(TableDataListOption.pageSize(100));
rows = ImmutableList.copyOf(page.getValues());
}
return rows;
}
private void assertSuccessful(Job job) {
assertTrue(job.isDone());
assertNull(job.getStatus().getError());
}
}
| |
package io.subutai.common.util;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.CompletionService;
import java.util.concurrent.ExecutorCompletionService;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Preconditions;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import io.subutai.common.peer.Peer;
import io.subutai.common.settings.Common;
public class PeerUtil<T>
{
private static final Logger LOG = LoggerFactory.getLogger( PeerUtil.class );
private final Set<PeerTask<T>> peerTasks = Sets.newHashSet();
/**
* Adds a task for a peer
*
* @param peerTask {@code PeerTask}
*/
public void addPeerTask( PeerTask<T> peerTask )
{
Preconditions.checkNotNull( peerTask, "Invalid peer task" );
peerTasks.add( peerTask );
}
/**
* Executes added tasks in parallel
*
* @return {@code PeerTaskResults}
*/
public PeerTaskResults<T> executeParallel()
{
Preconditions
.checkArgument( !CollectionUtil.isCollectionEmpty( peerTasks ), "No peer task found for execution" );
Set<PeerTaskResult<T>> results = executeParallel( peerTasks, false );
peerTasks.clear();
return new PeerTaskResults<>( results );
}
/**
* Executes added tasks in parallel. Fails fast if any execution failed.
*
* Returns results of tasks completed so far
*
* @return {@code PeerTaskResults}
*/
public PeerTaskResults<T> executeParallelFailFast()
{
Preconditions
.checkArgument( !CollectionUtil.isCollectionEmpty( peerTasks ), "No peer task found for execution" );
Set<PeerTaskResult<T>> results = executeParallel( peerTasks, true );
peerTasks.clear();
return new PeerTaskResults<>( results );
}
protected Set<PeerTaskResult<T>> executeParallel( Set<PeerTask<T>> peerTasks, boolean failFast )
{
Preconditions.checkArgument( !CollectionUtil.isCollectionEmpty( peerTasks ) );
Set<PeerTaskResult<T>> peerTaskResults = Sets.newHashSet();
ExecutorService taskExecutor =
Executors.newFixedThreadPool( Math.min( Common.MAX_EXECUTOR_SIZE, peerTasks.size() ) );
CompletionService<T> taskCompletionService = new ExecutorCompletionService<>( taskExecutor );
Map<Peer, Future<T>> peerFutures = Maps.newHashMap();
for ( PeerTask<T> peerTask : peerTasks )
{
peerFutures.put( peerTask.getPeer(), taskCompletionService.submit( peerTask.getTask() ) );
}
taskExecutor.shutdown();
futuresLoop:
while ( !Thread.interrupted() && !peerFutures.isEmpty() )
{
Iterator<Map.Entry<Peer, Future<T>>> mapIterator = peerFutures.entrySet().iterator();
while ( mapIterator.hasNext() )
{
Map.Entry<Peer, Future<T>> peerFutureEntry = mapIterator.next();
Future<T> peerFuture = peerFutureEntry.getValue();
Peer targetPeer = peerFutureEntry.getKey();
try
{
if ( peerFuture.isDone() )
{
mapIterator.remove();
peerTaskResults.add( new PeerTaskResult<>( targetPeer, peerFuture.get() ) );
}
}
catch ( Exception e )
{
LOG.error( "Error executing task on peer {}", targetPeer.getName(), e );
peerTaskResults.add( new PeerTaskResult<T>( targetPeer, e ) );
if ( failFast )
{
break futuresLoop;
}
}
}
TaskUtil.sleep( 100 );
}
return peerTaskResults;
}
public static class PeerTask<T>
{
private final Peer peer;
private final Callable<T> task;
public PeerTask( final Peer peer, final Callable<T> task )
{
this.peer = peer;
this.task = task;
}
public Peer getPeer()
{
return peer;
}
public Callable<T> getTask()
{
return task;
}
}
public static class PeerTaskResult<T>
{
private final Peer peer;
private T result;
private Exception exception;
private boolean hasSucceeded = true;
protected PeerTaskResult( final Peer peer, final T result )
{
this.peer = peer;
this.result = result;
}
protected PeerTaskResult( final Peer peer, final Exception exception )
{
this.peer = peer;
this.exception = exception;
this.hasSucceeded = false;
}
public Peer getPeer()
{
return peer;
}
public T getResult()
{
return result;
}
public Exception getException()
{
return exception;
}
public String getFailureReason()
{
return exception == null ? "Unknown" : exception.getMessage();
}
public boolean hasSucceeded()
{
return hasSucceeded;
}
}
public static class PeerTaskResults<T>
{
private final Set<PeerTaskResult<T>> results;
private boolean hasFailures = false;
protected PeerTaskResults( final Set<PeerTaskResult<T>> results )
{
Preconditions.checkArgument( !CollectionUtil.isCollectionEmpty( results ) );
this.results = results;
for ( PeerTaskResult peerTaskResult : results )
{
if ( !peerTaskResult.hasSucceeded() )
{
hasFailures = true;
break;
}
}
}
public boolean hasFailures()
{
return hasFailures;
}
public Set<PeerTaskResult<T>> getResults()
{
return results;
}
}
}
| |
/**
* Generated with Acceleo
*/
package com.dbdesigner.model.relationaldatabase.components;
// Start of user code for imports
import com.dbdesigner.model.relationaldatabase.RelationalDatabasePackage;
import com.dbdesigner.model.relationaldatabase.Tag;
import com.dbdesigner.model.relationaldatabase.parts.RelationaldatabaseViewsRepository;
import com.dbdesigner.model.relationaldatabase.parts.TagPropertiesEditionPart;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.util.BasicDiagnostic;
import org.eclipse.emf.common.util.Diagnostic;
import org.eclipse.emf.common.util.WrappedException;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.EStructuralFeature;
import org.eclipse.emf.ecore.EcorePackage;
import org.eclipse.emf.ecore.resource.ResourceSet;
import org.eclipse.emf.ecore.util.Diagnostician;
import org.eclipse.emf.ecore.util.EcoreUtil;
import org.eclipse.emf.eef.runtime.api.notify.EStructuralFeatureNotificationFilter;
import org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent;
import org.eclipse.emf.eef.runtime.api.notify.NotificationFilter;
import org.eclipse.emf.eef.runtime.context.PropertiesEditingContext;
import org.eclipse.emf.eef.runtime.impl.components.SinglePartPropertiesEditingComponent;
import org.eclipse.emf.eef.runtime.impl.utils.EEFConverterUtil;
// End of user code
/**
*
*
*/
public class TagPropertiesEditionComponent extends SinglePartPropertiesEditingComponent {
public static String BASE_PART = "Base"; //$NON-NLS-1$
/**
* Default constructor
*
*/
public TagPropertiesEditionComponent(PropertiesEditingContext editingContext, EObject tag, String editing_mode) {
super(editingContext, tag, editing_mode);
parts = new String[] { BASE_PART };
repositoryKey = RelationaldatabaseViewsRepository.class;
partKey = RelationaldatabaseViewsRepository.Tag.class;
}
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.eef.runtime.api.component.IPropertiesEditionComponent#initPart(java.lang.Object, int, org.eclipse.emf.ecore.EObject,
* org.eclipse.emf.ecore.resource.ResourceSet)
*
*/
public void initPart(Object key, int kind, EObject elt, ResourceSet allResource) {
setInitializing(true);
if (editingPart != null && key == partKey) {
editingPart.setContext(elt, allResource);
final Tag tag = (Tag)elt;
final TagPropertiesEditionPart basePart = (TagPropertiesEditionPart)editingPart;
// init values
if (isAccessible(RelationaldatabaseViewsRepository.Tag.Properties.name))
basePart.setName(EEFConverterUtil.convertToString(EcorePackage.Literals.ESTRING, tag.getName()));
if (isAccessible(RelationaldatabaseViewsRepository.Tag.Properties.documentation))
basePart.setDocumentation(EcoreUtil.convertToString(EcorePackage.Literals.ESTRING, tag.getDocumentation()));
// init filters
// init values for referenced views
// init filters for referenced views
}
setInitializing(false);
}
/**
* {@inheritDoc}
* @see org.eclipse.emf.eef.runtime.impl.components.StandardPropertiesEditionComponent#associatedFeature(java.lang.Object)
*/
public EStructuralFeature associatedFeature(Object editorKey) {
if (editorKey == RelationaldatabaseViewsRepository.Tag.Properties.name) {
return RelationalDatabasePackage.eINSTANCE.getTag_Name();
}
if (editorKey == RelationaldatabaseViewsRepository.Tag.Properties.documentation) {
return RelationalDatabasePackage.eINSTANCE.getTag_Documentation();
}
return super.associatedFeature(editorKey);
}
/**
* {@inheritDoc}
* @see org.eclipse.emf.eef.runtime.impl.components.StandardPropertiesEditionComponent#updateSemanticModel(org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent)
*
*/
public void updateSemanticModel(final IPropertiesEditionEvent event) {
Tag tag = (Tag)semanticObject;
if (RelationaldatabaseViewsRepository.Tag.Properties.name == event.getAffectedEditor()) {
tag.setName((java.lang.String)EEFConverterUtil.createFromString(EcorePackage.Literals.ESTRING, (String)event.getNewValue()));
}
if (RelationaldatabaseViewsRepository.Tag.Properties.documentation == event.getAffectedEditor()) {
tag.setDocumentation((java.lang.String)EEFConverterUtil.createFromString(EcorePackage.Literals.ESTRING, (String)event.getNewValue()));
}
}
/**
* {@inheritDoc}
* @see org.eclipse.emf.eef.runtime.impl.components.StandardPropertiesEditionComponent#updatePart(org.eclipse.emf.common.notify.Notification)
*/
public void updatePart(Notification msg) {
super.updatePart(msg);
if (editingPart.isVisible()) {
TagPropertiesEditionPart basePart = (TagPropertiesEditionPart)editingPart;
if (RelationalDatabasePackage.eINSTANCE.getTag_Name().equals(msg.getFeature()) && msg.getNotifier().equals(semanticObject) && basePart != null && isAccessible(RelationaldatabaseViewsRepository.Tag.Properties.name)) {
if (msg.getNewValue() != null) {
basePart.setName(EcoreUtil.convertToString(EcorePackage.Literals.ESTRING, msg.getNewValue()));
} else {
basePart.setName("");
}
}
if (RelationalDatabasePackage.eINSTANCE.getTag_Documentation().equals(msg.getFeature()) && msg.getNotifier().equals(semanticObject) && basePart != null && isAccessible(RelationaldatabaseViewsRepository.Tag.Properties.documentation)){
if (msg.getNewValue() != null) {
basePart.setDocumentation(EcoreUtil.convertToString(EcorePackage.Literals.ESTRING, msg.getNewValue()));
} else {
basePart.setDocumentation("");
}
}
}
}
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.eef.runtime.impl.components.StandardPropertiesEditionComponent#getNotificationFilters()
*/
@Override
protected NotificationFilter[] getNotificationFilters() {
NotificationFilter filter = new EStructuralFeatureNotificationFilter(
RelationalDatabasePackage.eINSTANCE.getTag_Name(),
RelationalDatabasePackage.eINSTANCE.getTag_Documentation() );
return new NotificationFilter[] {filter,};
}
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.eef.runtime.api.component.IPropertiesEditionComponent#validateValue(org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent)
*
*/
public Diagnostic validateValue(IPropertiesEditionEvent event) {
Diagnostic ret = Diagnostic.OK_INSTANCE;
if (event.getNewValue() != null) {
try {
if (RelationaldatabaseViewsRepository.Tag.Properties.name == event.getAffectedEditor()) {
Object newValue = event.getNewValue();
if (newValue instanceof String) {
newValue = EEFConverterUtil.createFromString(RelationalDatabasePackage.eINSTANCE.getTag_Name().getEAttributeType(), (String)newValue);
}
ret = Diagnostician.INSTANCE.validate(RelationalDatabasePackage.eINSTANCE.getTag_Name().getEAttributeType(), newValue);
}
if (RelationaldatabaseViewsRepository.Tag.Properties.documentation == event.getAffectedEditor()) {
Object newValue = event.getNewValue();
if (newValue instanceof String) {
newValue = EEFConverterUtil.createFromString(RelationalDatabasePackage.eINSTANCE.getTag_Documentation().getEAttributeType(), (String)newValue);
}
ret = Diagnostician.INSTANCE.validate(RelationalDatabasePackage.eINSTANCE.getTag_Documentation().getEAttributeType(), newValue);
}
} catch (IllegalArgumentException iae) {
ret = BasicDiagnostic.toDiagnostic(iae);
} catch (WrappedException we) {
ret = BasicDiagnostic.toDiagnostic(we);
}
}
return ret;
}
}
| |
/*
* Copyright (c) 2007, 2015, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*/
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.sun.org.apache.xml.internal.security.transforms.params;
import com.sun.org.apache.xml.internal.security.exceptions.XMLSecurityException;
import com.sun.org.apache.xml.internal.security.transforms.TransformParam;
import com.sun.org.apache.xml.internal.security.utils.ElementProxy;
import com.sun.org.apache.xml.internal.security.utils.XMLUtils;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
/**
* Implements the parameters for a custom Transform which has a better performance
* than the xfilter2.
*
* @author $Author: coheigea $
*/
public class XPathFilterCHGPContainer extends ElementProxy implements TransformParam {
public static final String TRANSFORM_XPATHFILTERCHGP =
"http://www.nue.et-inf.uni-siegen.de/~geuer-pollmann/#xpathFilter";
/** Field _ATT_FILTER_VALUE_INTERSECT */
private static final String _TAG_INCLUDE_BUT_SEARCH = "IncludeButSearch";
/** Field _ATT_FILTER_VALUE_SUBTRACT */
private static final String _TAG_EXCLUDE_BUT_SEARCH = "ExcludeButSearch";
/** Field _ATT_FILTER_VALUE_UNION */
private static final String _TAG_EXCLUDE = "Exclude";
/** Field _TAG_XPATHCHGP */
public static final String _TAG_XPATHCHGP = "XPathAlternative";
/** Field _ATT_INCLUDESLASH */
public static final String _ATT_INCLUDESLASH = "IncludeSlashPolicy";
/** Field IncludeSlash */
public static final boolean IncludeSlash = true;
/** Field ExcludeSlash */
public static final boolean ExcludeSlash = false;
/**
* Constructor XPathFilterCHGPContainer
*
*/
private XPathFilterCHGPContainer() {
// no instantiation
}
/**
* Constructor XPathFilterCHGPContainer
*
* @param doc
* @param includeSlashPolicy
* @param includeButSearch
* @param excludeButSearch
* @param exclude
*/
private XPathFilterCHGPContainer(
Document doc, boolean includeSlashPolicy, String includeButSearch,
String excludeButSearch, String exclude
) {
super(doc);
if (includeSlashPolicy) {
this.constructionElement.setAttributeNS(
null, XPathFilterCHGPContainer._ATT_INCLUDESLASH, "true"
);
} else {
this.constructionElement.setAttributeNS(
null, XPathFilterCHGPContainer._ATT_INCLUDESLASH, "false"
);
}
if ((includeButSearch != null) && (includeButSearch.trim().length() > 0)) {
Element includeButSearchElem =
ElementProxy.createElementForFamily(
doc, this.getBaseNamespace(), XPathFilterCHGPContainer._TAG_INCLUDE_BUT_SEARCH
);
includeButSearchElem.appendChild(
this.doc.createTextNode(indentXPathText(includeButSearch))
);
XMLUtils.addReturnToElement(this.constructionElement);
this.constructionElement.appendChild(includeButSearchElem);
}
if ((excludeButSearch != null) && (excludeButSearch.trim().length() > 0)) {
Element excludeButSearchElem =
ElementProxy.createElementForFamily(
doc, this.getBaseNamespace(), XPathFilterCHGPContainer._TAG_EXCLUDE_BUT_SEARCH
);
excludeButSearchElem.appendChild(
this.doc.createTextNode(indentXPathText(excludeButSearch)));
XMLUtils.addReturnToElement(this.constructionElement);
this.constructionElement.appendChild(excludeButSearchElem);
}
if ((exclude != null) && (exclude.trim().length() > 0)) {
Element excludeElem =
ElementProxy.createElementForFamily(
doc, this.getBaseNamespace(), XPathFilterCHGPContainer._TAG_EXCLUDE);
excludeElem.appendChild(this.doc.createTextNode(indentXPathText(exclude)));
XMLUtils.addReturnToElement(this.constructionElement);
this.constructionElement.appendChild(excludeElem);
}
XMLUtils.addReturnToElement(this.constructionElement);
}
/**
* Method indentXPathText
*
* @param xp
* @return the string with enters
*/
static String indentXPathText(String xp) {
if ((xp.length() > 2) && (!Character.isWhitespace(xp.charAt(0)))) {
return "\n" + xp + "\n";
}
return xp;
}
/**
* Constructor XPathFilterCHGPContainer
*
* @param element
* @param BaseURI
* @throws XMLSecurityException
*/
private XPathFilterCHGPContainer(Element element, String BaseURI)
throws XMLSecurityException {
super(element, BaseURI);
}
/**
* Creates a new XPathFilterCHGPContainer; needed for generation.
*
* @param doc
* @param includeSlashPolicy
* @param includeButSearch
* @param excludeButSearch
* @param exclude
* @return the created object
*/
public static XPathFilterCHGPContainer getInstance(
Document doc, boolean includeSlashPolicy, String includeButSearch,
String excludeButSearch, String exclude
) {
return new XPathFilterCHGPContainer(
doc, includeSlashPolicy, includeButSearch, excludeButSearch, exclude);
}
/**
* Creates a XPathFilterCHGPContainer from an existing Element; needed for verification.
*
* @param element
* @param BaseURI
*
* @throws XMLSecurityException
* @return the created object.
*/
public static XPathFilterCHGPContainer getInstance(
Element element, String BaseURI
) throws XMLSecurityException {
return new XPathFilterCHGPContainer(element, BaseURI);
}
/**
* Method getXStr
*
* @param type
* @return The Xstr
*/
private String getXStr(String type) {
if (this.length(this.getBaseNamespace(), type) != 1) {
return "";
}
Element xElem =
XMLUtils.selectNode(
this.constructionElement.getFirstChild(), this.getBaseNamespace(), type, 0
);
return XMLUtils.getFullTextChildrenFromElement(xElem);
}
/**
* Method getIncludeButSearch
*
* @return the string
*/
public String getIncludeButSearch() {
return this.getXStr(XPathFilterCHGPContainer._TAG_INCLUDE_BUT_SEARCH);
}
/**
* Method getExcludeButSearch
*
* @return the string
*/
public String getExcludeButSearch() {
return this.getXStr(XPathFilterCHGPContainer._TAG_EXCLUDE_BUT_SEARCH);
}
/**
* Method getExclude
*
* @return the string
*/
public String getExclude() {
return this.getXStr(XPathFilterCHGPContainer._TAG_EXCLUDE);
}
/**
* Method getIncludeSlashPolicy
*
* @return the string
*/
public boolean getIncludeSlashPolicy() {
return this.constructionElement.getAttributeNS(
null, XPathFilterCHGPContainer._ATT_INCLUDESLASH).equals("true");
}
/**
* Returns the first Text node which contains information from the XPath
* Filter String. We must use this stupid hook to enable the here() function
* to work.
*
* $todo$ I dunno whether this crashes: <XPath> he<!-- comment -->re()/ds:Signature[1]</XPath>
* @param type
* @return the first Text node which contains information from the XPath 2 Filter String
*/
private Node getHereContextNode(String type) {
if (this.length(this.getBaseNamespace(), type) != 1) {
return null;
}
return XMLUtils.selectNodeText(
this.constructionElement.getFirstChild(), this.getBaseNamespace(), type, 0
);
}
/**
* Method getHereContextNodeIncludeButSearch
*
* @return the string
*/
public Node getHereContextNodeIncludeButSearch() {
return this.getHereContextNode(XPathFilterCHGPContainer._TAG_INCLUDE_BUT_SEARCH);
}
/**
* Method getHereContextNodeExcludeButSearch
*
* @return the string
*/
public Node getHereContextNodeExcludeButSearch() {
return this.getHereContextNode(XPathFilterCHGPContainer._TAG_EXCLUDE_BUT_SEARCH);
}
/**
* Method getHereContextNodeExclude
*
* @return the string
*/
public Node getHereContextNodeExclude() {
return this.getHereContextNode(XPathFilterCHGPContainer._TAG_EXCLUDE);
}
/**
* Method getBaseLocalName
*
* @inheritDoc
*/
public final String getBaseLocalName() {
return XPathFilterCHGPContainer._TAG_XPATHCHGP;
}
/**
* Method getBaseNamespace
*
* @inheritDoc
*/
public final String getBaseNamespace() {
return TRANSFORM_XPATHFILTERCHGP;
}
}
| |
/**
* Copyright 2015 IBM Corp. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.ibm.watson.developer_cloud.document_conversion.v1;
import static com.ibm.watson.developer_cloud.document_conversion.v1.util.ConversionTarget.ANSWER_UNITS;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import com.ibm.watson.developer_cloud.document_conversion.v1.model.Answers;
import com.ibm.watson.developer_cloud.document_conversion.v1.util.ConversionTarget;
import com.ibm.watson.developer_cloud.document_conversion.v1.util.ConversionUtils;
import com.ibm.watson.developer_cloud.http.HttpHeaders;
import com.ibm.watson.developer_cloud.http.HttpMediaType;
import com.ibm.watson.developer_cloud.http.RequestBuilder;
import com.ibm.watson.developer_cloud.service.WatsonService;
import com.ibm.watson.developer_cloud.util.GsonSingleton;
import com.ibm.watson.developer_cloud.util.ResponseUtil;
import com.squareup.okhttp.Headers;
import com.squareup.okhttp.MediaType;
import com.squareup.okhttp.MultipartBuilder;
import com.squareup.okhttp.Request;
import com.squareup.okhttp.RequestBody;
import com.squareup.okhttp.Response;
/**
* The IBM Watson Document Conversion service converts provided source documents (HTML, Word, PDF)
* into JSON Answer Units, Normalized HTML, or Normalized Text.
*
* @version v1
* @see <a
* href="http://www.ibm.com/smarterplanet/us/en/ibmwatson/developercloud/document-conversion.html">
* Document Conversion</a>
*/
public class DocumentConversion extends WatsonService {
private static final Logger LOG = Logger.getLogger(DocumentConversion.class.getName());
/** The Constant CONVERSION_TARGET. */
private static final String CONVERSION_TARGET = "conversion_target";
/**
* The CONVERT_DOCUMENT_PATH. (value is "/v1/convert_document")
**/
private static final String CONVERT_DOCUMENT_PATH = "/v1/convert_document";
/** The default URL for the service. */
private static final String URL = "https://gateway.watsonplatform.net/document-conversion/api";
private static final JsonObject EMPTY_CONFIG = new JsonParser().parse("{}").getAsJsonObject();
public static final String VERSION_DATE_2015_12_01 = "2015-12-01";
private final String versionDate;
/** @deprecated See {@link DocumentConversion#DocumentConversion(String)} */
@Deprecated
public DocumentConversion() {
this(VERSION_DATE_2015_12_01);
}
/**
* @param versionDate The version date (yyyy-MM-dd) of the REST API to use. Specifying this value
* will keep your API calls from failing when the service introduces breaking changes.
*/
public DocumentConversion(String versionDate) {
super("document_conversion");
setEndPoint(URL);
this.versionDate = versionDate;
}
/**
* Converts a document and returns an {@link InputStream}.
*
* @param document The file to convert
* @param mediaType Internet media type of the file
* @param conversionTarget The conversion target to use
* @param customConfig The additional config params to use
* @return Converted document in the specified format
* @see {@link HttpMediaType} for available media types
*/
private InputStream convertDocument(final File document, final String mediaType,
final ConversionTarget conversionTarget, final JsonObject customConfig) {
if (document == null || !document.exists())
throw new IllegalArgumentException("document cannot be null and must exist");
if (customConfig == null)
throw new NullPointerException("custom config must not be null");
final String type =
mediaType != null ? mediaType : ConversionUtils.getMediaTypeFromFile(document);
if (type == null) {
throw new RuntimeException("mediaType cannot be null or empty");
} else if (!ConversionUtils.isValidMediaType(type)) {
throw new IllegalArgumentException("file with the given media type is not supported");
}
final JsonObject configJson = new JsonObject();
// Do this since we shouldn't mutate customConfig
for (Map.Entry<String, JsonElement> entry : customConfig.entrySet()) {
configJson.add(entry.getKey(), entry.getValue());
}
// Add or override the conversion target
configJson.addProperty(CONVERSION_TARGET, conversionTarget.toString());
final MediaType mType = MediaType.parse(type);
final RequestBody body =
new MultipartBuilder()
.type(MultipartBuilder.FORM)
.addPart(Headers.of(HttpHeaders.CONTENT_DISPOSITION, "form-data; name=\"config\""),
RequestBody.create(HttpMediaType.JSON, configJson.toString()))
.addPart(Headers.of(HttpHeaders.CONTENT_DISPOSITION, "form-data; name=\"file\""),
RequestBody.create(mType, document)).build();
final Request request =
RequestBuilder.post(CONVERT_DOCUMENT_PATH).withQuery(VERSION, versionDate).withBody(body)
.build();
final Response response = execute(request);
return ResponseUtil.getInputStream(response);
}
/**
* Converts a document to Answer Units. <br>
* Use {@link DocumentConversion#convertDocumentToAnswer(File, String)} if you want to specify the
* media type
*
* @param document the document
* @return Converted document as {@link Answers}
*
*/
public Answers convertDocumentToAnswer(File document) {
return convertDocumentToAnswer(document, null);
}
/**
* Converts a document to Answer Units.
*
* @param document the document
* @param mediaType the document media type. It will use the file extension if not provided
* @return Converted document as {@link Answers}
* @see HttpMediaType for available media types
*/
public Answers convertDocumentToAnswer(File document, String mediaType) {
return convertDocumentToAnswer(document, mediaType, EMPTY_CONFIG);
}
/**
* Converts a document to Answer Units using a custom configuration.
*
* @param document the document
* @param mediaType the document media type. It will use the file extension if not provided.
* @param customConfig a config used to customize the conversion
* @return converted document as {@link String}
*/
public Answers convertDocumentToAnswer(File document, String mediaType, JsonObject customConfig) {
final InputStream is = convertDocument(document, mediaType, ANSWER_UNITS, customConfig);
final String convertedDocument = responseToString(is);
return GsonSingleton.getGsonWithoutPrettyPrinting().fromJson(convertedDocument, Answers.class);
}
/**
* Converts a document to HTML. <br>
* Use {@link DocumentConversion#convertDocumentToHTML(File, String)} if you want to specify the
* media type.
*
* @param document the document
* @return Converted document as {@link String}
*/
public String convertDocumentToHTML(File document) {
return convertDocumentToHTML(document, null);
}
/**
* Converts a document to HTML.
*
* @param document the document
* @param mediaType the document media type. It will use the file extension if not provided.
* @return Converted document as {@link String}
* @see HttpMediaType for available media types
*/
public String convertDocumentToHTML(File document, String mediaType) {
return convertDocumentToHTML(document, mediaType, EMPTY_CONFIG);
}
/**
* Converts a document to HTML using a custom configuration.
*
* @param document the document
* @param mediaType the document media type. It will use the file extension if not provided.
* @param customConfig a config used to customize the conversion
* @return converted document as {@link String}
*/
public String convertDocumentToHTML(File document, String mediaType, JsonObject customConfig) {
final InputStream is =
convertDocument(document, mediaType, ConversionTarget.NORMALIZED_HTML, customConfig);
return responseToString(is);
}
/**
* Converts a document to Text. <br>
* Use {@link DocumentConversion#convertDocumentToText(File, String)} if you want to specify the
* media type.
*
* @param document the document
* @return Converted document as {@link String}
*/
public String convertDocumentToText(File document) {
return convertDocumentToText(document, null);
}
/**
* Converts a document to Text.
*
* @param document the document
* @param mediaType the document media type. It will use the file extension if not provided.
* @return Converted document as {@link String}
* @see HttpMediaType for available media types
*/
public String convertDocumentToText(File document, String mediaType) {
return convertDocumentToText(document, mediaType, EMPTY_CONFIG);
}
/**
* Converts a document to Text using a custom configuration.
*
* @param document the document
* @param mediaType the document media type. It will use the file extension if not provided.
* @param customConfig a config used to customize the conversion
* @return converted document as {@link String}
*/
public String convertDocumentToText(File document, String mediaType, JsonObject customConfig) {
final InputStream is =
convertDocument(document, mediaType, ConversionTarget.NORMALIZED_TEXT, customConfig);
return responseToString(is);
}
/**
* Loads a custom configuration from the input stream specified
* @param customConfig input stream for the custom configuration
* @return the custom configuration as a JsonObject
*/
public JsonObject loadCustomConfig(InputStream customConfig) {
final Reader reader = new InputStreamReader(customConfig);
return new JsonParser().parse(reader).getAsJsonObject();
}
/** Consumes the InputStream, converting it into a String. */
private String responseToString(InputStream is) {
try {
return ConversionUtils.writeInputStreamToString(is);
} finally {
try {
is.close();
} catch (final IOException e) {
LOG.log(Level.WARNING, "Unable to close document input stream", e);
}
}
}
}
| |
/*
* Copyright 2006 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import static com.google.javascript.jscomp.ProcessClosurePrimitives.BASE_CLASS_ERROR;
import static com.google.javascript.jscomp.ProcessClosurePrimitives.CLOSURE_DEFINES_ERROR;
import static com.google.javascript.jscomp.ProcessClosurePrimitives.DUPLICATE_NAMESPACE_ERROR;
import static com.google.javascript.jscomp.ProcessClosurePrimitives.EXPECTED_OBJECTLIT_ERROR;
import static com.google.javascript.jscomp.ProcessClosurePrimitives.FUNCTION_NAMESPACE_ERROR;
import static com.google.javascript.jscomp.ProcessClosurePrimitives.GOOG_BASE_CLASS_ERROR;
import static com.google.javascript.jscomp.ProcessClosurePrimitives.INVALID_ARGUMENT_ERROR;
import static com.google.javascript.jscomp.ProcessClosurePrimitives.INVALID_CLOSURE_CALL_ERROR;
import static com.google.javascript.jscomp.ProcessClosurePrimitives.INVALID_CSS_RENAMING_MAP;
import static com.google.javascript.jscomp.ProcessClosurePrimitives.INVALID_DEFINE_NAME_ERROR;
import static com.google.javascript.jscomp.ProcessClosurePrimitives.INVALID_PROVIDE_ERROR;
import static com.google.javascript.jscomp.ProcessClosurePrimitives.INVALID_STYLE_ERROR;
import static com.google.javascript.jscomp.ProcessClosurePrimitives.LATE_PROVIDE_ERROR;
import static com.google.javascript.jscomp.ProcessClosurePrimitives.MISSING_DEFINE_ANNOTATION;
import static com.google.javascript.jscomp.ProcessClosurePrimitives.MISSING_PROVIDE_ERROR;
import static com.google.javascript.jscomp.ProcessClosurePrimitives.NON_STRING_PASSED_TO_SET_CSS_NAME_MAPPING_ERROR;
import static com.google.javascript.jscomp.ProcessClosurePrimitives.NULL_ARGUMENT_ERROR;
import static com.google.javascript.jscomp.ProcessClosurePrimitives.TOO_MANY_ARGUMENTS_ERROR;
import static com.google.javascript.jscomp.ProcessClosurePrimitives.WEAK_NAMESPACE_TYPE;
import static com.google.javascript.jscomp.ProcessClosurePrimitives.XMODULE_REQUIRE_ERROR;
import com.google.javascript.jscomp.CompilerOptions.LanguageMode;
import com.google.javascript.rhino.Node;
/**
* Tests for {@link ProcessClosurePrimitives}.
*
*/
public final class ProcessClosurePrimitivesTest extends CompilerTestCase {
private String additionalCode;
private String additionalEndCode;
private boolean addAdditionalNamespace;
private boolean preserveGoogRequires;
private boolean banGoogBase;
public ProcessClosurePrimitivesTest() {
enableLineNumberCheck(true);
}
@Override protected void setUp() {
additionalCode = null;
additionalEndCode = null;
addAdditionalNamespace = false;
preserveGoogRequires = false;
banGoogBase = false;
compareJsDoc = false;
}
@Override
protected CompilerOptions getOptions() {
CompilerOptions options = super.getOptions();
if (banGoogBase) {
options.setWarningLevel(
DiagnosticGroups.USE_OF_GOOG_BASE, CheckLevel.ERROR);
}
return options;
}
@Override public CompilerPass getProcessor(final Compiler compiler) {
if ((additionalCode == null) && (additionalEndCode == null)) {
return new ProcessClosurePrimitives(
compiler, null, CheckLevel.ERROR, preserveGoogRequires);
} else {
return new CompilerPass() {
@Override
public void process(Node externs, Node root) {
// Process the original code.
new ProcessClosurePrimitives(
compiler, null, CheckLevel.OFF, preserveGoogRequires)
.process(externs, root);
// Inject additional code at the beginning.
if (additionalCode != null) {
SourceFile file =
SourceFile.fromCode("additionalcode", additionalCode);
Node scriptNode = root.getFirstChild();
Node newScriptNode = new CompilerInput(file).getAstRoot(compiler);
if (addAdditionalNamespace) {
newScriptNode.getFirstChild()
.putBooleanProp(Node.IS_NAMESPACE, true);
}
while (newScriptNode.getLastChild() != null) {
Node lastChild = newScriptNode.getLastChild();
newScriptNode.removeChild(lastChild);
scriptNode.addChildBefore(lastChild, scriptNode.getFirstChild());
}
}
// Inject additional code at the end.
if (additionalEndCode != null) {
SourceFile file =
SourceFile.fromCode("additionalendcode", additionalEndCode);
Node scriptNode = root.getFirstChild();
Node newScriptNode = new CompilerInput(file).getAstRoot(compiler);
if (addAdditionalNamespace) {
newScriptNode.getFirstChild()
.putBooleanProp(Node.IS_NAMESPACE, true);
}
while (newScriptNode.getFirstChild() != null) {
Node firstChild = newScriptNode.getFirstChild();
newScriptNode.removeChild(firstChild);
scriptNode.addChildToBack(firstChild);
}
}
// Process the tree a second time.
new ProcessClosurePrimitives(
compiler, null, CheckLevel.ERROR, preserveGoogRequires)
.process(externs, root);
}
};
}
}
@Override public int getNumRepetitions() {
return 1;
}
public void testSimpleProvides() {
test("goog.provide('foo');",
"var foo={};");
test("goog.provide('foo.bar');",
"var foo={}; foo.bar={};");
test("goog.provide('foo.bar.baz');",
"var foo={}; foo.bar={}; foo.bar.baz={};");
test("goog.provide('foo.bar.baz.boo');",
"var foo={}; foo.bar={}; foo.bar.baz={}; foo.bar.baz.boo={};");
test("goog.provide('goog.bar');",
"goog.bar={};"); // goog is special-cased
}
public void testMultipleProvides() {
test("goog.provide('foo.bar'); goog.provide('foo.baz');",
"var foo={}; foo.bar={}; foo.baz={};");
test("goog.provide('foo.bar.baz'); goog.provide('foo.boo.foo');",
"var foo={}; foo.bar={}; foo.bar.baz={}; foo.boo={}; foo.boo.foo={};");
test("goog.provide('foo.bar.baz'); goog.provide('foo.bar.boo');",
"var foo={}; foo.bar={}; foo.bar.baz={}; foo.bar.boo={};");
test("goog.provide('foo.bar.baz'); goog.provide('goog.bar.boo');",
"var foo={}; foo.bar={}; foo.bar.baz={}; goog.bar={}; " +
"goog.bar.boo={};");
}
public void testRemovalOfProvidedObjLit() {
test("goog.provide('foo'); foo = 0;",
"var foo = 0;");
test("goog.provide('foo'); foo = {a: 0};",
"var foo = {a: 0};");
test("goog.provide('foo'); foo = function(){};",
"var foo = function(){};");
test("goog.provide('foo'); var foo = 0;",
"var foo = 0;");
test("goog.provide('foo'); var foo = {a: 0};",
"var foo = {a: 0};");
test("goog.provide('foo'); var foo = function(){};",
"var foo = function(){};");
test("goog.provide('foo.bar.Baz'); foo.bar.Baz=function(){};",
"var foo={}; foo.bar={}; foo.bar.Baz=function(){};");
test("goog.provide('foo.bar.moo'); foo.bar.moo={E:1,S:2};",
"var foo={}; foo.bar={}; foo.bar.moo={E:1,S:2};");
test("goog.provide('foo.bar.moo'); foo.bar.moo={E:1}; foo.bar.moo={E:2};",
"var foo={}; foo.bar={}; foo.bar.moo={E:1}; foo.bar.moo={E:2};");
}
public void testProvidedDeclaredFunctionError() {
testError("goog.provide('foo'); function foo(){}", FUNCTION_NAMESPACE_ERROR);
}
public void testRemovalMultipleAssignment1() {
test("goog.provide('foo'); foo = 0; foo = 1",
"var foo = 0; foo = 1;");
}
public void testRemovalMultipleAssignment2() {
test("goog.provide('foo'); var foo = 0; foo = 1",
"var foo = 0; foo = 1;");
}
public void testRemovalMultipleAssignment3() {
test("goog.provide('foo'); foo = 0; var foo = 1",
"foo = 0; var foo = 1;");
}
public void testRemovalMultipleAssignment4() {
test("goog.provide('foo.bar'); foo.bar = 0; foo.bar = 1",
"var foo = {}; foo.bar = 0; foo.bar = 1");
}
public void testNoRemovalFunction1() {
test("goog.provide('foo'); function f(){foo = 0}",
"var foo = {}; function f(){foo = 0}");
}
public void testNoRemovalFunction2() {
test("goog.provide('foo'); function f(){var foo = 0}",
"var foo = {}; function f(){var foo = 0}");
}
public void testRemovalMultipleAssignmentInIf1() {
test("goog.provide('foo'); if (true) { var foo = 0 } else { foo = 1 }",
"if (true) { var foo = 0 } else { foo = 1 }");
}
public void testRemovalMultipleAssignmentInIf2() {
test("goog.provide('foo'); if (true) { foo = 0 } else { var foo = 1 }",
"if (true) { foo = 0 } else { var foo = 1 }");
}
public void testRemovalMultipleAssignmentInIf3() {
test("goog.provide('foo'); if (true) { foo = 0 } else { foo = 1 }",
"if (true) { var foo = 0 } else { foo = 1 }");
}
public void testRemovalMultipleAssignmentInIf4() {
test("goog.provide('foo.bar');" +
"if (true) { foo.bar = 0 } else { foo.bar = 1 }",
"var foo = {}; if (true) { foo.bar = 0 } else { foo.bar = 1 }");
}
public void testMultipleDeclarationError1() {
String rest = "if (true) { foo.bar = 0 } else { foo.bar = 1 }";
test("goog.provide('foo.bar');" + "var foo = {};" + rest,
"var foo = {};" + "var foo = {};" + rest);
}
public void testMultipleDeclarationError2() {
test("goog.provide('foo.bar');" +
"if (true) { var foo = {}; foo.bar = 0 } else { foo.bar = 1 }",
"var foo = {};" +
"if (true) {" +
" var foo = {}; foo.bar = 0" +
"} else {" +
" foo.bar = 1" +
"}");
}
public void testMultipleDeclarationError3() {
test("goog.provide('foo.bar');" +
"if (true) { foo.bar = 0 } else { var foo = {}; foo.bar = 1 }",
"var foo = {};" +
"if (true) {" +
" foo.bar = 0" +
"} else {" +
" var foo = {}; foo.bar = 1" +
"}");
}
public void testProvideAfterDeclarationError() {
test("var x = 42; goog.provide('x');",
"var x = 42; var x = {}");
}
public void testProvideErrorCases() {
testError("goog.provide();", NULL_ARGUMENT_ERROR);
testError("goog.provide(5);", INVALID_ARGUMENT_ERROR);
testError("goog.provide([]);", INVALID_ARGUMENT_ERROR);
testError("goog.provide({});", INVALID_ARGUMENT_ERROR);
testError("goog.provide('foo', 'bar');", TOO_MANY_ARGUMENTS_ERROR);
testError("goog.provide('foo'); goog.provide('foo');", DUPLICATE_NAMESPACE_ERROR);
testError("goog.provide('foo.bar'); goog.provide('foo'); goog.provide('foo');",
DUPLICATE_NAMESPACE_ERROR);
}
public void testProvideErrorCases2() {
test("goog.provide('foo'); /** @type {Object} */ var foo = {};",
"var foo={};", null, WEAK_NAMESPACE_TYPE);
test("goog.provide('foo'); /** @type {!Object} */ var foo = {};",
"var foo={};", null, WEAK_NAMESPACE_TYPE);
test("goog.provide('foo.bar'); /** @type {Object} */ foo.bar = {};",
"var foo={};foo.bar={};", null, WEAK_NAMESPACE_TYPE);
test("goog.provide('foo.bar'); /** @type {!Object} */ foo.bar = {};",
"var foo={};foo.bar={};", null, WEAK_NAMESPACE_TYPE);
test("goog.provide('foo'); /** @type {Object.<string>} */ var foo = {};",
"var foo={};");
}
public void testProvideValidObjectType() {
test("goog.provide('foo'); /** @type {Object.<string>} */ var foo = {};",
"var foo={};");
}
public void testRemovalOfRequires() {
test("goog.provide('foo'); goog.require('foo');",
"var foo={};");
test("goog.provide('foo.bar'); goog.require('foo.bar');",
"var foo={}; foo.bar={};");
test("goog.provide('foo.bar.baz'); goog.require('foo.bar.baz');",
"var foo={}; foo.bar={}; foo.bar.baz={};");
test("goog.provide('foo'); var x = 3; goog.require('foo'); something();",
"var foo={}; var x = 3; something();");
testSame("foo.require('foo.bar');");
}
public void testPreserveGoogRequires() {
preserveGoogRequires = true;
test("goog.provide('foo'); goog.require('foo');",
"var foo={}; goog.require('foo');");
test("goog.provide('foo'); goog.require('foo'); var a = {};",
"var foo = {}; goog.require('foo'); var a = {};");
}
public void testRequireErrorCases() {
testError("goog.require();", NULL_ARGUMENT_ERROR);
testError("goog.require(5);", INVALID_ARGUMENT_ERROR);
testError("goog.require([]);", INVALID_ARGUMENT_ERROR);
testError("goog.require({});", INVALID_ARGUMENT_ERROR);
}
public void testLateProvides() {
testError("goog.require('foo'); goog.provide('foo');", LATE_PROVIDE_ERROR);
testError("goog.require('foo.bar'); goog.provide('foo.bar');", LATE_PROVIDE_ERROR);
testError("goog.provide('foo.bar'); goog.require('foo'); goog.provide('foo');",
LATE_PROVIDE_ERROR);
}
public void testMissingProvides() {
testError("goog.require('foo');", MISSING_PROVIDE_ERROR);
testError("goog.provide('foo'); goog.require('Foo');", MISSING_PROVIDE_ERROR);
testError("goog.provide('foo'); goog.require('foo.bar');", MISSING_PROVIDE_ERROR);
testError("goog.provide('foo'); var EXPERIMENT_FOO = true; "
+ "if (EXPERIMENT_FOO) {goog.require('foo.bar');}",
MISSING_PROVIDE_ERROR);
}
public void testAddDependency() {
test("goog.addDependency('x.js', ['A', 'B'], []);", "0");
Compiler compiler = getLastCompiler();
assertTrue(compiler.getTypeRegistry().isForwardDeclaredType("A"));
assertTrue(compiler.getTypeRegistry().isForwardDeclaredType("B"));
assertFalse(compiler.getTypeRegistry().isForwardDeclaredType("C"));
}
public void testForwardDeclarations() {
test("goog.forwardDeclare('A.B')", "");
Compiler compiler = getLastCompiler();
assertTrue(compiler.getTypeRegistry().isForwardDeclaredType("A.B"));
assertFalse(compiler.getTypeRegistry().isForwardDeclaredType("C.D"));
testError("goog.forwardDeclare();",
ProcessClosurePrimitives.INVALID_FORWARD_DECLARE);
testError("goog.forwardDeclare('A.B', 'C.D');",
ProcessClosurePrimitives.INVALID_FORWARD_DECLARE);
}
public void testValidSetCssNameMapping() {
test("goog.setCssNameMapping({foo:'bar',\"biz\":'baz'});", "");
CssRenamingMap map = getLastCompiler().getCssRenamingMap();
assertNotNull(map);
assertEquals("bar", map.get("foo"));
assertEquals("baz", map.get("biz"));
}
public void testValidSetCssNameMappingWithType() {
test("goog.setCssNameMapping({foo:'bar',\"biz\":'baz'}, 'BY_PART');", "");
CssRenamingMap map = getLastCompiler().getCssRenamingMap();
assertNotNull(map);
assertEquals("bar", map.get("foo"));
assertEquals("baz", map.get("biz"));
test("goog.setCssNameMapping({foo:'bar',biz:'baz','biz-foo':'baz-bar'}," +
" 'BY_WHOLE');", "");
map = getLastCompiler().getCssRenamingMap();
assertNotNull(map);
assertEquals("bar", map.get("foo"));
assertEquals("baz", map.get("biz"));
assertEquals("baz-bar", map.get("biz-foo"));
}
public void testSetCssNameMappingNonStringValueReturnsError() {
// Make sure the argument is an object literal.
testError("var BAR = {foo:'bar'}; goog.setCssNameMapping(BAR);", EXPECTED_OBJECTLIT_ERROR);
testError("goog.setCssNameMapping([]);", EXPECTED_OBJECTLIT_ERROR);
testError("goog.setCssNameMapping(false);", EXPECTED_OBJECTLIT_ERROR);
testError("goog.setCssNameMapping(null);", EXPECTED_OBJECTLIT_ERROR);
testError("goog.setCssNameMapping(undefined);", EXPECTED_OBJECTLIT_ERROR);
// Make sure all values of the object literal are string literals.
testError("var BAR = 'bar'; goog.setCssNameMapping({foo:BAR});",
NON_STRING_PASSED_TO_SET_CSS_NAME_MAPPING_ERROR);
testError("goog.setCssNameMapping({foo:6});", NON_STRING_PASSED_TO_SET_CSS_NAME_MAPPING_ERROR);
testError("goog.setCssNameMapping({foo:false});",
NON_STRING_PASSED_TO_SET_CSS_NAME_MAPPING_ERROR);
testError("goog.setCssNameMapping({foo:null});",
NON_STRING_PASSED_TO_SET_CSS_NAME_MAPPING_ERROR);
testError("goog.setCssNameMapping({foo:undefined});",
NON_STRING_PASSED_TO_SET_CSS_NAME_MAPPING_ERROR);
}
public void testSetCssNameMappingValidity() {
// Make sure that the keys don't have -'s
test("goog.setCssNameMapping({'a': 'b', 'a-a': 'c'})", "", null,
INVALID_CSS_RENAMING_MAP);
// In full mode, we check that map(a-b)=map(a)-map(b)
test("goog.setCssNameMapping({'a': 'b', 'a-a': 'c'}, 'BY_WHOLE')", "", null,
INVALID_CSS_RENAMING_MAP);
// Unknown mapping type
testError("goog.setCssNameMapping({foo:'bar'}, 'UNKNOWN');",
INVALID_STYLE_ERROR);
}
public void testBadCrossModuleRequire() {
test(
createModuleStar(
"",
"goog.provide('goog.ui');",
"goog.require('goog.ui');"),
new String[] {
"",
"goog.ui = {};",
""
},
null,
XMODULE_REQUIRE_ERROR);
}
public void testGoodCrossModuleRequire1() {
test(
createModuleStar(
"goog.provide('goog.ui');",
"",
"goog.require('goog.ui');"),
new String[] {
"goog.ui = {};",
"",
"",
});
}
public void testGoodCrossModuleRequire2() {
test(
createModuleStar(
"",
"",
"goog.provide('goog.ui'); goog.require('goog.ui');"),
new String[] {
"",
"",
"goog.ui = {};",
});
}
// Tests providing additional code with non-overlapping var namespace.
public void testSimpleAdditionalProvide() {
additionalCode = "goog.provide('b.B'); b.B = {};";
test("goog.provide('a.A'); a.A = {};",
"var b={};b.B={};var a={};a.A={};");
}
// Same as above, but with the additional code added after the original.
public void testSimpleAdditionalProvideAtEnd() {
additionalEndCode = "goog.provide('b.B'); b.B = {};";
test("goog.provide('a.A'); a.A = {};",
"var a={};a.A={};var b={};b.B={};");
}
// Tests providing additional code with non-overlapping dotted namespace.
public void testSimpleDottedAdditionalProvide() {
additionalCode = "goog.provide('a.b.B'); a.b.B = {};";
test("goog.provide('c.d.D'); c.d.D = {};",
"var a={};a.b={};a.b.B={};var c={};c.d={};c.d.D={};");
}
// Tests providing additional code with overlapping var namespace.
public void testOverlappingAdditionalProvide() {
additionalCode = "goog.provide('a.B'); a.B = {};";
test("goog.provide('a.A'); a.A = {};",
"var a={};a.B={};a.A={};");
}
// Tests providing additional code with overlapping var namespace.
public void testOverlappingAdditionalProvideAtEnd() {
additionalEndCode = "goog.provide('a.B'); a.B = {};";
test("goog.provide('a.A'); a.A = {};",
"var a={};a.A={};a.B={};");
}
// Tests providing additional code with overlapping dotted namespace.
public void testOverlappingDottedAdditionalProvide() {
additionalCode = "goog.provide('a.b.B'); a.b.B = {};";
test("goog.provide('a.b.C'); a.b.C = {};",
"var a={};a.b={};a.b.B={};a.b.C={};");
}
// Tests that a require of additional code generates no error.
public void testRequireOfAdditionalProvide() {
additionalCode = "goog.provide('b.B'); b.B = {};";
test("goog.require('b.B'); goog.provide('a.A'); a.A = {};",
"var b={};b.B={};var a={};a.A={};");
}
// Tests that a require not in additional code generates (only) one error.
public void testMissingRequireWithAdditionalProvide() {
additionalCode = "goog.provide('b.B'); b.B = {};";
testError("goog.require('b.C'); goog.provide('a.A'); a.A = {};",
MISSING_PROVIDE_ERROR);
}
// Tests that a require in additional code generates no error.
public void testLateRequire() {
additionalEndCode = "goog.require('a.A');";
test("goog.provide('a.A'); a.A = {};",
"var a={};a.A={};");
}
// Tests a case where code is reordered after processing provides and then
// provides are processed again.
public void testReorderedProvides() {
additionalCode = "a.B = {};"; // as if a.B was after a.A originally
addAdditionalNamespace = true;
test("goog.provide('a.A'); a.A = {};",
"var a={};a.B={};a.A={};");
}
// Another version of above.
public void testReorderedProvides2() {
additionalEndCode = "a.B = {};";
addAdditionalNamespace = true;
test("goog.provide('a.A'); a.A = {};",
"var a={};a.A={};a.B={};");
}
// Provide a name before the definition of the class providing the
// parent namespace.
public void testProvideOrder1() {
additionalEndCode = "";
addAdditionalNamespace = false;
// TODO(johnlenz): This test confirms that the constructor (a.b) isn't
// improperly removed, but this result isn't really what we want as the
// reassign of a.b removes the definition of "a.b.c".
test("goog.provide('a.b');" +
"goog.provide('a.b.c');" +
"a.b.c;" +
"a.b = function(x,y) {};",
"var a = {};" +
"a.b = {};" +
"a.b.c = {};" +
"a.b.c;" +
"a.b = function(x,y) {};");
}
// Provide a name after the definition of the class providing the
// parent namespace.
public void testProvideOrder2() {
additionalEndCode = "";
addAdditionalNamespace = false;
// TODO(johnlenz): This test confirms that the constructor (a.b) isn't
// improperly removed, but this result isn't really what we want as
// namespace placeholders for a.b and a.b.c remain.
test("goog.provide('a.b');" +
"goog.provide('a.b.c');" +
"a.b = function(x,y) {};" +
"a.b.c;",
"var a = {};" +
"a.b = {};" +
"a.b.c = {};" +
"a.b = function(x,y) {};" +
"a.b.c;");
}
// Provide a name after the definition of the class providing the
// parent namespace.
public void testProvideOrder3a() {
test("goog.provide('a.b');" +
"a.b = function(x,y) {};" +
"goog.provide('a.b.c');" +
"a.b.c;",
"var a = {};" +
"a.b = function(x,y) {};" +
"a.b.c = {};" +
"a.b.c;");
}
public void testProvideOrder3b() {
additionalEndCode = "";
addAdditionalNamespace = false;
// This tests a cleanly provided name, below a function namespace.
test("goog.provide('a.b');" +
"a.b = function(x,y) {};" +
"goog.provide('a.b.c');" +
"a.b.c;",
"var a = {};" +
"a.b = function(x,y) {};" +
"a.b.c = {};" +
"a.b.c;");
}
public void testProvideOrder4a() {
test("goog.provide('goog.a');" +
"goog.provide('goog.a.b');" +
"if (x) {" +
" goog.a.b = 1;" +
"} else {" +
" goog.a.b = 2;" +
"}",
"goog.a={};" +
"if(x)" +
" goog.a.b=1;" +
"else" +
" goog.a.b=2;");
}
public void testProvideOrder4b() {
additionalEndCode = "";
addAdditionalNamespace = false;
// This tests a cleanly provided name, below a namespace.
test("goog.provide('goog.a');" +
"goog.provide('goog.a.b');" +
"if (x) {" +
" goog.a.b = 1;" +
"} else {" +
" goog.a.b = 2;" +
"}",
"goog.a={};" +
"if(x)" +
" goog.a.b=1;" +
"else" +
" goog.a.b=2;");
}
public void testInvalidProvide() {
setAcceptedLanguage(LanguageMode.ECMASCRIPT5);
test("goog.provide('a.class');", "var a = {}; a.class = {};");
testError("goog.provide('class.a');", INVALID_PROVIDE_ERROR);
setAcceptedLanguage(LanguageMode.ECMASCRIPT3);
testError("goog.provide('a.class');", INVALID_PROVIDE_ERROR);
testError("goog.provide('class.a');", INVALID_PROVIDE_ERROR);
}
public void testInvalidRequire() {
test("goog.provide('a.b'); goog.require('a.b');", "var a = {}; a.b = {};");
testError("goog.provide('a.b'); var x = x || goog.require('a.b');", INVALID_CLOSURE_CALL_ERROR);
testError("goog.provide('a.b'); x = goog.require('a.b');", INVALID_CLOSURE_CALL_ERROR);
testError(
"goog.provide('a.b'); function f() { goog.require('a.b'); }", INVALID_CLOSURE_CALL_ERROR);
}
public void testValidGoogMethod() {
testSame("function f() { goog.isDef('a.b'); }");
testSame("function f() { goog.inherits(a, b); }");
testSame("function f() { goog.exportSymbol(a, b); }");
test("function f() { goog.setCssNameMapping({}); }", "function f() {}");
testSame("x || goog.isDef('a.b');");
testSame("x || goog.inherits(a, b);");
testSame("x || goog.exportSymbol(a, b);");
testSame("x || void 0");
}
private static final String METHOD_FORMAT =
"function Foo() {} Foo.prototype.method = function() { %s };";
private static final String FOO_INHERITS =
"goog.inherits(Foo, BaseFoo);";
public void testInvalidGoogBase1() {
testError("goog.base(this, 'method');", GOOG_BASE_CLASS_ERROR);
}
public void testInvalidGoogBase2() {
testError("function Foo() {}" +
"Foo.method = function() {" +
" goog.base(this, 'method');" +
"};", GOOG_BASE_CLASS_ERROR);
}
public void testInvalidGoogBase3() {
testError(String.format(METHOD_FORMAT, "goog.base();"),
GOOG_BASE_CLASS_ERROR);
}
public void testInvalidGoogBase4() {
testError(String.format(METHOD_FORMAT, "goog.base(this, 'bar');"),
GOOG_BASE_CLASS_ERROR);
}
public void testInvalidGoogBase5() {
testError(String.format(METHOD_FORMAT, "goog.base('foo', 'method');"),
GOOG_BASE_CLASS_ERROR);
}
public void testInvalidGoogBase6() {
testError(String.format(METHOD_FORMAT, "goog.base.call(null, this, 'method');"),
GOOG_BASE_CLASS_ERROR);
}
public void testInvalidGoogBase7() {
testError("function Foo() { goog.base(this); }", GOOG_BASE_CLASS_ERROR);
}
public void testInvalidGoogBase8() {
testError("var Foo = function() { goog.base(this); }", GOOG_BASE_CLASS_ERROR);
}
public void testInvalidGoogBase9() {
testError("var goog = {}; goog.Foo = function() { goog.base(this); }", GOOG_BASE_CLASS_ERROR);
}
public void testValidGoogBase1() {
test(String.format(METHOD_FORMAT, "goog.base(this, 'method');"),
String.format(METHOD_FORMAT, "Foo.superClass_.method.call(this)"));
}
public void testValidGoogBase2() {
test(String.format(METHOD_FORMAT, "goog.base(this, 'method', 1, 2);"),
String.format(METHOD_FORMAT,
"Foo.superClass_.method.call(this, 1, 2)"));
}
public void testValidGoogBase3() {
test(String.format(METHOD_FORMAT, "return goog.base(this, 'method');"),
String.format(METHOD_FORMAT,
"return Foo.superClass_.method.call(this)"));
}
public void testValidGoogBase4() {
test("function Foo() { goog.base(this, 1, 2); }" + FOO_INHERITS,
"function Foo() { BaseFoo.call(this, 1, 2); } " + FOO_INHERITS);
}
public void testValidGoogBase5() {
test("var Foo = function() { goog.base(this, 1); };" + FOO_INHERITS,
"var Foo = function() { BaseFoo.call(this, 1); }; " + FOO_INHERITS);
}
public void testValidGoogBase6() {
test("var goog = {}; goog.Foo = function() { goog.base(this); }; " +
"goog.inherits(goog.Foo, goog.BaseFoo);",
"var goog = {}; goog.Foo = function() { goog.BaseFoo.call(this); }; " +
"goog.inherits(goog.Foo, goog.BaseFoo);");
}
public void testBanGoogBase() {
banGoogBase = true;
testError(
"function Foo() { goog.base(this, 1, 2); }" + FOO_INHERITS,
ProcessClosurePrimitives.USE_OF_GOOG_BASE);
}
public void testInvalidBase1() {
testError(
"var Foo = function() {};" + FOO_INHERITS +
"Foo.base(this, 'method');", BASE_CLASS_ERROR);
}
public void testInvalidBase2() {
testError("function Foo() {}" + FOO_INHERITS +
"Foo.method = function() {" +
" Foo.base(this, 'method');" +
"};", BASE_CLASS_ERROR);
}
public void testInvalidBase3() {
testError(String.format(FOO_INHERITS + METHOD_FORMAT, "Foo.base();"),
BASE_CLASS_ERROR);
}
public void testInvalidBase4() {
testError(String.format(FOO_INHERITS + METHOD_FORMAT, "Foo.base(this, 'bar');"),
BASE_CLASS_ERROR);
}
public void testInvalidBase5() {
testError(String.format(FOO_INHERITS + METHOD_FORMAT,
"Foo.base('foo', 'method');"),
BASE_CLASS_ERROR);
}
public void testInvalidBase7() {
testError("function Foo() { Foo.base(this); };" + FOO_INHERITS,
BASE_CLASS_ERROR);
}
public void testInvalidBase8() {
testError("var Foo = function() { Foo.base(this); };" + FOO_INHERITS,
BASE_CLASS_ERROR);
}
public void testInvalidBase9() {
testError("var goog = {}; goog.Foo = function() { goog.Foo.base(this); };"
+ FOO_INHERITS,
BASE_CLASS_ERROR);
}
public void testInvalidBase10() {
testError("function Foo() { Foo.base(this); }" + FOO_INHERITS,
BASE_CLASS_ERROR);
}
public void testInvalidBase11() {
testError("function Foo() { Foo.base(this, 'method'); }" + FOO_INHERITS,
BASE_CLASS_ERROR);
}
public void testInvalidBase12() {
testError("function Foo() { Foo.base(this, 1, 2); }" + FOO_INHERITS,
BASE_CLASS_ERROR);
}
public void testInvalidBase13() {
testError(
"function Bar(){ Bar.base(this, 'constructor'); }" +
"goog.inherits(Bar, Goo);" +
"function Foo(){ Bar.base(this, 'constructor'); }" + FOO_INHERITS,
BASE_CLASS_ERROR);
}
public void testValidBase1() {
test(FOO_INHERITS
+ String.format(METHOD_FORMAT, "Foo.base(this, 'method');"),
FOO_INHERITS
+ String.format(METHOD_FORMAT, "Foo.superClass_.method.call(this)"));
}
public void testValidBase2() {
test(FOO_INHERITS
+ String.format(METHOD_FORMAT, "Foo.base(this, 'method', 1, 2);"),
FOO_INHERITS
+ String.format(METHOD_FORMAT,
"Foo.superClass_.method.call(this, 1, 2)"));
}
public void testValidBase3() {
test(FOO_INHERITS
+ String.format(METHOD_FORMAT, "return Foo.base(this, 'method');"),
FOO_INHERITS
+ String.format(METHOD_FORMAT,
"return Foo.superClass_.method.call(this)"));
}
public void testValidBase4() {
test("function Foo() { Foo.base(this, 'constructor', 1, 2); }"
+ FOO_INHERITS,
"function Foo() { BaseFoo.call(this, 1, 2); } " + FOO_INHERITS);
}
public void testValidBase5() {
test("var Foo = function() { Foo.base(this, 'constructor', 1); };"
+ FOO_INHERITS,
"var Foo = function() { BaseFoo.call(this, 1); }; " + FOO_INHERITS);
}
public void testValidBase6() {
test("var goog = {}; goog.Foo = function() {" +
"goog.Foo.base(this, 'constructor'); }; " +
"goog.inherits(goog.Foo, goog.BaseFoo);",
"var goog = {}; goog.Foo = function() { goog.BaseFoo.call(this); }; " +
"goog.inherits(goog.Foo, goog.BaseFoo);");
}
public void testValidBase7() {
// No goog.inherits, so this is probably a different 'base' function.
testSame(""
+ "var a = function() {"
+ " a.base(this, 'constructor');"
+ "};");
}
public void testImplicitAndExplicitProvide() {
test("var goog = {}; " +
"goog.provide('goog.foo.bar'); goog.provide('goog.foo');",
"var goog = {}; goog.foo = {}; goog.foo.bar = {};");
}
public void testImplicitProvideInIndependentModules() {
test(
createModuleStar(
"",
"goog.provide('apps.A');",
"goog.provide('apps.B');"),
new String[] {
"var apps = {};",
"apps.A = {};",
"apps.B = {};",
});
}
public void testImplicitProvideInIndependentModules2() {
test(
createModuleStar(
"goog.provide('apps');",
"goog.provide('apps.foo.A');",
"goog.provide('apps.foo.B');"),
new String[] {
"var apps = {}; apps.foo = {};",
"apps.foo.A = {};",
"apps.foo.B = {};",
});
}
public void testImplicitProvideInIndependentModules3() {
test(
createModuleStar(
"var goog = {};",
"goog.provide('goog.foo.A');",
"goog.provide('goog.foo.B');"),
new String[] {
"var goog = {}; goog.foo = {};",
"goog.foo.A = {};",
"goog.foo.B = {};",
});
}
public void testProvideInIndependentModules1() {
test(
createModuleStar(
"goog.provide('apps');",
"goog.provide('apps.foo');",
"goog.provide('apps.foo.B');"),
new String[] {
"var apps = {}; apps.foo = {};",
"",
"apps.foo.B = {};",
});
}
public void testProvideInIndependentModules2() {
// TODO(nicksantos): Make this an error.
test(
createModuleStar(
"goog.provide('apps');",
"goog.provide('apps.foo'); apps.foo = {};",
"goog.provide('apps.foo.B');"),
new String[] {
"var apps = {};",
"apps.foo = {};",
"apps.foo.B = {};",
});
}
public void testProvideInIndependentModules2b() {
// TODO(nicksantos): Make this an error.
test(
createModuleStar(
"goog.provide('apps');",
"goog.provide('apps.foo'); apps.foo = function() {};",
"goog.provide('apps.foo.B');"),
new String[] {
"var apps = {};",
"apps.foo = function() {};",
"apps.foo.B = {};",
});
}
public void testProvideInIndependentModules3() {
test(
createModuleStar(
"goog.provide('apps');",
"goog.provide('apps.foo.B');",
"goog.provide('apps.foo'); goog.require('apps.foo');"),
new String[] {
"var apps = {}; apps.foo = {};",
"apps.foo.B = {};",
"",
});
}
public void testProvideInIndependentModules3b() {
// TODO(nicksantos): Make this an error.
test(
createModuleStar(
"goog.provide('apps');",
"goog.provide('apps.foo.B');",
"goog.provide('apps.foo'); apps.foo = function() {}; " +
"goog.require('apps.foo');"),
new String[] {
"var apps = {};",
"apps.foo.B = {};",
"apps.foo = function() {};",
});
}
public void testProvideInIndependentModules4() {
// Regression test for bug 261:
// http://code.google.com/p/closure-compiler/issues/detail?id=261
test(
createModuleStar(
"goog.provide('apps');",
"goog.provide('apps.foo.bar.B');",
"goog.provide('apps.foo.bar.C');"),
new String[] {
"var apps = {};apps.foo = {};apps.foo.bar = {}",
"apps.foo.bar.B = {};",
"apps.foo.bar.C = {};",
});
}
public void testRequireOfBaseGoog() {
testError("goog.require('goog');", MISSING_PROVIDE_ERROR);
}
public void testSourcePositionPreservation() {
test("goog.provide('foo.bar.baz');",
"var foo = {};" +
"foo.bar = {};" +
"foo.bar.baz = {};");
Node root = getLastCompiler().getRoot();
Node fooDecl = findQualifiedNameNode("foo", root);
Node fooBarDecl = findQualifiedNameNode("foo.bar", root);
Node fooBarBazDecl = findQualifiedNameNode("foo.bar.baz", root);
assertEquals(1, fooDecl.getLineno());
assertEquals(14, fooDecl.getCharno());
assertEquals(1, fooBarDecl.getLineno());
assertEquals(18, fooBarDecl.getCharno());
assertEquals(1, fooBarBazDecl.getLineno());
assertEquals(22, fooBarBazDecl.getCharno());
}
public void testNoStubForProvidedTypedef() {
test("goog.provide('x'); /** @typedef {number} */ var x;", "var x;");
}
public void testNoStubForProvidedTypedef2() {
test("goog.provide('x.y'); /** @typedef {number} */ x.y;",
"var x = {}; x.y;");
}
public void testNoStubForProvidedTypedef4() {
test("goog.provide('x.y.z'); /** @typedef {number} */ x.y.z;",
"var x = {}; x.y = {}; x.y.z;");
}
public void testProvideRequireSameFile() {
test("goog.provide('x');\ngoog.require('x');", "var x = {};");
}
public void testDefineCases() {
String jsdoc = "/** @define {number} */\n";
test(jsdoc + "goog.define('name', 1);", jsdoc + "var name = 1");
test(jsdoc + "goog.define('ns.name', 1);", jsdoc + "ns.name = 1");
}
public void testDefineErrorCases() {
String jsdoc = "/** @define {number} */\n";
testError("goog.define('name', 1);", MISSING_DEFINE_ANNOTATION);
testError(jsdoc + "goog.define('name.2', 1);", INVALID_DEFINE_NAME_ERROR);
testError(jsdoc + "goog.define();", NULL_ARGUMENT_ERROR);
testError(jsdoc + "goog.define('value');", NULL_ARGUMENT_ERROR);
testError(jsdoc + "goog.define(5);", INVALID_ARGUMENT_ERROR);
}
public void testDefineValues() {
testSame("var CLOSURE_DEFINES = {'FOO': 'string'};");
testSame("var CLOSURE_DEFINES = {'FOO': true};");
testSame("var CLOSURE_DEFINES = {'FOO': false};");
testSame("var CLOSURE_DEFINES = {'FOO': 1};");
testSame("var CLOSURE_DEFINES = {'FOO': 0xABCD};");
testSame("var CLOSURE_DEFINESS = {'FOO': -1};");
}
public void testDefineValuesErrors() {
testError("var CLOSURE_DEFINES = {'FOO': a};", CLOSURE_DEFINES_ERROR);
testError("var CLOSURE_DEFINES = {'FOO': 0+1};", CLOSURE_DEFINES_ERROR);
testError("var CLOSURE_DEFINES = {'FOO': 'value' + 'value'};", CLOSURE_DEFINES_ERROR);
testError("var CLOSURE_DEFINES = {'FOO': !true};", CLOSURE_DEFINES_ERROR);
testError("var CLOSURE_DEFINES = {'FOO': -true};", CLOSURE_DEFINES_ERROR);
}
}
| |
package com.DiffBot.Json;
/*
Copyright (c) 2008 JSON.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
The Software shall be used for Good, not Evil.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
import java.util.Iterator;
/**
* This provides static methods to convert an XML text into a JSONArray or
* JSONObject, and to covert a JSONArray or JSONObject into an XML text using
* the JsonML transform.
*
* @author JSON.org
* @version 2012-03-28
*/
public class JSONML {
/**
* Parse XML values and store them in a JSONArray.
* @param x The XMLTokener containing the source string.
* @param arrayForm true if array form, false if object form.
* @param ja The JSONArray that is containing the current tag or null
* if we are at the outermost level.
* @return A JSONArray if the value is the outermost tag, otherwise null.
* @throws JSONException
*/
private static Object parse(
XMLTokener x,
boolean arrayForm,
JSONArray ja
) throws JSONException {
String attribute;
char c;
String closeTag = null;
int i;
JSONArray newja = null;
JSONObject newjo = null;
Object token;
String tagName = null;
// Test for and skip past these forms:
// <!-- ... -->
// <![ ... ]]>
// <! ... >
// <? ... ?>
while (true) {
if (!x.more()) {
throw x.syntaxError("Bad XML");
}
token = x.nextContent();
if (token == XML.LT) {
token = x.nextToken();
if (token instanceof Character) {
if (token == XML.SLASH) {
// Close tag </
token = x.nextToken();
if (!(token instanceof String)) {
throw new JSONException(
"Expected a closing name instead of '" +
token + "'.");
}
if (x.nextToken() != XML.GT) {
throw x.syntaxError("Misshaped close tag");
}
return token;
} else if (token == XML.BANG) {
// <!
c = x.next();
if (c == '-') {
if (x.next() == '-') {
x.skipPast("-->");
} else {
x.back();
}
} else if (c == '[') {
token = x.nextToken();
if (token.equals("CDATA") && x.next() == '[') {
if (ja != null) {
ja.put(x.nextCDATA());
}
} else {
throw x.syntaxError("Expected 'CDATA['");
}
} else {
i = 1;
do {
token = x.nextMeta();
if (token == null) {
throw x.syntaxError("Missing '>' after '<!'.");
} else if (token == XML.LT) {
i += 1;
} else if (token == XML.GT) {
i -= 1;
}
} while (i > 0);
}
} else if (token == XML.QUEST) {
// <?
x.skipPast("?>");
} else {
throw x.syntaxError("Misshaped tag");
}
// Open tag <
} else {
if (!(token instanceof String)) {
throw x.syntaxError("Bad tagName '" + token + "'.");
}
tagName = (String)token;
newja = new JSONArray();
newjo = new JSONObject();
if (arrayForm) {
newja.put(tagName);
if (ja != null) {
ja.put(newja);
}
} else {
newjo.put("tagName", tagName);
if (ja != null) {
ja.put(newjo);
}
}
token = null;
for (;;) {
if (token == null) {
token = x.nextToken();
}
if (token == null) {
throw x.syntaxError("Misshaped tag");
}
if (!(token instanceof String)) {
break;
}
// attribute = value
attribute = (String)token;
if (!arrayForm && ("tagName".equals(attribute) || "childNode".equals(attribute))) {
throw x.syntaxError("Reserved attribute.");
}
token = x.nextToken();
if (token == XML.EQ) {
token = x.nextToken();
if (!(token instanceof String)) {
throw x.syntaxError("Missing value");
}
newjo.accumulate(attribute, XML.stringToValue((String)token));
token = null;
} else {
newjo.accumulate(attribute, "");
}
}
if (arrayForm && newjo.length() > 0) {
newja.put(newjo);
}
// Empty tag <.../>
if (token == XML.SLASH) {
if (x.nextToken() != XML.GT) {
throw x.syntaxError("Misshaped tag");
}
if (ja == null) {
if (arrayForm) {
return newja;
} else {
return newjo;
}
}
// Content, between <...> and </...>
} else {
if (token != XML.GT) {
throw x.syntaxError("Misshaped tag");
}
closeTag = (String)parse(x, arrayForm, newja);
if (closeTag != null) {
if (!closeTag.equals(tagName)) {
throw x.syntaxError("Mismatched '" + tagName +
"' and '" + closeTag + "'");
}
tagName = null;
if (!arrayForm && newja.length() > 0) {
newjo.put("childNodes", newja);
}
if (ja == null) {
if (arrayForm) {
return newja;
} else {
return newjo;
}
}
}
}
}
} else {
if (ja != null) {
ja.put(token instanceof String
? XML.stringToValue((String)token)
: token);
}
}
}
}
/**
* Convert a well-formed (but not necessarily valid) XML string into a
* JSONArray using the JsonML transform. Each XML tag is represented as
* a JSONArray in which the first element is the tag name. If the tag has
* attributes, then the second element will be JSONObject containing the
* name/value pairs. If the tag contains children, then strings and
* JSONArrays will represent the child tags.
* Comments, prologs, DTDs, and <code><[ [ ]]></code> are ignored.
* @param string The source string.
* @return A JSONArray containing the structured data from the XML string.
* @throws JSONException
*/
public static JSONArray toJSONArray(String string) throws JSONException {
return toJSONArray(new XMLTokener(string));
}
/**
* Convert a well-formed (but not necessarily valid) XML string into a
* JSONArray using the JsonML transform. Each XML tag is represented as
* a JSONArray in which the first element is the tag name. If the tag has
* attributes, then the second element will be JSONObject containing the
* name/value pairs. If the tag contains children, then strings and
* JSONArrays will represent the child content and tags.
* Comments, prologs, DTDs, and <code><[ [ ]]></code> are ignored.
* @param x An XMLTokener.
* @return A JSONArray containing the structured data from the XML string.
* @throws JSONException
*/
public static JSONArray toJSONArray(XMLTokener x) throws JSONException {
return (JSONArray)parse(x, true, null);
}
/**
* Convert a well-formed (but not necessarily valid) XML string into a
* JSONObject using the JsonML transform. Each XML tag is represented as
* a JSONObject with a "tagName" property. If the tag has attributes, then
* the attributes will be in the JSONObject as properties. If the tag
* contains children, the object will have a "childNodes" property which
* will be an array of strings and JsonML JSONObjects.
* Comments, prologs, DTDs, and <code><[ [ ]]></code> are ignored.
* @param x An XMLTokener of the XML source text.
* @return A JSONObject containing the structured data from the XML string.
* @throws JSONException
*/
public static JSONObject toJSONObject(XMLTokener x) throws JSONException {
return (JSONObject)parse(x, false, null);
}
/**
* Convert a well-formed (but not necessarily valid) XML string into a
* JSONObject using the JsonML transform. Each XML tag is represented as
* a JSONObject with a "tagName" property. If the tag has attributes, then
* the attributes will be in the JSONObject as properties. If the tag
* contains children, the object will have a "childNodes" property which
* will be an array of strings and JsonML JSONObjects.
* Comments, prologs, DTDs, and <code><[ [ ]]></code> are ignored.
* @param string The XML source text.
* @return A JSONObject containing the structured data from the XML string.
* @throws JSONException
*/
public static JSONObject toJSONObject(String string) throws JSONException {
return toJSONObject(new XMLTokener(string));
}
/**
* Reverse the JSONML transformation, making an XML text from a JSONArray.
* @param ja A JSONArray.
* @return An XML string.
* @throws JSONException
*/
public static String toString(JSONArray ja) throws JSONException {
int i;
JSONObject jo;
String key;
Iterator keys;
int length;
Object object;
StringBuffer sb = new StringBuffer();
String tagName;
String value;
// Emit <tagName
tagName = ja.getString(0);
XML.noSpace(tagName);
tagName = XML.escape(tagName);
sb.append('<');
sb.append(tagName);
object = ja.opt(1);
if (object instanceof JSONObject) {
i = 2;
jo = (JSONObject)object;
// Emit the attributes
keys = jo.keys();
while (keys.hasNext()) {
key = keys.next().toString();
XML.noSpace(key);
value = jo.optString(key);
if (value != null) {
sb.append(' ');
sb.append(XML.escape(key));
sb.append('=');
sb.append('"');
sb.append(XML.escape(value));
sb.append('"');
}
}
} else {
i = 1;
}
//Emit content in body
length = ja.length();
if (i >= length) {
sb.append('/');
sb.append('>');
} else {
sb.append('>');
do {
object = ja.get(i);
i += 1;
if (object != null) {
if (object instanceof String) {
sb.append(XML.escape(object.toString()));
} else if (object instanceof JSONObject) {
sb.append(toString((JSONObject)object));
} else if (object instanceof JSONArray) {
sb.append(toString((JSONArray)object));
}
}
} while (i < length);
sb.append('<');
sb.append('/');
sb.append(tagName);
sb.append('>');
}
return sb.toString();
}
/**
* Reverse the JSONML transformation, making an XML text from a JSONObject.
* The JSONObject must contain a "tagName" property. If it has children,
* then it must have a "childNodes" property containing an array of objects.
* The other properties are attributes with string values.
* @param jo A JSONObject.
* @return An XML string.
* @throws JSONException
*/
public static String toString(JSONObject jo) throws JSONException {
StringBuffer sb = new StringBuffer();
int i;
JSONArray ja;
String key;
Iterator keys;
int length;
Object object;
String tagName;
String value;
//Emit <tagName
tagName = jo.optString("tagName");
if (tagName == null) {
return XML.escape(jo.toString());
}
XML.noSpace(tagName);
tagName = XML.escape(tagName);
sb.append('<');
sb.append(tagName);
//Emit the attributes
keys = jo.keys();
while (keys.hasNext()) {
key = keys.next().toString();
if (!"tagName".equals(key) && !"childNodes".equals(key)) {
XML.noSpace(key);
value = jo.optString(key);
if (value != null) {
sb.append(' ');
sb.append(XML.escape(key));
sb.append('=');
sb.append('"');
sb.append(XML.escape(value));
sb.append('"');
}
}
}
//Emit content in body
ja = jo.optJSONArray("childNodes");
if (ja == null) {
sb.append('/');
sb.append('>');
} else {
sb.append('>');
length = ja.length();
for (i = 0; i < length; i += 1) {
object = ja.get(i);
if (object != null) {
if (object instanceof String) {
sb.append(XML.escape(object.toString()));
} else if (object instanceof JSONObject) {
sb.append(toString((JSONObject)object));
} else if (object instanceof JSONArray) {
sb.append(toString((JSONArray)object));
} else {
sb.append(object.toString());
}
}
}
sb.append('<');
sb.append('/');
sb.append(tagName);
sb.append('>');
}
return sb.toString();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Autogenerated by Thrift Compiler (0.11.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.storm.generated;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.11.0)")
public class ExecutorInfo implements org.apache.storm.thrift.TBase<ExecutorInfo, ExecutorInfo._Fields>, java.io.Serializable, Cloneable, Comparable<ExecutorInfo> {
private static final org.apache.storm.thrift.protocol.TStruct STRUCT_DESC = new org.apache.storm.thrift.protocol.TStruct("ExecutorInfo");
private static final org.apache.storm.thrift.protocol.TField TASK_START_FIELD_DESC = new org.apache.storm.thrift.protocol.TField("task_start", org.apache.storm.thrift.protocol.TType.I32, (short)1);
private static final org.apache.storm.thrift.protocol.TField TASK_END_FIELD_DESC = new org.apache.storm.thrift.protocol.TField("task_end", org.apache.storm.thrift.protocol.TType.I32, (short)2);
private static final org.apache.storm.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new ExecutorInfoStandardSchemeFactory();
private static final org.apache.storm.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new ExecutorInfoTupleSchemeFactory();
private int task_start; // required
private int task_end; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.storm.thrift.TFieldIdEnum {
TASK_START((short)1, "task_start"),
TASK_END((short)2, "task_end");
private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
static {
for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // TASK_START
return TASK_START;
case 2: // TASK_END
return TASK_END;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
private final java.lang.String _fieldName;
_Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public java.lang.String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __TASK_START_ISSET_ID = 0;
private static final int __TASK_END_ISSET_ID = 1;
private byte __isset_bitfield = 0;
public static final java.util.Map<_Fields, org.apache.storm.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.storm.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.storm.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.TASK_START, new org.apache.storm.thrift.meta_data.FieldMetaData("task_start", org.apache.storm.thrift.TFieldRequirementType.REQUIRED,
new org.apache.storm.thrift.meta_data.FieldValueMetaData(org.apache.storm.thrift.protocol.TType.I32)));
tmpMap.put(_Fields.TASK_END, new org.apache.storm.thrift.meta_data.FieldMetaData("task_end", org.apache.storm.thrift.TFieldRequirementType.REQUIRED,
new org.apache.storm.thrift.meta_data.FieldValueMetaData(org.apache.storm.thrift.protocol.TType.I32)));
metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.storm.thrift.meta_data.FieldMetaData.addStructMetaDataMap(ExecutorInfo.class, metaDataMap);
}
public ExecutorInfo() {
}
public ExecutorInfo(
int task_start,
int task_end)
{
this();
this.task_start = task_start;
set_task_start_isSet(true);
this.task_end = task_end;
set_task_end_isSet(true);
}
/**
* Performs a deep copy on <i>other</i>.
*/
public ExecutorInfo(ExecutorInfo other) {
__isset_bitfield = other.__isset_bitfield;
this.task_start = other.task_start;
this.task_end = other.task_end;
}
public ExecutorInfo deepCopy() {
return new ExecutorInfo(this);
}
@Override
public void clear() {
set_task_start_isSet(false);
this.task_start = 0;
set_task_end_isSet(false);
this.task_end = 0;
}
public int get_task_start() {
return this.task_start;
}
public void set_task_start(int task_start) {
this.task_start = task_start;
set_task_start_isSet(true);
}
public void unset_task_start() {
__isset_bitfield = org.apache.storm.thrift.EncodingUtils.clearBit(__isset_bitfield, __TASK_START_ISSET_ID);
}
/** Returns true if field task_start is set (has been assigned a value) and false otherwise */
public boolean is_set_task_start() {
return org.apache.storm.thrift.EncodingUtils.testBit(__isset_bitfield, __TASK_START_ISSET_ID);
}
public void set_task_start_isSet(boolean value) {
__isset_bitfield = org.apache.storm.thrift.EncodingUtils.setBit(__isset_bitfield, __TASK_START_ISSET_ID, value);
}
public int get_task_end() {
return this.task_end;
}
public void set_task_end(int task_end) {
this.task_end = task_end;
set_task_end_isSet(true);
}
public void unset_task_end() {
__isset_bitfield = org.apache.storm.thrift.EncodingUtils.clearBit(__isset_bitfield, __TASK_END_ISSET_ID);
}
/** Returns true if field task_end is set (has been assigned a value) and false otherwise */
public boolean is_set_task_end() {
return org.apache.storm.thrift.EncodingUtils.testBit(__isset_bitfield, __TASK_END_ISSET_ID);
}
public void set_task_end_isSet(boolean value) {
__isset_bitfield = org.apache.storm.thrift.EncodingUtils.setBit(__isset_bitfield, __TASK_END_ISSET_ID, value);
}
public void setFieldValue(_Fields field, java.lang.Object value) {
switch (field) {
case TASK_START:
if (value == null) {
unset_task_start();
} else {
set_task_start((java.lang.Integer)value);
}
break;
case TASK_END:
if (value == null) {
unset_task_end();
} else {
set_task_end((java.lang.Integer)value);
}
break;
}
}
public java.lang.Object getFieldValue(_Fields field) {
switch (field) {
case TASK_START:
return get_task_start();
case TASK_END:
return get_task_end();
}
throw new java.lang.IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new java.lang.IllegalArgumentException();
}
switch (field) {
case TASK_START:
return is_set_task_start();
case TASK_END:
return is_set_task_end();
}
throw new java.lang.IllegalStateException();
}
@Override
public boolean equals(java.lang.Object that) {
if (that == null)
return false;
if (that instanceof ExecutorInfo)
return this.equals((ExecutorInfo)that);
return false;
}
public boolean equals(ExecutorInfo that) {
if (that == null)
return false;
if (this == that)
return true;
boolean this_present_task_start = true;
boolean that_present_task_start = true;
if (this_present_task_start || that_present_task_start) {
if (!(this_present_task_start && that_present_task_start))
return false;
if (this.task_start != that.task_start)
return false;
}
boolean this_present_task_end = true;
boolean that_present_task_end = true;
if (this_present_task_end || that_present_task_end) {
if (!(this_present_task_end && that_present_task_end))
return false;
if (this.task_end != that.task_end)
return false;
}
return true;
}
@Override
public int hashCode() {
int hashCode = 1;
hashCode = hashCode * 8191 + task_start;
hashCode = hashCode * 8191 + task_end;
return hashCode;
}
@Override
public int compareTo(ExecutorInfo other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = java.lang.Boolean.valueOf(is_set_task_start()).compareTo(other.is_set_task_start());
if (lastComparison != 0) {
return lastComparison;
}
if (is_set_task_start()) {
lastComparison = org.apache.storm.thrift.TBaseHelper.compareTo(this.task_start, other.task_start);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.valueOf(is_set_task_end()).compareTo(other.is_set_task_end());
if (lastComparison != 0) {
return lastComparison;
}
if (is_set_task_end()) {
lastComparison = org.apache.storm.thrift.TBaseHelper.compareTo(this.task_end, other.task_end);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.storm.thrift.protocol.TProtocol iprot) throws org.apache.storm.thrift.TException {
scheme(iprot).read(iprot, this);
}
public void write(org.apache.storm.thrift.protocol.TProtocol oprot) throws org.apache.storm.thrift.TException {
scheme(oprot).write(oprot, this);
}
@Override
public java.lang.String toString() {
java.lang.StringBuilder sb = new java.lang.StringBuilder("ExecutorInfo(");
boolean first = true;
sb.append("task_start:");
sb.append(this.task_start);
first = false;
if (!first) sb.append(", ");
sb.append("task_end:");
sb.append(this.task_end);
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.storm.thrift.TException {
// check for required fields
if (!is_set_task_start()) {
throw new org.apache.storm.thrift.protocol.TProtocolException("Required field 'task_start' is unset! Struct:" + toString());
}
if (!is_set_task_end()) {
throw new org.apache.storm.thrift.protocol.TProtocolException("Required field 'task_end' is unset! Struct:" + toString());
}
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.storm.thrift.protocol.TCompactProtocol(new org.apache.storm.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.storm.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.storm.thrift.protocol.TCompactProtocol(new org.apache.storm.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.storm.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class ExecutorInfoStandardSchemeFactory implements org.apache.storm.thrift.scheme.SchemeFactory {
public ExecutorInfoStandardScheme getScheme() {
return new ExecutorInfoStandardScheme();
}
}
private static class ExecutorInfoStandardScheme extends org.apache.storm.thrift.scheme.StandardScheme<ExecutorInfo> {
public void read(org.apache.storm.thrift.protocol.TProtocol iprot, ExecutorInfo struct) throws org.apache.storm.thrift.TException {
org.apache.storm.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.storm.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // TASK_START
if (schemeField.type == org.apache.storm.thrift.protocol.TType.I32) {
struct.task_start = iprot.readI32();
struct.set_task_start_isSet(true);
} else {
org.apache.storm.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // TASK_END
if (schemeField.type == org.apache.storm.thrift.protocol.TType.I32) {
struct.task_end = iprot.readI32();
struct.set_task_end_isSet(true);
} else {
org.apache.storm.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.storm.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
struct.validate();
}
public void write(org.apache.storm.thrift.protocol.TProtocol oprot, ExecutorInfo struct) throws org.apache.storm.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
oprot.writeFieldBegin(TASK_START_FIELD_DESC);
oprot.writeI32(struct.task_start);
oprot.writeFieldEnd();
oprot.writeFieldBegin(TASK_END_FIELD_DESC);
oprot.writeI32(struct.task_end);
oprot.writeFieldEnd();
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class ExecutorInfoTupleSchemeFactory implements org.apache.storm.thrift.scheme.SchemeFactory {
public ExecutorInfoTupleScheme getScheme() {
return new ExecutorInfoTupleScheme();
}
}
private static class ExecutorInfoTupleScheme extends org.apache.storm.thrift.scheme.TupleScheme<ExecutorInfo> {
@Override
public void write(org.apache.storm.thrift.protocol.TProtocol prot, ExecutorInfo struct) throws org.apache.storm.thrift.TException {
org.apache.storm.thrift.protocol.TTupleProtocol oprot = (org.apache.storm.thrift.protocol.TTupleProtocol) prot;
oprot.writeI32(struct.task_start);
oprot.writeI32(struct.task_end);
}
@Override
public void read(org.apache.storm.thrift.protocol.TProtocol prot, ExecutorInfo struct) throws org.apache.storm.thrift.TException {
org.apache.storm.thrift.protocol.TTupleProtocol iprot = (org.apache.storm.thrift.protocol.TTupleProtocol) prot;
struct.task_start = iprot.readI32();
struct.set_task_start_isSet(true);
struct.task_end = iprot.readI32();
struct.set_task_end_isSet(true);
}
}
private static <S extends org.apache.storm.thrift.scheme.IScheme> S scheme(org.apache.storm.thrift.protocol.TProtocol proto) {
return (org.apache.storm.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme();
}
}
| |
package io.fabric8.kubernetes.api.model;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonAnyGetter;
import com.fasterxml.jackson.annotation.JsonAnySetter;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import io.sundr.builder.annotations.Buildable;
import lombok.EqualsAndHashCode;
import lombok.Setter;
import lombok.ToString;
import lombok.experimental.Accessors;
@JsonDeserialize(using = com.fasterxml.jackson.databind.JsonDeserializer.None.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
@JsonPropertyOrder({
"apiVersion",
"kind",
"metadata",
"args",
"command",
"env",
"envFrom",
"image",
"imagePullPolicy",
"lifecycle",
"livenessProbe",
"name",
"ports",
"readinessProbe",
"resources",
"securityContext",
"startupProbe",
"stdin",
"stdinOnce",
"targetContainerName",
"terminationMessagePath",
"terminationMessagePolicy",
"tty",
"volumeDevices",
"volumeMounts",
"workingDir"
})
@ToString
@EqualsAndHashCode
@Setter
@Accessors(prefix = {
"_",
""
})
@Buildable(editableEnabled = false, validationEnabled = false, generateBuilderPackage = true, lazyCollectionInitEnabled = false, builderPackage = "io.fabric8.kubernetes.api.builder")
public class EphemeralContainer implements KubernetesResource
{
@JsonProperty("args")
@JsonInclude(JsonInclude.Include.NON_EMPTY)
private List<String> args = new ArrayList<String>();
@JsonProperty("command")
@JsonInclude(JsonInclude.Include.NON_EMPTY)
private List<String> command = new ArrayList<String>();
@JsonProperty("env")
@JsonInclude(JsonInclude.Include.NON_EMPTY)
private List<EnvVar> env = new ArrayList<EnvVar>();
@JsonProperty("envFrom")
@JsonInclude(JsonInclude.Include.NON_EMPTY)
private List<EnvFromSource> envFrom = new ArrayList<EnvFromSource>();
@JsonProperty("image")
private String image;
@JsonProperty("imagePullPolicy")
private String imagePullPolicy;
@JsonProperty("lifecycle")
private Lifecycle lifecycle;
@JsonProperty("livenessProbe")
private Probe livenessProbe;
@JsonProperty("name")
private String name;
@JsonProperty("ports")
@JsonInclude(JsonInclude.Include.NON_EMPTY)
private List<ContainerPort> ports = new ArrayList<ContainerPort>();
@JsonProperty("readinessProbe")
private Probe readinessProbe;
@JsonProperty("resources")
private ResourceRequirements resources;
@JsonProperty("securityContext")
private SecurityContext securityContext;
@JsonProperty("startupProbe")
private Probe startupProbe;
@JsonProperty("stdin")
private Boolean stdin;
@JsonProperty("stdinOnce")
private Boolean stdinOnce;
@JsonProperty("targetContainerName")
private String targetContainerName;
@JsonProperty("terminationMessagePath")
private String terminationMessagePath;
@JsonProperty("terminationMessagePolicy")
private String terminationMessagePolicy;
@JsonProperty("tty")
private Boolean tty;
@JsonProperty("volumeDevices")
@JsonInclude(JsonInclude.Include.NON_EMPTY)
private List<VolumeDevice> volumeDevices = new ArrayList<VolumeDevice>();
@JsonProperty("volumeMounts")
@JsonInclude(JsonInclude.Include.NON_EMPTY)
private List<VolumeMount> volumeMounts = new ArrayList<VolumeMount>();
@JsonProperty("workingDir")
private String workingDir;
@JsonIgnore
private Map<String, Object> additionalProperties = new HashMap<String, Object>();
/**
* No args constructor for use in serialization
*
*/
public EphemeralContainer() {
}
/**
*
* @param volumeDevices
* @param image
* @param imagePullPolicy
* @param livenessProbe
* @param stdin
* @param targetContainerName
* @param terminationMessagePolicy
* @param terminationMessagePath
* @param workingDir
* @param resources
* @param securityContext
* @param startupProbe
* @param env
* @param ports
* @param command
* @param volumeMounts
* @param args
* @param lifecycle
* @param name
* @param tty
* @param readinessProbe
* @param stdinOnce
* @param envFrom
*/
public EphemeralContainer(List<String> args, List<String> command, List<EnvVar> env, List<EnvFromSource> envFrom, String image, String imagePullPolicy, Lifecycle lifecycle, Probe livenessProbe, String name, List<ContainerPort> ports, Probe readinessProbe, ResourceRequirements resources, SecurityContext securityContext, Probe startupProbe, Boolean stdin, Boolean stdinOnce, String targetContainerName, String terminationMessagePath, String terminationMessagePolicy, Boolean tty, List<VolumeDevice> volumeDevices, List<VolumeMount> volumeMounts, String workingDir) {
super();
this.args = args;
this.command = command;
this.env = env;
this.envFrom = envFrom;
this.image = image;
this.imagePullPolicy = imagePullPolicy;
this.lifecycle = lifecycle;
this.livenessProbe = livenessProbe;
this.name = name;
this.ports = ports;
this.readinessProbe = readinessProbe;
this.resources = resources;
this.securityContext = securityContext;
this.startupProbe = startupProbe;
this.stdin = stdin;
this.stdinOnce = stdinOnce;
this.targetContainerName = targetContainerName;
this.terminationMessagePath = terminationMessagePath;
this.terminationMessagePolicy = terminationMessagePolicy;
this.tty = tty;
this.volumeDevices = volumeDevices;
this.volumeMounts = volumeMounts;
this.workingDir = workingDir;
}
@JsonProperty("args")
public List<String> getArgs() {
return args;
}
@JsonProperty("args")
public void setArgs(List<String> args) {
this.args = args;
}
@JsonProperty("command")
public List<String> getCommand() {
return command;
}
@JsonProperty("command")
public void setCommand(List<String> command) {
this.command = command;
}
@JsonProperty("env")
public List<EnvVar> getEnv() {
return env;
}
@JsonProperty("env")
public void setEnv(List<EnvVar> env) {
this.env = env;
}
@JsonProperty("envFrom")
public List<EnvFromSource> getEnvFrom() {
return envFrom;
}
@JsonProperty("envFrom")
public void setEnvFrom(List<EnvFromSource> envFrom) {
this.envFrom = envFrom;
}
@JsonProperty("image")
public String getImage() {
return image;
}
@JsonProperty("image")
public void setImage(String image) {
this.image = image;
}
@JsonProperty("imagePullPolicy")
public String getImagePullPolicy() {
return imagePullPolicy;
}
@JsonProperty("imagePullPolicy")
public void setImagePullPolicy(String imagePullPolicy) {
this.imagePullPolicy = imagePullPolicy;
}
@JsonProperty("lifecycle")
public Lifecycle getLifecycle() {
return lifecycle;
}
@JsonProperty("lifecycle")
public void setLifecycle(Lifecycle lifecycle) {
this.lifecycle = lifecycle;
}
@JsonProperty("livenessProbe")
public Probe getLivenessProbe() {
return livenessProbe;
}
@JsonProperty("livenessProbe")
public void setLivenessProbe(Probe livenessProbe) {
this.livenessProbe = livenessProbe;
}
@JsonProperty("name")
public String getName() {
return name;
}
@JsonProperty("name")
public void setName(String name) {
this.name = name;
}
@JsonProperty("ports")
public List<ContainerPort> getPorts() {
return ports;
}
@JsonProperty("ports")
public void setPorts(List<ContainerPort> ports) {
this.ports = ports;
}
@JsonProperty("readinessProbe")
public Probe getReadinessProbe() {
return readinessProbe;
}
@JsonProperty("readinessProbe")
public void setReadinessProbe(Probe readinessProbe) {
this.readinessProbe = readinessProbe;
}
@JsonProperty("resources")
public ResourceRequirements getResources() {
return resources;
}
@JsonProperty("resources")
public void setResources(ResourceRequirements resources) {
this.resources = resources;
}
@JsonProperty("securityContext")
public SecurityContext getSecurityContext() {
return securityContext;
}
@JsonProperty("securityContext")
public void setSecurityContext(SecurityContext securityContext) {
this.securityContext = securityContext;
}
@JsonProperty("startupProbe")
public Probe getStartupProbe() {
return startupProbe;
}
@JsonProperty("startupProbe")
public void setStartupProbe(Probe startupProbe) {
this.startupProbe = startupProbe;
}
@JsonProperty("stdin")
public Boolean getStdin() {
return stdin;
}
@JsonProperty("stdin")
public void setStdin(Boolean stdin) {
this.stdin = stdin;
}
@JsonProperty("stdinOnce")
public Boolean getStdinOnce() {
return stdinOnce;
}
@JsonProperty("stdinOnce")
public void setStdinOnce(Boolean stdinOnce) {
this.stdinOnce = stdinOnce;
}
@JsonProperty("targetContainerName")
public String getTargetContainerName() {
return targetContainerName;
}
@JsonProperty("targetContainerName")
public void setTargetContainerName(String targetContainerName) {
this.targetContainerName = targetContainerName;
}
@JsonProperty("terminationMessagePath")
public String getTerminationMessagePath() {
return terminationMessagePath;
}
@JsonProperty("terminationMessagePath")
public void setTerminationMessagePath(String terminationMessagePath) {
this.terminationMessagePath = terminationMessagePath;
}
@JsonProperty("terminationMessagePolicy")
public String getTerminationMessagePolicy() {
return terminationMessagePolicy;
}
@JsonProperty("terminationMessagePolicy")
public void setTerminationMessagePolicy(String terminationMessagePolicy) {
this.terminationMessagePolicy = terminationMessagePolicy;
}
@JsonProperty("tty")
public Boolean getTty() {
return tty;
}
@JsonProperty("tty")
public void setTty(Boolean tty) {
this.tty = tty;
}
@JsonProperty("volumeDevices")
public List<VolumeDevice> getVolumeDevices() {
return volumeDevices;
}
@JsonProperty("volumeDevices")
public void setVolumeDevices(List<VolumeDevice> volumeDevices) {
this.volumeDevices = volumeDevices;
}
@JsonProperty("volumeMounts")
public List<VolumeMount> getVolumeMounts() {
return volumeMounts;
}
@JsonProperty("volumeMounts")
public void setVolumeMounts(List<VolumeMount> volumeMounts) {
this.volumeMounts = volumeMounts;
}
@JsonProperty("workingDir")
public String getWorkingDir() {
return workingDir;
}
@JsonProperty("workingDir")
public void setWorkingDir(String workingDir) {
this.workingDir = workingDir;
}
@JsonAnyGetter
public Map<String, Object> getAdditionalProperties() {
return this.additionalProperties;
}
@JsonAnySetter
public void setAdditionalProperty(String name, Object value) {
this.additionalProperties.put(name, value);
}
}
| |
package com.ra4king.opengl.arcsynthesis.gl33.chapter5.example1;
import static org.lwjgl.opengl.GL11.*;
import static org.lwjgl.opengl.GL15.*;
import static org.lwjgl.opengl.GL20.*;
import static org.lwjgl.opengl.GL30.*;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
import org.lwjgl.BufferUtils;
import com.ra4king.opengl.GLProgram;
import com.ra4king.opengl.util.ShaderProgram;
public class Example5_1 extends GLProgram {
public static void main(String[] args) {
new Example5_1().run(true);
}
private final float RIGHT_EXTENT = 0.8f;
private final float LEFT_EXTENT = -RIGHT_EXTENT;
private final float TOP_EXTENT = 0.2f;
private final float MIDDLE_EXTENT = 0;
private final float BOTTOM_EXTENT = -TOP_EXTENT;
private final float FRONT_EXTENT = -1.25f;
private final float REAR_EXTENT = -1.75f;
private final float[] GREEN_COLOR = { 0.75f, 0.75f, 1, 1 };
private final float[] BLUE_COLOR = { 0, 0.5f, 0, 1 };
private final float[] RED_COLOR = { 1, 0, 0, 1 };
private final float[] GREY_COLOR = { 0.8f, 0.8f, 0.8f, 1 };
private final float[] BROWN_COLOR = { 0.5f, 0.5f, 0, 1 };
private float data[] = {
// Object 1 positions
LEFT_EXTENT, TOP_EXTENT, REAR_EXTENT,
LEFT_EXTENT, MIDDLE_EXTENT, FRONT_EXTENT,
RIGHT_EXTENT, MIDDLE_EXTENT, FRONT_EXTENT,
RIGHT_EXTENT, TOP_EXTENT, REAR_EXTENT,
LEFT_EXTENT, BOTTOM_EXTENT, REAR_EXTENT,
LEFT_EXTENT, MIDDLE_EXTENT, FRONT_EXTENT,
RIGHT_EXTENT, MIDDLE_EXTENT, FRONT_EXTENT,
RIGHT_EXTENT, BOTTOM_EXTENT, REAR_EXTENT,
LEFT_EXTENT, TOP_EXTENT, REAR_EXTENT,
LEFT_EXTENT, MIDDLE_EXTENT, FRONT_EXTENT,
LEFT_EXTENT, BOTTOM_EXTENT, REAR_EXTENT,
RIGHT_EXTENT, TOP_EXTENT, REAR_EXTENT,
RIGHT_EXTENT, MIDDLE_EXTENT, FRONT_EXTENT,
RIGHT_EXTENT, BOTTOM_EXTENT, REAR_EXTENT,
LEFT_EXTENT, BOTTOM_EXTENT, REAR_EXTENT,
LEFT_EXTENT, TOP_EXTENT, REAR_EXTENT,
RIGHT_EXTENT, TOP_EXTENT, REAR_EXTENT,
RIGHT_EXTENT, BOTTOM_EXTENT, REAR_EXTENT,
// Object 2 positions
TOP_EXTENT, RIGHT_EXTENT, REAR_EXTENT,
MIDDLE_EXTENT, RIGHT_EXTENT, FRONT_EXTENT,
MIDDLE_EXTENT, LEFT_EXTENT, FRONT_EXTENT,
TOP_EXTENT, LEFT_EXTENT, REAR_EXTENT,
BOTTOM_EXTENT, RIGHT_EXTENT, REAR_EXTENT,
MIDDLE_EXTENT, RIGHT_EXTENT, FRONT_EXTENT,
MIDDLE_EXTENT, LEFT_EXTENT, FRONT_EXTENT,
BOTTOM_EXTENT, LEFT_EXTENT, REAR_EXTENT,
TOP_EXTENT, RIGHT_EXTENT, REAR_EXTENT,
MIDDLE_EXTENT, RIGHT_EXTENT, FRONT_EXTENT,
BOTTOM_EXTENT, RIGHT_EXTENT, REAR_EXTENT,
TOP_EXTENT, LEFT_EXTENT, REAR_EXTENT,
MIDDLE_EXTENT, LEFT_EXTENT, FRONT_EXTENT,
BOTTOM_EXTENT, LEFT_EXTENT, REAR_EXTENT,
BOTTOM_EXTENT, RIGHT_EXTENT, REAR_EXTENT,
TOP_EXTENT, RIGHT_EXTENT, REAR_EXTENT,
TOP_EXTENT, LEFT_EXTENT, REAR_EXTENT,
BOTTOM_EXTENT, LEFT_EXTENT, REAR_EXTENT,
// Object 1 colors
GREEN_COLOR[0], GREEN_COLOR[1], GREEN_COLOR[2], GREEN_COLOR[3],
GREEN_COLOR[0], GREEN_COLOR[1], GREEN_COLOR[2], GREEN_COLOR[3],
GREEN_COLOR[0], GREEN_COLOR[1], GREEN_COLOR[2], GREEN_COLOR[3],
GREEN_COLOR[0], GREEN_COLOR[1], GREEN_COLOR[2], GREEN_COLOR[3],
BLUE_COLOR[0], BLUE_COLOR[1], BLUE_COLOR[2], BLUE_COLOR[3],
BLUE_COLOR[0], BLUE_COLOR[1], BLUE_COLOR[2], BLUE_COLOR[3],
BLUE_COLOR[0], BLUE_COLOR[1], BLUE_COLOR[2], BLUE_COLOR[3],
BLUE_COLOR[0], BLUE_COLOR[1], BLUE_COLOR[2], BLUE_COLOR[3],
RED_COLOR[0], RED_COLOR[1], RED_COLOR[2], RED_COLOR[3],
RED_COLOR[0], RED_COLOR[1], RED_COLOR[2], RED_COLOR[3],
RED_COLOR[0], RED_COLOR[1], RED_COLOR[2], RED_COLOR[3],
GREY_COLOR[0], GREY_COLOR[1], GREY_COLOR[2], GREY_COLOR[3],
GREY_COLOR[0], GREY_COLOR[1], GREY_COLOR[2], GREY_COLOR[3],
GREY_COLOR[0], GREY_COLOR[1], GREY_COLOR[2], GREY_COLOR[3],
BROWN_COLOR[0], BROWN_COLOR[1], BROWN_COLOR[2], BROWN_COLOR[3],
BROWN_COLOR[0], BROWN_COLOR[1], BROWN_COLOR[2], BROWN_COLOR[3],
BROWN_COLOR[0], BROWN_COLOR[1], BROWN_COLOR[2], BROWN_COLOR[3],
BROWN_COLOR[0], BROWN_COLOR[1], BROWN_COLOR[2], BROWN_COLOR[3],
// Object 2 colors
RED_COLOR[0], RED_COLOR[1], RED_COLOR[2], RED_COLOR[3],
RED_COLOR[0], RED_COLOR[1], RED_COLOR[2], RED_COLOR[3],
RED_COLOR[0], RED_COLOR[1], RED_COLOR[2], RED_COLOR[3],
RED_COLOR[0], RED_COLOR[1], RED_COLOR[2], RED_COLOR[3],
BROWN_COLOR[0], BROWN_COLOR[1], BROWN_COLOR[2], BROWN_COLOR[3],
BROWN_COLOR[0], BROWN_COLOR[1], BROWN_COLOR[2], BROWN_COLOR[3],
BROWN_COLOR[0], BROWN_COLOR[1], BROWN_COLOR[2], BROWN_COLOR[3],
BROWN_COLOR[0], BROWN_COLOR[1], BROWN_COLOR[2], BROWN_COLOR[3],
BLUE_COLOR[0], BLUE_COLOR[1], BLUE_COLOR[2], BLUE_COLOR[3],
BLUE_COLOR[0], BLUE_COLOR[1], BLUE_COLOR[2], BLUE_COLOR[3],
BLUE_COLOR[0], BLUE_COLOR[1], BLUE_COLOR[2], BLUE_COLOR[3],
GREEN_COLOR[0], GREEN_COLOR[1], GREEN_COLOR[2], GREEN_COLOR[3],
GREEN_COLOR[0], GREEN_COLOR[1], GREEN_COLOR[2], GREEN_COLOR[3],
GREEN_COLOR[0], GREEN_COLOR[1], GREEN_COLOR[2], GREEN_COLOR[3],
GREY_COLOR[0], GREY_COLOR[1], GREY_COLOR[2], GREY_COLOR[3],
GREY_COLOR[0], GREY_COLOR[1], GREY_COLOR[2], GREY_COLOR[3],
GREY_COLOR[0], GREY_COLOR[1], GREY_COLOR[2], GREY_COLOR[3],
GREY_COLOR[0], GREY_COLOR[1], GREY_COLOR[2], GREY_COLOR[3],
};
private final short[] indices = {
0, 2, 1,
3, 2, 0,
4, 5, 6,
6, 7, 4,
8, 9, 10,
11, 13, 12,
14, 16, 15,
17, 16, 14
};
private ShaderProgram program;
private int offsetUniform;
private int vao1, vao2;
private FloatBuffer perspectiveMatrix;
private int perspectiveMatrixUniform;
private float frustumScale = 1;
public Example5_1() {
super("Example 5.1 - Overlap No Depth", 500, 500, true);
}
@Override
public void init() {
glClearColor(0, 0, 0, 0);
program = new ShaderProgram(readFromFile("example5.1.vert"), readFromFile("example5.1.frag"));
offsetUniform = program.getUniformLocation("offset");
perspectiveMatrixUniform = program.getUniformLocation("perspectiveMatrix");
float zNear = 0.5f, zFar = 3;
perspectiveMatrix = BufferUtils.createFloatBuffer(16);
perspectiveMatrix.put(0, frustumScale);
perspectiveMatrix.put(5, frustumScale);
perspectiveMatrix.put(10, (zFar + zNear) / (zNear - zFar));
perspectiveMatrix.put(14, (2 * zFar * zNear) / (zNear - zFar));
perspectiveMatrix.put(11, -1);
program.begin();
glUniformMatrix4(perspectiveMatrixUniform, false, perspectiveMatrix);
program.end();
int vbo1 = glGenBuffers();
glBindBuffer(GL_ARRAY_BUFFER, vbo1);
glBufferData(GL_ARRAY_BUFFER, (FloatBuffer)BufferUtils.createFloatBuffer(data.length).put(data).flip(), GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
int vbo2 = glGenBuffers();
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vbo2);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, (ShortBuffer)BufferUtils.createShortBuffer(indices.length).put(indices).flip(), GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
vao1 = glGenVertexArrays();
glBindVertexArray(vao1);
glBindBuffer(GL_ARRAY_BUFFER, vbo1);
glEnableVertexAttribArray(0);
glEnableVertexAttribArray(1);
glVertexAttribPointer(0, 3, GL_FLOAT, false, 0, 0);
glVertexAttribPointer(1, 4, GL_FLOAT, false, 0, 36 * 3 * 4);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vbo2);
vao2 = glGenVertexArrays();
glBindVertexArray(vao2);
glEnableVertexAttribArray(0);
glEnableVertexAttribArray(1);
glVertexAttribPointer(0, 3, GL_FLOAT, false, 0, 36 / 2 * 3 * 4);
glVertexAttribPointer(1, 4, GL_FLOAT, false, 0, (36 * 3 * 4) + (36 / 2 * 4 * 4));
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vbo2);
glBindVertexArray(0);
glEnable(GL_CULL_FACE);
glCullFace(GL_BACK);
glFrontFace(GL_CW);
}
@Override
public void resized() {
super.resized();
perspectiveMatrix.put(0, frustumScale / ((float)getWidth() / getHeight()));
program.begin();
glUniformMatrix4(perspectiveMatrixUniform, false, perspectiveMatrix);
program.end();
}
@Override
public void render() {
glClear(GL_COLOR_BUFFER_BIT);
program.begin();
glBindVertexArray(vao1);
glUniform3f(offsetUniform, 0, 0, 0);
glDrawElements(GL_TRIANGLES, indices.length, GL_UNSIGNED_SHORT, 0);
glBindVertexArray(vao2);
glUniform3f(offsetUniform, 0, 0, -1);
glDrawElements(GL_TRIANGLES, indices.length, GL_UNSIGNED_SHORT, 0);
glBindVertexArray(0);
program.end();
}
}
| |
/*
* Copyright 2000-2005 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.idea.tomcat;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.idea.tomcat.server.TomcatLocalModel;
import com.intellij.execution.ExecutionException;
import com.intellij.execution.configurations.GeneralCommandLine;
import com.intellij.execution.configurations.RuntimeConfigurationException;
import com.intellij.execution.process.OSProcessHandler;
import com.intellij.execution.util.EnvironmentVariable;
import com.intellij.javaee.oss.server.JavaeeParameters;
import com.intellij.javaee.oss.server.JavaeeStartupPolicy;
import com.intellij.javaee.run.localRun.ColoredCommandLineExecutableObject;
import com.intellij.javaee.run.localRun.ExecutableObject;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.projectRoots.JavaSdkType;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.ArrayUtil;
import com.intellij.util.EnvironmentUtil;
import com.intellij.util.containers.HashMap;
public class TomcatStartupPolicy extends JavaeeStartupPolicy<TomcatLocalModel>
{
private static final Logger LOG = Logger.getInstance("#org.jetbrains.idea.tomcat.TomcatStartupPolicy");
@NonNls
protected static final String TEMP_FILE_NAME = "temp";
@NonNls
protected static final String BIN_DIR = "bin";
@NonNls
private static final String CATALINA_TMPDIR_ENV_PROPERTY = "CATALINA_TMPDIR";
@NonNls
private static final String JAVA_HOME_ENV_PROPERTY = "JAVA_HOME";
@NonNls
private static final String JRE_HOME_ENV_PROPERTY = "JRE_HOME";
@NonNls
private static final String JAVA_VM_ENV_VARIABLE = "JAVA_OPTS";
@NonNls
private static final String JAR_PARAMETER = "-jar";
@NonNls
public static final String CLASSPATH_PARAMETER = "-cp";
@NonNls
public static final String RMI_HOST_JAVA_OPT = "java.rmi.server.hostname";
@Override
protected ExecutableObject getDefaultStartupScript(TomcatLocalModel serverModel, boolean debug)
{
try
{
return new ExecutableCreator(serverModel, "run", "start")
{
@Override
protected List<String> getCustomJavaOptions()
{
TomcatLocalModel tomcatModel = getTomcatModel();
if(tomcatModel.isUseJmx())
{
List<String> result = new ArrayList<>(Arrays.asList("-Dcom.sun.management.jmxremote=", "-Dcom.sun.management.jmxremote.port=" + tomcatModel.JNDI_PORT, "-Dcom.sun.management"
+ ".jmxremote.ssl=false", "-Dcom.sun.management.jmxremote.authenticate=false"));
if(tomcatModel.getVmArgument(RMI_HOST_JAVA_OPT) == null)
{
result.add("-D" + RMI_HOST_JAVA_OPT + "=127.0.0.1");
}
if(tomcatModel.isTomEE())
{
if(tomcatModel.versionHigher(7, 0, 68))
{
result.add("-Dtomee.serialization.class.whitelist=");
result.add("-Dtomee.serialization.class.blacklist=-");
}
if(tomcatModel.versionHigher(8, 0, 28))
{
result.add("-Dtomee.remote.support=true");
result.add("-Dopenejb.system.apps=true");
}
}
return result;
}
else
{
return super.getCustomJavaOptions();
}
}
}.createExecutable();
}
catch(RuntimeConfigurationException e)
{
return null;
}
}
@Override
protected ExecutableObject getDefaultShutdownScript(TomcatLocalModel serverModel, boolean debug)
{
try
{
return new ExecutableCreator(serverModel, "stop", "stop").createExecutable();
}
catch(RuntimeConfigurationException e)
{
return null;
}
}
@Override
protected void getStartupParameters(JavaeeParameters params, TomcatLocalModel model, boolean debug)
{
throw new UnsupportedOperationException();
}
@Override
protected void getShutdownParameters(JavaeeParameters params, TomcatLocalModel model, boolean debug)
{
throw new UnsupportedOperationException();
}
@Override
protected List<EnvironmentVariable> getEnvironmentVariables(TomcatLocalModel tomcatModel)
{
try
{
ArrayList<EnvironmentVariable> vars = new ArrayList<>();
vars.add(new EnvironmentVariable("CATALINA_HOME", tomcatModel.getHomeDirectory(), true));
vars.add(new EnvironmentVariable("CATALINA_BASE", tomcatModel.getBaseDirectoryPath(), true));
String tmpDir = EnvironmentUtil.getValue(CATALINA_TMPDIR_ENV_PROPERTY);
if(tmpDir == null)
{
vars.add(new EnvironmentVariable(CATALINA_TMPDIR_ENV_PROPERTY, getCatalinaTempDirectory(tomcatModel), true));
}
String[] javaEnvVars = {
JAVA_HOME_ENV_PROPERTY,
JRE_HOME_ENV_PROPERTY
};
String jrePath = tomcatModel.getJrePath();
for(String varName : javaEnvVars)
{
setupJavaPath(vars, varName, jrePath);
}
return vars;
}
catch(RuntimeConfigurationException e)
{
LOG.error(e);
return null;
}
}
private static String getCatalinaTempDirectory(final TomcatLocalModel tomcatModel) throws RuntimeConfigurationException
{
File tempDir = new File(tomcatModel.getSourceBaseDirectoryPath(), TEMP_FILE_NAME);
if(!tempDir.exists())
{
tempDir = new File(tomcatModel.getBaseDirectoryPath(), TEMP_FILE_NAME);
if(!tempDir.exists())
{
FileUtil.createDirectory(tempDir);
}
}
return tempDir.getAbsolutePath();
}
@NonNls
public static String getDefaultCatalinaFileName()
{
return SystemInfo.isWindows ? "catalina.bat" : "catalina.sh";
}
private static void setupJavaPath(List<EnvironmentVariable> vars, String varName, String jrePath)
{
if(jrePath != null)
{
vars.add(new EnvironmentVariable(varName, jrePath, true));
}
else
{
String envValue = EnvironmentUtil.getValue(varName);
if(envValue != null)
{
vars.add(new EnvironmentVariable(varName, envValue, true));
}
}
}
private static class ExecutableCreator
{
private final TomcatLocalModel myTomcatModel;
private final File myBinDir;
private final File myCatalinaScriptFile;
private final String myScriptCommand;
private final String myJavaCommand;
public ExecutableCreator(TomcatLocalModel model, @NonNls String scriptCommand, @NonNls String javaCommand) throws RuntimeConfigurationException
{
myScriptCommand = scriptCommand;
myJavaCommand = javaCommand;
myTomcatModel = model;
myBinDir = new File(new File(myTomcatModel.getHomeDirectory()), BIN_DIR);
myCatalinaScriptFile = new File(myBinDir, getDefaultCatalinaFileName());
}
protected final TomcatLocalModel getTomcatModel()
{
return myTomcatModel;
}
public ExecutableObject createExecutable() throws RuntimeConfigurationException
{
if(myCatalinaScriptFile.exists())
{
return createScriptExecutable();
}
else
{
return createJavaExecutable();
}
}
private ExecutableObject createScriptExecutable()
{
return new ColoredCommandLineExecutableObject(new String[]{
myCatalinaScriptFile.getAbsolutePath(),
myScriptCommand
}, null)
{
@Override
public OSProcessHandler createProcessHandler(String workingDirectory, Map<String, String> envVariables) throws ExecutionException
{
List<String> customJavaOptions = getCustomJavaOptions();
if(!customJavaOptions.isEmpty())
{
envVariables = new HashMap<>(envVariables);
String javaOptions = StringUtil.notNullize(envVariables.get(JAVA_VM_ENV_VARIABLE)) + " " + StringUtil.join(customJavaOptions, " ");
envVariables.put(JAVA_VM_ENV_VARIABLE, javaOptions);
}
return super.createProcessHandler(workingDirectory, envVariables);
}
};
}
private ExecutableObject createJavaExecutable() throws RuntimeConfigurationException
{
final Sdk jre = getTomcatModel().getJre();
GeneralCommandLine commandLine = new GeneralCommandLine();
if(jre != null)
{
//FIXME [VISTALL] better support custom sdks
((JavaSdkType) jre.getSdkType()).setupCommandLine(commandLine, jre);
}
final @NonNls String vmExecutablePath = jre == null ? "java" : commandLine.getExePath();
List<String> args = new ArrayList<>();
args.addAll(Arrays.asList(vmExecutablePath, "-Dcatalina.base=" + myTomcatModel.getBaseDirectoryPath(), "-Dcatalina.home=" + myTomcatModel.getHomeDirectory(), "-Djava.io.tmpdir=" +
getCatalinaTempDirectory(myTomcatModel)));
args.addAll(getCustomJavaOptions());
final String bootstrapJarPath = new File(myBinDir, "bootstrap.jar").getAbsolutePath();
if(myTomcatModel.isVersion7OrHigher())
{
args.addAll(Arrays.asList(CLASSPATH_PARAMETER, bootstrapJarPath + File.pathSeparator + new File(myBinDir, "tomcat-juli.jar").getAbsolutePath(), "org.apache.catalina.startup" + "" +
".Bootstrap"));
}
else
{
args.addAll(Arrays.asList(JAR_PARAMETER, bootstrapJarPath));
}
return new ColoredCommandLineExecutableObject(ArrayUtil.toStringArray(args), myJavaCommand)
{
protected GeneralCommandLine createCommandLine(String[] parameters, final Map<String, String> envVariables)
{
final String javaOptions = envVariables.get(JAVA_VM_ENV_VARIABLE);
if(javaOptions != null)
{
List<String> newParameters = new ArrayList<>();
for(String parameter : parameters)
{
if(JAR_PARAMETER.equals(parameter) || CLASSPATH_PARAMETER.equals(parameter))
{
newParameters.addAll(StringUtil.splitHonorQuotes(javaOptions, ' '));
}
newParameters.add(parameter);
}
parameters = ArrayUtil.toStringArray(newParameters);
}
return super.createCommandLine(parameters, envVariables);
}
};
}
protected List<String> getCustomJavaOptions()
{
return Collections.emptyList();
}
}
}
| |
package com.deveddy.clujbike.data.repository;
import com.deveddy.clujbike.data.repository.mapper.station.StationEntityRealmMapper;
import com.deveddy.clujbike.data.repository.mapper.station.StationRealmEntityMapper;
import com.deveddy.clujbike.data.repository.models.StationEntity;
import com.deveddy.clujbike.data.repository.models.StationRealm;
import com.deveddy.clujbike.data.repository.specifications.RealmSpecification;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InOrder;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import io.realm.Realm;
import io.realm.RealmQuery;
import rx.observers.TestSubscriber;
import rx.schedulers.Schedulers;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.inOrder;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class StationRealmRepositoryTest {
private StationRealmRepository sut;
@Mock
StationEntityRealmMapper entityToRealmMapper;
@Mock
StationRealmEntityMapper realmToEntityMapper;
@Mock
RealmProvider realmProvider;
@Mock
Realm realm;
@Mock
RealmSpecification realmSpecification;
@Mock
StationRealm stationRealmItem;
@Mock
RealmQuery<StationRealm> query;
@Mock
StationRealm newStationRealm;
@Mock
StationRealm oldStationRealm;
@Mock
StationEntity stationEntity;
private List<StationRealm> stationRealmList = Arrays.asList(
stationRealmItem,
stationRealmItem);
@Before
public void setUp() throws Exception {
sut = new StationRealmRepository(realmProvider, entityToRealmMapper, realmToEntityMapper);
when(realmProvider.provide()).thenReturn(realm);
}
@Test
public void givenItems_whenAddingToRepositorySuccessfully_thenExpectedMethodsAreCalled() {
List<StationEntity> stationEntityList = givenStationEntities();
setUpMapperFor(stationEntityList, stationRealmList);
sut.add(stationEntityList)
.subscribeOn(Schedulers.immediate())
.observeOn(Schedulers.immediate())
.subscribe(new TestSubscriber<StationEntity>());
InOrder inOrder = inOrder(realm, entityToRealmMapper);
inOrder.verify(realm).beginTransaction();
inOrder.verify(entityToRealmMapper).from(stationEntityList.get(0));
inOrder.verify(realm).insert(stationRealmList.get(0));
inOrder.verify(entityToRealmMapper).from(stationEntityList.get(1));
inOrder.verify(realm).insert(stationRealmList.get(1));
inOrder.verify(realm).commitTransaction();
inOrder.verify(realm).close();
}
@Test
public void givenItems_whenAddingToRepositoryUnsuccessfully_thenThrowError() {
List<StationEntity> stationEntityList = givenStationEntities();
TestSubscriber testSubscriber = new TestSubscriber();
Error error = new Error("A wild error has occurred!");
setUpMapperFor(stationEntityList, stationRealmList);
doThrow(error).when(realm).insert(stationRealmList.get(0));
sut.add(stationEntityList)
.subscribeOn(Schedulers.immediate())
.observeOn(Schedulers.immediate())
.subscribe(testSubscriber);
InOrder inOrder = inOrder(realm);
inOrder.verify(realm).beginTransaction();
inOrder.verify(realm).cancelTransaction();
inOrder.verify(realm).close();
testSubscriber.assertError(error);
}
@Test
public void givenItemAndSpecification_whenUpdatingItemSuccessfully_thenExpectedMethodsAreCalled() {
StationEntity stationEntity = givenStationEntity("Times Square", 15);
setUpMapperForItem(stationEntity, newStationRealm);
provideSpecificationQueryMockResult();
sut.update(stationEntity, realmSpecification)
.subscribeOn(Schedulers.immediate())
.observeOn(Schedulers.immediate())
.subscribe(new TestSubscriber<StationEntity>());
InOrder inOrder = inOrder(realm, entityToRealmMapper, realmSpecification);
inOrder.verify(realm).beginTransaction();
inOrder.verify(entityToRealmMapper).from(stationEntity);
inOrder.verify(realm).copyToRealmOrUpdate(newStationRealm);
inOrder.verify(realm).commitTransaction();
inOrder.verify(realm).close();
}
@Test
public void givenItemAndSpecification_whenUpdatingItemUnsuccessfully_thenThrowError() {
StationEntity stationEntity = givenStationEntity("Times Square", 15);
Error error = new Error("A wild error has occurred!");
TestSubscriber testSubscriber = new TestSubscriber();
setUpMapperForItem(stationEntity, newStationRealm);
provideSpecificationQueryMockResult();
doThrow(error).when(realm).copyToRealmOrUpdate(newStationRealm);
sut.update(stationEntity, realmSpecification)
.subscribeOn(Schedulers.immediate())
.observeOn(Schedulers.immediate())
.subscribe(testSubscriber);
InOrder inOrder = inOrder(realm, entityToRealmMapper);
inOrder.verify(realm).beginTransaction();
inOrder.verify(entityToRealmMapper).from(stationEntity);
inOrder.verify(realm).cancelTransaction();
inOrder.verify(realm).close();
testSubscriber.assertError(error);
}
@Test
public void givenSpecification_whenQueryingSuccessfully_thenExpectedMethodsAreCalled() {
provideSpecificationQueryMockResult();
sut.query(realmSpecification)
.subscribeOn(Schedulers.immediate())
.observeOn(Schedulers.immediate())
.subscribe(new TestSubscriber<>());
InOrder inOrder = inOrder(realm, realmToEntityMapper);
inOrder.verify(realm).beginTransaction();
inOrder.verify(realmToEntityMapper).from(oldStationRealm);
inOrder.verify(realm).commitTransaction();
inOrder.verify(realm).close();
}
@Test
public void givenSpecification_whenQueryingUnsuccessfully_thenThrowError() {
provideSpecificationQueryMockResult();
TestSubscriber testSubscriber = new TestSubscriber();
Error error = new Error("A wild error has occurred!");
doThrow(error).when(query).findFirst();
sut.query(realmSpecification)
.subscribeOn(Schedulers.immediate())
.observeOn(Schedulers.immediate())
.subscribe(testSubscriber);
InOrder inOrder = inOrder(realm);
inOrder.verify(realm).beginTransaction();
inOrder.verify(realm).cancelTransaction();
inOrder.verify(realm).close();
testSubscriber.assertError(error);
}
private StationEntity givenStationEntity(String name, int id) {
return StationEntity.builder()
.name(name)
.address("Manhattan")
.occupiedSpots(2)
.emptySpots(4)
.maximumNumberOfBikes(6)
.lastSyncDate(new Date(System.currentTimeMillis()))
.idStatus(0)
.statusFunctional(true)
.statusAvailable(true)
.latitude(34.33f)
.longitude(34.44f)
.valid(true)
.customValid(true)
.id(id)
.build();
}
private List<StationEntity> givenStationEntities() {
List<StationEntity> items = new ArrayList<>();
items.add(givenStationEntity("Union Square", 1));
items.add(givenStationEntity("Herald Square", 2));
return items;
}
private void setUpMapperForItem(StationEntity stationEntity, StationRealm stationRealm) {
when(entityToRealmMapper.from(stationEntity)).thenReturn(stationRealm);
}
private void setUpMapperFor(List<StationEntity> stationEntityList,
List<StationRealm> stationRealmList) {
for (int i = 0; i < stationEntityList.size(); i++) {
setUpMapperForItem(stationEntityList.get(i), stationRealmList.get(i));
}
}
private void provideSpecificationQueryMockResult() {
when(realmSpecification.toRealmQuery(realm)).thenReturn(query);
when(query.findFirst()).thenReturn(oldStationRealm);
}
}
| |
// Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.flags;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.annotation.Config;
import org.robolectric.annotation.LooperMode;
import org.chromium.base.FeatureList;
import org.chromium.base.task.test.ShadowPostTask;
import org.chromium.base.test.BaseRobolectricTestRunner;
import org.chromium.chrome.browser.flags.CachedFlagsSafeMode.Behavior;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
/**
* Unit Tests for the Safe Mode mechanism for {@link CachedFeatureFlags}.
*
* Tests the public API {@link CachedFeatureFlags} rather than the implementation
* {@link CachedFlagsSafeMode}.
*/
@RunWith(BaseRobolectricTestRunner.class)
@Config(shadows = {ShadowPostTask.class})
@LooperMode(LooperMode.Mode.LEGACY)
public class CachedFeatureFlagsSafeModeUnitTest {
private static final String CRASHY_FEATURE = "CrashyFeature";
private static final String OK_FEATURE = "OkFeature";
Map<String, Boolean> mDefaultsSwapped;
@Before
public void setUp() {
CachedFeatureFlags.resetFlagsForTesting();
Map<String, Boolean> defaults = makeFeatureMap(false, false);
mDefaultsSwapped = CachedFeatureFlags.swapDefaultsForTesting(defaults);
}
@After
public void tearDown() {
CachedFeatureFlags.resetFlagsForTesting();
CachedFeatureFlags.swapDefaultsForTesting(mDefaultsSwapped);
FeatureList.setTestFeatures(null);
CachedFlagsSafeMode.clearDiskForTesting();
}
@Test
public void testTwoCrashesInARow_engageSafeMode() {
startRun();
// Crash streak is 0. Do not engage Safe Mode.
// There are no safe values.
// There are no cached flag values, so the defaults false/false are used.
assertEquals(Behavior.NOT_ENGAGED_BELOW_THRESHOLD,
CachedFeatureFlags.getSafeModeBehaviorForTesting());
assertFalse(CachedFeatureFlags.isEnabled(CRASHY_FEATURE));
assertFalse(CachedFeatureFlags.isEnabled(OK_FEATURE));
endCleanRun(false, true);
// Safe values became false/false.
// Cached values became false/true.
startRun();
// Crash streak is 0. Do not engage Safe Mode.
// Safe values are false/false.
// Cached flag values are false/true, from previous run.
assertEquals(Behavior.NOT_ENGAGED_BELOW_THRESHOLD,
CachedFeatureFlags.getSafeModeBehaviorForTesting());
assertFalse(CachedFeatureFlags.isEnabled(CRASHY_FEATURE));
assertTrue(CachedFeatureFlags.isEnabled(OK_FEATURE));
endCleanRun(true, true);
// Safe values became false/true.
// Cached values became true(crashy)/true.
startRun();
// Crash streak is 0. Do not engage Safe Mode.
// Safe values are false/true.
// Cached values remain true(crashy)/true and are used.
assertEquals(Behavior.NOT_ENGAGED_BELOW_THRESHOLD,
CachedFeatureFlags.getSafeModeBehaviorForTesting());
assertTrue(CachedFeatureFlags.isEnabled(CRASHY_FEATURE));
assertTrue(CachedFeatureFlags.isEnabled(OK_FEATURE));
endCrashyRun();
// Cached values remain true(crashy)/true.
startRun();
// Crash streak is 1. Do not engage Safe Mode.
// Safe values are false/true.
// Cached values remain true(crashy)/true and are used.
assertEquals(Behavior.NOT_ENGAGED_BELOW_THRESHOLD,
CachedFeatureFlags.getSafeModeBehaviorForTesting());
assertTrue(CachedFeatureFlags.isEnabled(CRASHY_FEATURE));
assertTrue(CachedFeatureFlags.isEnabled(OK_FEATURE));
endCrashyRun();
// Cached values remain true(crashy)/true.
startRun();
// Crash streak is 2. Engage Safe Mode.
// Safe values are false/true, and are used during this run.
// Cached values remain true(crashy)/true, but are not used because Safe Mode is engaged.
assertEquals(Behavior.ENGAGED_WITH_SAFE_VALUES,
CachedFeatureFlags.getSafeModeBehaviorForTesting());
// TODO(crbug.com/1217708): Assert cached flags values are false/true.
endCleanRun(false, false);
// Cached values became false/false, cached from native.
startRun();
// Crash streak is 0. Do not engage Safe Mode.
// Safe values are false/true still.
// Cached values false/false are used, cached from native last run.
assertEquals(Behavior.NOT_ENGAGED_BELOW_THRESHOLD,
CachedFeatureFlags.getSafeModeBehaviorForTesting());
// TODO(crbug.com/1217708): Assert cached flags values are false/false.
}
@Test
public void testTwoCrashesInterrupted_normalMode() {
startRun();
// Crash streak is 0. Do not engage Safe Mode.
// There are no safe values.
// There are no cached flag values, so the defaults false/false are used.
assertEquals(Behavior.NOT_ENGAGED_BELOW_THRESHOLD,
CachedFeatureFlags.getSafeModeBehaviorForTesting());
assertFalse(CachedFeatureFlags.isEnabled(CRASHY_FEATURE));
assertFalse(CachedFeatureFlags.isEnabled(OK_FEATURE));
endCleanRun(true, true);
// Safe values became false/false.
// Cached values became true(flaky)/true.
startRun();
// Crash streak is 0. Do not engage Safe Mode.
// Safe values are false/false.
// Cached flag values are true(flaky)/true.
assertEquals(Behavior.NOT_ENGAGED_BELOW_THRESHOLD,
CachedFeatureFlags.getSafeModeBehaviorForTesting());
assertTrue(CachedFeatureFlags.isEnabled(CRASHY_FEATURE));
assertTrue(CachedFeatureFlags.isEnabled(OK_FEATURE));
endCrashyRun();
// Cached values remain true(crashy)/true.
startRun();
// Crash streak is 1. Do not engage Safe Mode.
// Safe values are false/false.
// Cached flag values are true(flaky)/true.
assertEquals(Behavior.NOT_ENGAGED_BELOW_THRESHOLD,
CachedFeatureFlags.getSafeModeBehaviorForTesting());
// Cached flag values are the flaky ones cached from native.
assertTrue(CachedFeatureFlags.isEnabled(CRASHY_FEATURE));
assertTrue(CachedFeatureFlags.isEnabled(OK_FEATURE));
endCleanRun(true, true);
// Safe values became true(flaky)/true.
// Cached values remain true(flaky)/true.
startRun();
// Crash streak is 0, do not engage, use flaky values.
// Safe values are true(flaky)/true.
// Cached flag values are true(flaky)/true.
assertEquals(Behavior.NOT_ENGAGED_BELOW_THRESHOLD,
CachedFeatureFlags.getSafeModeBehaviorForTesting());
assertTrue(CachedFeatureFlags.isEnabled(CRASHY_FEATURE));
assertTrue(CachedFeatureFlags.isEnabled(OK_FEATURE));
}
/**
* Tests that decrementing the crash streak to account for an aborted run prevents Safe Mode
* from engaging.
*/
@Test
public void testTwoFREs_normalMode() {
startRun();
// Crash streak is 0. Do not engage Safe Mode.
// There are no safe values.
// There are no cached flag values, so the defaults false/false are used.
assertEquals(Behavior.NOT_ENGAGED_BELOW_THRESHOLD,
CachedFeatureFlags.getSafeModeBehaviorForTesting());
assertFalse(CachedFeatureFlags.isEnabled(CRASHY_FEATURE));
assertFalse(CachedFeatureFlags.isEnabled(OK_FEATURE));
endFirstRunWithKill();
startRun();
// Crash streak is 0. Do not engage Safe Mode.
// There are no safe values.
// There are no cached flag values, so the defaults false/false are used.
assertEquals(Behavior.NOT_ENGAGED_BELOW_THRESHOLD,
CachedFeatureFlags.getSafeModeBehaviorForTesting());
assertFalse(CachedFeatureFlags.isEnabled(CRASHY_FEATURE));
assertFalse(CachedFeatureFlags.isEnabled(OK_FEATURE));
endFirstRunWithKill();
startRun();
// Crash streak is 0, do not engage, use flaky values.
// There are no safe values.
// There are no cached flag values, so the defaults false/false are used.
assertEquals(Behavior.NOT_ENGAGED_BELOW_THRESHOLD,
CachedFeatureFlags.getSafeModeBehaviorForTesting());
assertFalse(CachedFeatureFlags.isEnabled(CRASHY_FEATURE));
assertFalse(CachedFeatureFlags.isEnabled(OK_FEATURE));
}
@Test
public void testTwoCrashesInARow_engageSafeModeWithoutSafeValues() {
// Simulate a cache without writing safe values. This happens before Safe Mode was
// implemented and will become rare as clients start writing safe values.
// Cache a crashy value.
FeatureList.setTestFeatures(makeFeatureMap(true, true));
CachedFeatureFlags.cacheNativeFlags(Arrays.asList(CRASHY_FEATURE, OK_FEATURE));
CachedFeatureFlags.resetFlagsForTesting();
// Cached values became true(crashy)/true.
startRun();
// Crash streak is 0. Do not engage Safe Mode.
// There are no safe values.
// Cached values are true(crashy)/true.
assertEquals(Behavior.NOT_ENGAGED_BELOW_THRESHOLD,
CachedFeatureFlags.getSafeModeBehaviorForTesting());
assertTrue(CachedFeatureFlags.isEnabled(CRASHY_FEATURE));
assertTrue(CachedFeatureFlags.isEnabled(OK_FEATURE));
endCrashyRun();
// Cached values remain true(crashy)/true.
startRun();
// Crash streak is 1. Do not engage Safe Mode.
// There are no safe values.
// Cached values are true(crashy)/true.
assertEquals(Behavior.NOT_ENGAGED_BELOW_THRESHOLD,
CachedFeatureFlags.getSafeModeBehaviorForTesting());
assertTrue(CachedFeatureFlags.isEnabled(CRASHY_FEATURE));
assertTrue(CachedFeatureFlags.isEnabled(OK_FEATURE));
endCrashyRun();
// Cached values remain true(crashy)/true.
startRun();
// Crash streak is 2. Engage Safe Mode without safe values.
// There are no safe values.
// Cached values are true(crashy)/true, but the default values false/false are returned
// since Safe Mode is falling back to default.
assertEquals(Behavior.ENGAGED_WITHOUT_SAFE_VALUES,
CachedFeatureFlags.getSafeModeBehaviorForTesting());
// TODO(crbug.com/1217708): Assert cached flags values are false/false.
}
@Test
public void testTwoCrashesInARow_engageSafeModeIgnoringOutdated() {
startRun();
// Crash streak is 0. Do not engage Safe Mode.
// There are no safe values.
// There are no cached flag values, so the defaults false/false are used.
assertEquals(Behavior.NOT_ENGAGED_BELOW_THRESHOLD,
CachedFeatureFlags.getSafeModeBehaviorForTesting());
assertFalse(CachedFeatureFlags.isEnabled(CRASHY_FEATURE));
assertFalse(CachedFeatureFlags.isEnabled(OK_FEATURE));
endCleanRun(false, true);
// Safe values became false/false.
// Cached values became false/true.
startRun();
// Crash streak is 0. Do not engage Safe Mode.
// Safe values are false/false.
// Cached flag values are false/true, from previous run.
assertEquals(Behavior.NOT_ENGAGED_BELOW_THRESHOLD,
CachedFeatureFlags.getSafeModeBehaviorForTesting());
assertFalse(CachedFeatureFlags.isEnabled(CRASHY_FEATURE));
assertTrue(CachedFeatureFlags.isEnabled(OK_FEATURE));
endCleanRun(true, true);
// Safe values became false/true.
// Cached values became true(crashy)/true.
// Pretend safe values are from an older version
CachedFlagsSafeMode.getSafeValuePreferences()
.edit()
.putString(CachedFlagsSafeMode.PREF_SAFE_VALUES_VERSION, "1.0.0.0")
.apply();
startRun();
// Crash streak is 0. Do not engage Safe Mode.
// Safe values are false/true, but from another version.
// Cached values are true(crashy)/true.
assertEquals(Behavior.NOT_ENGAGED_BELOW_THRESHOLD,
CachedFeatureFlags.getSafeModeBehaviorForTesting());
assertTrue(CachedFeatureFlags.isEnabled(CRASHY_FEATURE));
assertTrue(CachedFeatureFlags.isEnabled(OK_FEATURE));
endCrashyRun();
// Cached values remain true(crashy)/true.
startRun();
// Crash streak is 1. Do not engage Safe Mode.
// Safe values are false/true, but from another version.
// Cached values are true(crashy)/true.
assertEquals(Behavior.NOT_ENGAGED_BELOW_THRESHOLD,
CachedFeatureFlags.getSafeModeBehaviorForTesting());
assertTrue(CachedFeatureFlags.isEnabled(CRASHY_FEATURE));
assertTrue(CachedFeatureFlags.isEnabled(OK_FEATURE));
endCrashyRun();
// Cached values remain true(crashy)/true.
startRun();
assertEquals(Behavior.ENGAGED_IGNORING_OUTDATED_SAFE_VALUES,
CachedFeatureFlags.getSafeModeBehaviorForTesting());
// Crash streak is 2. Engage Safe Mode with obsolete safe values.
// Safe values are false/true, but from another version.
// Cached values are true(crashy)/true, but the default values false/false are returned
// since Safe Mode is falling back to default.
// TODO(crbug.com/1217708): Assert cached flags values are false/false.
}
private void startRun() {
CachedFeatureFlags.isEnabled(CRASHY_FEATURE);
CachedFeatureFlags.onStartOrResumeCheckpoint();
}
private void endFirstRunWithKill() {
CachedFeatureFlags.onPauseCheckpoint();
CachedFeatureFlags.resetFlagsForTesting();
}
private void endCrashyRun() {
CachedFeatureFlags.resetFlagsForTesting();
}
private void endCleanRun(boolean crashyFeatureValue, boolean okFeatureValue) {
FeatureList.setTestFeatures(makeFeatureMap(crashyFeatureValue, okFeatureValue));
CachedFeatureFlags.cacheNativeFlags(Arrays.asList(CRASHY_FEATURE, OK_FEATURE));
CachedFeatureFlags.onEndCheckpoint();
// Async task writing values should have run synchronously because of ShadowPostTask.
assertTrue(CachedFlagsSafeMode.getSafeValuePreferences().contains(
"Chrome.Flags.CachedFlag.CrashyFeature"));
CachedFeatureFlags.resetFlagsForTesting();
}
private HashMap<String, Boolean> makeFeatureMap(
boolean crashyFeatureValue, boolean okFeatureValue) {
return new HashMap<String, Boolean>() {
{
put(CRASHY_FEATURE, crashyFeatureValue);
put(OK_FEATURE, okFeatureValue);
}
};
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.hdfs2;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import org.apache.camel.RuntimeCamelException;
import org.apache.camel.TypeConverter;
import org.apache.camel.util.IOHelper;
import org.apache.hadoop.io.BooleanWritable;
import org.apache.hadoop.io.ByteWritable;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
public class HdfsWritableFactories {
interface HdfsWritableFactory {
Writable create(Object value, TypeConverter typeConverter, Holder<Integer> size);
Object read(Writable writable, Holder<Integer> size);
}
public static final class HdfsNullWritableFactory implements HdfsWritableFactory {
@Override
public Writable create(Object value, TypeConverter typeConverter, Holder<Integer> size) {
size.value = 0;
return NullWritable.get();
}
@Override
public Object read(Writable writable, Holder<Integer> size) {
size.value = 0;
return null;
}
}
public static final class HdfsByteWritableFactory implements HdfsWritableFactory {
private static final int SIZE = 1;
@Override
public Writable create(Object value, TypeConverter typeConverter, Holder<Integer> size) {
size.value = SIZE;
ByteWritable writable = new ByteWritable();
writable.set(typeConverter.convertTo(Byte.class, value));
return writable;
}
@Override
public Object read(Writable writable, Holder<Integer> size) {
size.value = SIZE;
return ((ByteWritable) writable).get();
}
}
public static final class HdfsBooleanWritableFactory implements HdfsWritableFactory {
private static final int SIZE = 1;
@Override
public Writable create(Object value, TypeConverter typeConverter, Holder<Integer> size) {
size.value = SIZE;
BooleanWritable writable = new BooleanWritable();
writable.set(typeConverter.convertTo(Boolean.class, value));
return writable;
}
@Override
public Object read(Writable writable, Holder<Integer> size) {
size.value = SIZE;
return ((BooleanWritable) writable).get();
}
}
public static final class HdfsBytesWritableFactory implements HdfsWritableFactory {
@Override
public Writable create(Object value, TypeConverter typeConverter, Holder<Integer> size) {
BytesWritable writable = new BytesWritable();
ByteBuffer bb = (ByteBuffer) value;
writable.set(bb.array(), 0, bb.array().length);
size.value = bb.array().length;
return writable;
}
@Override
public Object read(Writable writable, Holder<Integer> size) {
size.value = ((BytesWritable) writable).getLength();
ByteBuffer bb = ByteBuffer.allocate(size.value);
bb.put(((BytesWritable) writable).getBytes(), 0, size.value);
return bb;
}
}
public static final class HdfsDoubleWritableFactory implements HdfsWritableFactory {
private static final int SIZE = 8;
@Override
public Writable create(Object value, TypeConverter typeConverter, Holder<Integer> size) {
size.value = SIZE;
DoubleWritable writable = new DoubleWritable();
writable.set(typeConverter.convertTo(Double.class, value));
return writable;
}
@Override
public Object read(Writable writable, Holder<Integer> size) {
size.value = SIZE;
return ((DoubleWritable) writable).get();
}
}
public static final class HdfsFloatWritableFactory implements HdfsWritableFactory {
private static final int SIZE = 4;
@Override
public Writable create(Object value, TypeConverter typeConverter, Holder<Integer> size) {
size.value = SIZE;
FloatWritable writable = new FloatWritable();
writable.set(typeConverter.convertTo(Float.class, value));
return writable;
}
@Override
public Object read(Writable writable, Holder<Integer> size) {
size.value = SIZE;
return ((FloatWritable) writable).get();
}
}
public static final class HdfsIntWritableFactory implements HdfsWritableFactory {
private static final int SIZE = 4;
@Override
public Writable create(Object value, TypeConverter typeConverter, Holder<Integer> size) {
size.value = SIZE;
IntWritable writable = new IntWritable();
writable.set(typeConverter.convertTo(Integer.class, value));
return writable;
}
@Override
public Object read(Writable writable, Holder<Integer> size) {
size.value = SIZE;
return ((IntWritable) writable).get();
}
}
public static final class HdfsLongWritableFactory implements HdfsWritableFactory {
private static final int SIZE = 8;
@Override
public Writable create(Object value, TypeConverter typeConverter, Holder<Integer> size) {
size.value = SIZE;
LongWritable writable = new LongWritable();
writable.set(typeConverter.convertTo(Long.class, value));
return writable;
}
@Override
public Object read(Writable writable, Holder<Integer> size) {
size.value = SIZE;
return ((LongWritable) writable).get();
}
}
public static final class HdfsTextWritableFactory implements HdfsWritableFactory {
@Override
public Writable create(Object value, TypeConverter typeConverter, Holder<Integer> size) {
Text writable = new Text();
writable.set(typeConverter.convertTo(String.class, value));
size.value = writable.getBytes().length;
return writable;
}
@Override
public Object read(Writable writable, Holder<Integer> size) {
size.value = ((Text) writable).getLength();
return writable.toString();
}
}
public static final class HdfsObjectWritableFactory implements HdfsWritableFactory {
@Override
public Writable create(Object value, TypeConverter typeConverter, Holder<Integer> size) {
InputStream is = null;
try {
is = typeConverter.convertTo(InputStream.class, value);
ByteArrayOutputStream bos = new ByteArrayOutputStream();
IOUtils.copyBytes(is, bos, HdfsConstants.DEFAULT_BUFFERSIZE, false);
BytesWritable writable = new BytesWritable();
writable.set(bos.toByteArray(), 0, bos.toByteArray().length);
size.value = bos.toByteArray().length;
return writable;
} catch (IOException ex) {
throw new RuntimeCamelException(ex);
} finally {
IOHelper.close(is);
}
}
@Override
public Object read(Writable writable, Holder<Integer> size) {
size.value = 0;
return null;
}
}
}
| |
package com.helpplusapp.amit.helpplus;
import android.content.Context;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.design.widget.Snackbar;
import android.support.v4.app.Fragment;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.text.InputType;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ProgressBar;
import android.widget.TextView;
import com.afollestad.materialdialogs.MaterialDialog;
import com.firebase.client.Firebase;
import com.firebase.ui.database.FirebaseRecyclerAdapter;
import com.google.firebase.auth.FirebaseAuth;
import com.google.firebase.auth.FirebaseUser;
import com.google.firebase.database.DatabaseReference;
import com.google.firebase.database.FirebaseDatabase;
import com.google.firebase.database.ServerValue;
import com.helpplusapp.amit.helpplus.model.Constants;
import com.helpplusapp.amit.helpplus.model.Tags;
import com.twitter.sdk.android.core.TwitterCore;
import com.twitter.sdk.android.core.TwitterSession;
import com.twitter.sdk.android.tweetcomposer.TweetComposer;
import java.util.HashMap;
import io.github.codefalling.recyclerviewswipedismiss.SwipeDismissRecyclerViewTouchListener;
/**
* A simple {@link Fragment} subclass.
*/
public class TagsFragment extends Fragment {
public TagsFragment() {
// Required empty public constructor
// initFire();
}
private DatabaseReference mFirebaseDatabaseReference;
private FirebaseAuth mFirebaseAuth;
private FirebaseUser mFirebaseUser;
public ProgressBar mProgressBar;
private RecyclerView mTagsRecyclerView;
private View mEmptyView;
private LinearLayoutManager mLinearLayoutManager;
private Context mContext;
int mItems=0;
// private EditText mSearchEditText;
// private String mInput;
private FirebaseRecyclerAdapter<Tags, TagsViewHolder> mFirebaseAdapter;
public static class TagsViewHolder extends RecyclerView.ViewHolder{
public TextView tagTextView;
public TagsViewHolder(View v) {
super(v);
tagTextView = (TextView) itemView.findViewById(R.id.tagTextView);
}
}
@Override
public void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Firebase.setAndroidContext(getContext());
mContext = getContext();
// Initialize Firebase Auth
mFirebaseAuth = FirebaseAuth.getInstance();
mFirebaseUser = mFirebaseAuth.getCurrentUser();
setHasOptionsMenu(true);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_tags, container, false);
// mSearchEditText = (EditText)view.findViewById(R.id.search_tags);
mProgressBar = (ProgressBar)view.findViewById(R.id.progressBar);
mEmptyView = view.findViewById(R.id.recyclerview_tags_empty);
mTagsRecyclerView = (RecyclerView) view.findViewById(R.id.tagsRecyclerView);
mLinearLayoutManager = new LinearLayoutManager(getContext());
mFirebaseDatabaseReference = FirebaseDatabase.getInstance().getReference();
mFirebaseAdapter = new FirebaseRecyclerAdapter<Tags, TagsViewHolder>(
Tags.class,
R.layout.item_message,
TagsViewHolder.class,
mFirebaseDatabaseReference.child("users").child(mFirebaseUser.getUid()).child("tags")
) {
@Override
protected void populateViewHolder(TagsViewHolder viewHolder, Tags tags, int position) {
mProgressBar.setVisibility(ProgressBar.INVISIBLE);
viewHolder.tagTextView.setText(tags.getTagname());
}
};
// mSearchEditText.addTextChangedListener(new TextWatcher() {
// @Override
// public void beforeTextChanged(CharSequence s, int start, int count, int after) {
// }
//
// @Override
// public void onTextChanged(CharSequence s, int start, int before, int count) {
// }
//
// @Override
// public void afterTextChanged(Editable s) {
// /* Get the input after every textChanged event and transform it to lowercase */
// mInput = mSearchEditText.getText().toString().toLowerCase();
//
// /* Clean up the old adapter */
// if (mFirebaseAdapter != null) mFirebaseAdapter.cleanup();
// /* Nullify the adapter data if the input length is less than 2 characters */
// if (mInput.equals("") || mInput.length() < 2) {
// mTagsRecyclerView.setAdapter(null);
//
// /* Define and set the adapter otherwise. */
// } else {
// mFirebaseAdapter = new FirebaseRecyclerAdapter<Tags, TagsViewHolder>( Tags.class,
// R.layout.item_message,
// TagsViewHolder.class,
// mFirebaseDatabaseReference.child("users").child(mFirebaseUser.getUid()).child("tags").orderByChild("tagname").startAt(mInput).endAt(mInput + "~").limitToFirst(5))
// {
//
// @Override
// protected void populateViewHolder(TagsViewHolder viewHolder, Tags tags, int position) {
// mProgressBar.setVisibility(ProgressBar.INVISIBLE);
// viewHolder.tagTextView.setText(tags.getTagname());
// }
// };
//
// mTagsRecyclerView.setAdapter(mFirebaseAdapter);
// }
//
// }
// });
SharedPreferences.Editor editor = mContext.getSharedPreferences(Constants.TAGS_COUNT_PREF, Context.MODE_PRIVATE).edit();
editor.putInt(Constants.TAGS_COUNT,0);
editor.commit();
mFirebaseAdapter.registerAdapterDataObserver(new RecyclerView.AdapterDataObserver() {
@Override
public void onItemRangeInserted(int positionStart, int itemCount) {
super.onItemRangeInserted(positionStart, itemCount);
//checking for empty data adapter
SharedPreferences.Editor editor = mContext.getSharedPreferences(Constants.TAGS_COUNT_PREF, Context.MODE_PRIVATE).edit();
editor.putInt(Constants.TAGS_COUNT,itemCount);
editor.commit();
}
});
mTagsRecyclerView.setLayoutManager(mLinearLayoutManager);
mTagsRecyclerView.setAdapter(mFirebaseAdapter);
SharedPreferences sharedPref = mContext.getSharedPreferences(
Constants.TAGS_COUNT_PREF, Context.MODE_PRIVATE);
sharedPref.getInt(Constants.TAGS_COUNT,mItems);
if(mItems == 0) {
mProgressBar.setVisibility(ProgressBar.INVISIBLE);
mEmptyView.setVisibility(View.VISIBLE);
Log.d("Visibility", Integer.toString(mEmptyView.getVisibility()) +mItems);
}
else {
mEmptyView.setVisibility(View.GONE);
}
SwipeDismissRecyclerViewTouchListener listener = new SwipeDismissRecyclerViewTouchListener.Builder(
mTagsRecyclerView,
new SwipeDismissRecyclerViewTouchListener.DismissCallbacks() {
@Override
public boolean canDismiss(int position) {
return true;
}
@Override
public void onDismiss(View view) {
int id = mTagsRecyclerView.getChildPosition(view);
mFirebaseAdapter.getRef(id).removeValue();
mFirebaseAdapter.notifyDataSetChanged();
Snackbar snackbar = Snackbar
.make(view,String.format("%s deleted",mFirebaseAdapter.getItem(id).getTagname()),Snackbar.LENGTH_SHORT);
snackbar.show();
}
})
.setIsVertical(false)
.setItemClickCallback(new SwipeDismissRecyclerViewTouchListener.OnItemClickCallBack() {
@Override
public void onClick(int position) {
// Do what you want when item be clicked
final TwitterSession session = TwitterCore.getInstance().getSessionManager()
.getActiveSession();
TweetComposer.Builder builder = new TweetComposer.Builder(getContext())
.text(mFirebaseAdapter.getItem(position).getTagname() + " #HelpPlus ");
builder.show();
Log.d("Item clicked at",Integer.toString(position));
}
})
.create();
mTagsRecyclerView.setOnTouchListener(listener);
return view;
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
inflater.inflate(R.menu.menu_tags_fragment, menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if(item.getItemId() == R.id.action_add_tag)
{
new MaterialDialog.Builder(getContext())
.title(R.string.addtag_title_text)
.content("Hashtag or Screen text")
.inputType(InputType.TYPE_CLASS_TEXT )
.inputRangeRes(2, 20, R.color.tw__composer_red)
.input(R.string.tags_input_hint, R.string.input_prefill, new MaterialDialog.InputCallback() {
@Override
public void onInput(MaterialDialog dialog, CharSequence input) {
HashMap<String, Object> timestampCreated = new HashMap<>();
timestampCreated.put("timestamp", ServerValue.TIMESTAMP);
Tags tags = new Tags(input.toString(),timestampCreated);
mFirebaseDatabaseReference.child("users").child(mFirebaseUser.getUid()).child("tags").push().setValue(tags);
}
}).show();
}
return true;
}
private void initFire() {
mFirebaseDatabaseReference = FirebaseDatabase.getInstance().getReference();
mFirebaseAdapter = new FirebaseRecyclerAdapter<Tags, TagsViewHolder>(
Tags.class,
R.layout.item_message,
TagsViewHolder.class,
mFirebaseDatabaseReference.child("tags")) {
@Override
protected void populateViewHolder(TagsViewHolder viewHolder, Tags tags, int position) {
mProgressBar.setVisibility(ProgressBar.INVISIBLE);
viewHolder.tagTextView.setText(tags.getTagname());
}
};
}
// @Override
// public void onDestroy() {
// super.onDestroy();
// mFirebaseAdapter.cleanup();
// }
}
| |
// Licensed to the Software Freedom Conservancy (SFC) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The SFC licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.openqa.selenium.remote.server;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.openqa.selenium.Capabilities;
import org.openqa.selenium.Platform;
import org.openqa.selenium.StubDriver;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.remote.BrowserType;
import org.openqa.selenium.remote.DesiredCapabilities;
@RunWith(JUnit4.class)
public class DriverFactoryTest {
private DefaultDriverFactory factory;
@Before
public void setUp() throws Exception {
factory = new DefaultDriverFactory();
}
@Test
public void testShouldBeAbleToRegisterNewDrivers() {
Capabilities capabilities = DesiredCapabilities.htmlUnit();
assertFalse(factory.hasMappingFor(capabilities));
factory.registerDriver(capabilities, DriverOne.class);
assertTrue(factory.hasMappingFor(capabilities));
}
@Test
public void testShouldReturnMatchIfOneFieldMatchesAndOnlyOneDriverIsRegistered() {
DesiredCapabilities template = new DesiredCapabilities();
template.setBrowserName("foo");
template.setVersion("1.0");
template.setPlatform(Platform.getCurrent());
factory.registerDriver(template, DriverOne.class);
DesiredCapabilities example = new DesiredCapabilities();
example.setBrowserName(template.getBrowserName());
Class<? extends WebDriver> result = factory.getBestMatchFor(example);
assertEquals(DriverOne.class, result);
}
@Test
public void testShouldReturnDriverWhereTheMostCapabilitiesMatch() {
DesiredCapabilities first = new DesiredCapabilities();
first.setBrowserName("foo");
first.setVersion("1.0");
DesiredCapabilities second = new DesiredCapabilities();
second.setBrowserName("bar"); // Different name
second.setVersion("1.0");
factory.registerDriver(first, DriverOne.class);
factory.registerDriver(second, DriverTwo.class);
DesiredCapabilities example = new DesiredCapabilities();
example.setBrowserName("foo");
Class<? extends WebDriver> result = factory.getBestMatchFor(example);
assertEquals(DriverOne.class, result);
example.setBrowserName("bar");
result = factory.getBestMatchFor(example);
assertEquals(DriverTwo.class, result);
}
@Test
public void testShouldReturnDriverWhereTheMostCapabilitiesMatch_lotsOfRegisteredDrivers() {
abstract class Chrome implements WebDriver {}
abstract class Firefox implements WebDriver {}
abstract class HtmlUnit implements WebDriver {}
abstract class Ie implements WebDriver {}
abstract class Opera implements WebDriver {}
factory.registerDriver(DesiredCapabilities.chrome(), Chrome.class);
factory.registerDriver(DesiredCapabilities.firefox(), Firefox.class);
factory.registerDriver(DesiredCapabilities.htmlUnit(), HtmlUnit.class);
factory.registerDriver(DesiredCapabilities.internetExplorer(), Ie.class);
factory.registerDriver(DesiredCapabilities.opera(), Opera.class);
DesiredCapabilities desiredCapabilities = new DesiredCapabilities();
desiredCapabilities.setBrowserName(BrowserType.IE);
desiredCapabilities.setVersion("");
desiredCapabilities.setJavascriptEnabled(true);
desiredCapabilities.setPlatform(Platform.ANY);
assertEquals(Ie.class, factory.getBestMatchFor(desiredCapabilities));
}
@Test
public void testShouldReturnMostRecentlyAddedDriverWhenAllCapabilitiesAreEqual() {
Capabilities capabilities = DesiredCapabilities.firefox();
factory.registerDriver(capabilities, DriverOne.class);
factory.registerDriver(capabilities, DriverTwo.class);
Class<? extends WebDriver> result = factory.getBestMatchFor(capabilities);
assertEquals(DriverTwo.class, result);
}
@Test
public void testShouldConsiderPlatform() {
DesiredCapabilities windows = new DesiredCapabilities("browser", "v1", Platform.WINDOWS);
DesiredCapabilities linux = new DesiredCapabilities("browser", "v1", Platform.LINUX);
factory.registerDriver(windows, DriverOne.class);
factory.registerDriver(linux, DriverTwo.class);
assertEquals(DriverOne.class, factory.getBestMatchFor(windows));
assertEquals(DriverTwo.class, factory.getBestMatchFor(linux));
}
@Test
public void testShouldMatchAgainstAnyPlatformWhenRequestingAny() {
DesiredCapabilities windowsVista = new DesiredCapabilities("browser", "v1", Platform.VISTA);
DesiredCapabilities windowsXp = new DesiredCapabilities("browser", "v1", Platform.XP);
DesiredCapabilities anyWindows = new DesiredCapabilities("browser", "v1", Platform.ANY);
factory.registerDriver(windowsVista, DriverOne.class);
assertEquals(DriverOne.class, factory.getBestMatchFor(windowsVista));
assertEquals(DriverOne.class, factory.getBestMatchFor(anyWindows));
assertEquals("Should always get a match if a driver has been registered",
DriverOne.class, factory.getBestMatchFor(windowsXp));
}
@Test
public void testShouldFailFastWhenMatchingAndNoDriversHaveBeenRegistered() {
try {
factory.getBestMatchFor(DesiredCapabilities.chrome());
fail("Should have thrown.");
} catch (IllegalStateException expected) {
}
}
@Test
public void testShouldConsiderJavascriptCapabilities() {
DesiredCapabilities nojavascript = new DesiredCapabilities("browser", "v1", Platform.LINUX);
nojavascript.setJavascriptEnabled(false);
DesiredCapabilities javascript = new DesiredCapabilities("browser", "v1", Platform.LINUX);
javascript.setJavascriptEnabled(true);
factory.registerDriver(nojavascript, DriverOne.class);
factory.registerDriver(javascript, DriverTwo.class);
assertEquals(DriverOne.class, factory.getBestMatchFor(nojavascript));
assertEquals(DriverTwo.class, factory.getBestMatchFor(javascript));
}
@Test
public void testShouldCallAConstructorTakingACapabilitiesArgInPreferenceToANoArgOne() {
DesiredCapabilities caps = new DesiredCapabilities();
caps.setBrowserName("example");
factory.registerDriver(caps, CapabilitiesDriver.class);
CapabilitiesDriver driver = (CapabilitiesDriver) factory.newInstance(caps);
assertEquals(caps, driver.getCapabilities());
}
public static abstract class DriverOne implements WebDriver {}
public static abstract class DriverTwo implements WebDriver {}
public static class CapabilitiesDriver extends StubDriver {
private Capabilities caps;
public CapabilitiesDriver() {
}
public CapabilitiesDriver(Capabilities caps) {
this.caps = caps;
}
public Capabilities getCapabilities() {
return caps;
}
}
}
| |
/*
Derby - Class org.apache.derby.impl.jdbc.authentication.AuthenticationServiceBase
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derby.impl.jdbc.authentication;
import org.apache.derby.authentication.UserAuthenticator;
import org.apache.derby.iapi.reference.Property;
import org.apache.derby.iapi.jdbc.AuthenticationService;
import org.apache.derby.iapi.reference.Limits;
import org.apache.derby.iapi.error.StandardException;
import org.apache.derby.iapi.services.context.ContextService;
import org.apache.derby.iapi.services.daemon.Serviceable;
import org.apache.derby.iapi.services.monitor.ModuleSupportable;
import org.apache.derby.iapi.services.monitor.ModuleControl;
import org.apache.derby.iapi.services.monitor.Monitor;
import org.apache.derby.iapi.store.access.AccessFactory;
import org.apache.derby.iapi.services.property.PropertyFactory;
import org.apache.derby.iapi.store.access.TransactionController;
import org.apache.derby.iapi.services.property.PropertySetCallback;
import org.apache.derby.shared.common.sanity.SanityManager;
import org.apache.derby.iapi.reference.Attribute;
import org.apache.derby.iapi.services.property.PropertyUtil;
import org.apache.derby.iapi.util.StringUtil;
import org.apache.derby.iapi.sql.conn.LanguageConnectionContext;
import org.apache.derby.iapi.sql.dictionary.DataDictionary;
import org.apache.derby.iapi.sql.dictionary.PasswordHasher;
import org.apache.derby.iapi.sql.dictionary.UserDescriptor;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.io.Serializable;
import java.io.UnsupportedEncodingException;
import java.security.SecureRandom;
import java.util.Dictionary;
import java.util.Properties;
import org.apache.derby.iapi.reference.SQLState;
/**
* <p>
* This is the authentication service base class.
* </p>
* <p>
* There can be 1 Authentication Service for the whole Derby
* system and/or 1 authentication per database.
* In a near future, we intend to allow multiple authentication services
* per system and/or per database.
* </p>
*
* <p>
* It should be extended by the specialized authentication services.
* </p>
*
* <p><strong>IMPORTANT NOTE:</strong></p>
*
* <p>
* User passwords are hashed using a message digest algorithm
* if they're stored in the database. They are not hashed
* if they were defined at the system level.
* </p>
*
* <p>
* The passwords can be hashed using two different schemes:
* </p>
*
* <ul>
* <li>The SHA-1 authentication scheme, which was the only available scheme
* in Derby 10.5 and earlier. This scheme uses the SHA-1 message digest
* algorithm.</li>
* <li>The configurable hash authentication scheme, which allows the users to
* specify which message digest algorithm to use.</li>
* </ul>
*
* <p>
* In order to use the configurable hash authentication scheme, the users have
* to set the {@code derby.authentication.builtin.algorithm} property (on
* system level or database level) to the name of an algorithm that's available
* in one of the security providers registered on the system. If this property
* is not set, or if it's set to NULL or an empty string, the SHA-1
* authentication scheme is used.
* </p>
*
* <p>
* Which scheme to use is decided when a password is about to be stored in the
* database. One database may therefore contain passwords stored using
* different schemes. In order to determine which scheme to use when comparing
* a user's credentials with those stored in the database, the stored password
* is prefixed with an identifier that tells which scheme is being used.
* Passwords stored using the SHA-1 authentication scheme are prefixed with
* {@link PasswordHasher#ID_PATTERN_SHA1_SCHEME}. Passwords that are stored using the
* configurable hash authentication scheme are prefixed with
* {@link PasswordHasher#ID_PATTERN_CONFIGURABLE_HASH_SCHEME} and suffixed with the name of
* the message digest algorithm.
* </p>
*/
public abstract class AuthenticationServiceBase
implements AuthenticationService, ModuleControl, ModuleSupportable, PropertySetCallback {
protected UserAuthenticator authenticationScheme;
// required to retrieve service properties
private AccessFactory store;
/**
Trace flag to trace authentication operations
*/
public static final String AuthenticationTrace =
SanityManager.DEBUG ? "AuthenticationTrace" : null;
/**
Userid with Strong password substitute DRDA security mechanism
*/
protected static final int SECMEC_USRSSBPWD = 8;
//
// constructor
//
public AuthenticationServiceBase() {
}
protected void setAuthenticationService(UserAuthenticator aScheme) {
// specialized class is the principal caller.
this.authenticationScheme = aScheme;
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(this.authenticationScheme != null,
"There is no authentication scheme for that service!");
if (SanityManager.DEBUG_ON(AuthenticationTrace)) {
java.io.PrintWriter iDbgStream =
SanityManager.GET_DEBUG_STREAM();
iDbgStream.println("Authentication Service: [" +
this.toString() + "]");
iDbgStream.println("Authentication Scheme : [" +
this.authenticationScheme.toString() + "]");
}
}
}
/**
/*
** Methods of module control - To be overriden
*/
/**
Start this module. In this case, nothing needs to be done.
@see org.apache.derby.iapi.services.monitor.ModuleControl#boot
@exception StandardException upon failure to load/boot
the expected authentication service.
*/
public void boot(boolean create, Properties properties)
throws StandardException
{
//
// we expect the Access factory to be available since we're
// at boot stage.
//
store = (AccessFactory)
Monitor.getServiceModule(this, AccessFactory.MODULE);
// register to be notified upon db properties changes
// _only_ if we're on a database context of course :)
PropertyFactory pf = (PropertyFactory)
Monitor.getServiceModule(this, org.apache.derby.iapi.reference.Module.PropertyFactory);
if (pf != null)
pf.addPropertySetNotification(this);
}
/**
* @see org.apache.derby.iapi.services.monitor.ModuleControl#stop
*/
public void stop() {
// nothing special to be done yet.
}
/*
** Methods of AuthenticationService
*/
/**
* Authenticate a User inside JBMS.T his is an overload method.
*
* We're passed-in a Properties object containing user credentials information
* (as well as database name if user needs to be validated for a certain
* database access).
*
* @see
* org.apache.derby.iapi.jdbc.AuthenticationService#authenticate
*
*
*/
public boolean authenticate(String databaseName, Properties userInfo) throws java.sql.SQLException
{
if (userInfo == (Properties) null)
return false;
String userName = userInfo.getProperty(Attribute.USERNAME_ATTR);
if ((userName != null) && userName.length() > Limits.MAX_IDENTIFIER_LENGTH) {
return false;
}
if (SanityManager.DEBUG)
{
if (SanityManager.DEBUG_ON(AuthenticationTrace)) {
java.io.PrintWriter iDbgStream =
SanityManager.GET_DEBUG_STREAM();
iDbgStream.println(
" - Authentication request: user [" +
userName + "]"+ ", database [" +
databaseName + "]");
// The following will print the stack trace of the
// authentication request to the log.
//Throwable t = new Throwable();
//istream.println("Authentication Request Stack trace:");
//t.printStackTrace(istream.getPrintWriter());
}
}
return this.authenticationScheme.authenticateUser(userName,
userInfo.getProperty(Attribute.PASSWORD_ATTR),
databaseName,
userInfo
);
}
public String getSystemCredentialsDatabaseName() { return null; }
/**
* Returns a property if it was set at the database or
* system level. Treated as SERVICE property by default.
*
* @return a property string value.
**/
public String getProperty(String key) {
String propertyValue = null;
TransactionController tc = null;
try {
tc = getTransaction();
propertyValue =
PropertyUtil.getServiceProperty(tc,
key,
(String) null);
if (tc != null) {
tc.commit();
tc = null;
}
} catch (StandardException se) {
// Do nothing and just return
}
return propertyValue;
}
/**
* <p>
* Get a transaction for performing authentication at the database level.
* </p>
*/
protected TransactionController getTransaction()
throws StandardException
{
if ( store == null ) { return null; }
else
{
return store.getTransaction( ContextService.getFactory().getCurrentContextManager() );
}
}
/**
* Get all the database properties.
* @return the database properties, or {@code null} if there is no
* access factory
*/
Properties getDatabaseProperties() throws StandardException {
Properties props = null;
TransactionController tc = getTransaction();
if (tc != null) {
try {
props = tc.getProperties();
} finally {
tc.commit();
}
}
return props;
}
/**
* <p>
* Get the name of the database if we are performing authentication at the database level.
* </p>
*/
protected String getServiceName()
{
if ( store == null ) { return null; }
else { return Monitor.getServiceName( store ); }
}
public String getDatabaseProperty(String key) {
String propertyValue = null;
TransactionController tc = null;
try {
if (store != null)
tc = store.getTransaction(
ContextService.getFactory().getCurrentContextManager());
propertyValue =
PropertyUtil.getDatabaseProperty(tc, key);
if (tc != null) {
tc.commit();
tc = null;
}
} catch (StandardException se) {
// Do nothing and just return
}
return propertyValue;
}
public String getSystemProperty(String key) {
boolean dbOnly = false;
dbOnly = Boolean.valueOf(
this.getDatabaseProperty(
Property.DATABASE_PROPERTIES_ONLY)).booleanValue();
if (dbOnly)
return null;
return PropertyUtil.getSystemProperty(key);
}
/*
** Methods of PropertySetCallback
*/
public void init(boolean dbOnly, Dictionary p) {
// not called yet ...
}
/**
@see PropertySetCallback#validate
*/
public boolean validate(String key, Serializable value, Dictionary p)
throws StandardException
{
// user password properties need to be remapped. nothing else needs remapping.
if ( key.startsWith(org.apache.derby.iapi.reference.Property.USER_PROPERTY_PREFIX) ) { return true; }
String stringValue = (String) value;
boolean settingToNativeLocal = Property.AUTHENTICATION_PROVIDER_NATIVE_LOCAL.equals( stringValue );
if ( Property.AUTHENTICATION_PROVIDER_PARAMETER.equals( key ) )
{
// NATIVE + LOCAL is the only value of this property which can be persisted
if (
( stringValue != null ) &&
( stringValue.startsWith( Property.AUTHENTICATION_PROVIDER_NATIVE ) )&&
!settingToNativeLocal
)
{
throw StandardException.newException( SQLState.PROPERTY_DBO_LACKS_CREDENTIALS );
}
// once set to NATIVE authentication, you can't change it
String oldValue = (String) p.get( Property.AUTHENTICATION_PROVIDER_PARAMETER );
if ( (oldValue != null) && oldValue.startsWith( Property.AUTHENTICATION_PROVIDER_NATIVE ) )
{
throw StandardException.newException( SQLState.PROPERTY_CANT_UNDO_NATIVE );
}
// can't turn on NATIVE + LOCAL authentication unless the DBO's credentials are already stored.
// this should prevent setting NATIVE + LOCAL authentication in pre-10.9 databases too
// because you can't store credentials in a pre-10.9 database.
if ( settingToNativeLocal )
{
DataDictionary dd = getDataDictionary();
String dbo = dd.getAuthorizationDatabaseOwner();
UserDescriptor userCredentials = dd.getUser( dbo );
if ( userCredentials == null )
{
throw StandardException.newException( SQLState.PROPERTY_DBO_LACKS_CREDENTIALS );
}
}
}
if ( Property.AUTHENTICATION_NATIVE_PASSWORD_LIFETIME.equals( key ) )
{
if ( parsePasswordLifetime( stringValue ) == null )
{
throw StandardException.newException
( SQLState.BAD_PASSWORD_LIFETIME, Property.AUTHENTICATION_NATIVE_PASSWORD_LIFETIME );
}
}
if ( Property.AUTHENTICATION_PASSWORD_EXPIRATION_THRESHOLD.equals( key ) )
{
if ( parsePasswordThreshold( stringValue ) == null )
{
throw StandardException.newException
( SQLState.BAD_PASSWORD_LIFETIME, Property.AUTHENTICATION_PASSWORD_EXPIRATION_THRESHOLD );
}
}
return false;
}
/** Parse the value of the password lifetime property. Return null if it is bad. */
protected Long parsePasswordLifetime( String passwordLifetimeString )
{
try {
long passwordLifetime = Long.parseLong( passwordLifetimeString );
if ( passwordLifetime < 0L ) { passwordLifetime = 0L; }
return new Long( passwordLifetime );
} catch (Exception e) { return null; }
}
/** Parse the value of the password expiration threshold property. Return null if it is bad. */
protected Double parsePasswordThreshold( String expirationThresholdString )
{
try {
double expirationThreshold = Double.parseDouble( expirationThresholdString );
if ( expirationThreshold <= 0L ) { return null; }
else { return new Double( expirationThreshold ); }
} catch (Exception e) { return null; }
}
/**
@see PropertySetCallback#validate
*/
public Serviceable apply(String key,Serializable value,Dictionary p)
{
return null;
}
/**
@see PropertySetCallback#map
@exception StandardException Thrown on error.
*/
public Serializable map(String key, Serializable value, Dictionary p)
throws StandardException
{
// We only care for "derby.user." property changes
// at the moment.
if (!key.startsWith(org.apache.derby.iapi.reference.Property.USER_PROPERTY_PREFIX)) return null;
// We do not hash 'derby.user.<userName>' password if
// the configured authentication service is LDAP as the
// same property could be used to store LDAP user full DN (X500).
// In performing this check we only consider database properties
// not system, service or application properties.
String authService =
(String)p.get(org.apache.derby.iapi.reference.Property.AUTHENTICATION_PROVIDER_PARAMETER);
if ((authService != null) &&
(StringUtil.SQLEqualsIgnoreCase(authService, org.apache.derby.iapi.reference.Property.AUTHENTICATION_PROVIDER_LDAP)))
return null;
// Ok, we can hash this password in the db
String userPassword = (String) value;
if (userPassword != null) {
// hash (digest) the password
// the caller will retrieve the new value
String userName =
key.substring(Property.USER_PROPERTY_PREFIX.length());
userPassword =
hashUsingDefaultAlgorithm(userName, userPassword, p);
}
return userPassword;
}
// Class implementation
protected final boolean requireAuthentication(Properties properties) {
//
// we check if derby.connection.requireAuthentication system
// property is set to true, otherwise we are the authentication
// service that should be run.
//
String requireAuthentication = PropertyUtil.getPropertyFromSet(
properties,
org.apache.derby.iapi.reference.Property.REQUIRE_AUTHENTICATION_PARAMETER
);
if ( Boolean.valueOf(requireAuthentication).booleanValue() ) { return true; }
//
// NATIVE authentication does not require that you set REQUIRE_AUTHENTICATION_PARAMETER.
//
return PropertyUtil.nativeAuthenticationEnabled( properties );
}
/**
* <p>
* This method hashes a clear user password using a
* Single Hash algorithm such as SHA-1 (SHA equivalent)
* (it is a 160 bits digest)
* </p>
*
* <p>
* The digest is returned as an object string.
* </p>
*
* <p>
* This method is only used by the SHA-1 authentication scheme.
* </p>
*
* @param plainTxtUserPassword Plain text user password
*
* @return hashed user password (digest) as a String object
* or {@code null} if the plaintext password is {@code null}
*/
protected String hashPasswordSHA1Scheme(String plainTxtUserPassword)
{
if (plainTxtUserPassword == null)
return null;
MessageDigest algorithm = null;
try
{
algorithm = MessageDigest.getInstance("SHA-1");
} catch (NoSuchAlgorithmException nsae)
{
// Ignore as we checked already during service boot-up
}
algorithm.reset();
byte[] bytePasswd = null;
bytePasswd = toHexByte(plainTxtUserPassword);
algorithm.update(bytePasswd);
byte[] hashedVal = algorithm.digest();
String hexString = PasswordHasher.ID_PATTERN_SHA1_SCHEME +
StringUtil.toHexString(hashedVal, 0, hashedVal.length);
return (hexString);
}
/**
* <p>
* Convert a string into a byte array in hex format.
* </p>
*
* <p>
* For each character (b) two bytes are generated, the first byte
* represents the high nibble (4 bits) in hexadecimal ({@code b & 0xf0}),
* the second byte represents the low nibble ({@code b & 0x0f}).
* </p>
*
* <p>
* The character at {@code str.charAt(0)} is represented by the first two
* bytes in the returned String.
* </p>
*
* <p>
* New code is encouraged to use {@code String.getBytes(String)} or similar
* methods instead, since this method does not preserve all bits for
* characters whose codepoint exceeds 8 bits. This method is preserved for
* compatibility with the SHA-1 authentication scheme.
* </p>
*
* @param str string
* @return the byte[] (with hexadecimal format) form of the string (str)
*/
private static byte[] toHexByte(String str)
{
byte[] data = new byte[str.length() * 2];
for (int i = 0; i < str.length(); i++)
{
char ch = str.charAt(i);
int high_nibble = (ch & 0xf0) >>> 4;
int low_nibble = (ch & 0x0f);
data[i] = (byte)high_nibble;
data[i+1] = (byte)low_nibble;
}
return data;
}
/**
* <p>
* Hash a password using the default message digest algorithm for this
* system before it's stored in the database.
* </p>
*
* <p>
* If the data dictionary supports the configurable hash authentication
* scheme, and the property {@code derby.authentication.builtin.algorithm}
* is a non-empty string, the password will be hashed using the
* algorithm specified by that property. Otherwise, we fall back to the new
* authentication scheme based on SHA-1. The algorithm used is encoded in
* the returned token so that the code that validates a user's credentials
* knows which algorithm to use.
* </p>
*
* @param user the user whose password to hash
* @param password the plain text password
* @param props database properties
* @return a digest of the user name and password formatted as a string,
* or {@code null} if {@code password} is {@code null}
* @throws StandardException if the specified algorithm is not supported
*/
String hashUsingDefaultAlgorithm(String user,
String password,
Dictionary props)
throws StandardException
{
if ( password == null ) { return null; }
PasswordHasher hasher = getDataDictionary().makePasswordHasher( props );
if ( hasher != null ) { return hasher.hashAndEncode( user, password ); }
else { return hashPasswordSHA1Scheme(password); }
}
/**
* Find the data dictionary for the current connection.
*
* @return the {@code DataDictionary} for the current connection
*/
private static DataDictionary getDataDictionary() {
LanguageConnectionContext lcc = (LanguageConnectionContext)
ContextService.getContext(LanguageConnectionContext.CONTEXT_ID);
return lcc.getDataDictionary();
}
/**
* Strong Password Substitution (USRSSBPWD).
*
* This method generates a password substitute to authenticate a client
* which is using a DRDA security mechanism such as SECMEC_USRSSBPWD.
*
* Depending how the user is defined in Derby and if BUILTIN
* is used, the stored password can be in clear-text (system level)
* or encrypted (hashed - *not decryptable*)) (database level) - If the
* user has authenticated at the network level via SECMEC_USRSSBPWD, it
* means we're presented with a password substitute and we need to
* generate a substitute password coming from the store to compare with
* the one passed-in.
*
* The substitution algorithm used is the same as the one used in the
* SHA-1 authentication scheme ({@link PasswordHasher#ID_PATTERN_SHA1_SCHEME}), so in
* the case of database passwords stored using that scheme, we can simply
* compare the received hash with the stored hash. If the configurable
* hash authentication scheme {@link PasswordHasher#ID_PATTERN_CONFIGURABLE_HASH_SCHEME}
* is used, we have no way to find out if the received hash matches the
* stored password, since we cannot decrypt the hashed passwords and
* re-apply another hash algorithm. Therefore, strong password substitution
* only works if the database-level passwords are stored with the SHA-1
* scheme.
*
* NOTE: A lot of this logic could be shared with the DRDA decryption
* and client encryption managers - This will be done _once_
* code sharing along with its rules are defined between the
* Derby engine, client and network code (PENDING).
*
* Substitution algorithm works as follow:
*
* PW_TOKEN = SHA-1(PW, ID)
* The password (PW) and user name (ID) can be of any length greater
* than or equal to 1 byte.
* The client generates a 20-byte password substitute (PW_SUB) as follows:
* PW_SUB = SHA-1(PW_TOKEN, RDr, RDs, ID, PWSEQs)
*
* w/ (RDs) as the random client seed and (RDr) as the server one.
*
* See PWDSSB - Strong Password Substitution Security Mechanism
* (DRDA Vol.3 - P.650)
*
* @return a substituted password.
*/
protected String substitutePassword(
String userName,
String password,
Properties info,
boolean databaseUser) {
MessageDigest messageDigest = null;
// PWSEQs's 8-byte value constant - See DRDA Vol 3
byte SECMEC_USRSSBPWD_PWDSEQS[] = {
(byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00,
(byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x01
};
// Generated password substitute
byte[] passwordSubstitute;
try
{
messageDigest = MessageDigest.getInstance("SHA-1");
} catch (NoSuchAlgorithmException nsae)
{
// Ignore as we checked already during service boot-up
}
// IMPORTANT NOTE: As the password is stored single-hashed in the
// database, it is impossible for us to decrypt the password and
// recompute a substitute to compare with one generated on the source
// side - Hence, we have to generate a password substitute.
// In other words, we cannot figure what the original password was -
// Strong Password Substitution (USRSSBPWD) cannot be supported for
// targets which can't access or decrypt passwords on their side.
//
messageDigest.reset();
byte[] bytePasswd = null;
byte[] userBytes = toHexByte(userName);
if (SanityManager.DEBUG)
{
// We must have a source and target seed
SanityManager.ASSERT(
(((String) info.getProperty(Attribute.DRDA_SECTKN_IN) != null) &&
((String) info.getProperty(Attribute.DRDA_SECTKN_OUT) != null)),
"Unexpected: Requester or server seed not available");
}
// Retrieve source (client) and target 8-byte seeds
String sourceSeedstr = info.getProperty(Attribute.DRDA_SECTKN_IN);
String targetSeedstr = info.getProperty(Attribute.DRDA_SECTKN_OUT);
byte[] sourceSeed_ =
StringUtil.fromHexString(sourceSeedstr, 0, sourceSeedstr.length());
byte[] targetSeed_ =
StringUtil.fromHexString(targetSeedstr, 0, targetSeedstr.length());
String hexString = null;
// If user is at the database level, we don't hash the password
// as it is already hashed (BUILTIN scheme) - we only do the
// BUILTIN hashing if the user is defined at the system level
// only - this is required beforehands so that we can do the password
// substitute generation right afterwards.
if (!databaseUser)
{
bytePasswd = toHexByte(password);
messageDigest.update(bytePasswd);
byte[] hashedVal = messageDigest.digest();
hexString = PasswordHasher.ID_PATTERN_SHA1_SCHEME +
StringUtil.toHexString(hashedVal, 0, hashedVal.length);
}
else
{
// Already hashed from the database store
// NOTE: If the password was stored with the configurable hash
// authentication scheme, the stored password will have been hashed
// with a different algorithm than the hashed password sent from
// the client. Since there's no way to decrypt the stored password
// and rehash it with the algorithm that the client uses, we are
// not able to compare the passwords, and the connection attempt
// will fail.
hexString = password;
}
// Generate the password substitute now
// Generate some 20-byte password token
messageDigest.update(userBytes);
messageDigest.update(toHexByte(hexString));
byte[] passwordToken = messageDigest.digest();
// Now we generate the 20-byte password substitute
messageDigest.update(passwordToken);
messageDigest.update(targetSeed_);
messageDigest.update(sourceSeed_);
messageDigest.update(userBytes);
messageDigest.update(SECMEC_USRSSBPWD_PWDSEQS);
passwordSubstitute = messageDigest.digest();
return StringUtil.toHexString(passwordSubstitute, 0,
passwordSubstitute.length);
}
}
| |
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.client.file;
import alluxio.AlluxioURI;
import alluxio.Constants;
import alluxio.annotation.PublicApi;
import alluxio.client.file.options.CreateDirectoryOptions;
import alluxio.client.file.options.CreateFileOptions;
import alluxio.client.file.options.DeleteOptions;
import alluxio.client.file.options.ExistsOptions;
import alluxio.client.file.options.FreeOptions;
import alluxio.client.file.options.GetStatusOptions;
import alluxio.client.file.options.InStreamOptions;
import alluxio.client.file.options.ListStatusOptions;
import alluxio.client.file.options.LoadMetadataOptions;
import alluxio.client.file.options.MountOptions;
import alluxio.client.file.options.OpenFileOptions;
import alluxio.client.file.options.OutStreamOptions;
import alluxio.client.file.options.RenameOptions;
import alluxio.client.file.options.SetAclOptions;
import alluxio.client.file.options.SetAttributeOptions;
import alluxio.client.file.options.UnmountOptions;
import alluxio.exception.AlluxioException;
import alluxio.exception.DirectoryNotEmptyException;
import alluxio.exception.ExceptionMessage;
import alluxio.exception.FileAlreadyExistsException;
import alluxio.exception.FileDoesNotExistException;
import alluxio.exception.InvalidPathException;
import alluxio.exception.status.AlluxioStatusException;
import alluxio.exception.status.AlreadyExistsException;
import alluxio.exception.status.FailedPreconditionException;
import alluxio.exception.status.InvalidArgumentException;
import alluxio.exception.status.NotFoundException;
import alluxio.exception.status.UnavailableException;
import alluxio.master.MasterInquireClient;
import alluxio.security.authorization.AclEntry;
import alluxio.uri.Authority;
import alluxio.wire.LoadMetadataType;
import alluxio.wire.MountPointInfo;
import alluxio.wire.SetAclAction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import javax.annotation.concurrent.ThreadSafe;
/**
* Default implementation of the {@link FileSystem} interface. Developers can extend this class
* instead of implementing the interface. This implementation reads and writes data through
* {@link FileInStream} and {@link FileOutStream}. This class is thread safe.
*/
@PublicApi
@ThreadSafe
public class BaseFileSystem implements FileSystem {
private static final Logger LOG = LoggerFactory.getLogger(BaseFileSystem.class);
protected final FileSystemContext mFileSystemContext;
/**
* @param context file system context
* @return a {@link BaseFileSystem}
*/
public static BaseFileSystem get(FileSystemContext context) {
return new BaseFileSystem(context);
}
/**
* Constructs a new base file system.
*
* @param context file system context
*/
protected BaseFileSystem(FileSystemContext context) {
mFileSystemContext = context;
}
@Override
public void createDirectory(AlluxioURI path)
throws FileAlreadyExistsException, InvalidPathException, IOException, AlluxioException {
createDirectory(path, CreateDirectoryOptions.defaults());
}
@Override
public void createDirectory(AlluxioURI path, CreateDirectoryOptions options)
throws FileAlreadyExistsException, InvalidPathException, IOException, AlluxioException {
checkUri(path);
FileSystemMasterClient masterClient = mFileSystemContext.acquireMasterClient();
try {
masterClient.createDirectory(path, options);
LOG.debug("Created directory {}, options: {}", path.getPath(), options);
} catch (AlreadyExistsException e) {
throw new FileAlreadyExistsException(e.getMessage());
} catch (InvalidArgumentException e) {
throw new InvalidPathException(e.getMessage());
} catch (UnavailableException e) {
throw e;
} catch (AlluxioStatusException e) {
throw e.toAlluxioException();
} finally {
mFileSystemContext.releaseMasterClient(masterClient);
}
}
@Override
public FileOutStream createFile(AlluxioURI path)
throws FileAlreadyExistsException, InvalidPathException, IOException, AlluxioException {
return createFile(path, CreateFileOptions.defaults());
}
@Override
public FileOutStream createFile(AlluxioURI path, CreateFileOptions options)
throws FileAlreadyExistsException, InvalidPathException, IOException, AlluxioException {
checkUri(path);
FileSystemMasterClient masterClient = mFileSystemContext.acquireMasterClient();
URIStatus status;
try {
masterClient.createFile(path, options);
// Do not sync before this getStatus, since the UFS file is expected to not exist.
GetStatusOptions opts = GetStatusOptions.defaults();
opts.setLoadMetadataType(LoadMetadataType.Never);
opts.getCommonOptions().setSyncIntervalMs(-1);
status = masterClient.getStatus(path, opts);
LOG.debug("Created file {}, options: {}", path.getPath(), options);
} catch (AlreadyExistsException e) {
throw new FileAlreadyExistsException(e.getMessage());
} catch (InvalidArgumentException e) {
throw new InvalidPathException(e.getMessage());
} catch (UnavailableException e) {
throw e;
} catch (AlluxioStatusException e) {
throw e.toAlluxioException();
} finally {
mFileSystemContext.releaseMasterClient(masterClient);
}
OutStreamOptions outStreamOptions = options.toOutStreamOptions();
outStreamOptions.setUfsPath(status.getUfsPath());
outStreamOptions.setMountId(status.getMountId());
outStreamOptions.setAcl(status.getAcl());
try {
return new FileOutStream(path, outStreamOptions, mFileSystemContext);
} catch (Exception e) {
delete(path);
throw e;
}
}
@Override
public void delete(AlluxioURI path)
throws DirectoryNotEmptyException, FileDoesNotExistException, IOException, AlluxioException {
delete(path, DeleteOptions.defaults());
}
@Override
public void delete(AlluxioURI path, DeleteOptions options)
throws DirectoryNotEmptyException, FileDoesNotExistException, IOException, AlluxioException {
checkUri(path);
FileSystemMasterClient masterClient = mFileSystemContext.acquireMasterClient();
try {
masterClient.delete(path, options);
LOG.debug("Deleted {}, options: {}", path.getPath(), options);
} catch (FailedPreconditionException e) {
// A little sketchy, but this should be the only case that throws FailedPrecondition.
throw new DirectoryNotEmptyException(e.getMessage());
} catch (NotFoundException e) {
throw new FileDoesNotExistException(e.getMessage());
} catch (UnavailableException e) {
throw e;
} catch (AlluxioStatusException e) {
throw e.toAlluxioException();
} finally {
mFileSystemContext.releaseMasterClient(masterClient);
}
}
@Override
public boolean exists(AlluxioURI path)
throws InvalidPathException, IOException, AlluxioException {
return exists(path, ExistsOptions.defaults());
}
@Override
public boolean exists(AlluxioURI path, ExistsOptions options)
throws InvalidPathException, IOException, AlluxioException {
checkUri(path);
FileSystemMasterClient masterClient = mFileSystemContext.acquireMasterClient();
try {
// TODO(calvin): Make this more efficient
masterClient.getStatus(path, options.toGetStatusOptions());
return true;
} catch (NotFoundException e) {
return false;
} catch (InvalidArgumentException e) {
// The server will throw this when a prefix of the path is a file.
// TODO(andrew): Change the server so that a prefix being a file means the path does not exist
return false;
} catch (UnavailableException e) {
throw e;
} catch (AlluxioStatusException e) {
throw e.toAlluxioException();
} finally {
mFileSystemContext.releaseMasterClient(masterClient);
}
}
@Override
public void free(AlluxioURI path)
throws FileDoesNotExistException, IOException, AlluxioException {
free(path, FreeOptions.defaults());
}
@Override
public void free(AlluxioURI path, FreeOptions options)
throws FileDoesNotExistException, IOException, AlluxioException {
checkUri(path);
FileSystemMasterClient masterClient = mFileSystemContext.acquireMasterClient();
try {
masterClient.free(path, options);
LOG.debug("Freed {}, options: {}", path.getPath(), options);
} catch (NotFoundException e) {
throw new FileDoesNotExistException(e.getMessage());
} catch (UnavailableException e) {
throw e;
} catch (AlluxioStatusException e) {
throw e.toAlluxioException();
} finally {
mFileSystemContext.releaseMasterClient(masterClient);
}
}
@Override
public URIStatus getStatus(AlluxioURI path)
throws FileDoesNotExistException, IOException, AlluxioException {
return getStatus(path, GetStatusOptions.defaults());
}
@Override
public URIStatus getStatus(AlluxioURI path, GetStatusOptions options)
throws FileDoesNotExistException, IOException, AlluxioException {
checkUri(path);
FileSystemMasterClient masterClient = mFileSystemContext.acquireMasterClient();
try {
return masterClient.getStatus(path, options);
} catch (NotFoundException e) {
throw new FileDoesNotExistException(ExceptionMessage.PATH_DOES_NOT_EXIST.getMessage(path));
} catch (UnavailableException e) {
throw e;
} catch (AlluxioStatusException e) {
throw e.toAlluxioException();
} finally {
mFileSystemContext.releaseMasterClient(masterClient);
}
}
@Override
public List<URIStatus> listStatus(AlluxioURI path)
throws FileDoesNotExistException, IOException, AlluxioException {
return listStatus(path, ListStatusOptions.defaults());
}
@Override
public List<URIStatus> listStatus(AlluxioURI path, ListStatusOptions options)
throws FileDoesNotExistException, IOException, AlluxioException {
checkUri(path);
FileSystemMasterClient masterClient = mFileSystemContext.acquireMasterClient();
// TODO(calvin): Fix the exception handling in the master
try {
return masterClient.listStatus(path, options);
} catch (NotFoundException e) {
throw new FileDoesNotExistException(ExceptionMessage.PATH_DOES_NOT_EXIST.getMessage(path));
} catch (UnavailableException e) {
throw e;
} catch (AlluxioStatusException e) {
throw e.toAlluxioException();
} finally {
mFileSystemContext.releaseMasterClient(masterClient);
}
}
/**
* {@inheritDoc}
*
* @deprecated since version 1.1 and will be removed in version 2.0
*/
@Deprecated
@Override
public void loadMetadata(AlluxioURI path)
throws FileDoesNotExistException, IOException, AlluxioException {
loadMetadata(path, LoadMetadataOptions.defaults());
}
/**
* {@inheritDoc}
*
* @deprecated since version 1.1 and will be removed in version 2.0
*/
@Deprecated
@Override
public void loadMetadata(AlluxioURI path, LoadMetadataOptions options)
throws FileDoesNotExistException, IOException, AlluxioException {
checkUri(path);
FileSystemMasterClient masterClient = mFileSystemContext.acquireMasterClient();
try {
masterClient.loadMetadata(path, options);
LOG.debug("Loaded metadata {}, options: {}", path.getPath(), options);
} catch (NotFoundException e) {
throw new FileDoesNotExistException(e.getMessage());
} catch (UnavailableException e) {
throw e;
} catch (AlluxioStatusException e) {
throw e.toAlluxioException();
} finally {
mFileSystemContext.releaseMasterClient(masterClient);
}
}
@Override
public void mount(AlluxioURI alluxioPath, AlluxioURI ufsPath)
throws IOException, AlluxioException {
mount(alluxioPath, ufsPath, MountOptions.defaults());
}
@Override
public void mount(AlluxioURI alluxioPath, AlluxioURI ufsPath, MountOptions options)
throws IOException, AlluxioException {
checkUri(alluxioPath);
FileSystemMasterClient masterClient = mFileSystemContext.acquireMasterClient();
try {
// TODO(calvin): Make this fail on the master side
masterClient.mount(alluxioPath, ufsPath, options);
LOG.info("Mount " + ufsPath.toString() + " to " + alluxioPath.getPath());
} catch (UnavailableException e) {
throw e;
} catch (AlluxioStatusException e) {
throw e.toAlluxioException();
} finally {
mFileSystemContext.releaseMasterClient(masterClient);
}
}
@Override
public Map<String, MountPointInfo> getMountTable() throws IOException, AlluxioException {
FileSystemMasterClient masterClient = mFileSystemContext.acquireMasterClient();
try {
return masterClient.getMountTable();
} catch (UnavailableException e) {
throw e;
} catch (AlluxioStatusException e) {
throw e.toAlluxioException();
} finally {
mFileSystemContext.releaseMasterClient(masterClient);
}
}
@Override
public List<String> getSyncPathList() throws IOException, AlluxioException {
FileSystemMasterClient masterClient = mFileSystemContext.acquireMasterClient();
try {
return masterClient.getSyncPathList();
} catch (UnavailableException e) {
throw e;
} catch (AlluxioStatusException e) {
throw e.toAlluxioException();
} finally {
mFileSystemContext.releaseMasterClient(masterClient);
}
}
@Override
public FileInStream openFile(AlluxioURI path)
throws FileDoesNotExistException, IOException, AlluxioException {
return openFile(path, OpenFileOptions.defaults());
}
@Override
public FileInStream openFile(AlluxioURI path, OpenFileOptions options)
throws FileDoesNotExistException, IOException, AlluxioException {
checkUri(path);
URIStatus status = getStatus(path);
if (status.isFolder()) {
throw new FileDoesNotExistException(
ExceptionMessage.CANNOT_READ_DIRECTORY.getMessage(status.getName()));
}
InStreamOptions inStreamOptions = options.toInStreamOptions(status);
return new FileInStream(status, inStreamOptions, mFileSystemContext);
}
@Override
public void rename(AlluxioURI src, AlluxioURI dst)
throws FileDoesNotExistException, IOException, AlluxioException {
rename(src, dst, RenameOptions.defaults());
}
@Override
public void rename(AlluxioURI src, AlluxioURI dst, RenameOptions options)
throws FileDoesNotExistException, IOException, AlluxioException {
checkUri(src);
checkUri(dst);
FileSystemMasterClient masterClient = mFileSystemContext.acquireMasterClient();
try {
// TODO(calvin): Update this code on the master side.
masterClient.rename(src, dst, options);
LOG.debug("Renamed {} to {}, options: {}", src.getPath(), dst.getPath(), options);
} catch (NotFoundException e) {
throw new FileDoesNotExistException(e.getMessage());
} catch (UnavailableException e) {
throw e;
} catch (AlluxioStatusException e) {
throw e.toAlluxioException();
} finally {
mFileSystemContext.releaseMasterClient(masterClient);
}
}
@Override
public void setAcl(AlluxioURI path, SetAclAction action, List<AclEntry> entries)
throws FileDoesNotExistException, IOException, AlluxioException {
setAcl(path, action, entries, SetAclOptions.defaults());
}
@Override
public void setAcl(AlluxioURI path, SetAclAction action, List<AclEntry> entries,
SetAclOptions options) throws FileDoesNotExistException, IOException, AlluxioException {
checkUri(path);
FileSystemMasterClient masterClient = mFileSystemContext.acquireMasterClient();
try {
masterClient.setAcl(path, action, entries, options);
LOG.debug("Set ACL for {}, entries: {} options: {}", path.getPath(), entries, options);
} catch (NotFoundException e) {
throw new FileDoesNotExistException(e.getMessage());
} catch (UnavailableException e) {
throw e;
} catch (AlluxioStatusException e) {
throw e.toAlluxioException();
} finally {
mFileSystemContext.releaseMasterClient(masterClient);
}
}
@Override
public void setAttribute(AlluxioURI path)
throws FileDoesNotExistException, IOException, AlluxioException {
setAttribute(path, SetAttributeOptions.defaults());
}
@Override
public void setAttribute(AlluxioURI path, SetAttributeOptions options)
throws FileDoesNotExistException, IOException, AlluxioException {
checkUri(path);
FileSystemMasterClient masterClient = mFileSystemContext.acquireMasterClient();
try {
masterClient.setAttribute(path, options);
LOG.debug("Set attributes for {}, options: {}", path.getPath(), options);
} catch (NotFoundException e) {
throw new FileDoesNotExistException(e.getMessage());
} catch (UnavailableException e) {
throw e;
} catch (AlluxioStatusException e) {
throw e.toAlluxioException();
} finally {
mFileSystemContext.releaseMasterClient(masterClient);
}
}
/**
* Starts the active syncing process on an Alluxio path.
*
* @param path the path to sync
*/
@Override
public void startSync(AlluxioURI path)
throws FileDoesNotExistException, IOException, AlluxioException {
FileSystemMasterClient masterClient = mFileSystemContext.acquireMasterClient();
try {
masterClient.startSync(path);
LOG.debug("Start syncing for {}", path.getPath());
} catch (NotFoundException e) {
throw new FileDoesNotExistException(e.getMessage());
} catch (UnavailableException e) {
throw e;
} catch (AlluxioStatusException e) {
throw e.toAlluxioException();
} finally {
mFileSystemContext.releaseMasterClient(masterClient);
}
}
/**
* Stops the active syncing process on an Alluxio path.
* @param path the path to stop syncing
*/
@Override
public void stopSync(AlluxioURI path)
throws FileDoesNotExistException, IOException, AlluxioException {
FileSystemMasterClient masterClient = mFileSystemContext.acquireMasterClient();
try {
masterClient.stopSync(path);
LOG.debug("Stop syncing for {}", path.getPath());
} catch (NotFoundException e) {
throw new FileDoesNotExistException(e.getMessage());
} catch (UnavailableException e) {
throw e;
} catch (AlluxioStatusException e) {
throw e.toAlluxioException();
} finally {
mFileSystemContext.releaseMasterClient(masterClient);
}
}
@Override
public void unmount(AlluxioURI path) throws IOException, AlluxioException {
unmount(path, UnmountOptions.defaults());
}
@Override
public void unmount(AlluxioURI path, UnmountOptions options)
throws IOException, AlluxioException {
checkUri(path);
FileSystemMasterClient masterClient = mFileSystemContext.acquireMasterClient();
try {
masterClient.unmount(path);
LOG.debug("Unmounted {}, options: {}", path.getPath(), options);
} catch (UnavailableException e) {
throw e;
} catch (AlluxioStatusException e) {
throw e.toAlluxioException();
} finally {
mFileSystemContext.releaseMasterClient(masterClient);
}
}
/**
* Checks an {@link AlluxioURI} for scheme and authority information. Warn the user and throw an
* exception if necessary.
*/
private static void checkUri(AlluxioURI uri) {
if (uri.hasScheme()) {
String warnMsg = "The URI scheme \"{}\" is ignored and not required in URIs passed to"
+ " the Alluxio Filesystem client.";
switch (uri.getScheme()) {
case Constants.SCHEME:
LOG.warn(warnMsg, Constants.SCHEME);
break;
case Constants.SCHEME_FT:
LOG.warn(warnMsg, Constants.SCHEME_FT);
break;
default:
throw new IllegalArgumentException(
String.format("Scheme %s:// in AlluxioURI is invalid. Schemes in filesystem"
+ " operations are ignored. \"alluxio://\" or no scheme at all is valid.",
uri.getScheme()));
}
}
if (uri.hasAuthority()) {
LOG.warn("The URI authority (hostname and port) is ignored and not required in URIs passed "
+ "to the Alluxio Filesystem client.");
/* Even if we choose to log the warning, check if the Configuration host matches what the
* user passes. If not, throw an exception letting the user know they don't match.
*/
Authority configured = MasterInquireClient.Factory.create().getConnectDetails().toAuthority();
if (!configured.equals(uri.getAuthority())) {
throw new IllegalArgumentException(
String.format("The URI authority %s does not match the configured " + "value of %s.",
uri.getAuthority(), configured));
}
}
return;
}
}
| |
/**
* Copyright 2011-2013 FoundationDB, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* The original from which this derives bore the following: */
/*
Derby - Class org.apache.derby.impl.sql.compile.BinaryOperatorNode
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.foundationdb.sql.parser;
import com.foundationdb.sql.StandardException;
import com.foundationdb.sql.types.ValueClassName;
/**
* A BinaryOperatorNode represents a built-in binary operator as defined by
* the ANSI/ISO SQL standard. This covers operators like +, -, *, /, =, <, etc.
* Java operators are not represented here: the JSQL language allows Java
* methods to be called from expressions, but not Java operators.
*
*/
public class BinaryOperatorNode extends ValueNode
{
protected String operator;
protected String methodName;
public static enum OperatorType {
PLUS, MINUS, TIMES, DIVIDE, CONCATENATE, MOD, DIV,
EQ, NE, GT, GE, LT, LE, AND, OR, LIKE, LTRIM, TRIM, RTRIM,
BITAND, BITOR, BITXOR, LEFT_SHIFT, RIGHT_SHIFT,
LEFT, RIGHT
}
protected ValueNode leftOperand;
protected ValueNode rightOperand;
protected String leftInterfaceType;
protected String rightInterfaceType;
protected String resultInterfaceType;
/**
* Initializer for a BinaryOperatorNode
*
* @param leftOperand The left operand of the node
* @param rightOperand The right operand of the node
* @param operator The name of the operator
* @param methodName The name of the method to call for this operator
* @param leftInterfaceType The name of the interface for the left operand
* @param rightInterfaceType The name of the interface for the right operand
*/
public void init(Object leftOperand, Object rightOperand,
Object operator, Object methodName,
Object leftInterfaceType, Object rightInterfaceType) {
this.leftOperand = (ValueNode)leftOperand;
this.rightOperand = (ValueNode)rightOperand;
this.operator = (String)operator;
this.methodName = (String)methodName;
this.leftInterfaceType = (String)leftInterfaceType;
this.rightInterfaceType = (String)rightInterfaceType;
}
public void init(Object leftOperand, Object rightOperand,
Object leftInterfaceType, Object rightInterfaceType) {
this.leftOperand = (ValueNode)leftOperand;
this.rightOperand = (ValueNode)rightOperand;
this.leftInterfaceType = (String)leftInterfaceType;
this.rightInterfaceType = (String)rightInterfaceType;
}
/**
* Fill this node with a deep copy of the given node.
*/
public void copyFrom(QueryTreeNode node) throws StandardException {
super.copyFrom(node);
BinaryOperatorNode other = (BinaryOperatorNode)node;
this.operator = other.operator;
this.methodName = other.methodName;
this.leftOperand = (ValueNode)
getNodeFactory().copyNode(other.leftOperand, getParserContext());
this.rightOperand = (ValueNode)
getNodeFactory().copyNode(other.rightOperand, getParserContext());
this.leftInterfaceType = other.leftInterfaceType;
this.rightInterfaceType = other.rightInterfaceType;
this.resultInterfaceType = other.resultInterfaceType;
}
/**
* Convert this object to a String. See comments in QueryTreeNode.java
* for how this should be done for tree printing.
*
* @return This object as a String
*/
public String toString() {
return "operator: " + operator + "\n" +
"methodName: " + methodName + "\n" +
super.toString();
}
/**
* Set the operator.
*
* @param operator The operator.
*/
void setOperator(String operator) {
this.operator = operator;
}
public String getOperator() {
return operator;
}
/**
* Set the methodName.
*
* @param methodName The methodName.
*/
void setMethodName(String methodName) {
this.methodName = methodName;
}
public String getMethodName() {
return methodName;
}
/**
* Set the interface type for the left and right arguments.
* Used when we don't know the interface type until
* later in binding.
*/
public void setLeftRightInterfaceType(String iType) {
leftInterfaceType = iType;
rightInterfaceType = iType;
}
/**
* Prints the sub-nodes of this object. See QueryTreeNode.java for
* how tree printing is supposed to work.
*
* @param depth The depth of this node in the tree
*/
public void printSubNodes(int depth) {
super.printSubNodes(depth);
if (leftOperand != null) {
printLabel(depth, "leftOperand: ");
leftOperand.treePrint(depth + 1);
}
if (rightOperand != null) {
printLabel(depth, "rightOperand: ");
rightOperand.treePrint(depth + 1);
}
}
/**
* Set the leftOperand to the specified ValueNode
*
* @param newLeftOperand The new leftOperand
*/
public void setLeftOperand(ValueNode newLeftOperand) {
leftOperand = newLeftOperand;
}
/**
* Get the leftOperand
*
* @return The current leftOperand.
*/
public ValueNode getLeftOperand() {
return leftOperand;
}
/**
* Set the rightOperand to the specified ValueNode
*
* @param newRightOperand The new rightOperand
*/
public void setRightOperand(ValueNode newRightOperand) {
rightOperand = newRightOperand;
}
/**
* Get the rightOperand
*
* @return The current rightOperand.
*/
public ValueNode getRightOperand() {
return rightOperand;
}
/**
* Return whether or not this expression tree represents a constant expression.
*
* @return Whether or not this expression tree represents a constant expression.
*/
public boolean isConstantExpression() {
return (leftOperand.isConstantExpression() &&
rightOperand.isConstantExpression());
}
/**
* Accept the visitor for all visitable children of this node.
*
* @param v the visitor
*
* @exception StandardException on error
*/
void acceptChildren(Visitor v) throws StandardException {
super.acceptChildren(v);
if (leftOperand != null) {
leftOperand = (ValueNode)leftOperand.accept(v);
}
if (rightOperand != null) {
rightOperand = (ValueNode)rightOperand.accept(v);
}
}
/**
* @inheritDoc
*/
protected boolean isEquivalent(ValueNode o) throws StandardException {
if (!isSameNodeType(o)) {
return false;
}
BinaryOperatorNode other = (BinaryOperatorNode)o;
return methodName.equals(other.methodName) &&
leftOperand.isEquivalent(other.leftOperand) &&
rightOperand.isEquivalent(other.rightOperand);
}
}
| |
/*******************************************************************************
* Copyright (c) 2015 Jeff Martin.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the GNU Lesser General Public
* License v3.0 which accompanies this distribution, and is available at
* http://www.gnu.org/licenses/lgpl.html
*
* Contributors:
* Jeff Martin - initial API and implementation
******************************************************************************/
package cuchaz.enigma.analysis.visitor;
import com.strobel.assembler.metadata.TypeDefinition;
import com.strobel.decompiler.languages.java.ast.Annotation;
import com.strobel.decompiler.languages.java.ast.AnonymousObjectCreationExpression;
import com.strobel.decompiler.languages.java.ast.ArrayCreationExpression;
import com.strobel.decompiler.languages.java.ast.ArrayInitializerExpression;
import com.strobel.decompiler.languages.java.ast.ArraySpecifier;
import com.strobel.decompiler.languages.java.ast.AssertStatement;
import com.strobel.decompiler.languages.java.ast.AssignmentExpression;
import com.strobel.decompiler.languages.java.ast.AstNode;
import com.strobel.decompiler.languages.java.ast.BinaryOperatorExpression;
import com.strobel.decompiler.languages.java.ast.BlockStatement;
import com.strobel.decompiler.languages.java.ast.BreakStatement;
import com.strobel.decompiler.languages.java.ast.CaseLabel;
import com.strobel.decompiler.languages.java.ast.CastExpression;
import com.strobel.decompiler.languages.java.ast.CatchClause;
import com.strobel.decompiler.languages.java.ast.ClassOfExpression;
import com.strobel.decompiler.languages.java.ast.Comment;
import com.strobel.decompiler.languages.java.ast.CompilationUnit;
import com.strobel.decompiler.languages.java.ast.ComposedType;
import com.strobel.decompiler.languages.java.ast.ConditionalExpression;
import com.strobel.decompiler.languages.java.ast.ConstructorDeclaration;
import com.strobel.decompiler.languages.java.ast.ContinueStatement;
import com.strobel.decompiler.languages.java.ast.DoWhileStatement;
import com.strobel.decompiler.languages.java.ast.EmptyStatement;
import com.strobel.decompiler.languages.java.ast.EnumValueDeclaration;
import com.strobel.decompiler.languages.java.ast.ExpressionStatement;
import com.strobel.decompiler.languages.java.ast.FieldDeclaration;
import com.strobel.decompiler.languages.java.ast.ForEachStatement;
import com.strobel.decompiler.languages.java.ast.ForStatement;
import com.strobel.decompiler.languages.java.ast.GotoStatement;
import com.strobel.decompiler.languages.java.ast.IAstVisitor;
import com.strobel.decompiler.languages.java.ast.Identifier;
import com.strobel.decompiler.languages.java.ast.IdentifierExpression;
import com.strobel.decompiler.languages.java.ast.IfElseStatement;
import com.strobel.decompiler.languages.java.ast.ImportDeclaration;
import com.strobel.decompiler.languages.java.ast.IndexerExpression;
import com.strobel.decompiler.languages.java.ast.InstanceInitializer;
import com.strobel.decompiler.languages.java.ast.InstanceOfExpression;
import com.strobel.decompiler.languages.java.ast.InvocationExpression;
import com.strobel.decompiler.languages.java.ast.JavaTokenNode;
import com.strobel.decompiler.languages.java.ast.Keys;
import com.strobel.decompiler.languages.java.ast.LabelStatement;
import com.strobel.decompiler.languages.java.ast.LabeledStatement;
import com.strobel.decompiler.languages.java.ast.LambdaExpression;
import com.strobel.decompiler.languages.java.ast.LocalTypeDeclarationStatement;
import com.strobel.decompiler.languages.java.ast.MemberReferenceExpression;
import com.strobel.decompiler.languages.java.ast.MethodDeclaration;
import com.strobel.decompiler.languages.java.ast.MethodGroupExpression;
import com.strobel.decompiler.languages.java.ast.NewLineNode;
import com.strobel.decompiler.languages.java.ast.NullReferenceExpression;
import com.strobel.decompiler.languages.java.ast.ObjectCreationExpression;
import com.strobel.decompiler.languages.java.ast.PackageDeclaration;
import com.strobel.decompiler.languages.java.ast.ParameterDeclaration;
import com.strobel.decompiler.languages.java.ast.ParenthesizedExpression;
import com.strobel.decompiler.languages.java.ast.PrimitiveExpression;
import com.strobel.decompiler.languages.java.ast.ReturnStatement;
import com.strobel.decompiler.languages.java.ast.SimpleType;
import com.strobel.decompiler.languages.java.ast.SuperReferenceExpression;
import com.strobel.decompiler.languages.java.ast.SwitchSection;
import com.strobel.decompiler.languages.java.ast.SwitchStatement;
import com.strobel.decompiler.languages.java.ast.SynchronizedStatement;
import com.strobel.decompiler.languages.java.ast.TextNode;
import com.strobel.decompiler.languages.java.ast.ThisReferenceExpression;
import com.strobel.decompiler.languages.java.ast.ThrowStatement;
import com.strobel.decompiler.languages.java.ast.TryCatchStatement;
import com.strobel.decompiler.languages.java.ast.TypeDeclaration;
import com.strobel.decompiler.languages.java.ast.TypeParameterDeclaration;
import com.strobel.decompiler.languages.java.ast.TypeReferenceExpression;
import com.strobel.decompiler.languages.java.ast.UnaryOperatorExpression;
import com.strobel.decompiler.languages.java.ast.VariableDeclarationStatement;
import com.strobel.decompiler.languages.java.ast.VariableInitializer;
import com.strobel.decompiler.languages.java.ast.WhileStatement;
import com.strobel.decompiler.languages.java.ast.WildcardType;
import com.strobel.decompiler.patterns.Pattern;
import cuchaz.enigma.analysis.SourceIndex;
import cuchaz.enigma.mapping.entry.ClassEntry;
public class SourceIndexVisitor implements IAstVisitor<SourceIndex,Void> {
@Override
public Void visitTypeDeclaration(TypeDeclaration node, SourceIndex index) {
TypeDefinition def = node.getUserData(Keys.TYPE_DEFINITION);
ClassEntry classEntry = new ClassEntry(def.getInternalName());
index.addDeclaration(node.getNameToken(), classEntry);
return node.acceptVisitor(new SourceIndexClassVisitor(classEntry), index);
}
protected Void recurse(AstNode node, SourceIndex index) {
for (final AstNode child : node.getChildren()) {
child.acceptVisitor(this, index);
}
return null;
}
@Override
public Void visitMethodDeclaration(MethodDeclaration node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitConstructorDeclaration(ConstructorDeclaration node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitFieldDeclaration(FieldDeclaration node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitEnumValueDeclaration(EnumValueDeclaration node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitParameterDeclaration(ParameterDeclaration node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitInvocationExpression(InvocationExpression node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitMemberReferenceExpression(MemberReferenceExpression node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitSimpleType(SimpleType node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitIdentifierExpression(IdentifierExpression node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitComment(Comment node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitPatternPlaceholder(AstNode node, Pattern pattern, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitTypeReference(TypeReferenceExpression node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitJavaTokenNode(JavaTokenNode node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitIdentifier(Identifier node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitNullReferenceExpression(NullReferenceExpression node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitThisReferenceExpression(ThisReferenceExpression node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitSuperReferenceExpression(SuperReferenceExpression node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitClassOfExpression(ClassOfExpression node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitBlockStatement(BlockStatement node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitExpressionStatement(ExpressionStatement node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitBreakStatement(BreakStatement node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitContinueStatement(ContinueStatement node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitDoWhileStatement(DoWhileStatement node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitEmptyStatement(EmptyStatement node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitIfElseStatement(IfElseStatement node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitLabelStatement(LabelStatement node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitLabeledStatement(LabeledStatement node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitReturnStatement(ReturnStatement node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitSwitchStatement(SwitchStatement node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitSwitchSection(SwitchSection node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitCaseLabel(CaseLabel node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitThrowStatement(ThrowStatement node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitCatchClause(CatchClause node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitAnnotation(Annotation node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitNewLine(NewLineNode node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitVariableDeclaration(VariableDeclarationStatement node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitVariableInitializer(VariableInitializer node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitText(TextNode node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitImportDeclaration(ImportDeclaration node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitInitializerBlock(InstanceInitializer node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitTypeParameterDeclaration(TypeParameterDeclaration node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitCompilationUnit(CompilationUnit node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitPackageDeclaration(PackageDeclaration node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitArraySpecifier(ArraySpecifier node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitComposedType(ComposedType node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitWhileStatement(WhileStatement node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitPrimitiveExpression(PrimitiveExpression node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitCastExpression(CastExpression node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitBinaryOperatorExpression(BinaryOperatorExpression node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitInstanceOfExpression(InstanceOfExpression node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitIndexerExpression(IndexerExpression node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitUnaryOperatorExpression(UnaryOperatorExpression node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitConditionalExpression(ConditionalExpression node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitArrayInitializerExpression(ArrayInitializerExpression node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitObjectCreationExpression(ObjectCreationExpression node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitArrayCreationExpression(ArrayCreationExpression node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitAssignmentExpression(AssignmentExpression node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitForStatement(ForStatement node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitForEachStatement(ForEachStatement node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitTryCatchStatement(TryCatchStatement node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitGotoStatement(GotoStatement node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitParenthesizedExpression(ParenthesizedExpression node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitSynchronizedStatement(SynchronizedStatement node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitAnonymousObjectCreationExpression(AnonymousObjectCreationExpression node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitWildcardType(WildcardType node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitMethodGroupExpression(MethodGroupExpression node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitAssertStatement(AssertStatement node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitLambdaExpression(LambdaExpression node, SourceIndex index) {
return recurse(node, index);
}
@Override
public Void visitLocalTypeDeclarationStatement(LocalTypeDeclarationStatement node, SourceIndex index) {
return recurse(node, index);
}
}
| |
/*
Copyright 2006 Jerry Huxtable
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package thirdparty.jhlabs.image;
import thirdparty.jhlabs.math.FFT;
import java.awt.image.BufferedImage;
/**
* A filter which use FFTs to simulate lens blur on an image.
*/
public class LensBlurFilter extends AbstractBufferedImageOp {
private float radius = 10;
private float bloom = 2;
private float bloomThreshold = 255;
private int sides = 5;
/**
* Set the radius of the kernel, and hence the amount of blur.
*
* @param radius the radius of the blur in pixels.
* @see #getRadius
*/
public void setRadius(float radius) {
this.radius = radius;
}
/**
* Get the radius of the kernel.
*
* @return the radius
* @see #setRadius
*/
public float getRadius() {
return radius;
}
/**
* Set the number of sides of the aperture.
*
* @param sides the number of sides
* @see #getSides
*/
public void setSides(int sides) {
this.sides = sides;
}
/**
* Get the number of sides of the aperture.
*
* @return the number of sides
* @see #setSides
*/
public int getSides() {
return sides;
}
/**
* Set the bloom factor.
*
* @param bloom the bloom factor
* @see #getBloom
*/
public void setBloom(float bloom) {
this.bloom = bloom;
}
/**
* Get the bloom factor.
*
* @return the bloom factor
* @see #setBloom
*/
public float getBloom() {
return bloom;
}
/**
* Set the bloom threshold.
*
* @param bloomThreshold the bloom threshold
* @see #getBloomThreshold
*/
public void setBloomThreshold(float bloomThreshold) {
this.bloomThreshold = bloomThreshold;
}
/**
* Get the bloom threshold.
*
* @return the bloom threshold
* @see #setBloomThreshold
*/
public float getBloomThreshold() {
return bloomThreshold;
}
public BufferedImage filter(BufferedImage src, BufferedImage dst) {
int width = src.getWidth();
int height = src.getHeight();
int rows = 1, cols = 1;
int log2rows = 0, log2cols = 0;
int iradius = (int) Math.ceil(radius);
int tileWidth = 128;
int tileHeight = tileWidth;
int adjustedWidth = (int) (width + iradius * 2);
int adjustedHeight = (int) (height + iradius * 2);
tileWidth = iradius < 32 ? Math.min(128, width + 2 * iradius) : Math.min(256, width + 2 * iradius);
tileHeight = iradius < 32 ? Math.min(128, height + 2 * iradius) : Math.min(256, height + 2 * iradius);
if (dst == null)
dst = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB);
while (rows < tileHeight) {
rows *= 2;
log2rows++;
}
while (cols < tileWidth) {
cols *= 2;
log2cols++;
}
int w = cols;
int h = rows;
tileWidth = w;
tileHeight = h;//FIXME-tileWidth, w, and cols are always all the same
FFT fft = new FFT(Math.max(log2rows, log2cols));
int[] rgb = new int[w * h];
float[][] mask = new float[2][w * h];
float[][] gb = new float[2][w * h];
float[][] ar = new float[2][w * h];
// Create the kernel
double polyAngle = Math.PI / sides;
double polyScale = 1.0f / Math.cos(polyAngle);
double r2 = radius * radius;
float angle = 0;
double rangle = Math.toRadians(angle);
float total = 0;
int i = 0;
for (int y = 0; y < h; y++) {
for (int x = 0; x < w; x++) {
double dx = x - w / 2f;
double dy = y - h / 2f;
double r = dx * dx + dy * dy;
double f = r < r2 ? 1 : 0;
if (f != 0) {
r = Math.sqrt(r);
if (sides != 0) {
double a = Math.atan2(dy, dx) + rangle;
a = ImageMath.mod(a, polyAngle * 2) - polyAngle;
f = Math.cos(a) * polyScale;
} else
f = 1;
f = f * r < radius ? 1 : 0;
}
total += (float) f;
mask[0][i] = (float) f;
mask[1][i] = 0;
i++;
}
}
// Normalize the kernel
i = 0;
for (int y = 0; y < h; y++) {
for (int x = 0; x < w; x++) {
mask[0][i] /= total;
i++;
}
}
fft.transform2D(mask[0], mask[1], w, h, true);
for (int tileY = -iradius; tileY < height; tileY += tileHeight - 2 * iradius) {
for (int tileX = -iradius; tileX < width; tileX += tileWidth - 2 * iradius) {
// System.out.println("Tile: "+tileX+" "+tileY+" "+tileWidth+" "+tileHeight);
// Clip the tile to the image bounds
int tx = tileX, ty = tileY, tw = tileWidth, th = tileHeight;
int fx = 0, fy = 0;
if (tx < 0) {
tw += tx;
fx -= tx;
tx = 0;
}
if (ty < 0) {
th += ty;
fy -= ty;
ty = 0;
}
if (tx + tw > width)
tw = width - tx;
if (ty + th > height)
th = height - ty;
src.getRGB(tx, ty, tw, th, rgb, fy * w + fx, w);
// Create a float array from the pixels. Any pixels off the edge of the source image get duplicated from the edge.
i = 0;
for (int y = 0; y < h; y++) {
int imageY = y + tileY;
int j;
if (imageY < 0)
j = fy;
else if (imageY > height)
j = fy + th - 1;
else
j = y;
j *= w;
for (int x = 0; x < w; x++) {
int imageX = x + tileX;
int k;
if (imageX < 0)
k = fx;
else if (imageX > width)
k = fx + tw - 1;
else
k = x;
k += j;
ar[0][i] = ((rgb[k] >> 24) & 0xff);
float r = ((rgb[k] >> 16) & 0xff);
float g = ((rgb[k] >> 8) & 0xff);
float b = (rgb[k] & 0xff);
// Bloom...
if (r > bloomThreshold)
r *= bloom;
// r = bloomThreshold + (r-bloomThreshold) * bloom;
if (g > bloomThreshold)
g *= bloom;
// g = bloomThreshold + (g-bloomThreshold) * bloom;
if (b > bloomThreshold)
b *= bloom;
// b = bloomThreshold + (b-bloomThreshold) * bloom;
ar[1][i] = r;
gb[0][i] = g;
gb[1][i] = b;
i++;
k++;
}
}
// Transform into frequency space
fft.transform2D(ar[0], ar[1], cols, rows, true);
fft.transform2D(gb[0], gb[1], cols, rows, true);
// Multiply the transformed pixels by the transformed kernel
i = 0;
for (int y = 0; y < h; y++) {
for (int x = 0; x < w; x++) {
float re = ar[0][i];
float im = ar[1][i];
float rem = mask[0][i];
float imm = mask[1][i];
ar[0][i] = re * rem - im * imm;
ar[1][i] = re * imm + im * rem;
re = gb[0][i];
im = gb[1][i];
gb[0][i] = re * rem - im * imm;
gb[1][i] = re * imm + im * rem;
i++;
}
}
// Transform back
fft.transform2D(ar[0], ar[1], cols, rows, false);
fft.transform2D(gb[0], gb[1], cols, rows, false);
// Convert back to RGB pixels, with quadrant remapping
int row_flip = w >> 1;
int col_flip = h >> 1;
int index = 0;
//FIXME-don't bother converting pixels off image edges
for (int y = 0; y < w; y++) {
int ym = y ^ row_flip;
int yi = ym * cols;
for (int x = 0; x < w; x++) {
int xm = yi + (x ^ col_flip);
int a = (int) ar[0][xm];
int r = (int) ar[1][xm];
int g = (int) gb[0][xm];
int b = (int) gb[1][xm];
// Clamp high pixels due to blooming
if (r > 255)
r = 255;
if (g > 255)
g = 255;
if (b > 255)
b = 255;
int argb = (a << 24) | (r << 16) | (g << 8) | b;
rgb[index++] = argb;
}
}
// Clip to the output image
tx = tileX + iradius;
ty = tileY + iradius;
tw = tileWidth - 2 * iradius;
th = tileHeight - 2 * iradius;
if (tx + tw > width)
tw = width - tx;
if (ty + th > height)
th = height - ty;
dst.setRGB(tx, ty, tw, th, rgb, iradius * w + iradius, w);
}
}
return dst;
}
public String toString() {
return "Blur/Lens Blur...";
}
}
| |
package org.lantern.http;
import io.netty.handler.codec.http.HttpHeaders;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import javax.security.auth.login.CredentialException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.httpclient.HttpStatus;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.jivesoftware.smack.XMPPConnection;
import org.jivesoftware.smack.XMPPException;
import org.jivesoftware.smackx.packet.VCard;
import org.lantern.LanternUtils;
import org.lantern.oauth.LanternGoogleOAuth2Credentials;
import org.lantern.state.ModelUtils;
import org.lantern.state.StaticSettings;
import org.littleshoot.commom.xmpp.XmppUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.inject.Inject;
import com.google.inject.Singleton;
//import eu.medsea.mimeutil.MimeType;
//import eu.medsea.mimeutil.MimeUtil2;
/**
* Servlet for sending photo data for a given user.
*/
@Singleton
public final class PhotoServlet extends HttpServlet {
private static final Logger log = LoggerFactory.getLogger(PhotoServlet.class);
private static XMPPConnection conn;
private static final int CACHE_DURATION_IN_S = 60 * 60 * 24; // 1 day
private static final long CACHE_DURATION_IN_MS = CACHE_DURATION_IN_S * 1000;
/**
* Generated serial ID.
*/
private static final long serialVersionUID = -8442913539662036158L;
private static final Map<String, VCard> cache = new HashMap<String, VCard>();
private final byte[] noImage = loadNoImage();
//private static final MimeUtil2 mimeUtil = new MimeUtil2();
private static final Object CONNECTION_LOCK = new Object();
private final ModelUtils modelUtils;
@Inject
public PhotoServlet(final ModelUtils modelUtils) {
this.modelUtils = modelUtils;
/*
mimeUtil.registerMimeDetector(
"eu.medsea.mimeutil.detector.MagicMimeMimeDetector");
*/
//Connection.DEBUG_ENABLED = true;
}
@Override
protected void doGet(final HttpServletRequest req,
final HttpServletResponse resp) throws ServletException,
IOException {
LanternUtils.addCSPHeader(resp);
final String referer = req.getHeader(HttpHeaders.Names.REFERER);
log.debug("Referer is: {}", referer);
final String localEndpoint = StaticSettings.getLocalEndpoint();
if (!referer.startsWith(localEndpoint)) {
sendError(resp, HttpStatus.SC_BAD_REQUEST,
"referer must be localhost");
return;
}
log.debug("Got photo request: {}", req.getRequestURI());
final String email = req.getParameter("email");
final byte[] imageData;
if (StringUtils.isBlank(email)) {
sendError(resp, HttpStatus.SC_BAD_REQUEST, "email required");
return;
}
if (email.equals("default")) {
log.debug("Serving default photo!!");
imageData = noImage;
} else {
imageData = noImage;
/*
// In theory here we could hit another Google API to avoid
// shoving all this data through XMPP, although it probably doesn't
// matter much -- a TCP pipe is a TCP pipe after all.
byte[] raw = null;
try {
raw = getVCard(email).getAvatar();
} catch (final CredentialException e) {
sendError(resp, HttpStatus.SC_UNAUTHORIZED,
"Could not authorize Google Talk connection");
return;
} catch (final XMPPException e) {
log.debug("Exception accessing vcard for "+email);
}
if (raw == null) {
imageData = noImage;
} else {
imageData = raw;
//final Collection<MimeType> types = mimeUtil.getMimeTypes(imageData);
//if (types != null && !types.isEmpty()) {
// final String ct = types.iterator().next().toString();
// resp.setContentType(ct);
// log.debug("Set content type to {}", ct);
//}
}
*/
}
resp.addHeader(HttpHeaders.Names.CACHE_CONTROL,
"max-age=" + CACHE_DURATION_IN_S);
resp.setDateHeader(HttpHeaders.Names.EXPIRES,
System.currentTimeMillis() + CACHE_DURATION_IN_MS);
resp.setContentLength(imageData.length);
resp.getOutputStream().write(imageData);
//resp.getOutputStream().close();
}
public VCard getVCard(final String email)
throws CredentialException, XMPPException, IOException {
if (StringUtils.isBlank(email)) {
//sendError(resp, HttpStatus.SC_BAD_REQUEST, "email required");
throw new NullPointerException("No email!");
} else {
if (cache.containsKey(email)) {
return cache.get(email);
} else {
final VCard vcard = XmppUtils.getVCard(establishConnection(), email);
cache.put(email, vcard);
return vcard;
}
}
}
private byte[] loadNoImage() {
final File none;
final File installed = new File("lantern-ui/img/default-avatar.png");//default-profile-image.png");
if (installed.isFile()) {
none = installed;
} else {
none = new File("lantern-ui/app/img/default-avatar.png");
}
InputStream is = null;
try {
is = new FileInputStream(none);
return IOUtils.toByteArray(is);
} catch (final IOException e) {
log.error("No default profile image?", e);
} finally {
IOUtils.closeQuietly(is);
}
return new byte[0];
}
private XMPPConnection establishConnection() throws CredentialException,
XMPPException, IOException {
// The browser will send a bunch of requests for photos, and we don't
// want to hammer the Google Talk servers, so we synchronize to
// create a single connection.
synchronized (CONNECTION_LOCK) {
if (conn != null && conn.isConnected()) {
return conn;
}
final LanternGoogleOAuth2Credentials cred =
this.modelUtils.newGoogleOauthCreds("vcard-connection");
conn = XmppUtils.simpleGoogleTalkConnection(cred);
return conn;
}
}
private void sendError(final HttpServletResponse resp, final int errorCode,
final String msg) {
try {
resp.sendError(errorCode, msg);
} catch (final IOException e) {
log.debug("Could not send response", e);
}
}
@Override
protected void doPost(final HttpServletRequest req,
final HttpServletResponse resp) throws ServletException,
IOException {
}
}
| |
/*
* This file is part of choco-solver, http://choco-solver.org/
*
* Copyright (c) 2019, IMT Atlantique. All rights reserved.
*
* Licensed under the BSD 4-clause license.
*
* See LICENSE file in the project root for full license information.
*/
package org.chocosolver.solver.constraints.extension.binary;
import org.chocosolver.solver.constraints.extension.Tuples;
import org.chocosolver.solver.exception.ContradictionException;
import org.chocosolver.solver.variables.IntVar;
import org.chocosolver.solver.variables.events.IntEventType;
import org.chocosolver.solver.variables.events.PropagatorEventType;
import org.chocosolver.util.iterators.DisposableValueIterator;
import org.chocosolver.util.objects.setDataStructures.iterable.IntIterableBitSet;
import java.util.Arrays;
/**
* <br/>
*
* @author Charles Prud'homme, Hadrien Cambazard
* @since 24/04/2014
*/
public class PropBinAC3rm extends PropBinCSP {
private int[] currentSupport0;
private int[] currentSupport1;
private int offset0;
private int offset1;
private int[] initS0; //initial number of supports of each value of x0
private int[] initS1; //initial number of supports of each value of x0
private int minS0; //value with minimum number of supports for v0
private int minS1; //value with minimum number of supports for v1
private int initDomSize0;
private int initDomSize1;
private final IntIterableBitSet vrms;
public PropBinAC3rm(IntVar x, IntVar y, Tuples tuples) {
this(x, y, new CouplesBitSetTable(tuples, x, y));
}
private PropBinAC3rm(IntVar x, IntVar y, CouplesBitSetTable table) {
super(x, y, table);
vrms = new IntIterableBitSet();
}
@Override
public void propagate(int evtmask) throws ContradictionException {
if (PropagatorEventType.isFullPropagation(evtmask)) {
initProp();
}
reviseV0();
reviseV1();
}
@Override
public void propagate(int idxVarInProp, int mask) throws ContradictionException {
if (IntEventType.isInstantiate(mask)) {
onInstantiationOf(idxVarInProp);
} else if (idxVarInProp == 0) {
reviseV1();
} else {
reviseV0();
}
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
private void fastInitNbSupports(int a, int b) {
DisposableValueIterator itv0 = v0.getValueIterator(true);
int cpt1 = 0;
while (itv0.hasNext()) {
int val0 = itv0.next();
cpt1++;
DisposableValueIterator itv1 = v1.getValueIterator(true);
int cpt2 = 0;
while (itv1.hasNext()) {
cpt2++;
int val1 = itv1.next();
if (relation.isConsistent(val0, val1)) {
initS0[val0 - offset0]++;
initS1[val1 - offset1]++;
}
if (cpt2 >= a) break;
}
itv1.dispose();
if (cpt1 >= b) break;
}
itv0.dispose();
minS0 = Integer.MAX_VALUE;
minS1 = Integer.MAX_VALUE;
for (int i = 0; i < initS0.length; i++) {
if (initS0[i] < minS0) minS0 = initS0[i];
}
for (int i = 0; i < initS1.length; i++) {
if (initS1[i] < minS1) minS1 = initS1[i];
}
}
private boolean testDeepakConditionV1(int y, int v0Size) {
return initS1[y - offset1] <= (initDomSize0 - v0Size);
}
private boolean testDeepakConditionV0(int x, int v1Size) {
return initS0[x - offset0] <= (initDomSize1 - v1Size);
}
private int getSupportV1(int y) {
return currentSupport1[y - offset1];
}
private int getSupportV0(int x) {
return currentSupport0[x - offset0];
}
/**
* updates the support for all values in the domain of v1, and remove unsupported values for v1
*
* @throws ContradictionException
*/
private void reviseV1() throws ContradictionException {
int v0Size = v0.getDomainSize();
if (minS1 <= (initDomSize0 - v0Size)) {
DisposableValueIterator itv1 = v1.getValueIterator(true);
vrms.clear();
vrms.setOffset(v1.getLB());
try {
while (itv1.hasNext()) {
int y = itv1.next();
if (testDeepakConditionV1(y, v0Size)) { //initS1[y - offset1] <= (initDomSize0 - v0Size)) {
if (!v0.contains(getSupportV1(y))) {
boolean found = false;
int support = 0;
DisposableValueIterator itv0 = v0.getValueIterator(true);
while (!found && itv0.hasNext()) {
support = itv0.next();
if (relation.isConsistent(support, y)) found = true;
}
itv0.dispose();
if (found) {
storeSupportV1(support, y);
} else {
vrms.add(y);
}
}
}
}
v1.removeValues(vrms, this);
} finally {
itv1.dispose();
}
}
}
/**
* updates the support for all values in the domain of v0, and remove unsupported values for v0
*
* @throws ContradictionException
*/
private void reviseV0() throws ContradictionException {
int v1Size = v1.getDomainSize();
if (minS0 <= (initDomSize1 - v1Size)) {
DisposableValueIterator itv0 = v0.getValueIterator(true);
vrms.clear();
vrms.setOffset(v0.getLB());
try {
while (itv0.hasNext()) {
int x = itv0.next();
if (testDeepakConditionV0(x, v1Size)) { //initS0[x - offset0] <= (initDomSize1 - v1Size)) {
if (!v1.contains(getSupportV0(x))) {
boolean found = false;
int support = 0;
DisposableValueIterator itv1 = v1.getValueIterator(true);
while (!found && itv1.hasNext()) {
support = itv1.next();
if (relation.isConsistent(x, support)) found = true;
}
itv1.dispose();
if (found) {
storeSupportV0(support, x);
} else {
vrms.add(x);
}
}
}
}
v0.removeValues(vrms, this);
} finally {
itv0.dispose();
}
}
}
private void storeSupportV0(int support, int x) {
currentSupport0[x - offset0] = support;
currentSupport1[support - offset1] = x;
}
private void storeSupportV1(int support, int y) {
currentSupport1[y - offset1] = support;
currentSupport0[support - offset0] = y;
}
private void initProp() throws ContradictionException {
offset1 = v1.getLB();
offset0 = v0.getLB();
currentSupport0 = new int[v0.getUB() - v0.getLB() + 1];
currentSupport1 = new int[v1.getUB() - v1.getLB() + 1];
initS0 = new int[v0.getUB() - v0.getLB() + 1];
initS1 = new int[v1.getUB() - v1.getLB() + 1];
initDomSize0 = v0.getDomainSize();
initDomSize1 = v1.getDomainSize();
Arrays.fill(currentSupport0, -1);
Arrays.fill(currentSupport1, -1);
//double cardprod = v0.getDomainSize() * v1.getDomainSize();
//if (cardprod <= 7000)
fastInitNbSupports(Integer.MAX_VALUE, Integer.MAX_VALUE);
//else fastInitNbSupports(80,80);
DisposableValueIterator itv0 = v0.getValueIterator(true);
vrms.clear();
vrms.setOffset(v0.getLB());
int support = 0;
boolean found = false;
try {
while (itv0.hasNext()) {
DisposableValueIterator itv1 = v1.getValueIterator(true);
int val0 = itv0.next();
while (itv1.hasNext()) {
int val1 = itv1.next();
if (relation.isConsistent(val0, val1)) {
support = val1;
found = true;
break;
}
}
itv1.dispose();
if (!found) {
vrms.add(val0);
} else {
storeSupportV0(support, val0);
}
found = false;
}
v0.removeValues(vrms, this);
} finally {
itv0.dispose();
}
found = false;
DisposableValueIterator itv1 = v1.getValueIterator(true);
vrms.clear();
vrms.setOffset(v1.getLB());
try {
while (itv1.hasNext()) {
itv0 = v0.getValueIterator(true);
int val1 = itv1.next();
while (itv0.hasNext()) {
int val0 = itv0.next();
if (relation.isConsistent(val0, val1)) {
support = val0;
found = true;
break;
}
}
itv0.dispose();
if (!found) {
vrms.add(val1);
} else {
storeSupportV1(support, val1);
}
found = false;
}
v1.removeValues(vrms, this);
} finally {
itv1.dispose();
}
//propagate();
}
private void onInstantiationOf(int idx) throws ContradictionException {
if (idx == 0) {
int value = v0.getValue();
DisposableValueIterator iterator = v1.getValueIterator(true);
vrms.clear();
vrms.setOffset(v1.getLB());
try {
while (iterator.hasNext()) {
int val = iterator.next();
if (!relation.isConsistent(value, val)) {
vrms.add(val);
}
}
v1.removeValues(vrms, this);
} finally {
iterator.dispose();
}
} else {
int value = v1.getValue();
DisposableValueIterator iterator = v0.getValueIterator(true);
vrms.clear();
vrms.setOffset(v0.getLB());
try {
while (iterator.hasNext()) {
int val = iterator.next();
if (!relation.isConsistent(val, value)) {
vrms.add(val);
}
}
v0.removeValues(vrms, this);
} finally {
iterator.dispose();
}
}
}
}
| |
package org.apache.flex.forks.velocity.io;
/*
* Copyright 2000-2001,2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.io.Writer;
/**
* Implementation of a fast Writer. It was originally taken from JspWriter
* and modified to have less syncronization going on.
*
* @author <a href="mailto:jvanzyl@apache.org">Jason van Zyl</a>
* @author <a href="mailto:jon@latchkey.com">Jon S. Stevens</a>
* @author Anil K. Vijendran
* @version $Id: VelocityWriter.java,v 1.8.4.1 2004/03/03 23:22:54 geirm Exp $
*/
public final class VelocityWriter extends Writer
{
/**
* constant indicating that the Writer is not buffering output
*/
public static final int NO_BUFFER = 0;
/**
* constant indicating that the Writer is buffered and is using the
* implementation default buffer size
*/
public static final int DEFAULT_BUFFER = -1;
/**
* constant indicating that the Writer is buffered and is unbounded;
* this is used in BodyContent
*/
public static final int UNBOUNDED_BUFFER = -2;
protected int bufferSize;
protected boolean autoFlush;
private Writer writer;
private char cb[];
private int nextChar;
private static int defaultCharBufferSize = 8 * 1024;
private boolean flushed = false;
/**
* Create a buffered character-output stream that uses a default-sized
* output buffer.
*
* @param response A Servlet Response
*/
public VelocityWriter(Writer writer)
{
this(writer, defaultCharBufferSize, true);
}
/**
* private constructor.
*/
private VelocityWriter(int bufferSize, boolean autoFlush)
{
this.bufferSize = bufferSize;
this.autoFlush = autoFlush;
}
/**
* This method returns the size of the buffer used by the JspWriter.
*
* @return the size of the buffer in bytes, or 0 is unbuffered.
*/
public int getBufferSize() { return bufferSize; }
/**
* This method indicates whether the JspWriter is autoFlushing.
*
* @return if this JspWriter is auto flushing or throwing IOExceptions on
* buffer overflow conditions
*/
public boolean isAutoFlush() { return autoFlush; }
/**
* Create a new buffered character-output stream that uses an output
* buffer of the given size.
*
* @param response A Servlet Response
* @param sz Output-buffer size, a positive integer
*
* @exception IllegalArgumentException If sz is <= 0
*/
public VelocityWriter(Writer writer, int sz, boolean autoFlush)
{
this(sz, autoFlush);
if (sz < 0)
throw new IllegalArgumentException("Buffer size <= 0");
this.writer = writer;
cb = sz == 0 ? null : new char[sz];
nextChar = 0;
}
private final void init( Writer writer, int sz, boolean autoFlush )
{
this.writer= writer;
if( sz > 0 && ( cb == null || sz > cb.length ) )
cb=new char[sz];
nextChar = 0;
this.autoFlush=autoFlush;
this.bufferSize=sz;
}
/**
* Flush the output buffer to the underlying character stream, without
* flushing the stream itself. This method is non-private only so that it
* may be invoked by PrintStream.
*/
private final void flushBuffer() throws IOException
{
if (bufferSize == 0)
return;
flushed = true;
if (nextChar == 0)
return;
writer.write(cb, 0, nextChar);
nextChar = 0;
}
/**
* Discard the output buffer.
*/
public final void clear()
{
nextChar = 0;
}
private final void bufferOverflow() throws IOException
{
throw new IOException("overflow");
}
/**
* Flush the stream.
*
*/
public final void flush() throws IOException
{
flushBuffer();
if (writer != null)
{
writer.flush();
}
}
/**
* Close the stream.
*
*/
public final void close() throws IOException {
if (writer == null)
return;
flush();
}
/**
* @return the number of bytes unused in the buffer
*/
public final int getRemaining()
{
return bufferSize - nextChar;
}
/**
* Write a single character.
*
*/
public final void write(int c) throws IOException
{
if (bufferSize == 0)
{
writer.write(c);
}
else
{
if (nextChar >= bufferSize)
if (autoFlush)
flushBuffer();
else
bufferOverflow();
cb[nextChar++] = (char) c;
}
}
/**
* Our own little min method, to avoid loading
* <code>java.lang.Math</code> if we've run out of file
* descriptors and we're trying to print a stack trace.
*/
private final int min(int a, int b)
{
return (a < b ? a : b);
}
/**
* Write a portion of an array of characters.
*
* <p> Ordinarily this method stores characters from the given array into
* this stream's buffer, flushing the buffer to the underlying stream as
* needed. If the requested length is at least as large as the buffer,
* however, then this method will flush the buffer and write the characters
* directly to the underlying stream. Thus redundant
* <code>DiscardableBufferedWriter</code>s will not copy data unnecessarily.
*
* @param cbuf A character array
* @param off Offset from which to start reading characters
* @param len Number of characters to write
*
*/
public final void write(char cbuf[], int off, int len)
throws IOException
{
if (bufferSize == 0)
{
writer.write(cbuf, off, len);
return;
}
if (len == 0)
{
return;
}
if (len >= bufferSize)
{
/* If the request length exceeds the size of the output buffer,
flush the buffer and then write the data directly. In this
way buffered streams will cascade harmlessly. */
if (autoFlush)
flushBuffer();
else
bufferOverflow();
writer.write(cbuf, off, len);
return;
}
int b = off, t = off + len;
while (b < t)
{
int d = min(bufferSize - nextChar, t - b);
System.arraycopy(cbuf, b, cb, nextChar, d);
b += d;
nextChar += d;
if (nextChar >= bufferSize)
if (autoFlush)
flushBuffer();
else
bufferOverflow();
}
}
/**
* Write an array of characters. This method cannot be inherited from the
* Writer class because it must suppress I/O exceptions.
*/
public final void write(char buf[]) throws IOException
{
write(buf, 0, buf.length);
}
/**
* Write a portion of a String.
*
* @param s String to be written
* @param off Offset from which to start reading characters
* @param len Number of characters to be written
*
*/
public final void write(String s, int off, int len) throws IOException
{
if (bufferSize == 0)
{
writer.write(s, off, len);
return;
}
int b = off, t = off + len;
while (b < t)
{
int d = min(bufferSize - nextChar, t - b);
s.getChars(b, b + d, cb, nextChar);
b += d;
nextChar += d;
if (nextChar >= bufferSize)
if (autoFlush)
flushBuffer();
else
bufferOverflow();
}
}
/**
* Write a string. This method cannot be inherited from the Writer class
* because it must suppress I/O exceptions.
*/
public final void write(String s) throws IOException
{
write(s, 0, s.length());
}
/**
* resets this class so that it can be reused
*
*/
public final void recycle( Writer writer)
{
this.writer = writer;
flushed = false;
clear();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.distributed.near;
import java.io.Externalizable;
import java.util.AbstractSet;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.UUID;
import javax.cache.Cache;
import javax.cache.expiry.ExpiryPolicy;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteException;
import org.apache.ignite.cache.CachePeekMode;
import org.apache.ignite.internal.IgniteInternalFuture;
import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion;
import org.apache.ignite.internal.processors.cache.CacheEntryPredicate;
import org.apache.ignite.internal.processors.cache.CacheEntryPredicateAdapter;
import org.apache.ignite.internal.processors.cache.GridCacheClearAllRunnable;
import org.apache.ignite.internal.processors.cache.GridCacheContext;
import org.apache.ignite.internal.processors.cache.GridCacheEntryEx;
import org.apache.ignite.internal.processors.cache.GridCacheEntryRemovedException;
import org.apache.ignite.internal.processors.cache.GridCacheLocalConcurrentMap;
import org.apache.ignite.internal.processors.cache.GridCacheMapEntry;
import org.apache.ignite.internal.processors.cache.GridCacheMapEntryFactory;
import org.apache.ignite.internal.processors.cache.GridCachePreloader;
import org.apache.ignite.internal.processors.cache.IgniteCacheExpiryPolicy;
import org.apache.ignite.internal.processors.cache.KeyCacheObject;
import org.apache.ignite.internal.processors.cache.distributed.GridDistributedCacheAdapter;
import org.apache.ignite.internal.processors.cache.distributed.dht.CacheGetFuture;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtCacheAdapter;
import org.apache.ignite.internal.processors.cache.transactions.IgniteInternalTx;
import org.apache.ignite.internal.processors.cache.transactions.IgniteTxLocalEx;
import org.apache.ignite.internal.processors.cache.version.GridCacheVersion;
import org.apache.ignite.internal.util.future.GridFinishedFuture;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.P1;
import org.apache.ignite.internal.util.typedef.internal.A;
import org.apache.ignite.internal.util.typedef.internal.CU;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.lang.IgniteBiPredicate;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* Common logic for near caches.
*/
public abstract class GridNearCacheAdapter<K, V> extends GridDistributedCacheAdapter<K, V> {
/** */
private static final long serialVersionUID = 0L;
/** */
private static final CachePeekMode[] NEAR_PEEK_MODE = {CachePeekMode.NEAR};
/**
* Empty constructor required for {@link Externalizable}.
*/
protected GridNearCacheAdapter() {
// No-op.
}
/**
* @param ctx Context.
*/
protected GridNearCacheAdapter(GridCacheContext<K, V> ctx) {
super(ctx);
}
/** {@inheritDoc} */
@Override public void start() throws IgniteCheckedException {
if (map == null) {
map = new GridCacheLocalConcurrentMap(
ctx,
entryFactory(),
ctx.config().getNearConfiguration().getNearStartSize());
}
}
/**
* @return Entry factory.
*/
private GridCacheMapEntryFactory entryFactory() {
return new GridCacheMapEntryFactory() {
@Override public GridCacheMapEntry create(
GridCacheContext ctx,
AffinityTopologyVersion topVer,
KeyCacheObject key
) {
return new GridNearCacheEntry(ctx, key);
}
};
}
/**
* @return DHT cache.
*/
public abstract GridDhtCacheAdapter<K, V> dht();
/** {@inheritDoc} */
@Override public void forceKeyCheck() {
super.forceKeyCheck();
dht().forceKeyCheck();
}
/** {@inheritDoc} */
@Override public void onReconnected() {
map = new GridCacheLocalConcurrentMap(
ctx,
entryFactory(),
ctx.config().getNearConfiguration().getNearStartSize());
}
/** {@inheritDoc} */
@Override public boolean isNear() {
return true;
}
/** {@inheritDoc} */
@Override public GridCachePreloader preloader() {
return dht().preloader();
}
/** {@inheritDoc} */
@Override public GridCacheMapEntry entryEx(KeyCacheObject key, AffinityTopologyVersion topVer) {
GridNearCacheEntry entry = null;
while (true) {
try {
entry = (GridNearCacheEntry)super.entryEx(key, topVer);
entry.initializeFromDht(topVer);
return entry;
}
catch (GridCacheEntryRemovedException ignore) {
if (log.isDebugEnabled())
log.debug("Got removed near entry while initializing from DHT entry (will retry): " + entry);
}
}
}
/**
* @param key Key.
* @param topVer Topology version.
* @return Entry.
*/
public GridNearCacheEntry entryExx(KeyCacheObject key, AffinityTopologyVersion topVer) {
return (GridNearCacheEntry)entryEx(key, topVer);
}
/**
* @param key Key.
* @return Entry.
*/
@Nullable public GridNearCacheEntry peekExx(KeyCacheObject key) {
return (GridNearCacheEntry)peekEx(key);
}
/** {@inheritDoc} */
@Override public boolean isLocked(K key) {
return super.isLocked(key) || dht().isLocked(key);
}
/**
* @param key Key.
* @return If near entry is locked.
*/
public boolean isLockedNearOnly(K key) {
return super.isLocked(key);
}
/**
* @param keys Keys.
* @return If near entries for given keys are locked.
*/
public boolean isAllLockedNearOnly(Iterable<? extends K> keys) {
A.notNull(keys, "keys");
for (K key : keys)
if (!isLockedNearOnly(key))
return false;
return true;
}
/**
* @param tx Transaction.
* @param keys Keys to load.
* @param forcePrimary Force primary flag.
* @param subjId Subject ID.
* @param taskName Task name.
* @param deserializeBinary Deserialize binary flag.
* @param expiryPlc Expiry policy.
* @param skipVal Skip value flag.
* @param skipStore Skip store flag.
* @param canRemap Can remap flag.
* @param needVer Need version.
* @return Loaded values.
*/
public IgniteInternalFuture<Map<K, V>> loadAsync(
@Nullable IgniteInternalTx tx,
@Nullable Collection<KeyCacheObject> keys,
boolean forcePrimary,
@Nullable UUID subjId,
String taskName,
boolean deserializeBinary,
boolean recovery,
@Nullable ExpiryPolicy expiryPlc,
boolean skipVal,
boolean skipStore,
boolean canRemap,
boolean needVer
) {
if (F.isEmpty(keys))
return new GridFinishedFuture<>(Collections.<K, V>emptyMap());
IgniteTxLocalEx txx = (tx != null && tx.local()) ? (IgniteTxLocalEx)tx : null;
final IgniteCacheExpiryPolicy expiry = expiryPolicy(expiryPlc);
GridNearGetFuture<K, V> fut = new GridNearGetFuture<>(ctx,
keys,
!skipStore,
forcePrimary,
txx,
subjId,
taskName,
deserializeBinary,
expiry,
skipVal,
canRemap,
needVer,
false,
recovery);
// init() will register future for responses if future has remote mappings.
fut.init(null);
return fut;
}
/** {@inheritDoc} */
@Override public void localLoadCache(IgniteBiPredicate<K, V> p, Object[] args) throws IgniteCheckedException {
dht().localLoadCache(p, args);
}
/** {@inheritDoc} */
@Override public void localLoad(Collection<? extends K> keys, ExpiryPolicy plc, boolean keepBinary) throws IgniteCheckedException {
dht().localLoad(keys, plc, keepBinary);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<?> localLoadCacheAsync(IgniteBiPredicate<K, V> p, Object[] args) {
return dht().localLoadCacheAsync(p, args);
}
/**
* @param nodeId Sender ID.
* @param res Response.
*/
protected void processGetResponse(UUID nodeId, GridNearGetResponse res) {
CacheGetFuture fut = (CacheGetFuture)ctx.mvcc().future(res.futureId());
if (fut == null) {
if (log.isDebugEnabled())
log.debug("Failed to find future for get response [sender=" + nodeId + ", res=" + res + ']');
return;
}
fut.onResult(nodeId, res);
}
/** {@inheritDoc} */
@Override public int size() {
return dht().size();
}
/** {@inheritDoc} */
@Override public long sizeLong() {
return nearEntries().size() + dht().size();
}
/** {@inheritDoc} */
@Override public int primarySize() {
return dht().primarySize();
}
/** {@inheritDoc} */
@Override public long primarySizeLong() {
return dht().primarySizeLong();
}
/** {@inheritDoc} */
@Override public int nearSize() {
return nearEntries().size();
}
/**
* @return Near entries.
*/
public Set<Cache.Entry<K, V>> nearEntries() {
final AffinityTopologyVersion topVer = ctx.shared().exchange().readyAffinityVersion();
return super.entrySet(new CacheEntryPredicateAdapter() {
@Override public boolean apply(GridCacheEntryEx entry) {
GridNearCacheEntry nearEntry = (GridNearCacheEntry)entry;
return !nearEntry.deleted() && nearEntry.visitable(CU.empty0()) && nearEntry.valid(topVer);
}
});
}
/** {@inheritDoc} */
@Override public Set<Cache.Entry<K, V>> entrySet(@Nullable final CacheEntryPredicate... filter) {
CacheEntryPredicate p = new CacheEntryPredicateAdapter() {
@Override public boolean apply(GridCacheEntryEx ex) {
if (ex instanceof GridCacheMapEntry)
return ((GridCacheMapEntry)ex).visitable(filter);
else
return !ex.deleted() && F.isAll(ex, filter);
}
};
return new EntrySet(super.entrySet(p), dht().entrySet(p));
}
/** {@inheritDoc} */
@Override public boolean evict(K key) {
// Use unary 'and' to make sure that both sides execute.
return super.evict(key) & dht().evict(key);
}
/** {@inheritDoc} */
@Override public void evictAll(Collection<? extends K> keys) {
super.evictAll(keys);
dht().evictAll(keys);
}
/** {@inheritDoc} */
@Override public boolean clearLocally(K key) {
return super.clearLocally(key) | dht().clearLocally(key);
}
/** {@inheritDoc} */
@Override public void clearLocallyAll(Set<? extends K> keys, boolean srv, boolean near, boolean readers) {
super.clearLocallyAll(keys, srv, near, readers);
dht().clearLocallyAll(keys, srv, near, readers);
}
/** {@inheritDoc} */
@Override public long offHeapEntriesCount() {
return dht().offHeapEntriesCount();
}
/** {@inheritDoc} */
@Override public long offHeapAllocatedSize() {
return dht().offHeapAllocatedSize();
}
/** {@inheritDoc} */
@Override public boolean isIgfsDataCache() {
return dht().isIgfsDataCache();
}
/** {@inheritDoc} */
@Override public long igfsDataSpaceUsed() {
return dht().igfsDataSpaceUsed();
}
/** {@inheritDoc} */
@Override public void onIgfsDataSizeChanged(long delta) {
dht().onIgfsDataSizeChanged(delta);
}
/** {@inheritDoc} */
@Override public boolean isMongoDataCache() {
return dht().isMongoDataCache();
}
/** {@inheritDoc} */
@Override public boolean isMongoMetaCache() {
return dht().isMongoMetaCache();
}
/** {@inheritDoc} */
@Override public List<GridCacheClearAllRunnable<K, V>> splitClearLocally(boolean srv, boolean near,
boolean readers) {
assert configuration().getNearConfiguration() != null;
if (ctx.affinityNode()) {
GridCacheVersion obsoleteVer = ctx.versions().next();
List<GridCacheClearAllRunnable<K, V>> dhtJobs = dht().splitClearLocally(srv, near, readers);
List<GridCacheClearAllRunnable<K, V>> res = new ArrayList<>(dhtJobs.size());
for (GridCacheClearAllRunnable<K, V> dhtJob : dhtJobs)
res.add(new GridNearCacheClearAllRunnable<>(this, obsoleteVer, dhtJob));
return res;
}
else
return super.splitClearLocally(srv, near, readers);
}
/**
* Wrapper for entry set.
*/
private class EntrySet extends AbstractSet<Cache.Entry<K, V>> {
/** Near entry set. */
private Set<Cache.Entry<K, V>> nearSet;
/** Dht entry set. */
private Set<Cache.Entry<K, V>> dhtSet;
/**
* @param nearSet Near entry set.
* @param dhtSet Dht entry set.
*/
private EntrySet(Set<Cache.Entry<K, V>> nearSet, Set<Cache.Entry<K, V>> dhtSet) {
assert nearSet != null;
assert dhtSet != null;
this.nearSet = nearSet;
this.dhtSet = dhtSet;
}
/** {@inheritDoc} */
@NotNull @Override public Iterator<Cache.Entry<K, V>> iterator() {
return new EntryIterator(nearSet.iterator(),
F.iterator0(dhtSet, false, new P1<Cache.Entry<K, V>>() {
@Override public boolean apply(Cache.Entry<K, V> e) {
try {
return GridNearCacheAdapter.super.localPeek(e.getKey(), NEAR_PEEK_MODE, null) == null;
}
catch (IgniteCheckedException ex) {
throw new IgniteException(ex);
}
}
}));
}
/** {@inheritDoc} */
@Override public int size() {
return F.size(iterator());
}
}
/**
* Entry set iterator.
*/
private class EntryIterator implements Iterator<Cache.Entry<K, V>> {
/** */
private Iterator<Cache.Entry<K, V>> dhtIter;
/** */
private Iterator<Cache.Entry<K, V>> nearIter;
/** */
private Iterator<Cache.Entry<K, V>> currIter;
/** */
private Cache.Entry<K, V> currEntry;
/**
* @param nearIter Near set iterator.
* @param dhtIter Dht set iterator.
*/
private EntryIterator(Iterator<Cache.Entry<K, V>> nearIter, Iterator<Cache.Entry<K, V>> dhtIter) {
assert nearIter != null;
assert dhtIter != null;
this.nearIter = nearIter;
this.dhtIter = dhtIter;
currIter = nearIter;
}
/** {@inheritDoc} */
@Override public boolean hasNext() {
return nearIter.hasNext() || dhtIter.hasNext();
}
/** {@inheritDoc} */
@Override public Cache.Entry<K, V> next() {
if (!hasNext())
throw new NoSuchElementException();
if (!currIter.hasNext())
currIter = dhtIter;
return currEntry = currIter.next();
}
/** {@inheritDoc} */
@Override public void remove() {
if (currEntry == null)
throw new IllegalStateException();
assert currIter != null;
currIter.remove();
try {
GridNearCacheAdapter.this.getAndRemove(currEntry.getKey());
}
catch (IgniteCheckedException e) {
throw new IgniteException(e);
}
}
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(GridNearCacheAdapter.class, this);
}
}
| |
package org.apache.mina.transport.rawsocket;
import org.apache.mina.core.session.AbstractIoSessionConfig;
import org.apache.mina.core.session.IoSessionConfig;
import org.jnetpcap.Pcap;
import org.jnetpcap.protocol.JProtocol;
public class DefaultRawSessionConfig extends AbstractIoSessionConfig {
// detail log switch
public static boolean verbose = false;
/** The connect timeout in millis. */
private long connectTimeoutInMillis;
/** The err buffer. */
private StringBuilder errBuffer = new StringBuilder();
/** The filter. */
private String filter;
public static final int DEFAULT_FRAME_TYPE=JProtocol.ETHERNET_ID;
/** The lcladdr. */
private EthAddress lcladdr;
/** The loop. */
private int loop = -1;
private String name;
/** The need capture. */
private boolean needCapture = true;
/** The netmask. */
private int netmask = 0xFFFFFF00;
/** The optimize. */
private int optimize = 0;
/** The promisc. */
private int promisc = Pcap.MODE_NON_PROMISCUOUS;
/** The protocol id. */
private int protocolId;
/** The snaplen. */
private int snaplen = 2000;
/** The timeout. */
private int timeout = 1000;
/** The user. */
private String user = "ate";
private String dumperFilename;
public DefaultRawSessionConfig(){}
public DefaultRawSessionConfig(EthAddress lcladdr){
this.lcladdr=lcladdr;
}
/**
* Gets the connect timeout millis.
*
* @return the connect timeout millis
*/
public final long getConnectTimeoutMillis() {
return connectTimeoutInMillis;
}
public String getDumperFilename() {
return dumperFilename;
}
/**
* Gets the err buffer.
*
* @return the err buffer
*/
public StringBuilder getErrBuffer() {
return errBuffer;
}
/**
* Gets the filter.
*
* @return the filter
*/
public String getFilter() {
return filter;
}
/**
* Gets the local mac addr.
*
* @return the local mac addr
*/
public EthAddress getLocalBindingAddr() {
return lcladdr;
}
/**
* Gets the loop.
*
* @return the loop
*/
public int getLoop() {
return loop;
}
/**
* Gets the mac addr by ip.
*
* @param ip the ip
* @return the mac addr by ip
*/
public EthAddress getMacAddrByIp(String ip) {
return EthAddress.get_addr_by_ip(ip);
}
public String getName() {
return name;
}
/**
* Gets the netmask.
*
* @return the netmask
*/
public int getNetmask() {
return netmask;
}
/**
* Gets the optimize.
*
* @return the optimize
*/
public int getOptimize() {
return optimize;
}
/**
* Gets the promisc.
*
* @return the promisc
*/
public int getPromisc() {
return promisc;
}
/**
* Gets the protocol id.
*
* @return the protocol id
*/
public int getProtocolId() {
return protocolId;
}
/**
* Gets the snaplen.
*
* @return the snaplen
*/
public int getSnaplen() {
return snaplen;
}
/**
* Gets the timeout.
*
* @return the timeout
*/
public int getTimeout() {
return timeout;
}
/**
* Gets the user.
*
* @return the user
*/
public String getUser() {
return user;
}
/**
* Checks if is need capture.
*
* @return true, if is need capture
*/
public boolean isNeedCapture() {
return needCapture;
}
/**
* Sets the dumper filename.
*
* @param dumperFilename the new dumper filename
*/
public void setDumperFilename(String dumperFilename) {
this.dumperFilename = dumperFilename;
}
/**
* Sets the err buffer.
*
* @param errBuffer the new err buffer
*/
public void setErrBuffer(StringBuilder errBuffer) {
this.errBuffer = errBuffer;
}
/**
* Sets the filter.
*
* @param filter the new filter
*/
public void setFilter(String filter) {
this.filter = filter;
}
/**
* Sets the local mac addr.
*
* @param macaddr the new local mac addr
*/
public void setLocalEthAddr(EthAddress macaddr) {
if(macaddr==null)
throw new RuntimeException("setLocalEthAddr cant be null");
this.lcladdr = macaddr;
}
/**
* Sets the loop.
*
* @param loop the new loop
*/
public void setLoop(int loop) {
this.loop = loop;
}
public void setName(String name) {
this.name = name;
}
/**
* Sets the need capture.
*
* @param needCapture the new need capture
*/
public void setNeedCapture(boolean needCapture) {
this.needCapture = needCapture;
}
/**
* Sets the netmask.
*
* @param netmask the new netmask
*/
public void setNetmask(int netmask) {
this.netmask = netmask;
}
/**
* Sets the optimize.
*
* @param optimize the new optimize
*/
public void setOptimize(int optimize) {
this.optimize = optimize;
}
/**
* Sets the promisc.
*
* @param promisc the new promisc
*/
public void setPromisc(int promisc) {
this.promisc = promisc;
}
/**
* Sets the protocol id.
*
* @param protocolId the new protocol id
*/
public void setProtocolId(int protocolId) {
this.protocolId = protocolId;
}
/**
* Sets the snaplen.
*
* @param snaplen the new snaplen
*/
public void setSnaplen(int snaplen) {
this.snaplen = snaplen;
}
/**
* Sets the timeout.
*
* @param timeout the new timeout
*/
public void setTimeout(int timeout) {
this.timeout = timeout;
}
/**
* Sets the user.
*
* @param user the new user
*/
public void setUser(String user) {
this.user = user;
}
/* (non-Javadoc)
* @see org.apache.mina.core.session.AbstractIoSessionConfig#doSetAll(org.apache.mina.core.session.IoSessionConfig)
*/
@Override
protected void doSetAll(IoSessionConfig config) {
if (config instanceof DefaultRawSessionConfig) {
DefaultRawSessionConfig cfg = (DefaultRawSessionConfig) config;
setSnaplen(cfg.getSnaplen());
setPromisc(cfg.getPromisc());
setTimeout(cfg.getTimeout());
setNeedCapture(cfg.isNeedCapture());
setProtocolId(cfg.getProtocolId());
setFilter(cfg.getFilter());
setUser(cfg.getUser());
this.setLoop(cfg.getLoop());
this.setOptimize(cfg.getOptimize());
this.setNetmask(cfg.getNetmask());
}
}
}
| |
package qa.edu.qu.cse.cmps312.location;
import java.text.SimpleDateFormat;
import java.util.List;
import java.util.Locale;
import java.util.TimeZone;
import android.content.Context;
import android.graphics.Color;
import android.location.Location;
import android.location.LocationListener;
import android.location.LocationManager;
import android.os.Bundle;
import android.os.Handler;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.widget.TextView;
public class LocationManagerActivity extends AppCompatActivity {
private static final long ONE_MIN = 1000 * 60;
private static final long FIVE_MIN = ONE_MIN * 5;
private static final long MEASURE_TIME = 1000 * 30;
private static final long POLLING_FREQ = 1000 * 10;
private static final float MIN_ACCURACY = 25.0f;
private static final float MIN_LAST_READ_ACCURACY = 500.0f;
private static final float MIN_DISTANCE = 10.0f;
//workaround for bug with LocationManager's "locations" getTime() function
//You must find your own skew from Logcat if you want to use the emulator
//Some emulator instances don't have that bug, so keep as 0 first
private static final long SKEW = 0L;
// Views for display location information
private TextView mAccuracyView;
private TextView mTimeView;
private TextView mLatView;
private TextView mLngView;
// Current best location estimate
private Location mBestReading;
// Reference to the LocationManager and LocationListener
private LocationManager mLocationManager;
private LocationListener mLocationListener;
private final String TAG = "LocationManager";
private boolean mFirstUpdate = true;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_location_manager);
mAccuracyView = (TextView) findViewById(R.id.accuracy_view);
mTimeView = (TextView) findViewById(R.id.time_view);
mLatView = (TextView) findViewById(R.id.lat_view);
mLngView = (TextView) findViewById(R.id.lng_view);
// Acquire reference to the LocationManager
if (null == (mLocationManager = (LocationManager) getSystemService(Context.LOCATION_SERVICE)))
finish();
// Get best last location measurement
mBestReading = bestLastKnownLocation(MIN_LAST_READ_ACCURACY, FIVE_MIN);
// Display last reading information
if (null != mBestReading) {
updateDisplay(mBestReading);
} else {
mAccuracyView.setText(R.string.no_init_readings);
}
mLocationListener = new LocationListener() {
// Called back when location changes
public void onLocationChanged(Location location) {
ensureColor();
// Determine whether new location is better than current best
// estimate
try {
Log.d(TAG, "Skew is: " + (System.currentTimeMillis() - location.getTime()));
Log.d(TAG, "Provider is: " + location.getProvider());
if (null == mBestReading
|| location.getAccuracy() <= mBestReading.getAccuracy()) {
// Update best estimate
mBestReading = location;
mBestReading.setTime(mBestReading.getTime() + SKEW);
// Update display
updateDisplay(location);
if (mBestReading.getAccuracy() < MIN_ACCURACY)
mLocationManager.removeUpdates(mLocationListener);
}
} catch (SecurityException e) {}
}
public void onStatusChanged(String provider, int status,
Bundle extras) {
// NA
}
public void onProviderEnabled(String provider) {
// NA
}
public void onProviderDisabled(String provider) {
// NA
}
};
}
@Override
protected void onResume() {
super.onResume();
// Determine whether initial reading is
// "good enough". If not, register for
// further location updates
if (null == mBestReading
|| mBestReading.getAccuracy() > MIN_LAST_READ_ACCURACY
|| mBestReading.getTime() < System.currentTimeMillis()
- ONE_MIN) {
Log.d(TAG, "Location needs updating");
try {
// Register for network location updates
if (null != mLocationManager
.getProvider(LocationManager.NETWORK_PROVIDER)) {
mLocationManager.requestLocationUpdates(
LocationManager.NETWORK_PROVIDER, POLLING_FREQ,
MIN_DISTANCE, mLocationListener);
}
// Register for GPS location updates
if (null != mLocationManager
.getProvider(LocationManager.GPS_PROVIDER)) {
mLocationManager.requestLocationUpdates(
LocationManager.GPS_PROVIDER, POLLING_FREQ,
MIN_DISTANCE, mLocationListener);
}
new Handler().postDelayed(new Runnable() {
@Override
public void run() {
Log.i(TAG, "location updates cancelled");
try {
mLocationManager.removeUpdates(mLocationListener);
} catch (SecurityException e) {}
}
}, MEASURE_TIME);
} catch (SecurityException e) {
//code should never reach here
}
} else {
Log.d(TAG, "Location is good enough");
}
}
// Unregister location listeners
@Override
protected void onPause() {
super.onPause();
try {
mLocationManager.removeUpdates(mLocationListener);
} catch (SecurityException e) {}
}
// Get the last known location from all providers
// return best reading that is as accurate as minAccuracy and
// was taken no longer then minAge milliseconds ago. If none,
// return null.
private Location bestLastKnownLocation(float minAccuracy, long maxAge) {
Location bestResult = null;
float bestAccuracy = Float.MAX_VALUE;
long bestAge = Long.MIN_VALUE;
List<String> matchingProviders = mLocationManager.getAllProviders();
for (String provider : matchingProviders) {
try {
Location location = mLocationManager.getLastKnownLocation(provider);
if (location != null) {
location.setTime(location.getTime() + SKEW);
float accuracy = location.getAccuracy();
long time = location.getTime();
if (accuracy < bestAccuracy) {
bestResult = location;
bestAccuracy = accuracy;
bestAge = time;
}
}
} catch (SecurityException e) {
}
}
// Return best reading or null
if (bestAccuracy > minAccuracy
|| (System.currentTimeMillis() - bestAge) > maxAge) {
return null;
} else {
return bestResult;
}
}
// Update display
private void updateDisplay(Location location) {
mAccuracyView.setText(String.format("%s %.2f",getString(R.string.accuracy) , location.getAccuracy()));
mTimeView.setText(String.format("%s %s",getString(R.string.time),
getFormattedTime(location.getTime())));
mLatView.setText(String.format("%s %.2f",getString(R.string.longitude), location.getLongitude()));
mLngView.setText(String.format("%s %.2f", getString(R.string.latitude), location.getLatitude()));
}
private String getFormattedTime(long time) {
SimpleDateFormat simpleDateFormat = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss", Locale.getDefault());
simpleDateFormat.setTimeZone(TimeZone.getDefault());
return simpleDateFormat.format(time);
}
private void ensureColor() {
if (mFirstUpdate) {
setTextViewColor(Color.GRAY);
mFirstUpdate = false;
}
}
private void setTextViewColor(int color) {
mAccuracyView.setTextColor(color);
mTimeView.setTextColor(color);
mLatView.setTextColor(color);
mLngView.setTextColor(color);
}
}
| |
/*
* ConnectBot: simple, powerful, open-source SSH client for Android
* Copyright 2010 Kenny Root, Jeffrey Sharkey
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.connectbot.service;
import java.io.IOException;
import org.connectbot.bean.SelectionArea;
import org.connectbot.util.PreferenceConstants;
import android.content.SharedPreferences;
import android.content.SharedPreferences.OnSharedPreferenceChangeListener;
import android.content.res.Configuration;
import android.preference.PreferenceManager;
import android.text.ClipboardManager;
import android.util.Log;
import android.view.KeyCharacterMap;
import android.view.KeyEvent;
import android.view.View;
import android.view.View.OnKeyListener;
import de.mud.terminal.VDUBuffer;
import de.mud.terminal.vt320;
/**
* @author kenny
*
*/
@SuppressWarnings("deprecation") // for ClipboardManager
public class TerminalKeyListener implements OnKeyListener, OnSharedPreferenceChangeListener {
private static final String TAG = "CB.OnKeyListener";
// Constants for our private tracking of modifier state
public final static int OUR_CTRL_ON = 0x01;
public final static int OUR_CTRL_LOCK = 0x02;
public final static int OUR_ALT_ON = 0x04;
public final static int OUR_ALT_LOCK = 0x08;
public final static int OUR_SHIFT_ON = 0x10;
public final static int OUR_SHIFT_LOCK = 0x20;
private final static int OUR_SLASH = 0x40;
private final static int OUR_TAB = 0x80;
// All the transient key codes
private final static int OUR_TRANSIENT = OUR_CTRL_ON | OUR_ALT_ON
| OUR_SHIFT_ON | OUR_SLASH | OUR_TAB;
// The bit mask of momentary and lock states for each
private final static int OUR_CTRL_MASK = OUR_CTRL_ON | OUR_CTRL_LOCK;
private final static int OUR_ALT_MASK = OUR_ALT_ON | OUR_ALT_LOCK;
private final static int OUR_SHIFT_MASK = OUR_SHIFT_ON | OUR_SHIFT_LOCK;
// backport constants from api level 11
private final static int KEYCODE_ESCAPE = 111;
private final static int KEYCODE_CTRL_LEFT = 113;
private final static int KEYCODE_CTRL_RIGHT = 114;
private final static int KEYCODE_INSERT = 124;
private final static int KEYCODE_FORWARD_DEL = 112;
private final static int KEYCODE_MOVE_HOME = 122;
private final static int KEYCODE_MOVE_END = 123;
private final static int KEYCODE_PAGE_DOWN = 93;
private final static int KEYCODE_PAGE_UP = 92;
private final static int HC_META_CTRL_ON = 0x1000;
private final static int HC_META_CTRL_LEFT_ON = 0x2000;
private final static int HC_META_CTRL_RIGHT_ON = 0x4000;
private final static int HC_META_CTRL_MASK = HC_META_CTRL_ON | HC_META_CTRL_RIGHT_ON
| HC_META_CTRL_LEFT_ON;
private final static int HC_META_ALT_MASK = KeyEvent.META_ALT_ON | KeyEvent.META_ALT_LEFT_ON
| KeyEvent.META_ALT_RIGHT_ON;
private final TerminalManager manager;
private final TerminalBridge bridge;
private final VDUBuffer buffer;
private String keymode = null;
private final boolean deviceHasHardKeyboard;
private boolean shiftedNumbersAreFKeysOnHardKeyboard;
private boolean controlNumbersAreFKeysOnSoftKeyboard;
private boolean volumeKeysChangeFontSize;
private int stickyMetas;
private int ourMetaState = 0;
private int mDeadKey = 0;
// TODO add support for the new API.
private ClipboardManager clipboard = null;
private boolean selectingForCopy = false;
private final SelectionArea selectionArea;
private String encoding;
private final SharedPreferences prefs;
public TerminalKeyListener(TerminalManager manager,
TerminalBridge bridge,
VDUBuffer buffer,
String encoding) {
this.manager = manager;
this.bridge = bridge;
this.buffer = buffer;
this.encoding = encoding;
selectionArea = new SelectionArea();
prefs = PreferenceManager.getDefaultSharedPreferences(manager);
prefs.registerOnSharedPreferenceChangeListener(this);
deviceHasHardKeyboard = (manager.res.getConfiguration().keyboard
== Configuration.KEYBOARD_QWERTY);
updatePrefs();
}
/**
* Handle onKey() events coming down from a {@link org.connectbot.TerminalView} above us.
* Modify the keys to make more sense to a host then pass it to the transport.
*/
public boolean onKey(View v, int keyCode, KeyEvent event) {
try {
// skip keys if we aren't connected yet or have been disconnected
if (bridge.isDisconnected() || bridge.transport == null)
return false;
final boolean interpretAsHardKeyboard = deviceHasHardKeyboard &&
!manager.hardKeyboardHidden;
final boolean rightModifiersAreSlashAndTab = interpretAsHardKeyboard &&
PreferenceConstants.KEYMODE_RIGHT.equals(keymode);
final boolean leftModifiersAreSlashAndTab = interpretAsHardKeyboard &&
PreferenceConstants.KEYMODE_LEFT.equals(keymode);
final boolean shiftedNumbersAreFKeys = shiftedNumbersAreFKeysOnHardKeyboard &&
interpretAsHardKeyboard;
final boolean controlNumbersAreFKeys = controlNumbersAreFKeysOnSoftKeyboard &&
!interpretAsHardKeyboard;
// Ignore all key-up events except for the special keys
if (event.getAction() == KeyEvent.ACTION_UP) {
if (rightModifiersAreSlashAndTab) {
if (keyCode == KeyEvent.KEYCODE_ALT_RIGHT
&& (ourMetaState & OUR_SLASH) != 0) {
ourMetaState &= ~OUR_TRANSIENT;
bridge.transport.write('/');
return true;
} else if (keyCode == KeyEvent.KEYCODE_SHIFT_RIGHT
&& (ourMetaState & OUR_TAB) != 0) {
ourMetaState &= ~OUR_TRANSIENT;
bridge.transport.write(0x09);
return true;
}
} else if (leftModifiersAreSlashAndTab) {
if (keyCode == KeyEvent.KEYCODE_ALT_LEFT
&& (ourMetaState & OUR_SLASH) != 0) {
ourMetaState &= ~OUR_TRANSIENT;
bridge.transport.write('/');
return true;
} else if (keyCode == KeyEvent.KEYCODE_SHIFT_LEFT
&& (ourMetaState & OUR_TAB) != 0) {
ourMetaState &= ~OUR_TRANSIENT;
bridge.transport.write(0x09);
return true;
}
}
return false;
}
//Log.i("CBKeyDebug", KeyEventUtil.describeKeyEvent(keyCode, event));
if (volumeKeysChangeFontSize) {
if (keyCode == KeyEvent.KEYCODE_VOLUME_UP) {
bridge.increaseFontSize();
return true;
} else if (keyCode == KeyEvent.KEYCODE_VOLUME_DOWN) {
bridge.decreaseFontSize();
return true;
}
}
bridge.resetScrollPosition();
// Handle potentially multi-character IME input.
if (keyCode == KeyEvent.KEYCODE_UNKNOWN &&
event.getAction() == KeyEvent.ACTION_MULTIPLE) {
byte[] input = event.getCharacters().getBytes(encoding);
bridge.transport.write(input);
return true;
}
/// Handle alt and shift keys if they aren't repeating
if (event.getRepeatCount() == 0) {
if (rightModifiersAreSlashAndTab) {
switch (keyCode) {
case KeyEvent.KEYCODE_ALT_RIGHT:
ourMetaState |= OUR_SLASH;
return true;
case KeyEvent.KEYCODE_SHIFT_RIGHT:
ourMetaState |= OUR_TAB;
return true;
case KeyEvent.KEYCODE_SHIFT_LEFT:
metaPress(OUR_SHIFT_ON);
return true;
case KeyEvent.KEYCODE_ALT_LEFT:
metaPress(OUR_ALT_ON);
return true;
}
} else if (leftModifiersAreSlashAndTab) {
switch (keyCode) {
case KeyEvent.KEYCODE_ALT_LEFT:
ourMetaState |= OUR_SLASH;
return true;
case KeyEvent.KEYCODE_SHIFT_LEFT:
ourMetaState |= OUR_TAB;
return true;
case KeyEvent.KEYCODE_SHIFT_RIGHT:
metaPress(OUR_SHIFT_ON);
return true;
case KeyEvent.KEYCODE_ALT_RIGHT:
metaPress(OUR_ALT_ON);
return true;
}
} else {
switch (keyCode) {
case KeyEvent.KEYCODE_ALT_LEFT:
case KeyEvent.KEYCODE_ALT_RIGHT:
metaPress(OUR_ALT_ON);
return true;
case KeyEvent.KEYCODE_SHIFT_LEFT:
case KeyEvent.KEYCODE_SHIFT_RIGHT:
metaPress(OUR_SHIFT_ON);
return true;
}
}
if (keyCode == KEYCODE_CTRL_LEFT || keyCode == KEYCODE_CTRL_RIGHT) {
metaPress(OUR_CTRL_ON);
return true;
}
}
if (keyCode == KeyEvent.KEYCODE_DPAD_CENTER) {
if (selectingForCopy) {
if (selectionArea.isSelectingOrigin())
selectionArea.finishSelectingOrigin();
else {
if (clipboard != null) {
// copy selected area to clipboard
String copiedText = selectionArea.copyFrom(buffer);
clipboard.setText(copiedText);
// XXX STOPSHIP
// manager.notifyUser(manager.getString(
// R.string.console_copy_done,
// copiedText.length()));
selectingForCopy = false;
selectionArea.reset();
}
}
} else {
if ((ourMetaState & OUR_CTRL_ON) != 0) {
sendEscape();
ourMetaState &= ~OUR_CTRL_ON;
} else
metaPress(OUR_CTRL_ON, true);
}
bridge.redraw();
return true;
}
int derivedMetaState = event.getMetaState();
if ((ourMetaState & OUR_SHIFT_MASK) != 0)
derivedMetaState |= KeyEvent.META_SHIFT_ON;
if ((ourMetaState & OUR_ALT_MASK) != 0)
derivedMetaState |= KeyEvent.META_ALT_ON;
if ((ourMetaState & OUR_CTRL_MASK) != 0)
derivedMetaState |= HC_META_CTRL_ON;
if ((ourMetaState & OUR_TRANSIENT) != 0) {
ourMetaState &= ~OUR_TRANSIENT;
bridge.redraw();
}
// Test for modified numbers becoming function keys
if (shiftedNumbersAreFKeys && (derivedMetaState & KeyEvent.META_SHIFT_ON) != 0) {
if (sendFunctionKey(keyCode))
return true;
}
if (controlNumbersAreFKeys && (derivedMetaState & HC_META_CTRL_ON) != 0) {
if (sendFunctionKey(keyCode))
return true;
}
// CTRL-SHIFT-V to paste.
if (keyCode == KeyEvent.KEYCODE_V
&& (derivedMetaState & HC_META_CTRL_ON) != 0
&& (derivedMetaState & KeyEvent.META_SHIFT_ON) != 0
&& clipboard.hasText()) {
bridge.injectString(clipboard.getText().toString());
return true;
}
if ((keyCode == KeyEvent.KEYCODE_EQUALS
&& (derivedMetaState & HC_META_CTRL_ON) != 0
&& (derivedMetaState & KeyEvent.META_SHIFT_ON) != 0)
|| (keyCode == KeyEvent.KEYCODE_PLUS
&& (derivedMetaState & HC_META_CTRL_ON) != 0)) {
bridge.increaseFontSize();
return true;
}
if (keyCode == KeyEvent.KEYCODE_MINUS && (derivedMetaState & HC_META_CTRL_ON) != 0) {
bridge.decreaseFontSize();
return true;
}
// Ask the system to use the keymap to give us the unicode character for this key,
// with our derived modifier state applied.
int uchar = event.getUnicodeChar(derivedMetaState & ~HC_META_CTRL_MASK);
int ucharWithoutAlt = event.getUnicodeChar(
derivedMetaState & ~(HC_META_ALT_MASK | HC_META_CTRL_MASK));
if (uchar == 0) {
// Keymap doesn't know the key with alt on it, so just go with the unmodified version
uchar = ucharWithoutAlt;
} else if (uchar != ucharWithoutAlt) {
// The alt key was used to modify the character returned; therefore, drop the alt
// modifier from the state so we don't end up sending alt+key.
derivedMetaState &= ~HC_META_ALT_MASK;
}
// Remove shift from the modifier state as it has already been used by getUnicodeChar.
derivedMetaState &= ~KeyEvent.META_SHIFT_ON;
if ((uchar & KeyCharacterMap.COMBINING_ACCENT) != 0) {
mDeadKey = uchar & KeyCharacterMap.COMBINING_ACCENT_MASK;
return true;
}
if (mDeadKey != 0) {
uchar = KeyCharacterMap.getDeadChar(mDeadKey, keyCode);
mDeadKey = 0;
}
// If we have a defined non-control character
if (uchar >= 0x20) {
if ((derivedMetaState & HC_META_CTRL_ON) != 0)
uchar = keyAsControl(uchar);
if ((derivedMetaState & KeyEvent.META_ALT_ON) != 0)
sendEscape();
if (uchar < 0x80)
bridge.transport.write(uchar);
else
// TODO write encoding routine that doesn't allocate each time
bridge.transport.write(new String(Character.toChars(uchar))
.getBytes(encoding));
return true;
}
// look for special chars
switch (keyCode) {
case KEYCODE_ESCAPE:
sendEscape();
return true;
case KeyEvent.KEYCODE_TAB:
bridge.transport.write(0x09);
return true;
case KeyEvent.KEYCODE_CAMERA:
// check to see which shortcut the camera button triggers
String camera = manager.prefs.getString(
PreferenceConstants.CAMERA,
PreferenceConstants.CAMERA_CTRLA_SPACE);
if (PreferenceConstants.CAMERA_CTRLA_SPACE.equals(camera)) {
bridge.transport.write(0x01);
bridge.transport.write(' ');
} else if (PreferenceConstants.CAMERA_CTRLA.equals(camera)) {
bridge.transport.write(0x01);
} else if (PreferenceConstants.CAMERA_ESC.equals(camera)) {
((vt320) buffer).keyTyped(vt320.KEY_ESCAPE, ' ', 0);
} else if (PreferenceConstants.CAMERA_ESC_A.equals(camera)) {
((vt320) buffer).keyTyped(vt320.KEY_ESCAPE, ' ', 0);
bridge.transport.write('a');
}
break;
case KeyEvent.KEYCODE_DEL:
((vt320) buffer).keyPressed(vt320.KEY_BACK_SPACE, ' ',
getStateForBuffer());
return true;
case KeyEvent.KEYCODE_ENTER:
((vt320) buffer).keyTyped(vt320.KEY_ENTER, ' ', 0);
return true;
case KeyEvent.KEYCODE_DPAD_LEFT:
if (selectingForCopy) {
selectionArea.decrementColumn();
bridge.redraw();
} else {
((vt320) buffer).keyPressed(vt320.KEY_LEFT, ' ',
getStateForBuffer());
bridge.tryKeyVibrate();
}
return true;
case KeyEvent.KEYCODE_DPAD_UP:
if (selectingForCopy) {
selectionArea.decrementRow();
bridge.redraw();
} else {
((vt320) buffer).keyPressed(vt320.KEY_UP, ' ',
getStateForBuffer());
bridge.tryKeyVibrate();
}
return true;
case KeyEvent.KEYCODE_DPAD_DOWN:
if (selectingForCopy) {
selectionArea.incrementRow();
bridge.redraw();
} else {
((vt320) buffer).keyPressed(vt320.KEY_DOWN, ' ',
getStateForBuffer());
bridge.tryKeyVibrate();
}
return true;
case KeyEvent.KEYCODE_DPAD_RIGHT:
if (selectingForCopy) {
selectionArea.incrementColumn();
bridge.redraw();
} else {
((vt320) buffer).keyPressed(vt320.KEY_RIGHT, ' ',
getStateForBuffer());
bridge.tryKeyVibrate();
}
return true;
case KEYCODE_INSERT:
((vt320) buffer).keyPressed(vt320.KEY_INSERT, ' ',
getStateForBuffer());
return true;
case KEYCODE_FORWARD_DEL:
((vt320) buffer).keyPressed(vt320.KEY_DELETE, ' ',
getStateForBuffer());
return true;
case KEYCODE_MOVE_HOME:
((vt320) buffer).keyPressed(vt320.KEY_HOME, ' ',
getStateForBuffer());
return true;
case KEYCODE_MOVE_END:
((vt320) buffer).keyPressed(vt320.KEY_END, ' ',
getStateForBuffer());
return true;
case KEYCODE_PAGE_UP:
((vt320) buffer).keyPressed(vt320.KEY_PAGE_UP, ' ',
getStateForBuffer());
return true;
case KEYCODE_PAGE_DOWN:
((vt320) buffer).keyPressed(vt320.KEY_PAGE_DOWN, ' ',
getStateForBuffer());
return true;
}
} catch (IOException e) {
Log.e(TAG, "Problem while trying to handle an onKey() event", e);
try {
bridge.transport.flush();
} catch (IOException ioe) {
Log.d(TAG, "Our transport was closed, dispatching disconnect event");
bridge.dispatchDisconnect(false);
}
} catch (NullPointerException npe) {
Log.d(TAG, "Input before connection established ignored.");
return true;
}
return false;
}
public int keyAsControl(int key) {
// Support CTRL-a through CTRL-z
if (key >= 0x61 && key <= 0x7A)
key -= 0x60;
// Support CTRL-A through CTRL-_
else if (key >= 0x41 && key <= 0x5F)
key -= 0x40;
// CTRL-space sends NULL
else if (key == 0x20)
key = 0x00;
// CTRL-? sends DEL
else if (key == 0x3F)
key = 0x7F;
return key;
}
public void sendEscape() {
((vt320) buffer).keyTyped(vt320.KEY_ESCAPE, ' ', 0);
}
public void sendTab() {
try {
bridge.transport.write(0x09);
} catch (IOException e) {
Log.e(TAG, "Problem while trying to send TAB press.", e);
try {
bridge.transport.flush();
} catch (IOException ioe) {
Log.d(TAG, "Our transport was closed, dispatching disconnect event");
bridge.dispatchDisconnect(false);
}
}
}
public void sendPressedKey(int key) {
((vt320) buffer).keyPressed(key, ' ', getStateForBuffer());
}
/**
* @param key
* @return successful
*/
private boolean sendFunctionKey(int keyCode) {
switch (keyCode) {
case KeyEvent.KEYCODE_1:
((vt320) buffer).keyPressed(vt320.KEY_F1, ' ', 0);
return true;
case KeyEvent.KEYCODE_2:
((vt320) buffer).keyPressed(vt320.KEY_F2, ' ', 0);
return true;
case KeyEvent.KEYCODE_3:
((vt320) buffer).keyPressed(vt320.KEY_F3, ' ', 0);
return true;
case KeyEvent.KEYCODE_4:
((vt320) buffer).keyPressed(vt320.KEY_F4, ' ', 0);
return true;
case KeyEvent.KEYCODE_5:
((vt320) buffer).keyPressed(vt320.KEY_F5, ' ', 0);
return true;
case KeyEvent.KEYCODE_6:
((vt320) buffer).keyPressed(vt320.KEY_F6, ' ', 0);
return true;
case KeyEvent.KEYCODE_7:
((vt320) buffer).keyPressed(vt320.KEY_F7, ' ', 0);
return true;
case KeyEvent.KEYCODE_8:
((vt320) buffer).keyPressed(vt320.KEY_F8, ' ', 0);
return true;
case KeyEvent.KEYCODE_9:
((vt320) buffer).keyPressed(vt320.KEY_F9, ' ', 0);
return true;
case KeyEvent.KEYCODE_0:
((vt320) buffer).keyPressed(vt320.KEY_F10, ' ', 0);
return true;
default:
return false;
}
}
/**
* Handle meta key presses where the key can be locked on.
* <p>
* 1st press: next key to have meta state<br />
* 2nd press: meta state is locked on<br />
* 3rd press: disable meta state
*
* @param code
*/
public void metaPress(int code, boolean forceSticky) {
if ((ourMetaState & (code << 1)) != 0) {
ourMetaState &= ~(code << 1);
} else if ((ourMetaState & code) != 0) {
ourMetaState &= ~code;
ourMetaState |= code << 1;
} else if (forceSticky || (stickyMetas & code) != 0) {
ourMetaState |= code;
} else {
// skip redraw
return;
}
bridge.redraw();
}
public void metaPress(int code) {
metaPress(code, false);
}
public void setTerminalKeyMode(String keymode) {
this.keymode = keymode;
}
private int getStateForBuffer() {
int bufferState = 0;
if ((ourMetaState & OUR_CTRL_MASK) != 0)
bufferState |= vt320.KEY_CONTROL;
if ((ourMetaState & OUR_SHIFT_MASK) != 0)
bufferState |= vt320.KEY_SHIFT;
if ((ourMetaState & OUR_ALT_MASK) != 0)
bufferState |= vt320.KEY_ALT;
return bufferState;
}
public int getMetaState() {
return ourMetaState;
}
public int getDeadKey() {
return mDeadKey;
}
public void setClipboardManager(ClipboardManager clipboard) {
this.clipboard = clipboard;
}
public void onSharedPreferenceChanged(SharedPreferences sharedPreferences,
String key) {
if (PreferenceConstants.KEYMODE.equals(key) ||
PreferenceConstants.SHIFT_FKEYS.equals(key) ||
PreferenceConstants.CTRL_FKEYS.equals(key) ||
PreferenceConstants.VOLUME_FONT.equals(key) ||
PreferenceConstants.STICKY_MODIFIERS.equals(key)) {
updatePrefs();
}
}
private void updatePrefs() {
keymode = prefs.getString(PreferenceConstants.KEYMODE, PreferenceConstants.KEYMODE_NONE);
shiftedNumbersAreFKeysOnHardKeyboard =
prefs.getBoolean(PreferenceConstants.SHIFT_FKEYS, false);
controlNumbersAreFKeysOnSoftKeyboard =
prefs.getBoolean(PreferenceConstants.CTRL_FKEYS, false);
volumeKeysChangeFontSize = prefs.getBoolean(PreferenceConstants.VOLUME_FONT, true);
String stickyModifiers = prefs.getString(PreferenceConstants.STICKY_MODIFIERS,
PreferenceConstants.NO);
if (PreferenceConstants.ALT.equals(stickyModifiers)) {
stickyMetas = OUR_ALT_ON;
} else if (PreferenceConstants.YES.equals(stickyModifiers)) {
stickyMetas = OUR_SHIFT_ON | OUR_CTRL_ON | OUR_ALT_ON;
} else {
stickyMetas = 0;
}
}
public void setCharset(String encoding) {
this.encoding = encoding;
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/cx/v3/intent.proto
package com.google.cloud.dialogflow.cx.v3;
/**
*
*
* <pre>
* The request message for [Intents.DeleteIntent][google.cloud.dialogflow.cx.v3.Intents.DeleteIntent].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3.DeleteIntentRequest}
*/
public final class DeleteIntentRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.cx.v3.DeleteIntentRequest)
DeleteIntentRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use DeleteIntentRequest.newBuilder() to construct.
private DeleteIntentRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DeleteIntentRequest() {
name_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new DeleteIntentRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private DeleteIntentRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
name_ = s;
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3.IntentProto
.internal_static_google_cloud_dialogflow_cx_v3_DeleteIntentRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3.IntentProto
.internal_static_google_cloud_dialogflow_cx_v3_DeleteIntentRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest.class,
com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object name_;
/**
*
*
* <pre>
* Required. The name of the intent to delete.
* Format: `projects/<Project ID>/locations/<Location ID>/agents/<Agent
* ID>/intents/<Intent ID>`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of the intent to delete.
* Format: `projects/<Project ID>/locations/<Location ID>/agents/<Agent
* ID>/intents/<Intent ID>`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest other =
(com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest) obj;
if (!getName().equals(other.getName())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request message for [Intents.DeleteIntent][google.cloud.dialogflow.cx.v3.Intents.DeleteIntent].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3.DeleteIntentRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.cx.v3.DeleteIntentRequest)
com.google.cloud.dialogflow.cx.v3.DeleteIntentRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3.IntentProto
.internal_static_google_cloud_dialogflow_cx_v3_DeleteIntentRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3.IntentProto
.internal_static_google_cloud_dialogflow_cx_v3_DeleteIntentRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest.class,
com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest.Builder.class);
}
// Construct using com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
name_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.cx.v3.IntentProto
.internal_static_google_cloud_dialogflow_cx_v3_DeleteIntentRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest getDefaultInstanceForType() {
return com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest build() {
com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest buildPartial() {
com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest result =
new com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest(this);
result.name_ = name_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest) {
return mergeFrom((com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest other) {
if (other == com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest.getDefaultInstance())
return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The name of the intent to delete.
* Format: `projects/<Project ID>/locations/<Location ID>/agents/<Agent
* ID>/intents/<Intent ID>`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the intent to delete.
* Format: `projects/<Project ID>/locations/<Location ID>/agents/<Agent
* ID>/intents/<Intent ID>`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the intent to delete.
* Format: `projects/<Project ID>/locations/<Location ID>/agents/<Agent
* ID>/intents/<Intent ID>`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the intent to delete.
* Format: `projects/<Project ID>/locations/<Location ID>/agents/<Agent
* ID>/intents/<Intent ID>`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the intent to delete.
* Format: `projects/<Project ID>/locations/<Location ID>/agents/<Agent
* ID>/intents/<Intent ID>`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.cx.v3.DeleteIntentRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.cx.v3.DeleteIntentRequest)
private static final com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest();
}
public static com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DeleteIntentRequest> PARSER =
new com.google.protobuf.AbstractParser<DeleteIntentRequest>() {
@java.lang.Override
public DeleteIntentRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new DeleteIntentRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<DeleteIntentRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DeleteIntentRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
package com.capitalone.dashboard.client.story;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import java.net.URI;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import com.capitalone.dashboard.model.Team;
import org.bson.types.ObjectId;
import org.codehaus.jettison.json.JSONArray;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.format.ISODateTimeFormat;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.runners.MockitoJUnitRunner;
import com.atlassian.jira.rest.client.api.domain.BasicProject;
import com.atlassian.jira.rest.client.api.domain.BasicVotes;
import com.atlassian.jira.rest.client.api.domain.BasicWatchers;
import com.atlassian.jira.rest.client.api.domain.Comment;
import com.atlassian.jira.rest.client.api.domain.Issue;
import com.atlassian.jira.rest.client.api.domain.IssueField;
import com.atlassian.jira.rest.client.api.domain.IssueType;
import com.atlassian.jira.rest.client.api.domain.Status;
import com.atlassian.jira.rest.client.api.domain.TimeTracking;
import com.atlassian.jira.rest.client.api.domain.User;
import com.capitalone.dashboard.client.JiraClient;
import com.capitalone.dashboard.model.Feature;
import com.capitalone.dashboard.model.FeatureCollector;
import com.capitalone.dashboard.model.FeatureStatus;
import com.capitalone.dashboard.repository.FeatureCollectorRepository;
import com.capitalone.dashboard.repository.FeatureRepository;
import com.capitalone.dashboard.repository.TeamRepository;
import com.capitalone.dashboard.util.CoreFeatureSettings;
import com.capitalone.dashboard.util.FeatureCollectorConstants;
import com.capitalone.dashboard.util.FeatureSettings;
@RunWith(MockitoJUnitRunner.class)
public class StoryDataClientImplTests {
private static final ObjectId JIRA_COLLECTORID = new ObjectId("ABCDEF0123456789ABCDEF01");
private static final BasicProject PROJECT1 = new BasicProject(URI.create("http://my.jira.com/rest/api/2/project/100"), "project1", Long.valueOf(100L), "projectname1");
private static final User USER1 = new User(URI.create("http://my.jira.com/rest/api/2/user?username=billy"), "Billy", "Billy Bob", "Billy@foo.com", null, getAvatarUris(), null);
private static final IssueType ISSUETYPE1 = new IssueType(URI.create("http://my.jira.com/rest/api/2/issuetype/10"), Long.valueOf(10), "Story", false, "issuetype10", null);
private static final Status STATUS_TODO = new Status(URI.create("http://my.jira.com/rest/api/2/status/21"), Long.valueOf(21), "OPEN", "OPEN", null);
private static final Status STATUS_IN_PROGRESS = new Status(URI.create("http://my.jira.com/rest/api/2/status/22"), Long.valueOf(22), "IN PROGRESS", "IN PROGRESS", null);
private static final Status STATUS_DONE = new Status(URI.create("http://my.jira.com/rest/api/2/status/23"), Long.valueOf(23), "CLOSED", "CLOSED", null);
CoreFeatureSettings coreFeatureSettings;
FeatureSettings featureSettings;
@Mock FeatureRepository featureRepo;
@Mock TeamRepository teamRepo;
@Mock FeatureCollectorRepository featureCollectorRepository;
@Mock JiraClient jiraClient;
@Captor ArgumentCaptor<List<Feature>> captor;
StoryDataClientImpl storyDataClient;
@Before
public final void init() {
coreFeatureSettings = new CoreFeatureSettings();
featureSettings = new FeatureSettings();
coreFeatureSettings.setTodoStatuses(Arrays.asList("OPEN"));
coreFeatureSettings.setDoingStatuses(Arrays.asList("IN PROGRESS"));
coreFeatureSettings.setDoneStatuses(Arrays.asList("CLOSED"));
featureSettings.setJiraIssueTypeNames(new String[] {"Story"});
featureSettings.setJiraSprintDataFieldName("custom_sprint");
featureSettings.setJiraEpicIdFieldName("custom_epic");
featureSettings.setJiraStoryPointsFieldName("custom_storypoints");
featureSettings.setJiraTeamFieldName("custom_teamname");
featureSettings.setDeltaStartDate("2016-03-01T00:00:00.000000");
featureSettings.setPageSize(25);
featureSettings.setJiraBaseUrl("https://jira.atlassian.com/");
featureSettings.setJiraQueryEndpoint("rest/api/latest/");
storyDataClient = new StoryDataClientImpl(coreFeatureSettings, featureSettings, featureRepo, featureCollectorRepository, teamRepo, jiraClient);
FeatureCollector jira = new FeatureCollector();
jira.setId(JIRA_COLLECTORID);
Mockito.when(featureCollectorRepository.findByName(Mockito.eq(FeatureCollectorConstants.JIRA))).thenReturn(jira);
Mockito.when(jiraClient.getPageSize()).thenReturn(25);
}
@Test
public void testUpdateStoryInformation_NoPage() {
// This is actually how the data comes back from jira
String sprintRaw = "com.atlassian.greenhopper.service.sprint.Sprint@2189d27[id=2144,rapidViewId=1645,state=OPEN,name=Sprint 18,startDate=2016-05-31T14:06:46.350-04:00,endDate=2016-06-16T17:06:00.000-04:00,completeDate=2016-06-20T14:21:57.131-04:00,sequence=2144]";
String sprintRaw2 = "com.atlassian.greenhopper.service.sprint.Sprint@2189d27[id=2144,rapidViewId=1645,state=OPEN,name=Sprint 17,startDate=2016-04-31T14:06:46.350-04:00,endDate=2016-05-31T17:06:00.000-04:00,completeDate=2016-05-31T14:21:57.131-04:00,sequence=2144]";
JSONArray jsonA = new JSONArray();
jsonA.put(sprintRaw);
jsonA.put(sprintRaw2);
List<Issue> jiraClientResponse = Arrays.asList(
createIssue(1001, 10000000, STATUS_TODO, createTimeTracking(5 * 60, 4 * 60, 1 * 60),
Arrays.asList(createField("custom_sprint", "List", jsonA),
createField("custom_storypoints", "Integer", 3),
createField("custom_teamname", "String", "1534")))
);
Mockito.when(jiraClient.getIssues(Mockito.anyLong(), Mockito.eq(0))).thenReturn(jiraClientResponse);
Team scopeOwner = new Team("", "");
scopeOwner.setName("warriors");
scopeOwner.setTeamId("1534");
Mockito.when(teamRepo.findByTeamId(Mockito.anyString())).thenReturn(scopeOwner);
int cnt = storyDataClient.updateStoryInformation();
Mockito.verify(featureRepo).save(captor.capture());
assertEquals(1, cnt);
Feature feature1 = captor.getAllValues().get(0).get(0);
assertEquals(JIRA_COLLECTORID, feature1.getCollectorId());
assertEquals("1001", feature1.getsId());
// processFeatureData
assertEquals("key1001", feature1.getsNumber());
assertEquals("summary1001", feature1.getsName());
assertEquals(FeatureStatus.BACKLOG.getStatus(), feature1.getsStatus());
assertEquals(FeatureStatus.BACKLOG.getStatus(), feature1.getsState());
assertEquals("3", feature1.getsEstimate());
assertEquals(Integer.valueOf(5 * 60), feature1.getsEstimateTime());
assertEquals("False", feature1.getIsDeleted());
assertEquals("100", feature1.getsProjectID());
assertEquals("projectname1", feature1.getsProjectName());
assertNotNull(feature1.getsProjectBeginDate());
assertNotNull(feature1.getsProjectEndDate());
assertNotNull(feature1.getsProjectChangeDate());
assertNotNull(feature1.getsProjectState());
assertEquals("False", feature1.getsProjectIsDeleted());
assertNotNull(feature1.getsProjectPath());
assertEquals("1534", feature1.getsTeamID());
assertEquals("warriors", feature1.getsTeamName());
assertNotNull(feature1.getsTeamChangeDate());
assertNotNull(feature1.getsTeamAssetState());
assertEquals("False", feature1.getsTeamIsDeleted());
assertEquals("Active", feature1.getsOwnersState().iterator().next());
assertEquals(Collections.<String>emptyList(), feature1.getsOwnersChangeDate());
assertEquals(Collections.<String>emptyList(), feature1.getsOwnersChangeDate());
// processSprintData
assertEquals("2144", feature1.getsSprintID());
assertEquals("Sprint 18", feature1.getsSprintName());
assertEquals(dateLocal("2016-05-31T14:06:46.350-04:00") + "0000", feature1.getsSprintBeginDate());
assertEquals(dateLocal("2016-06-16T17:06:00.000-04:00") + "0000", feature1.getsSprintEndDate());
assertEquals("OPEN", feature1.getsSprintAssetState());
assertNotNull(feature1.getsSprintChangeDate());
assertEquals("False", feature1.getsSprintIsDeleted());
// processAssigneeData
assertEquals(Arrays.asList("Billy"), feature1.getsOwnersShortName());
assertEquals(Arrays.asList("Billy"), feature1.getsOwnersUsername());
assertEquals(Arrays.asList("Billy"), feature1.getsOwnersID());
assertEquals(Arrays.asList("Billy Bob"), feature1.getsOwnersFullName());
// epic data test elsewhere
}
@Test
public void testUpdateStoryInformation_WithPage() {
featureSettings.setPageSize(2);
Mockito.when(jiraClient.getPageSize()).thenReturn(2);
// This is actually how the data comes back from jira
String sprintRaw = "com.atlassian.greenhopper.service.sprint.Sprint@2189d27[id=2144,rapidViewId=1645,state=OPEN,name=Sprint 18,startDate=2016-05-31T14:06:46.350-04:00,endDate=2016-06-16T17:06:00.000-04:00,completeDate=2016-06-20T14:21:57.131-04:00,sequence=2144]";
JSONArray jsonA = new JSONArray();
jsonA.put(sprintRaw);
List<Issue> jiraClientResponse = Arrays.asList(
createIssue(1001, 10000000, STATUS_TODO, createTimeTracking(5 * 60, 4 * 60, 1 * 60), Arrays.asList(createField("custom_sprint", "List", jsonA))),
createIssue(1002, 10000000, STATUS_TODO, createTimeTracking(5 * 60, 4 * 60, 1 * 60), Arrays.asList(createField("custom_sprint", "List", jsonA))),
createIssue(1003, 10000000, STATUS_DONE, createTimeTracking(5 * 60, 4 * 60, 1 * 60), Arrays.asList(createField("custom_sprint", "List", jsonA)))
);
Mockito.when(jiraClient.getIssues(Mockito.anyLong(), Mockito.eq(0))).thenReturn(jiraClientResponse.subList(0, 2));
Mockito.when(jiraClient.getIssues(Mockito.anyLong(), Mockito.eq(2))).thenReturn(jiraClientResponse.subList(2, 3));
int cnt = storyDataClient.updateStoryInformation();
Mockito.verify(featureRepo, Mockito.times(2)).save(captor.capture());
assertEquals(3, cnt);
Feature feature1 = captor.getAllValues().get(0).get(0);
assertEquals(JIRA_COLLECTORID, feature1.getCollectorId());
assertEquals("1001", feature1.getsId());
Feature feature2 = captor.getAllValues().get(0).get(1);
assertEquals(JIRA_COLLECTORID, feature2.getCollectorId());
assertEquals("1002", feature2.getsId());
Feature feature3 = captor.getAllValues().get(1).get(0);
assertEquals(JIRA_COLLECTORID, feature3.getCollectorId());
assertEquals("1003", feature3.getsId());
}
@Test
public void testUpdateStoryInformation_WithEpic() {
// This is actually how the data comes back from jira
List<Issue> jiraClientResponse = Arrays.asList(
createIssue(1001, 10000000, STATUS_DONE, createTimeTracking(5 * 60, 4 * 60, 1 * 60),
Arrays.asList(createField("custom_epic", "String", "1002")))
);
Issue jiraClientEpicResponse = createIssue(1002, 1467739128322L, STATUS_IN_PROGRESS, null, null);
Mockito.when(jiraClient.getIssues(Mockito.anyLong(), Mockito.eq(0))).thenReturn(jiraClientResponse);
Mockito.when(jiraClient.getEpic(Mockito.eq("1002"))).thenReturn(jiraClientEpicResponse);
int cnt = storyDataClient.updateStoryInformation();
Mockito.verify(featureRepo).save(captor.capture());
assertEquals(1, cnt);
Feature feature1 = captor.getAllValues().get(0).get(0);
assertEquals(JIRA_COLLECTORID, feature1.getCollectorId());
assertEquals("1001", feature1.getsId());
assertEquals("1002", feature1.getsEpicID());
assertEquals("key1002", feature1.getsEpicNumber());
assertEquals("summary1002", feature1.getsEpicName());
assertEquals(dateLocal("2016-06-24T03:32:08.322-00:00") + "0000", feature1.getsEpicBeginDate());
assertEquals(dateLocal("2016-07-17T07:05:28.322-00:00") + "0000", feature1.getsEpicEndDate());
assertEquals("IN PROGRESS", feature1.getsEpicAssetState());
assertNotNull(feature1.getsEpicType());
assertNotNull(feature1.getsEpicChangeDate());
assertEquals("False", feature1.getsEpicIsDeleted());
}
private Issue createIssue(long id, long updateDate, Status status, TimeTracking timeTracking, Collection<IssueField> issueFields) {
String idStr = Long.valueOf(id).toString();
Issue rt = new Issue(
"summary" + idStr, // summary
URI.create("http://my.jira.com/rest/api/2/issue/" + idStr), // self
"key" + idStr, // key
Long.valueOf(id), // id
PROJECT1, // project
ISSUETYPE1, //issueType
status, //status
"description" + idStr, // description
null, // priority
null, // resolution
Collections.emptyList(), // attachments
USER1, // reporter
USER1, // assignee
new DateTime(updateDate - 1000000000, DateTimeZone.UTC), // creationDate
new DateTime(updateDate, DateTimeZone.UTC), // updateDate
new DateTime(updateDate + 1000000000, DateTimeZone.UTC), // dueDate
Collections.emptyList(), // affectedVersions
Collections.emptyList(), // fixVersions
Collections.emptyList(), // components
timeTracking, // timeTracking
issueFields, // issueFields,
Arrays.asList(Comment.valueOf("A comment")), // comments
null, // transitionUri
null, // issueLinks
new BasicVotes(null, 0, false), // votes
Collections.emptyList(), // worklogs
new BasicWatchers(null, false, 0), // watchers
null, // expandos
null, // subtasks
null, // changelog
null, // operations
new HashSet<>(Arrays.asList("label" + idStr)) // labels
);
return rt;
}
private IssueField createField(String id, String type, Object value) {
return new IssueField(id, "name" + id, type, value);
}
private TimeTracking createTimeTracking(Integer originalEstimateMinutes, Integer remainingEstimateMinutes, Integer timeSpentMinutes) {
return new TimeTracking(originalEstimateMinutes, remainingEstimateMinutes, timeSpentMinutes);
}
private static Map<String, URI> getAvatarUris() {
Map<String, URI> rt = new HashMap<>();
rt.put(User.S48_48, URI.create("http://foobar.com"));
return rt;
}
private String dateLocal(String date) {
DateTime dt = ISODateTimeFormat.dateOptionalTimeParser().parseDateTime(date);
return ISODateTimeFormat.dateHourMinuteSecondMillis().print(dt);
}
}
| |
//======================================================================================
// Copyright 5AM Solutions Inc, Yale University
//
// Distributed under the OSI-approved BSD 3-Clause License.
// See http://ncip.github.com/caarray/LICENSE.txt for details.
//======================================================================================
package gov.nih.nci.caarray.services.external.v1_0.search;
import gov.nih.nci.caarray.external.v1_0.AbstractCaArrayEntity;
import gov.nih.nci.caarray.external.v1_0.CaArrayEntityReference;
import gov.nih.nci.caarray.external.v1_0.data.File;
import gov.nih.nci.caarray.external.v1_0.data.QuantitationType;
import gov.nih.nci.caarray.external.v1_0.experiment.Experiment;
import gov.nih.nci.caarray.external.v1_0.experiment.Person;
import gov.nih.nci.caarray.external.v1_0.query.AnnotationSetRequest;
import gov.nih.nci.caarray.external.v1_0.query.BiomaterialKeywordSearchCriteria;
import gov.nih.nci.caarray.external.v1_0.query.BiomaterialSearchCriteria;
import gov.nih.nci.caarray.external.v1_0.query.ExampleSearchCriteria;
import gov.nih.nci.caarray.external.v1_0.query.ExperimentSearchCriteria;
import gov.nih.nci.caarray.external.v1_0.query.FileSearchCriteria;
import gov.nih.nci.caarray.external.v1_0.query.HybridizationSearchCriteria;
import gov.nih.nci.caarray.external.v1_0.query.KeywordSearchCriteria;
import gov.nih.nci.caarray.external.v1_0.query.LimitOffset;
import gov.nih.nci.caarray.external.v1_0.query.QuantitationTypeSearchCriteria;
import gov.nih.nci.caarray.external.v1_0.query.SearchResult;
import gov.nih.nci.caarray.external.v1_0.sample.AnnotationSet;
import gov.nih.nci.caarray.external.v1_0.sample.Biomaterial;
import gov.nih.nci.caarray.external.v1_0.sample.Hybridization;
import gov.nih.nci.caarray.external.v1_0.vocabulary.Category;
import gov.nih.nci.caarray.external.v1_0.vocabulary.Term;
import gov.nih.nci.caarray.services.external.v1_0.InvalidInputException;
import gov.nih.nci.caarray.services.external.v1_0.InvalidReferenceException;
import gov.nih.nci.caarray.services.external.v1_0.UnsupportedCategoryException;
import java.util.List;
import javax.ejb.Remote;
/**
* Remote service for search and data enumeration. Used by the grid service, and can also be used directly by EJB
* clients.
*
* Several methods in this service accept a LimitOffset parameter to allow the client to request a subset of the results
* that would otherwise be matched by the provided criteria. For these methods, there may also be a maximum number of
* results that the system is willing to return for that query, regardless of the limit requested by the client. This
* maximum is not specified in the method definition (and varies between the methods), but will be be indicated in the
* return value. The actual number of results returned for these methods will then be the smaller of { maximum system
* threshold, limit requested by client in the LimitOffset parameter, actual number of results available (taking into
* account the offset specified)
*
* @author dkokotov
*/
@Remote
public interface SearchService {
/**
* The JNDI name to look up this Remote EJB under.
*/
String JNDI_NAME = "caarray/external/v1_0/SearchServiceBean";
/**
* Retrieve list of Person entities that are Principal Investigators on at least one experiment in the system. A
* Person is considered a Principal Investigator if he/she is an experiment contact on an experiment with a set of
* roles that includes the "investigator" term from the MGED ontology.
*
* @return the list of Person entities that are principal investigators on at least one experiment in the system.
*/
List<Person> getAllPrincipalInvestigators();
/**
* Retrieve the list of all categories of characteristics, either in the entire system, or for given experiment.
* This list always includes the following "standard" categories:
* <ul>
* <li>MGED Ontology : OrganismPart
* <li>MGED Ontology : DiseaseState
* <li>MGED Ontology : MaterialType
* <li>MGED Ontology : CellType
* <li>MGED Ontology : LabelCompound
* <li>caArray Ontology : ExternalId
* </ul>
* In addition if an experiment specified, then it includes all categories from any characteristics belonging to any
* of the biomaterials in that experiment. If an experiment is not specified, then it includes all categories from
* any characteristics belonging to any of the biomaterials in the entire system.
*
* @param experimentRef if not null, then only categories of characteristics of biomaterials in the given experiment
* are returned, otherwise categories of all characteristics in the system are returned.
* @return the list of Category entities as described above.
* @throws InvalidReferenceException if the given reference does not identify an existing experiment in the system.
*/
List<Category> getAllCharacteristicCategories(CaArrayEntityReference experimentRef)
throws InvalidReferenceException;
/**
* Retrieve the list of all terms belonging to given category in the system.
*
* @param categoryRef reference identifying the category
* @param valuePrefix if not null, only include terms whose value starts with given prefix, using case insensitive
* matching
* @return the terms in the given category, possibly filtered for the given prefix
* @throws InvalidReferenceException if the given reference does not identify an existing category in the system.
*/
List<Term> getTermsForCategory(CaArrayEntityReference categoryRef, String valuePrefix)
throws InvalidReferenceException;
/**
* Search for experiments satisfying the given search criteria.
*
* @param criteria the search criteria.
* @param limitOffset an optional parameter specifying the number of results to return, and the offset of the first
* result to return within the overall result set. May be left null to indicate the entire result set is
* requested.
* @return a SearchResult with the matching experiments and metadata on the subset of matching results actually
* returned. This may be smaller than the requested number of results - see the class level Javadoc for
* details.
* @throws InvalidReferenceException if any references within the given criteria are not valid, e.g. refer to
* entities that do not exist or are not of the correct types
* @throws UnsupportedCategoryException if the search criteria includes an annotation criterion with a category
* other that disease state, cell type, material type, tissue site.
*/
SearchResult<Experiment> searchForExperiments(ExperimentSearchCriteria criteria, LimitOffset limitOffset)
throws InvalidReferenceException, UnsupportedCategoryException;
/**
* Search for experiments matching the given keyword keyword criteria. The following fields are used to match the
* keyword
*
* <ul>
* <li>Experiment title
* <li>Experiment description
* <li>Experiment public identifier
* <li>Experiment array provider name
* <li>Experiment array designs' names
* <li>Experiment organism's common and scientific names
* <li>Experiment samples' names
* <li>Experiment sources' disease state's values
* </ul>
*
* @param criteria the keyword criteria to search for.
* @param limitOffset an optional parameter specifying the number of results to return, and the offset of the first
* result to return within the overall result set. May be left null to indicate the entire result set is
* requested.
* @return a SearchResult with the matching experiments and metadata on the subset of matching results actually
* returned. This may be smaller than the requested number of results - see the class level Javadoc for
* details.
*/
SearchResult<Experiment> searchForExperimentsByKeyword(KeywordSearchCriteria criteria, LimitOffset limitOffset);
/**
* Search for biomaterials satisfying the given search criteria.
*
* @param criteria the search criteria
* @param limitOffset an optional parameter specifying the number of results to return, and the offset of the first
* result to return within the overall result set. May be left null to indicate the entire result set is
* requested.
* @return a SearchResult with the matching biomaterials and metadata on the subset of matching results actually
* returned. This may be smaller than the requested number of results - see the class level Javadoc for
* details.
* @throws InvalidReferenceException if any references within the given criteria are not valid, e.g. refer to
* entities that do not exist or are not of the correct types
* @throws UnsupportedCategoryException if the search criteria includes an annotation criterion with a category
* other that disease state, cell type, material type, tissue site.
*/
SearchResult<Biomaterial> searchForBiomaterials(BiomaterialSearchCriteria criteria, LimitOffset limitOffset)
throws InvalidReferenceException, UnsupportedCategoryException;
/**
* Search for biomaterials matching the given keyword criteria. The following fields are used to match the keyword
*
* <ul>
* <li>Biomaterial name
* <li>Biomaterial external id
* <li>Biomaterial disease state's value
* <li>Biomaterial tissue site's value
* <li>Biomaterial organism's common and scientific names
* <li>Experiment organism's common and scientific names (if biomaterial organism is not set)
* </ul>
*
* @param criteria the keyword criteria to search for; this identifies the string to look for, and the types of
* biomaterials to include in the results.
* @param limitOffset an optional parameter specifying the number of results to return, and the offset of the first
* result to return within the overall result set. May be left null to indicate the entire result set is
* requested.
* @return a SearchResult with the matching biomaterials and metadata on the subset of matching results actually
* returned. This may be smaller than the requested number of results - see the class level Javadoc for
* details.
*/
SearchResult<Biomaterial> searchForBiomaterialsByKeyword(BiomaterialKeywordSearchCriteria criteria,
LimitOffset limitOffset);
/**
* Search for hybridizations satisfying the given search criteria.
*
* @param criteria the search criteria
* @param limitOffset an optional parameter specifying the number of results to return, and the offset of the first
* result to return within the overall result set. May be left null to indicate the entire result set is
* requested.
* @return a SearchResult with the matching hybridizations and metadata on the subset of matching results actually
* returned. This may be smaller than the requested number of results - see the class level Javadoc for
* details.
* @throws InvalidReferenceException if any references within the given criteria are not valid, e.g. refer to
* entities that do not exist or are not of the correct types
*/
SearchResult<Hybridization> searchForHybridizations(HybridizationSearchCriteria criteria, LimitOffset limitOffset)
throws InvalidReferenceException;
/**
* Search for files satisfying the given search criteria. Note that the File instances returned by this search only
* contain file metadata; to retrieve the actual file contents, use the file retrieval methods in DataService.
*
* @param criteria the search criteria.
* @param limitOffset an optional parameter specifying the number of results to return, and the offset of the first
* result to return within the overall result set. May be left null to indicate the entire result set is
* requested.
* @return a SearchResult with the matching hybridizations and metadata on the subset of matching results actually
* returned. This may be smaller than the requested number of results - see the class level Javadoc for
* details.
* @throws InvalidReferenceException if any references within the given criteria are not valid, e.g. refer to
* entities that do not exist or are not of the correct types
* @see DataService
*/
SearchResult<File> searchForFiles(FileSearchCriteria criteria, LimitOffset limitOffset)
throws InvalidReferenceException;
/**
* Returns a list of quantitation types satisfying the given search criteria.
*
* @param criteria the search criteria. The criteria must, at a minimum, include a reference to a Hybridization.
* @return the list of QuantitationType matching criteria.
* @throws InvalidReferenceException if any references within the given criteria are not valid, e.g. refer to
* entities that do not exist or are not of the correct types
* @throws InvalidInputException if the search criteria does not have a non-null Hybridization reference
*/
List<QuantitationType> searchForQuantitationTypes(QuantitationTypeSearchCriteria criteria)
throws InvalidReferenceException, InvalidInputException;
/**
* Search for entities based on a specified example. Searches by example use the root example entities, as well as
* its directly associated entities, to construct the query. Entities with association chains of more than 1 link to
* the root example entities are ignored. The ExampleSearchCriteria class also allows the caller to specify how to
* treat empty and zero-valued properties, and how string comparisons should be done.
*
* @param <T> type of the example entity
* @param criteria the criteria specifying the example entity, as well as rules defining how candidate entities are
* matched against the example
* @param limitOffset an optional parameter specifying the number of results to return, and the offset of the first
* result to return within the overall result set. May be left null to indicate the entire result set is
* requested.
* @return a SearchResult with the matching entities and metadata on the subset of matching results actually
* returned. This may be smaller than the requested number of results - see the class level Javadoc for
* details.
* @throws InvalidInputException if a null example is given
*/
<T extends AbstractCaArrayEntity> SearchResult<T> searchByExample(ExampleSearchCriteria<T> criteria,
LimitOffset limitOffset) throws InvalidInputException;
/**
* Returns an annotation set matching the given request. This annotation set consists of the values of
* Characteristics with categories specified in the request across the experiment nodes (biomaterials and/or
* hybridizations) specified in the request.
* <p>
*
* The annotation set will include an AnnotationColumn for each ExperimentGraphNode included in the request; each
* AnnotationColumn will include an AnnotationValueSet for each Category included in the request. The
* AnnotationValueSet for a given experiment node and characteristic category is calculated as follows:
*
* <ul>
* <li>If the node has characteristics with the category directly, then the returned set consists of the values of
* all such characteristics</li>
* <li>Otherwise, the returned set is given by the applying this algorithm recursively to the direct predecessors of
* this node in the chain, and union-ing the resulting values.
* </ul>
*
* @param request the annotation set request
* @return the annotation set.
* @throws InvalidReferenceException if any references within the given criteria are not valid, e.g. refer to
* entities that do not exist or are not of the correct types
*/
AnnotationSet getAnnotationSet(AnnotationSetRequest request) throws InvalidReferenceException;
}
| |
package com.smcpartners.shape.shared.utils;
import java.time.*;
import java.time.format.DateTimeFormatter;
import java.time.temporal.ChronoUnit;
import java.time.temporal.TemporalAdjusters;
import java.util.Date;
/**
* Responsible: <br/>
* 1. Wraps java.time date functionality. Used in many places in the application<br/>
* <p>
* <p>
* Created by johndestefano on 9/12/15.
* <p>
* Changes:<br/>
* Created by johndestefano on 9/21/15.
*/
public class DateAndTimeUtils {
/**
* Date formatter for MMMM dd, yyyy
*/
private static final DateTimeFormatter df1 = DateTimeFormatter.ofPattern("MMMM dd, yyyy");
/**
* Date formatter for MM-dd-yyyy
*/
private static final DateTimeFormatter df2 = DateTimeFormatter.ofPattern("MM-dd-yyyy");
/**
* Date formatter for MM-dd-yyyy
*/
private static final DateTimeFormatter df3 = DateTimeFormatter.ofPattern("MM-dd-yyyy hh:mm:ss");
/**
* Get the Monday of the week that contains the given date
*
* @param dt
* @return
* @throws Exception
*/
public static Date getMondayOfWeekWithDate(Date dt) throws Exception {
return getDayofWeekForContainedDate(dt, DayOfWeek.MONDAY);
}
/**
* Get the Sunday of the week that contaons the given date
*
* @param dt
* @return
* @throws Exception
*/
public static Date getSundayofWeekWithDate(Date dt) throws Exception {
return getDayofWeekForContainedDate(dt, DayOfWeek.SUNDAY);
}
/**
* Find the day of week within the week that the dt is part of.
*
* @param dt
* @param dow
* @return
* @throws Exception
*/
public static Date getDayofWeekForContainedDate(Date dt, DayOfWeek dow) throws Exception {
LocalDate dtLocalDate = dateToSystemLocalDate(dt);
LocalDateTime dtLocalDateTime = dtLocalDate.atStartOfDay();
dtLocalDate = dtLocalDateTime.toLocalDate();
Date retDate;
if (isDayOfWeek(dtLocalDate, dow)) {
retDate = localDateToSystemAdjustedStartOfDayDate(dtLocalDate);
} else {
int dtVal = dtLocalDateTime.getDayOfWeek().getValue();
int dowVal = dow.getValue();
LocalDate tempLocalDate;
if (dowVal > dtVal) {
tempLocalDate = dtLocalDate.with(TemporalAdjusters.next(dow));
} else {
tempLocalDate = dtLocalDate.with(TemporalAdjusters.previous(dow));
}
retDate = localDateToSystemAdjustedStartOfDayDate(tempLocalDate);
}
return retDate;
}
/**
* Find the next day of the week from the given LocalDate
*
* @param d
* @param dow
* @return
*/
public static LocalDate calcNextDayOfWeek(LocalDate d, DayOfWeek dow) throws Exception {
return d.with(TemporalAdjusters.next(dow));
}
/**
* Like it says
*
* @param ldt
* @param dow
* @return
* @throws Exception
*/
public static LocalDate calcNextDayOfWeekFromLDT(LocalDateTime ldt, DayOfWeek dow) throws Exception {
LocalDate d = ldt.toLocalDate();
return calcNextDayOfWeek(d, dow);
}
/**
* Pass a date in. It is set to the system local date format,
* then taken to Midnight and turned back into a date.
*
* @param dt
* @return
* @throws Exception
*/
public static Date adjustToMidnightForSystem(Date dt) throws Exception {
LocalDate ld = dateToSystemLocalDate(dt);
LocalDateTime ldt = ld.atStartOfDay();
return getSystemDefaultDate(ldt);
}
/**
* Check the LocalDate given to see if it is the DayOfWeek given
*
* @param d
* @param dow
*/
public static boolean isDayOfWeek(LocalDate d, DayOfWeek dow) throws Exception {
if (d.getDayOfWeek().compareTo(dow) == 0) {
return true;
} else {
return false;
}
}
/**
* Get local system Date from LocalDateTime
*
* @param ldt
* @return
* @throws Exception
*/
public static Date getSystemDefaultDate(LocalDateTime ldt) throws Exception {
return Date.from(ldt.atZone(ZoneId.systemDefault()).toInstant());
}
/**
* Takes a java.util.Date and turns it into a LocalDate based on the
* systems default timezone.
*
* @param d
* @return
* @throws Exception
*/
public static LocalDate dateToSystemLocalDate(Date d) throws Exception {
return d.toInstant().atZone(ZoneId.systemDefault()).toLocalDate();
}
/**
* Adjusts a LocalDate to Midnight of the say day (start of day).
*
* @param d
* @return
* @throws Exception
*/
public static Date localDateToSystemAdjustedStartOfDayDate(LocalDate d) throws Exception {
LocalDateTime ldt = d.atStartOfDay();
return getSystemDefaultDate(ldt);
}
/**
* Turns a LocalDateTime into a LocalDate
*
* @param ldt
* @return
* @throws Exception
*/
public static LocalDate localDateTimeToLocalDate(LocalDateTime ldt) throws Exception {
return ldt.toLocalDate();
}
/**
* Takes the LocalDate, subtracts the day indicated from it, and
* adjusts it to Midnight (start of day).
*
* @param ld
* @param days
* @return
* @throws Exception
*/
public static LocalDateTime minusDaysAdjustedToStartOfDay(LocalDate ld, int days) throws Exception {
return ld.minusDays(days).atStartOfDay();
}
/**
* Add the number of days to the date.
*
* @param d
* @param days
* @return
* @throws Exception
*/
public static LocalDate addDaysToDate(Date d, int days) throws Exception {
LocalDate ld = dateToSystemLocalDate(d);
return ld.plusDays(days);
}
/**
* Returns Date plus days
*
* @param dt
* @param days
* @return
* @throws Exception
*/
public static Date plusDays(Date dt, int days) throws Exception {
LocalDate ld = addDaysToDate(dt, days);
return localDateToSystemAdjustedStartOfDayDate(ld);
}
/**
* Add weeks to date
*
* @param dt
* @param weeks
* @return
* @throws Exception
*/
public static Date plusWeeks(Date dt, int weeks) throws Exception {
LocalDate ld = dateToSystemLocalDate(dt);
LocalDate addedWks = ld.plusWeeks(weeks);
return localDateToSystemAdjustedStartOfDayDate(addedWks);
}
/**
* Return the day of week string for the given date
*
* @param dt
* @return
* @throws Exception
*/
public static String getDayNameForDate(Date dt) throws Exception {
LocalDate ld = dateToSystemLocalDate(dt);
return ld.getDayOfWeek().name();
}
/**
* Uses format df1 above to format the supplied date.
*
* @param dt
* @return
* @throws Exception
*/
public static String getLongDateString(Date dt) throws Exception {
LocalDate ld = dateToSystemLocalDate(dt);
return df1.format(ld);
}
/**
* Expects date string as MM-dd-yyyy
*
* @param dtStr
* @return
* @throws Exception
*/
public static Date getDateFromFormat(String dtStr) throws Exception {
LocalDate ld = LocalDate.parse(dtStr, df2);
return localDateToSystemAdjustedStartOfDayDate(ld);
}
/**
* Converst a date to date time string - MM-dd-yyyy hh:mm:ss
*
* @param date
* @return
* @throws Exception
*/
public static String convertDateToDTString(Date date) throws Exception {
LocalDateTime ld = date.toInstant().atZone(ZoneId.systemDefault()).toLocalDateTime();
return df3.format(ld);
}
/**
* Returns the number of full weeks between 2 dates
*
* @param d1
* @param d2
* @return
* @throws Exception
*/
public static long getFullWeeksBetween(Date d1, Date d2) throws Exception {
LocalDateTime ldt1 = dateToSystemLocalDate(d1).atStartOfDay();
Instant d1i = Instant.ofEpochMilli(d1.getTime());
Instant d2i = Instant.ofEpochMilli(d2.getTime());
LocalDateTime startDate = LocalDateTime.ofInstant(d1i, ZoneId.systemDefault());
LocalDateTime endDate = LocalDateTime.ofInstant(d2i, ZoneId.systemDefault());
return ChronoUnit.WEEKS.between(startDate, endDate);
}
}
| |
package es.lnsd.citikey.discovery;
import android.app.ActionBar;
import android.app.Activity;
import android.bluetooth.BluetoothAdapter;
import android.content.ComponentName;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.ServiceConnection;
import android.os.Bundle;
import android.os.IBinder;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.Menu;
import android.view.MenuItem;
import android.widget.Toolbar;
import java.util.List;
import javax.inject.Inject;
import butterknife.Bind;
import butterknife.ButterKnife;
import es.lnsd.bluetooth.BluetoothAdapterCallback;
import es.lnsd.bluetooth.BluetoothLeHolder;
import es.lnsd.citikey.App;
import es.lnsd.citikey.R;
import es.lnsd.citikey.di.ActivityComponent;
import es.lnsd.citikey.di.DaggerActivityComponent;
import es.lnsd.citikey.di.modules.ActivityModule;
import es.lnsd.citikey.di.modules.PresenterModule;
import es.lnsd.citikey.discovery.view.DiscoveryView;
import es.lnsd.citikey.discovery.view.adapters.BeaconListAdapter;
import es.lnsd.citikey.discovery.view.presenters.DiscoveryPresenter;
import es.lnsd.citikey.core.model.Beacon;
import es.lnsd.citikey.service.BeaconService;
import es.lnsd.citikey.service.listeners.OnBatchBeaconsDetectedListener;
import es.lnsd.citikey.util.AlertDialogHelper;
import es.lnsd.mvp.core.BaseActivity;
public class DiscoveryActivity extends BaseActivity<DiscoveryPresenter> implements DiscoveryView,
BluetoothAdapterCallback.OnStateTurnedOffListener,
OnBatchBeaconsDetectedListener {
public static final int ACTIVITY_REQUEST_CODE = 0x03;
private static ActivityComponent component;
@Inject DiscoveryPresenter presenter;
//region Views
@Bind(R.id.discovery_toolbar) Toolbar toolbar;
@Bind(R.id.beacons_listview) RecyclerView beaconListview;
private BeaconListAdapter beaconAdapter;
//endregion
//region Beacon service
/** Fields **/
private static BeaconService beaconService;
private static boolean isServiceBound = false;
/** Defines callbacks for service binding, passed to bindService() */
private ServiceConnection mConnection = new ServiceConnection() {
@Override
public void onServiceConnected(ComponentName className, IBinder service) {
// We've bound to BeaconService, cast the IBinder and get BeaconService instance
BeaconService.BeaconServiceBinder binder = (BeaconService.BeaconServiceBinder) service;
beaconService = binder.getService();
isServiceBound = true;
// Attach listener to the service
beaconService.setOnBluetoothTurnedOffCallback(DiscoveryActivity.this);
beaconService.setOnBatchBeaconsDetectedListener(DiscoveryActivity.this);
}
@Override
public void onServiceDisconnected(ComponentName name) {
beaconService = null;
isServiceBound = false;
}
};
public void bindToBeaconService() {
if (!isServiceBound) {
Intent intent = new Intent(this, BeaconService.class);
bindService(intent, mConnection, Context.BIND_AUTO_CREATE);
}
}
public void unbindBeaconService() {
if (isServiceBound) {
unbindService(mConnection);
isServiceBound = false;
}
}
@Override
public void onBatchBeaconsDetected(List<Beacon> beaconList) {
beaconAdapter.setDataset(beaconList);
}
//endregion
@Override
protected DiscoveryPresenter getPresenter() {
if (presenter == null) {
component = DaggerActivityComponent.builder()
.baseComponent(App.getBaseComponent())
.activityModule(new ActivityModule(this))
.presenterModule(new PresenterModule(this))
.build();
component.inject(this);
}
return presenter;
}
public static ActivityComponent getComponent() {
return component;
}
//region Activity Lifecycle
@Override
protected void onCreate(Bundle savedInstanceState) {
setContentView(R.layout.discovery_activity);
ButterKnife.bind(this);
super.onCreate(savedInstanceState);
setActionBar(toolbar);
ActionBar actionBar = getActionBar();
if (actionBar != null) {
// Show the Up button in the action bar.
actionBar.setDisplayHomeAsUpEnabled(true);
}
//region Discovered beacons listview
beaconAdapter = new BeaconListAdapter();
beaconListview.setAdapter(beaconAdapter);
beaconListview.setLayoutManager(new LinearLayoutManager(this));
// Use this setting to improve performance if you know that changes
// in content do not change the layout size of the RecyclerView
beaconListview.setHasFixedSize(true);
// Use a linear layout manager
beaconListview.setLayoutManager(new LinearLayoutManager(this));
//endregion
}
@Override
protected void onResume() {
super.onResume();
if (!BluetoothLeHolder.isBluetoothEnabled()) {
requestEnableBluetooth();
}
// Bind to the service
if (!isServiceBound) {
bindToBeaconService();
}
}
@Override
protected void onPause() {
super.onPause();
// Unbind from the service
if (isServiceBound) {
unbindBeaconService();
}
}
@Override
public void onBackPressed() {
setResult(Activity.RESULT_OK);
super.onBackPressed();
overridePendingTransition(R.anim.pull_in_left, R.anim.push_out_right);
}
//endregion
//region Bluetooth request
private static final int REQUEST_ENABLE_BLUETOOTH = 0x00;
private boolean isRequestOngoing = false;
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == REQUEST_ENABLE_BLUETOOTH) {
if (resultCode != Activity.RESULT_OK) {
showBluetoothNotEnabledDialog();
} else {
isRequestOngoing = false;
if (!isServiceBound) {
bindToBeaconService();
}
}
}
}
public void requestEnableBluetooth() {
if (!isRequestOngoing) {
Intent intent = new Intent(BluetoothAdapter.ACTION_REQUEST_ENABLE);
startActivityForResult(intent, REQUEST_ENABLE_BLUETOOTH);
isRequestOngoing = true;
}
}
@Override
public void onBluetoothAdapterTurnedOff() {
if (!BluetoothLeHolder.isBluetoothEnabled()) {
requestEnableBluetooth();
}
}
//endregion
//region Toolbar menu
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.discovery_menu, menu);
return super.onCreateOptionsMenu(menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.action_debug:
presenter.onDebugMenuClick();
return true;
case android.R.id.home:
onBackPressed();
default:
return super.onOptionsItemSelected(item);
}
}
//endregion
//region Dialog helpers
public void showNotBleCapableDialog() {
DialogInterface.OnClickListener listener = new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
setResult(Activity.RESULT_CANCELED);
finish();
}
};
AlertDialogHelper.showAlertDialog(this, R.string.ble_not_supported, R.string.ble_not_supported_message, listener);
}
public void showBluetoothNotEnabledDialog() {
DialogInterface.OnClickListener listener = new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
setResult(Activity.RESULT_CANCELED);
finish();
}
};
AlertDialogHelper.showAlertDialog(this, R.string.bluetooth_required, R.string.bluetooth_required_message, listener);
}
//endregion
}
| |
/*
* Copyright (c) 2009, James Leigh All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* - Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* - Neither the name of the openrdf.org nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
*/
package org.openrdf.repository.object.compiler;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.openrdf.model.Model;
import org.openrdf.model.Resource;
import org.openrdf.model.Statement;
import org.openrdf.model.URI;
import org.openrdf.model.Value;
import org.openrdf.model.ValueFactory;
import org.openrdf.model.impl.ContextStatementImpl;
import org.openrdf.model.impl.ValueFactoryImpl;
import org.openrdf.model.vocabulary.OWL;
import org.openrdf.model.vocabulary.RDF;
import org.openrdf.rio.RDFFormat;
import org.openrdf.rio.RDFHandlerException;
import org.openrdf.rio.RDFParseException;
import org.openrdf.rio.RDFParser;
import org.openrdf.rio.RDFParserRegistry;
import org.openrdf.rio.helpers.StatementCollector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Reads ontologies and schemas into memory from remote sources.
*
* @author James Leigh
*
*/
public class OntologyLoader {
private Logger logger = LoggerFactory.getLogger(OntologyLoader.class);
private Model model;
/** context -> prefix -> namespace */
private Map<URI, Map<String, String>> namespaces = new HashMap<URI, Map<String,String>>();
private List<URL> imported = new ArrayList<URL>();
private ValueFactory vf = ValueFactoryImpl.getInstance();
public OntologyLoader(Model model) {
this.model = model;
}
public List<URL> getImported() {
return imported;
}
public Model getModel() {
return model;
}
/** context -> prefix -> namespace */
public Map<URI, Map<String, String>> getNamespaces() {
return namespaces;
}
public void loadOntologies(List<URL> urls) throws RDFParseException,
IOException {
for (URL url : urls) {
loadOntology(url, null, vf.createURI(url.toExternalForm()));
}
}
public void followImports() throws RDFParseException, IOException {
List<URL> urls = new ArrayList<URL>();
for (Value obj : model.filter(null, OWL.IMPORTS, null).objects()) {
if (obj instanceof URI) {
URI uri = (URI) obj;
if (!model.contains(null, null, null, uri)
&& !model.contains(uri, RDF.TYPE, OWL.ONTOLOGY)) {
URL url = new URL(uri.stringValue());
if (!imported.contains(url)) {
urls.add(url);
}
}
}
}
if (!urls.isEmpty()) {
imported.addAll(urls);
for (URL url : urls) {
String uri = url.toExternalForm();
loadOntology(url, null, vf.createURI(uri));
}
followImports();
}
}
private void loadOntology(URL url, RDFFormat override, final URI uri)
throws IOException, RDFParseException {
try {
URLConnection conn = url.openConnection();
if (override == null) {
conn.setRequestProperty("Accept", getAcceptHeader());
} else {
conn
.setRequestProperty("Accept", override
.getDefaultMIMEType());
}
RDFFormat format = override;
if (override == null) {
format = RDFFormat.RDFXML;
format = RDFFormat.forFileName(url.toString(), format);
format = RDFFormat.forMIMEType(conn.getContentType(), format);
}
RDFParserRegistry registry = RDFParserRegistry.getInstance();
RDFParser parser = registry.get(format).getParser();
parser.setRDFHandler(new StatementCollector(model, model
.getNamespaces()) {
@Override
public void handleStatement(Statement st) {
Resource s = st.getSubject();
URI p = st.getPredicate();
Value o = st.getObject();
super
.handleStatement(new ContextStatementImpl(s, p, o,
uri));
}
@Override
public void handleNamespace(String prefix, String ns)
throws RDFHandlerException {
Map<String, String> map = namespaces.get(uri);
if (map == null) {
namespaces
.put(uri, map = new HashMap<String, String>());
}
map.put(prefix, ns);
super.handleNamespace(prefix, ns);
}
});
InputStream in = conn.getInputStream();
try {
parser.parse(in, url.toExternalForm());
} catch (RDFHandlerException e) {
throw new AssertionError(e);
} catch (RDFParseException e) {
if (override == null && format.equals(RDFFormat.NTRIPLES)) {
// sometimes text/plain is used for rdf+xml
loadOntology(url, RDFFormat.RDFXML, uri);
} else {
throw e;
}
} finally {
in.close();
}
} catch (RDFParseException e) {
logger.warn("Could not load {} {}", url, e.getMessage());
String msg = e.getMessage() + " in " + url;
throw new RDFParseException(msg, e.getLineNumber(), e.getColumnNumber());
} catch (IOException e) {
logger.warn("Could not load {} {}", url, e.getMessage());
} catch (SecurityException e) {
logger.warn("Could not load {} {}", url, e.getMessage());
}
}
private String getAcceptHeader() {
StringBuilder sb = new StringBuilder();
String preferred = RDFFormat.RDFXML.getDefaultMIMEType();
sb.append(preferred).append(";q=0.2");
Set<RDFFormat> rdfFormats = RDFParserRegistry.getInstance().getKeys();
for (RDFFormat format : rdfFormats) {
for (String type : format.getMIMETypes()) {
if (!preferred.equals(type)) {
sb.append(", ").append(type);
}
}
}
return sb.toString();
}
}
| |
/*
This file is part of AstC2C.
Copyright (c) STMicroelectronics, 2013.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of STMicroelectronics nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Authors: Thierry Lepley
*/
/* OpenCL C integer vector types */
package ir.types.ocl;
import ir.types.Type;
import ir.types.c.IntegerScalar;
import java.util.HashMap;
import java.util.HashSet;
public class IntegerVector extends Vector {
// 'char' vector types
public static final IntegerVector Tschar2 = new IntegerVector(IntegerScalar.Tschar,2);
public static final IntegerVector Tschar3 = new IntegerVector(IntegerScalar.Tschar,3);
public static final IntegerVector Tschar4 = new IntegerVector(IntegerScalar.Tschar,4);
public static final IntegerVector Tschar8 = new IntegerVector(IntegerScalar.Tschar,8);
public static final IntegerVector Tschar16 = new IntegerVector(IntegerScalar.Tschar,16);
public static final IntegerVector Tuchar2 = new IntegerVector(IntegerScalar.Tuchar,2);
public static final IntegerVector Tuchar3 = new IntegerVector(IntegerScalar.Tuchar,3);
public static final IntegerVector Tuchar4 = new IntegerVector(IntegerScalar.Tuchar,4);
public static final IntegerVector Tuchar8 = new IntegerVector(IntegerScalar.Tuchar,8);
public static final IntegerVector Tuchar16 = new IntegerVector(IntegerScalar.Tuchar,16);
// 'short' vector types
public static final IntegerVector Tsshort2 = new IntegerVector(IntegerScalar.Tsshort,2);
public static final IntegerVector Tsshort3 = new IntegerVector(IntegerScalar.Tsshort,3);
public static final IntegerVector Tsshort4 = new IntegerVector(IntegerScalar.Tsshort,4);
public static final IntegerVector Tsshort8 = new IntegerVector(IntegerScalar.Tsshort,8);
public static final IntegerVector Tsshort16 = new IntegerVector(IntegerScalar.Tsshort,16);
public static final IntegerVector Tushort2 = new IntegerVector(IntegerScalar.Tushort,2);
public static final IntegerVector Tushort3 = new IntegerVector(IntegerScalar.Tushort,3);
public static final IntegerVector Tushort4 = new IntegerVector(IntegerScalar.Tushort,4);
public static final IntegerVector Tushort8 = new IntegerVector(IntegerScalar.Tushort,8);
public static final IntegerVector Tushort16 = new IntegerVector(IntegerScalar.Tushort,16);
// 'int' vector types
public static final IntegerVector Tsint2 = new IntegerVector(IntegerScalar.Tsint,2);
public static final IntegerVector Tsint3 = new IntegerVector(IntegerScalar.Tsint,3);
public static final IntegerVector Tsint4 = new IntegerVector(IntegerScalar.Tsint,4);
public static final IntegerVector Tsint8 = new IntegerVector(IntegerScalar.Tsint,8);
public static final IntegerVector Tsint16 = new IntegerVector(IntegerScalar.Tsint,16);
public static final IntegerVector Tuint2 = new IntegerVector(IntegerScalar.Tuint,2);
public static final IntegerVector Tuint3 = new IntegerVector(IntegerScalar.Tuint,3);
public static final IntegerVector Tuint4 = new IntegerVector(IntegerScalar.Tuint,4);
public static final IntegerVector Tuint8 = new IntegerVector(IntegerScalar.Tuint,8);
public static final IntegerVector Tuint16 = new IntegerVector(IntegerScalar.Tuint,16);
// 'long' vector types
public static final IntegerVector Tslong2 = new IntegerVector(IntegerScalar.Tslong,2);
public static final IntegerVector Tslong3 = new IntegerVector(IntegerScalar.Tslong,3);
public static final IntegerVector Tslong4 = new IntegerVector(IntegerScalar.Tslong,4);
public static final IntegerVector Tslong8 = new IntegerVector(IntegerScalar.Tslong,8);
public static final IntegerVector Tslong16 = new IntegerVector(IntegerScalar.Tslong,16);
public static final IntegerVector Tulong2 = new IntegerVector(IntegerScalar.Tulong,2);
public static final IntegerVector Tulong3 = new IntegerVector(IntegerScalar.Tulong,3);
public static final IntegerVector Tulong4 = new IntegerVector(IntegerScalar.Tulong,4);
public static final IntegerVector Tulong8 = new IntegerVector(IntegerScalar.Tulong,8);
public static final IntegerVector Tulong16 = new IntegerVector(IntegerScalar.Tulong,16);
// No automatic promotion for vectors in OCL
private static final IntegerVector charArray[] ={null,null,Tschar2 ,Tschar3 ,Tschar4 ,null,null,null ,
Tschar8, null,null,null,null,null,null,null ,Tschar16};
private static final IntegerVector ucharArray[] ={null,null,Tuchar2 ,Tuchar3 ,Tuchar4 ,null,null,null ,
Tuchar8, null,null,null,null,null,null,null ,Tuchar16};
private static final IntegerVector shortArray[] ={null,null,Tsshort2,Tsshort3,Tsshort4,null,null,null ,
Tsshort8,null,null,null,null,null,null,null ,Tsshort16};
private static final IntegerVector ushortArray[]={null,null,Tushort2,Tushort3,Tushort4,null,null,null ,
Tushort8,null,null,null,null,null,null,null ,Tushort16};
private static final IntegerVector intArray[] ={null,null,Tsint2 ,Tsint3 ,Tsint4 ,null,null,null ,
Tsint8, null,null,null,null,null,null,null ,Tsint16};
private static final IntegerVector uintArray[] ={null,null,Tuint2 ,Tuint3 ,Tuint4 ,null,null,null ,
Tuint8, null,null,null,null,null,null,null ,Tuint16};
private static final IntegerVector longArray[] ={null,null,Tslong2 ,Tslong3 ,Tslong4 ,null,null,null ,
Tslong8, null,null,null,null,null,null,null ,Tslong16};
private static final IntegerVector ulongArray[] ={null,null,Tulong2 ,Tulong3 ,Tulong4 ,null,null,null ,
Tulong8, null,null,null,null,null,null,null ,Tulong16};
public IntegerVector getUnsignedVersion() {
switch(baseType.getBaseType()) {
case CHAR:
return getUcharVector(getNbElements());
case SHORT_INT:
return getUshortVector(getNbElements());
case INT:
return getUintVector(getNbElements());
case LONG_INT:
return getUlongVector(getNbElements());
default:
// Should never happen
return null;
}
}
static public IntegerVector getScharVector(int n) {
return charArray[n];
}
static public IntegerVector getUcharVector(int n) {
return ucharArray[n];
}
static public IntegerVector getSshortVector(int n) {
return shortArray[n];
}
static public IntegerVector getUshortVector(int n) {
return ushortArray[n];
}
static public IntegerVector getSintVector(int n) {
return intArray[n];
}
static public IntegerVector getUintVector(int n) {
return uintArray[n];
}
static public IntegerVector getSlongVector(int n) {
return longArray[n];
}
static public IntegerVector getUlongVector(int n) {
return ulongArray[n];
}
static public IntegerVector getVectorType(IntegerScalar t, int n) {
// Check for correct range
if ((n<0)||(n>16)) {
return null;
}
if (t==IntegerScalar.Tschar) {
return getScharVector(n);
}
else if (t==IntegerScalar.Tuchar) {
return getUcharVector(n);
}
if (t==IntegerScalar.Tsshort) {
return getSshortVector(n);
}
else if (t==IntegerScalar.Tushort) {
return getUshortVector(n);
}
if (t==IntegerScalar.Tsint) {
return getSintVector(n);
}
else if (t==IntegerScalar.Tuint) {
return getUintVector(n);
}
if (t==IntegerScalar.Tslong) {
return getSlongVector(n);
}
else if (t==IntegerScalar.Tulong) {
return getUlongVector(n);
}
else {
// Internal error
return null;
}
}
//==================================================================
// Private data
//==================================================================
IntegerScalar baseType;
//==================================================================
// Private Constructor
//==================================================================
private IntegerVector(IntegerScalar base_type, int n) {
super(n);
baseType=base_type;
}
//==================================================================
// Type class generic methods
//==================================================================
public boolean isIntegralVector() {return true;}
public boolean isUnsignedIntegerVector() {return baseType.isUnsigned();}
public boolean isSignedIntegerVector() {return baseType.isSigned();}
public boolean isCharVector() {return hasCharElements();}
public boolean isShortVector() {return hasShortElements();}
public boolean isIntVector() {return hasIntElements();}
public boolean isLongVector() {return hasLongElements();}
//==================================================================
// Getters
//==================================================================
//------------------------------------------------------------------
// getBaseType
//
// Returns base type of the integer vector type
//------------------------------------------------------------------
public IntegerScalar getBaseType() {
return baseType;
}
public boolean hasCharElements() {return baseType.isChar();}
public boolean hasShortElements() {return baseType.isShort();}
public boolean hasIntElements() {return baseType.isInt();}
public boolean hasLongElements() {return baseType.isLong();}
//------------------------------------------------------------------
// getSignProperty
//
// Returns the sign property of the integer vector type
//------------------------------------------------------------------
public boolean isSigned() {
return(baseType.isSigned());
}
public boolean isUnsigned() {
return(baseType.isUnsigned());
}
//------------------------------------------------------------------
// getEquivalentType
//
// Returns the equivalent vector of size n.
// Returns a scalar in case n==1
// Returns null in case of non allowed n
//------------------------------------------------------------------
public Type getEquivalentType(int n) {
// Check for correct range
if ((n<0)||(n>16)) {
return null;
}
if (n==1) {
return baseType;
}
else if (baseType==IntegerScalar.Tschar) {
return getScharVector(n);
}
else if (baseType==IntegerScalar.Tuchar) {
return getUcharVector(n);
}
if (baseType==IntegerScalar.Tsshort) {
return getSshortVector(n);
}
else if (baseType==IntegerScalar.Tushort) {
return getUshortVector(n);
}
if (baseType==IntegerScalar.Tsint) {
return getSintVector(n);
}
else if (baseType==IntegerScalar.Tuint) {
return getUintVector(n);
}
if (baseType==IntegerScalar.Tslong) {
return getSlongVector(n);
}
else if (baseType==IntegerScalar.Tulong) {
return getUlongVector(n);
}
else {
// Internal error
return null;
}
}
//==================================================================
// Signature management (for arguments of function prototypes)
//==================================================================
//------------------------------------------------------------------
// getSignature
//
// Returns a string corresponding to the signature of the type
// (for function mangling)
//------------------------------------------------------------------
public String getSignature() {
return "V" + getNbElements() + getBaseType().getSignature();
}
//==================================================================
// Conversion Management
//==================================================================
//------------------------------------------------------------------
// promote:
//
// No automatic promotion in OCL.
//------------------------------------------------------------------
public Type promote() {
return this;
}
//==================================================================
// Target Specific information
//==================================================================
public int sizeof() {
int n=getNbElements();
if (n==3) {
// vec3 aligned on 4-elem boundary
n=4;
}
return n*baseType.sizeof();
}
// In OCL, vectors aligned on their size
public int alignof() {
return sizeof();
}
//==================================================================
// Verbose functions
//==================================================================
//------------------------------------------------------------------
// toStringInternal:
//
// Returns string a textual representation of the type. Use 'ts' and
// 'cs' to avoid displaying multiple times the same type
// (and avoid cycles)
//------------------------------------------------------------------
public String toStringInternal(HashSet<Type> ts, HashMap<Type,Integer> cs) {
return super.toStringInternal(ts,cs)+
" of "+baseType.toStringInternal(ts,cs);
}
//------------------------------------------------------------------
// dump :
//
// Returns the original type syntax
//------------------------------------------------------------------
public String dump() {
return baseType.dump()+getNbElements();
}
//------------------------------------------------------------------
// dumpBaseType :
//
// Returns the original type syntax of the vector base type
//------------------------------------------------------------------
public String dumpBaseType() {
return baseType.dump();
}
}
| |
/*
* Copyright (c) 2016 Constant Contact, Inc. All Rights Reserved.
* Boston, MA 02451, USA
* Phone: (781) 472-8100
* Fax: (781) 472-8101
* This software is the confidential and proprietary information
* of Constant Contact, Inc. created for Constant Contact, Inc.
* You shall not disclose such Confidential Information and shall use
* it only in accordance with the terms of the license agreement
* you entered into with Constant Contact, Inc.
*/
package com.constantcontact.v2.contacts;
import org.apache.commons.lang3.SerializationUtils;
import org.junit.Test;
import java.util.Date;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
/**
*/
public class ContactTest {
private static final String ID = "123ABC";
private static final Date DATE = new Date(0);
private static final String PHONE_CELL = "111-111-1111";
private static final String PHONE_WORK = "222-222-2222";
private static final String PHONE_FAX = "333-333-3333";
private static final String PHONE_HOME = "444-444-4444";
private static final String FIRST_NAME = "Max";
private static final String LAST_NAME = "Power";
private static final String COMPANY = "Nucular Power Plant";
private static final String JOB_TITLE = "Safety Inspector";
private static final String DEPARTMENT = "Sector G";
private static final String SOURCE_DETAILS = "Nowhere";
private static final String SOURCE = "None";
private static final String PREFIX = "Dr";
private static final ContactStatus STATUS = ContactStatus.ACTIVE;
private static final Address[] ADDRESSES = new Address[]{AddressTest.createAddress()};
private static final ContactListMetaData[] CONTACT_LISTS = new ContactListMetaData[]{ContactListMetaDataTest
.createContactListMetaData()};
private static final CustomField[] CUSTOM_FIELDS = new CustomField[]{CustomFieldTest.createCustomField()};
private static final EmailAddress[] EMAIL_ADDRESSES = new EmailAddress[]{EmailAddressTest.createEmailAddress()};
private static final Note[] NOTES = new Note[]{NoteTest.createNote()};
@Test
public void expectThatGettingAndSettingValues_WillReturnSame() {
Contact contact = new Contact();
contact.setId(ID);
contact.setCreatedDate(DATE);
contact.setInsertDate(DATE);
contact.setModifiedDate(DATE);
contact.setLastUpdateDate(DATE);
contact.setCellPhone(PHONE_CELL);
contact.setWorkPhone(PHONE_WORK);
contact.setFax(PHONE_FAX);
contact.setHomePhone(PHONE_HOME);
contact.setFirstName(FIRST_NAME);
contact.setLastName(LAST_NAME);
contact.setCompanyName(COMPANY);
contact.setJobTitle(JOB_TITLE);
contact.setDepartmentName(DEPARTMENT);
contact.setSourceDetails(SOURCE_DETAILS);
contact.setSource(SOURCE);
contact.setPrefixName(PREFIX);
contact.setStatus(STATUS);
contact.setAddresses(ADDRESSES);
contact.setContactLists(CONTACT_LISTS);
contact.setEmailAddresses(EMAIL_ADDRESSES);
contact.setNotes(NOTES);
contact.setCustomFields(CUSTOM_FIELDS);
runAssertions(contact);
}
@Test
public void expectThatSerializing_WillRetainValues() {
Contact contact = new Contact();
contact.setId(ID);
contact.setCreatedDate(DATE);
contact.setInsertDate(DATE);
contact.setModifiedDate(DATE);
contact.setLastUpdateDate(DATE);
contact.setCellPhone(PHONE_CELL);
contact.setWorkPhone(PHONE_WORK);
contact.setFax(PHONE_FAX);
contact.setHomePhone(PHONE_HOME);
contact.setFirstName(FIRST_NAME);
contact.setLastName(LAST_NAME);
contact.setCompanyName(COMPANY);
contact.setJobTitle(JOB_TITLE);
contact.setDepartmentName(DEPARTMENT);
contact.setSourceDetails(SOURCE_DETAILS);
contact.setSource(SOURCE);
contact.setPrefixName(PREFIX);
contact.setStatus(STATUS);
contact.setAddresses(ADDRESSES);
contact.setContactLists(CONTACT_LISTS);
contact.setEmailAddresses(EMAIL_ADDRESSES);
contact.setNotes(NOTES);
contact.setCustomFields(CUSTOM_FIELDS);
Contact out = SerializationUtils.roundtrip(contact);
runAssertions(out);
}
@Test
public void testEqualsAndHash() {
Contact contact1 = new Contact();
contact1.setId(ID);
contact1.setCreatedDate(DATE);
contact1.setInsertDate(DATE);
contact1.setModifiedDate(DATE);
contact1.setLastUpdateDate(DATE);
contact1.setCellPhone(PHONE_CELL);
contact1.setWorkPhone(PHONE_WORK);
contact1.setFax(PHONE_FAX);
contact1.setHomePhone(PHONE_HOME);
contact1.setFirstName(FIRST_NAME);
contact1.setLastName(LAST_NAME);
contact1.setCompanyName(COMPANY);
contact1.setJobTitle(JOB_TITLE);
contact1.setDepartmentName(DEPARTMENT);
contact1.setSourceDetails(SOURCE_DETAILS);
contact1.setSource(SOURCE);
contact1.setPrefixName(PREFIX);
contact1.setStatus(STATUS);
contact1.setAddresses(ADDRESSES);
contact1.setContactLists(CONTACT_LISTS);
contact1.setEmailAddresses(EMAIL_ADDRESSES);
contact1.setNotes(NOTES);
contact1.setCustomFields(CUSTOM_FIELDS);
Contact contact2 = new Contact();
contact2.setId(ID);
contact2.setCreatedDate(DATE);
contact2.setInsertDate(DATE);
contact2.setModifiedDate(DATE);
contact2.setLastUpdateDate(DATE);
contact2.setCellPhone(PHONE_CELL);
contact2.setWorkPhone(PHONE_WORK);
contact2.setFax(PHONE_FAX);
contact2.setHomePhone(PHONE_HOME);
contact2.setFirstName(FIRST_NAME);
contact2.setLastName(LAST_NAME);
contact2.setCompanyName(COMPANY);
contact2.setJobTitle(JOB_TITLE);
contact2.setDepartmentName(DEPARTMENT);
contact2.setSourceDetails(SOURCE_DETAILS);
contact2.setSource(SOURCE);
contact2.setPrefixName(PREFIX);
contact2.setStatus(STATUS);
contact2.setAddresses(ADDRESSES);
contact2.setContactLists(CONTACT_LISTS);
contact2.setEmailAddresses(EMAIL_ADDRESSES);
contact2.setNotes(NOTES);
contact2.setCustomFields(CUSTOM_FIELDS);
int hash1 = contact1.hashCode();
int hash2 = contact2.hashCode();
assertThat(contact1.equals(contact2), is(true));
assertThat(hash1 == hash2, is(true));
}
private void runAssertions(Contact contact) {
assertThat(contact.getId(), is(ID));
assertThat(contact.getCreatedDate(), is(DATE));
assertThat(contact.getInsertDate(), is(DATE));
assertThat(contact.getModifiedDate(), is(DATE));
assertThat(contact.getLastUpdateDate(), is(DATE));
assertThat(contact.getCellPhone(), is(PHONE_CELL));
assertThat(contact.getWorkPhone(), is(PHONE_WORK));
assertThat(contact.getFax(), is(PHONE_FAX));
assertThat(contact.getHomePhone(), is(PHONE_HOME));
assertThat(contact.getFirstName(), is(FIRST_NAME));
assertThat(contact.getLastName(), is(LAST_NAME));
assertThat(contact.getCompanyName(), is(COMPANY));
assertThat(contact.getJobTitle(), is(JOB_TITLE));
assertThat(contact.getDepartmentName(), is(DEPARTMENT));
assertThat(contact.getSourceDetails(), is(SOURCE_DETAILS));
assertThat(contact.getSource(), is(SOURCE));
assertThat(contact.getPrefixName(), is(PREFIX));
assertThat(contact.getStatus(), is(STATUS));
AddressTest.runAssertions(contact.getAddresses()[0]);
ContactListMetaDataTest.runAssertions(contact.getContactLists()[0]);
EmailAddressTest.runAssertions(contact.getEmailAddresses()[0]);
NoteTest.runAssertions(contact.getNotes()[0]);
CustomFieldTest.runAssertions(contact.getCustomFields()[0]);
}
}
| |
/*
* Copyright 2015 Schedo Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ncode.android.apps.schedo.provider;
import android.accounts.Account;
import android.app.SearchManager;
import android.content.ContentResolver;
import android.content.Context;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
import android.provider.BaseColumns;
import com.ncode.android.apps.schedo.provider.ScheduleContract.*;
import com.ncode.android.apps.schedo.sync.ConferenceDataHandler;
import com.ncode.android.apps.schedo.sync.SyncHelper;
import com.ncode.android.apps.schedo.util.AccountUtils;
import static com.ncode.android.apps.schedo.util.LogUtils.*;
/**
* Helper for managing {@link SQLiteDatabase} that stores data for
* {@link ScheduleProvider}.
*/
public class ScheduleDatabase extends SQLiteOpenHelper {
private static final String TAG = makeLogTag(ScheduleDatabase.class);
private static final String DATABASE_NAME = "schedule.db";
// NOTE: carefully update onUpgrade() when bumping database versions to make
// sure user data is saved.
private static final int VER_2014_RELEASE_A = 122; // app version 2.0.0, 2.0.1
private static final int VER_2014_RELEASE_C = 207; // app version 2.1.x
private static final int VER_2015_RELEASE_A = 100; //app version 1.0.0
private static final int CUR_DATABASE_VERSION = VER_2015_RELEASE_A;
private final Context mContext;
interface Tables {
String BLOCKS = "blocks";
String TAGS = "tags";
String ROOMS = "rooms";
String SESSIONS = "sessions";
String MY_SCHEDULE = "myschedule";
String SPEAKERS = "speakers";
String EVENTS = "events";
String EVENTS_VIDEOS = "events_videos";
String EVENTS_TAGS = "events_tags";
String SESSIONS_TAGS = "sessions_tags";
String SESSIONS_SPEAKERS = "sessions_speakers";
String ANNOUNCEMENTS = "announcements";
String MAPMARKERS = "mapmarkers";
String MAPTILES = "mapoverlays";
String HASHTAGS = "hashtags";
String FEEDBACK = "feedback";
String EXPERTS = "experts";
String PEOPLE_IVE_MET = "people_ive_met";
String VIDEOS = "videos";
String PARTNERS = "partners";
String SESSIONS_SEARCH = "sessions_search";
String SEARCH_SUGGEST = "search_suggest";
String SESSIONS_JOIN_MYSCHEDULE = "sessions "
+ "LEFT OUTER JOIN myschedule ON sessions.session_id=myschedule.session_id "
+ "AND myschedule.account_name=? ";
String EVENTS_JOIN_SESSIONS_MYSCHEDULE = "events "
+ "LEFT OUTER JOIN sessions ON events.event_id=sessions.event_id "
+ "LEFT OUTER JOIN myschedule ON sessions.session_id=myschedule.session_id "
+ "AND myschedule.account_name=? ";
String SESSIONS_JOIN_ROOMS_TAGS = "sessions "
+ "LEFT OUTER JOIN myschedule ON sessions.session_id=myschedule.session_id "
+ "AND myschedule.account_name=? "
+ "LEFT OUTER JOIN rooms ON sessions.room_id=rooms.room_id "
+ "LEFT OUTER JOIN sessions_tags ON sessions.session_id=sessions_tags.session_id";
String EVENTS_JOIN_SESSIONS_ROOMS_TAGS = "events "
+ "LEFT OUTER JOIN sessions ON events.event_id=sessions.event_id "
+ "LEFT OUTER JOIN myschedule ON sessions.session_id=myschedule.session_id "
+ "AND myschedule.account_name=? "
+ "LEFT OUTER JOIN rooms ON sessions.room_id=rooms.room_id "
+ "LEFT OUTER JOIN sessions_tags ON sessions.session_id=sessions_tags.session_id";
String SESSIONS_JOIN_ROOMS_TAGS_FEEDBACK_MYSCHEDULE = "sessions "
+ "LEFT OUTER JOIN myschedule ON sessions.session_id=myschedule.session_id "
+ "AND myschedule.account_name=? AND sessions.event_id=? " //check if this is OK
+ "LEFT OUTER JOIN rooms ON sessions.room_id=rooms.room_id "
+ "LEFT OUTER JOIN sessions_tags ON sessions.session_id=sessions_tags.session_id "
+ "LEFT OUTER JOIN feedback ON sessions.session_id=feedback.session_id";
String SESSIONS_JOIN_ROOMS = "sessions "
+ "LEFT OUTER JOIN myschedule ON sessions.session_id=myschedule.session_id "
+ "AND myschedule.account_name=? "
+ "LEFT OUTER JOIN rooms ON sessions.room_id=rooms.room_id";
String SESSIONS_SPEAKERS_JOIN_SPEAKERS = "sessions_speakers "
+ "LEFT OUTER JOIN speakers ON sessions_speakers.speaker_id=speakers.speaker_id";
String SESSIONS_TAGS_JOIN_TAGS = "sessions_tags "
+ "LEFT OUTER JOIN tags ON sessions_tags.tag_id=tags.tag_id";
String SESSIONS_SPEAKERS_JOIN_SESSIONS_ROOMS = "sessions_speakers "
+ "LEFT OUTER JOIN sessions ON sessions_speakers.session_id=sessions.session_id "
+ "LEFT OUTER JOIN rooms ON sessions.room_id=rooms.room_id";
String SESSIONS_SEARCH_JOIN_SESSIONS_ROOMS = "sessions_search "
+ "LEFT OUTER JOIN sessions ON sessions_search.session_id=sessions.session_id "
+ "LEFT OUTER JOIN myschedule ON sessions.session_id=myschedule.session_id "
+ "AND myschedule.account_name=? "
+ "LEFT OUTER JOIN rooms ON sessions.room_id=rooms.room_id";
// When tables get deprecated, add them to this list (so they get correctly deleted
// on database upgrades)
interface DeprecatedTables {
String TRACKS = "tracks";
String SESSIONS_TRACKS = "sessions_tracks";
String SANDBOX = "sandbox";
};
}
private interface Triggers {
// Deletes from dependent tables when corresponding sessions are deleted.
String SESSIONS_TAGS_DELETE = "sessions_tags_delete";
String SESSIONS_SPEAKERS_DELETE = "sessions_speakers_delete";
String SESSIONS_MY_SCHEDULE_DELETE = "sessions_myschedule_delete";
String SESSIONS_FEEDBACK_DELETE = "sessions_feedback_delete";
// When triggers get deprecated, add them to this list (so they get correctly deleted
// on database upgrades)
interface DeprecatedTriggers {
String SESSIONS_TRACKS_DELETE = "sessions_tracks_delete";
};
}
public interface EventsTags {
String EVENT_ID = "event_id";
String TAG_ID = "tag_id";
}
public interface EventsVideos {
String EVENT_ID = "event_id";
String VIDEO_ID = "video_id";
}
public interface SessionsSpeakers {
String SESSION_ID = "session_id";
String SPEAKER_ID = "speaker_id";
}
public interface SessionsTags {
String SESSION_ID = "session_id";
String TAG_ID = "tag_id";
}
interface SessionsSearchColumns {
String SESSION_ID = "session_id";
String BODY = "body";
}
/** Fully-qualified field names. */
private interface Qualified {
String SESSIONS_SEARCH = Tables.SESSIONS_SEARCH + "(" + SessionsSearchColumns.SESSION_ID
+ "," + SessionsSearchColumns.BODY + ")";
String SESSIONS_TAGS_SESSION_ID = Tables.SESSIONS_TAGS + "."
+ SessionsTags.SESSION_ID;
String SESSIONS_SPEAKERS_SESSION_ID = Tables.SESSIONS_SPEAKERS+ "."
+ SessionsSpeakers.SESSION_ID;
String SESSIONS_SPEAKERS_SPEAKER_ID = Tables.SESSIONS_SPEAKERS+ "."
+ SessionsSpeakers.SPEAKER_ID;
String SPEAKERS_SPEAKER_ID = Tables.SPEAKERS + "." + ScheduleContract.Speakers.SPEAKER_ID;
String FEEDBACK_SESSION_ID = Tables.FEEDBACK + "." + FeedbackColumns.SESSION_ID;
}
/** {@code REFERENCES} clauses. */
private interface References {
String BLOCK_ID = "REFERENCES " + Tables.BLOCKS + "(" + ScheduleContract.Blocks.BLOCK_ID + ")";
String TAG_ID = "REFERENCES " + Tables.TAGS + "(" + Tags.TAG_ID + ")";
String ROOM_ID = "REFERENCES " + Tables.ROOMS + "(" + Rooms.ROOM_ID + ")";
String SESSION_ID = "REFERENCES " + Tables.SESSIONS + "(" + Sessions.SESSION_ID + ")";
String VIDEO_ID = "REFERENCES " + Tables.VIDEOS + "(" + Videos.VIDEO_ID + ")";
String SPEAKER_ID = "REFERENCES " + Tables.SPEAKERS + "(" + Speakers.SPEAKER_ID + ")";
String EVENT_ID = "REFERENCES " + Tables.EVENTS + "(" + Events.EVENT_ID + ")";
}
public ScheduleDatabase(Context context) {
super(context, DATABASE_NAME, null, CUR_DATABASE_VERSION);
mContext = context;
}
@Override
public void onCreate(SQLiteDatabase db) {
db.execSQL("CREATE TABLE " + Tables.BLOCKS + " ("
+ BaseColumns._ID + " INTEGER PRIMARY KEY AUTOINCREMENT,"
+ BlocksColumns.BLOCK_ID + " TEXT NOT NULL,"
+ BlocksColumns.BLOCK_TITLE + " TEXT NOT NULL,"
+ BlocksColumns.BLOCK_START + " INTEGER NOT NULL,"
+ BlocksColumns.BLOCK_END + " INTEGER NOT NULL,"
+ BlocksColumns.BLOCK_TYPE + " TEXT,"
+ BlocksColumns.BLOCK_SUBTITLE + " TEXT,"
+ "UNIQUE (" + BlocksColumns.BLOCK_ID + ") ON CONFLICT REPLACE)");
db.execSQL("CREATE TABLE " + Tables.TAGS + " ("
+ BaseColumns._ID + " INTEGER PRIMARY KEY AUTOINCREMENT,"
+ TagsColumns.TAG_ID + " TEXT NOT NULL,"
+ TagsColumns.TAG_CATEGORY + " TEXT NOT NULL,"
+ TagsColumns.TAG_NAME + " TEXT NOT NULL,"
+ TagsColumns.TAG_ORDER_IN_CATEGORY + " INTEGER,"
+ TagsColumns.TAG_COLOR + " TEXT NOT NULL,"
+ TagsColumns.TAG_ABSTRACT + " TEXT NOT NULL,"
+ "UNIQUE (" + TagsColumns.TAG_ID + ") ON CONFLICT REPLACE)");
db.execSQL("CREATE TABLE " + Tables.ROOMS + " ("
+ BaseColumns._ID + " INTEGER PRIMARY KEY AUTOINCREMENT,"
+ RoomsColumns.ROOM_ID + " TEXT NOT NULL,"
+ RoomsColumns.ROOM_NAME + " TEXT,"
+ RoomsColumns.ROOM_FLOOR + " TEXT,"
+ "UNIQUE (" + RoomsColumns.ROOM_ID + ") ON CONFLICT REPLACE)");
db.execSQL("CREATE TABLE " + Tables.EVENTS + " ("
+ BaseColumns._ID + " INTEGER PRIMARY KEY AUTOINCREMENT,"
+ SyncColumns.UPDATED + " INTEGER NOT NULL,"
+ EventsColumns.EVENT_ID + " TEXT NOT NULL,"
+ EventsColumns.EVENT_LEVEL + " TEXT,"
+ EventsColumns.EVENT_TITLE + " TEXT,"
+ EventsColumns.EVENT_ABSTRACT + " TEXT,"
+ EventsColumns.EVENT_REQUIREMENTS + " TEXT,"
+ EventsColumns.EVENT_KEYWORDS + " TEXT,"
+ EventsColumns.EVENT_HASHTAG + " TEXT,"
+ EventsColumns.EVENT_URL + " TEXT,"
+ EventsColumns.EVENT_YEAR + " TEXT NOT NULL,"
+ EventsColumns.EVENT_DAYS + " TEXT NOT NULL,"
+ EventsColumns.EVENT_YOUTUBE_URL + " TEXT,"
+ EventsColumns.EVENT_MODERATOR_URL + " TEXT,"
+ EventsColumns.EVENT_PDF_URL + " TEXT,"
+ EventsColumns.EVENT_NOTES_URL + " TEXT,"
+ EventsColumns.EVENT_CAL_EVENT_ID + " INTEGER,"
+ EventsColumns.EVENT_LIVESTREAM_URL + " TEXT,"
+ EventsColumns.EVENT_TAGS + " TEXT,"
+ EventsColumns.SESSION_TAGS + " TEXT,"
+ EventsColumns.EVENT_GROUPING_ORDER + " INTEGER,"
+ EventsColumns.EVENT_SPEAKER_NAMES + " TEXT,"
+ EventsColumns.EVENT_IMPORT_HASHCODE + " TEXT NOT NULL DEFAULT '',"
+ EventsColumns.EVENT_MAIN_TAG + " TEXT,"
+ EventsColumns.EVENT_COLOR + " INTEGER,"
+ EventsColumns.EVENT_CAPTIONS_URL + " TEXT,"
+ EventsColumns.EVENT_PHOTO_URL + " TEXT,"
+ EventsColumns.EVENT_RELATED_CONTENT + " TEXT,"
+ "UNIQUE (" + EventsColumns.EVENT_ID + ") ON CONFLICT REPLACE)");
db.execSQL("CREATE TABLE " + Tables.SESSIONS + " ("
+ BaseColumns._ID + " INTEGER PRIMARY KEY AUTOINCREMENT,"
+ SyncColumns.UPDATED + " INTEGER NOT NULL,"
+ SessionsColumns.SESSION_ID + " TEXT NOT NULL,"
+ Sessions.ROOM_ID + " TEXT " + References.ROOM_ID + ","
+ Sessions.EVENT_ID + " TEXT " + References.EVENT_ID + ","
+ SessionsColumns.SESSION_START + " INTEGER NOT NULL,"
+ SessionsColumns.SESSION_END + " INTEGER NOT NULL,"
+ SessionsColumns.SESSION_LEVEL + " TEXT,"
+ SessionsColumns.SESSION_TITLE + " TEXT,"
+ SessionsColumns.SESSION_ABSTRACT + " TEXT,"
+ SessionsColumns.SESSION_REQUIREMENTS + " TEXT,"
+ SessionsColumns.SESSION_KEYWORDS + " TEXT,"
+ SessionsColumns.SESSION_HASHTAG + " TEXT,"
+ SessionsColumns.SESSION_URL + " TEXT,"
+ SessionsColumns.SESSION_YOUTUBE_URL + " TEXT,"
+ SessionsColumns.SESSION_MODERATOR_URL + " TEXT,"
+ SessionsColumns.SESSION_PDF_URL + " TEXT,"
+ SessionsColumns.SESSION_NOTES_URL + " TEXT,"
+ SessionsColumns.SESSION_CAL_EVENT_ID + " INTEGER,"
+ SessionsColumns.SESSION_LIVESTREAM_URL + " TEXT,"
+ SessionsColumns.SESSION_TAGS + " TEXT,"
+ SessionsColumns.SESSION_GROUPING_ORDER + " INTEGER,"
+ SessionsColumns.SESSION_SPEAKER_NAMES + " TEXT,"
+ SessionsColumns.SESSION_IMPORT_HASHCODE + " TEXT NOT NULL DEFAULT '',"
+ SessionsColumns.SESSION_MAIN_TAG + " TEXT,"
+ SessionsColumns.SESSION_COLOR + " INTEGER,"
+ SessionsColumns.SESSION_CAPTIONS_URL + " TEXT,"
+ SessionsColumns.SESSION_PHOTO_URL + " TEXT,"
+ SessionsColumns.SESSION_RELATED_CONTENT + " TEXT,"
+ "UNIQUE (" + SessionsColumns.SESSION_ID + ") ON CONFLICT REPLACE)");
db.execSQL("CREATE TABLE " + Tables.SPEAKERS + " ("
+ BaseColumns._ID + " INTEGER PRIMARY KEY AUTOINCREMENT,"
+ SyncColumns.UPDATED + " INTEGER NOT NULL,"
+ SpeakersColumns.SPEAKER_ID + " TEXT NOT NULL,"
+ SpeakersColumns.SPEAKER_NAME + " TEXT,"
+ SpeakersColumns.SPEAKER_IMAGE_URL + " TEXT,"
+ SpeakersColumns.SPEAKER_COMPANY + " TEXT,"
+ SpeakersColumns.SPEAKER_ABSTRACT + " TEXT,"
+ SpeakersColumns.SPEAKER_URL + " TEXT,"
+ SpeakersColumns.SPEAKER_IMPORT_HASHCODE + " TEXT NOT NULL DEFAULT '',"
+ "UNIQUE (" + SpeakersColumns.SPEAKER_ID + ") ON CONFLICT REPLACE)");
db.execSQL("CREATE TABLE " + Tables.MY_SCHEDULE + " ("
+ BaseColumns._ID + " INTEGER PRIMARY KEY AUTOINCREMENT,"
+ MySchedule.SESSION_ID + " TEXT NOT NULL " + References.SESSION_ID + ","
+ MySchedule.MY_SCHEDULE_ACCOUNT_NAME + " TEXT NOT NULL ,"
+ MySchedule.MY_SCHEDULE_DIRTY_FLAG + " INTEGER NOT NULL DEFAULT 1,"
+ MySchedule.MY_SCHEDULE_IN_SCHEDULE + " INTEGER NOT NULL DEFAULT 1,"
+ "UNIQUE (" + MySchedule.SESSION_ID + ","
+ MySchedule.MY_SCHEDULE_ACCOUNT_NAME + ") ON CONFLICT REPLACE)");
db.execSQL("CREATE TABLE " + Tables.EVENTS_VIDEOS + " ("
+ BaseColumns._ID + " INTEGER PRIMARY KEY AUTOINCREMENT,"
+ EventsVideos.EVENT_ID + " TEXT NOT NULL " + References.EVENT_ID + ","
+ EventsVideos.VIDEO_ID + " TEXT NOT NULL " + References.VIDEO_ID + ","
+ "UNIQUE (" + EventsVideos.EVENT_ID + ","
+ EventsVideos.VIDEO_ID + ") ON CONFLICT REPLACE)");
db.execSQL("CREATE TABLE " + Tables.SESSIONS_SPEAKERS + " ("
+ BaseColumns._ID + " INTEGER PRIMARY KEY AUTOINCREMENT,"
+ SessionsSpeakers.SESSION_ID + " TEXT NOT NULL " + References.SESSION_ID + ","
+ SessionsSpeakers.SPEAKER_ID + " TEXT NOT NULL " + References.SPEAKER_ID + ","
+ "UNIQUE (" + SessionsSpeakers.SESSION_ID + ","
+ SessionsSpeakers.SPEAKER_ID + ") ON CONFLICT REPLACE)");
db.execSQL("CREATE TABLE " + Tables.SESSIONS_TAGS + " ("
+ BaseColumns._ID + " INTEGER PRIMARY KEY AUTOINCREMENT,"
+ SessionsTags.SESSION_ID + " TEXT NOT NULL " + References.SESSION_ID + ","
+ SessionsTags.TAG_ID + " TEXT NOT NULL " + References.TAG_ID + ","
+ "UNIQUE (" + SessionsTags.SESSION_ID + ","
+ SessionsTags.TAG_ID + ") ON CONFLICT REPLACE)");
db.execSQL("CREATE TABLE " + Tables.ANNOUNCEMENTS + " ("
+ BaseColumns._ID + " INTEGER PRIMARY KEY AUTOINCREMENT,"
+ SyncColumns.UPDATED + " INTEGER NOT NULL,"
+ AnnouncementsColumns.ANNOUNCEMENT_ID + " TEXT,"
+ AnnouncementsColumns.ANNOUNCEMENT_TITLE + " TEXT NOT NULL,"
+ AnnouncementsColumns.ANNOUNCEMENT_ACTIVITY_JSON + " BLOB,"
+ AnnouncementsColumns.ANNOUNCEMENT_URL + " TEXT,"
+ AnnouncementsColumns.ANNOUNCEMENT_DATE + " INTEGER NOT NULL)");
db.execSQL("CREATE TABLE " + Tables.MAPTILES + " ("
+ BaseColumns._ID + " INTEGER PRIMARY KEY AUTOINCREMENT,"
+ MapTileColumns.TILE_FLOOR+ " INTEGER NOT NULL,"
+ MapTileColumns.TILE_FILE+ " TEXT NOT NULL,"
+ MapTileColumns.TILE_URL+ " TEXT NOT NULL,"
+ "UNIQUE (" + MapTileColumns.TILE_FLOOR+ ") ON CONFLICT REPLACE)");
db.execSQL("CREATE TABLE " + Tables.FEEDBACK + " ("
+ BaseColumns._ID + " INTEGER PRIMARY KEY AUTOINCREMENT,"
+ SyncColumns.UPDATED + " INTEGER NOT NULL,"
+ Sessions.SESSION_ID + " TEXT " + References.SESSION_ID + ","
+ FeedbackColumns.SESSION_RATING + " INTEGER NOT NULL,"
+ FeedbackColumns.ANSWER_RELEVANCE + " INTEGER NOT NULL,"
+ FeedbackColumns.ANSWER_CONTENT + " INTEGER NOT NULL,"
+ FeedbackColumns.ANSWER_SPEAKER + " INTEGER NOT NULL,"
+ FeedbackColumns.COMMENTS + " TEXT,"
+ FeedbackColumns.SYNCED + " INTEGER NOT NULL DEFAULT 0)");
db.execSQL("CREATE TRIGGER " + Triggers.SESSIONS_FEEDBACK_DELETE + " AFTER DELETE ON "
+ Tables.SESSIONS + " BEGIN DELETE FROM " + Tables.FEEDBACK + " "
+ " WHERE " + Qualified.FEEDBACK_SESSION_ID + "=old." + Sessions.SESSION_ID
+ ";" + " END;");
db.execSQL("CREATE TABLE " + Tables.MAPMARKERS + " ("
+ BaseColumns._ID + " INTEGER PRIMARY KEY AUTOINCREMENT,"
+ MapMarkerColumns.MARKER_ID+ " TEXT NOT NULL,"
+ MapMarkerColumns.MARKER_TYPE+ " TEXT NOT NULL,"
+ MapMarkerColumns.MARKER_LATITUDE+ " DOUBLE NOT NULL,"
+ MapMarkerColumns.MARKER_LONGITUDE+ " DOUBLE NOT NULL,"
+ MapMarkerColumns.MARKER_LABEL+ " TEXT,"
+ MapMarkerColumns.MARKER_FLOOR+ " INTEGER NOT NULL,"
+ "UNIQUE (" + MapMarkerColumns.MARKER_ID + ") ON CONFLICT REPLACE)");
db.execSQL("CREATE TABLE " + Tables.HASHTAGS + " ("
+ BaseColumns._ID + " INTEGER PRIMARY KEY AUTOINCREMENT,"
+ HashtagColumns.HASHTAG_NAME + " TEXT NOT NULL,"
+ HashtagColumns.HASHTAG_DESCRIPTION + " TEXT NOT NULL,"
+ HashtagColumns.HASHTAG_COLOR + " INTEGER NOT NULL,"
+ HashtagColumns.HASHTAG_ORDER + " INTEGER NOT NULL,"
+ "UNIQUE (" + HashtagColumns.HASHTAG_NAME + ") ON CONFLICT REPLACE)");
db.execSQL("CREATE TABLE " + Tables.VIDEOS + " ("
+ BaseColumns._ID + " INTEGER PRIMARY KEY AUTOINCREMENT,"
+ VideoColumns.VIDEO_ID + " TEXT NOT NULL,"
+ VideoColumns.VIDEO_YEAR + " INTEGER NOT NULL,"
+ VideoColumns.VIDEO_TITLE + " TEXT,"
+ VideoColumns.VIDEO_DESC + " TEXT,"
+ VideoColumns.VIDEO_VID + " TEXT,"
+ VideoColumns.VIDEO_TOPIC + " TEXT,"
+ VideoColumns.VIDEO_SPEAKERS + " TEXT,"
+ VideoColumns.VIDEO_THUMBNAIL_URL + " TEXT,"
+ VideoColumns.VIDEO_IMPORT_HASHCODE + " TEXT NOT NULL,"
+ "UNIQUE (" + VideoColumns.VIDEO_ID + ") ON CONFLICT REPLACE)");
// Full-text search index. Update using updateSessionSearchIndex method.
// Use the porter tokenizer for simple stemming, so that "frustration" matches "frustrated."
db.execSQL("CREATE VIRTUAL TABLE " + Tables.SESSIONS_SEARCH + " USING fts3("
+ BaseColumns._ID + " INTEGER PRIMARY KEY AUTOINCREMENT,"
+ SessionsSearchColumns.BODY + " TEXT NOT NULL,"
+ SessionsSearchColumns.SESSION_ID
+ " TEXT NOT NULL " + References.SESSION_ID + ","
+ "UNIQUE (" + SessionsSearchColumns.SESSION_ID + ") ON CONFLICT REPLACE,"
+ "tokenize=porter)");
// Search suggestions
db.execSQL("CREATE TABLE " + Tables.SEARCH_SUGGEST + " ("
+ BaseColumns._ID + " INTEGER PRIMARY KEY AUTOINCREMENT,"
+ SearchManager.SUGGEST_COLUMN_TEXT_1 + " TEXT NOT NULL)");
// Session deletion triggers
db.execSQL("CREATE TRIGGER " + Triggers.SESSIONS_TAGS_DELETE + " AFTER DELETE ON "
+ Tables.SESSIONS + " BEGIN DELETE FROM " + Tables.SESSIONS_TAGS + " "
+ " WHERE " + Qualified.SESSIONS_TAGS_SESSION_ID + "=old." + Sessions.SESSION_ID
+ ";" + " END;");
db.execSQL("CREATE TRIGGER " + Triggers.SESSIONS_SPEAKERS_DELETE + " AFTER DELETE ON "
+ Tables.SESSIONS + " BEGIN DELETE FROM " + Tables.SESSIONS_SPEAKERS + " "
+ " WHERE " + Qualified.SESSIONS_SPEAKERS_SESSION_ID + "=old." + Sessions.SESSION_ID
+ ";" + " END;");
db.execSQL("CREATE TRIGGER " + Triggers.SESSIONS_MY_SCHEDULE_DELETE + " AFTER DELETE ON "
+ Tables.SESSIONS + " BEGIN DELETE FROM " + Tables.MY_SCHEDULE + " "
+ " WHERE " + Tables.MY_SCHEDULE + "." + MySchedule.SESSION_ID +
"=old." + Sessions.SESSION_ID
+ ";" + " END;");
upgradeAtoC(db);
}
private void upgradeAtoC(SQLiteDatabase db) {
db.execSQL("CREATE TABLE " + Tables.EXPERTS + " ("
+ BaseColumns._ID + " INTEGER PRIMARY KEY AUTOINCREMENT, "
+ SyncColumns.UPDATED + " INTEGER NOT NULL, "
+ ExpertsColumns.EXPERT_ID + " TEXT NOT NULL, "
+ ExpertsColumns.EXPERT_NAME + " TEXT, "
+ ExpertsColumns.EXPERT_IMAGE_URL + " TEXT, "
+ ExpertsColumns.EXPERT_TITLE + " TEXT, "
+ ExpertsColumns.EXPERT_ABSTRACT + " TEXT, "
+ ExpertsColumns.EXPERT_URL + " TEXT, "
+ ExpertsColumns.EXPERT_COUNTRY + " TEXT, "
+ ExpertsColumns.EXPERT_CITY + " TEXT, "
+ ExpertsColumns.EXPERT_ATTENDING + " BOOLEAN, "
+ ExpertsColumns.EXPERT_IMPORT_HASHCODE + " TEXT NOT NULL DEFAULT '', "
+ "UNIQUE (" + ExpertsColumns.EXPERT_ID + ") ON CONFLICT REPLACE)");
db.execSQL("CREATE TABLE " + Tables.PEOPLE_IVE_MET + " ("
+ BaseColumns._ID + " INTEGER PRIMARY KEY AUTOINCREMENT, "
+ ScheduleContract.PeopleIveMetColumns.PERSON_ID + " TEXT NOT NULL, "
+ ScheduleContract.PeopleIveMetColumns.PERSON_TIMESTAMP + " INTEGER NOT NULL, "
+ ScheduleContract.PeopleIveMetColumns.PERSON_NAME + " TEXT, "
+ ScheduleContract.PeopleIveMetColumns.PERSON_IMAGE_URL + " TEXT, "
+ ScheduleContract.PeopleIveMetColumns.PERSON_NOTE + " TEXT, "
+ "UNIQUE (" + ScheduleContract.PeopleIveMetColumns.PERSON_ID + ") ON CONFLICT REPLACE)");
db.execSQL("CREATE TABLE " + Tables.PARTNERS + " ("
+ BaseColumns._ID + " INTEGER PRIMARY KEY AUTOINCREMENT,"
+ PartnersColumns.PARTNER_ID + " TEXT NOT NULL,"
+ PartnersColumns.PARTNER_NAME + " TEXT NOT NULL,"
+ PartnersColumns.PARTNER_DESC + " TEXT NOT NULL,"
+ PartnersColumns.PARTNER_WEBSITE_URL + " TEXT NOT NULL,"
+ PartnersColumns.PARTNER_LOGO_URL + " TEXT NOT NULL,"
+ "UNIQUE (" + PartnersColumns.PARTNER_ID + ") ON CONFLICT REPLACE)");
}
/**
* Updates the session search index. This should be done sparingly, as the queries are rather
* complex.
*/
static void updateSessionSearchIndex(SQLiteDatabase db) {
db.execSQL("DELETE FROM " + Tables.SESSIONS_SEARCH);
db.execSQL("INSERT INTO " + Qualified.SESSIONS_SEARCH
+ " SELECT s." + Sessions.SESSION_ID + ",("
// Full text body
+ Sessions.SESSION_TITLE + "||'; '||"
+ Sessions.SESSION_ABSTRACT + "||'; '||"
+ "IFNULL(GROUP_CONCAT(t." + Speakers.SPEAKER_NAME + ",' '),'')||'; '||"
+ "'')"
+ " FROM " + Tables.SESSIONS + " s "
+ " LEFT OUTER JOIN"
// Subquery resulting in session_id, speaker_id, speaker_name
+ "(SELECT " + Sessions.SESSION_ID + "," + Qualified.SPEAKERS_SPEAKER_ID
+ "," + Speakers.SPEAKER_NAME
+ " FROM " + Tables.SESSIONS_SPEAKERS
+ " INNER JOIN " + Tables.SPEAKERS
+ " ON " + Qualified.SESSIONS_SPEAKERS_SPEAKER_ID + "="
+ Qualified.SPEAKERS_SPEAKER_ID
+ ") t"
// Grand finale
+ " ON s." + Sessions.SESSION_ID + "=t." + Sessions.SESSION_ID
+ " GROUP BY s." + Sessions.SESSION_ID);
}
@Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
LOGD(TAG, "onUpgrade() from " + oldVersion + " to " + newVersion);
// Cancel any sync currently in progress
Account account = AccountUtils.getActiveAccount(mContext);
if (account != null) {
LOGI(TAG, "Cancelling any pending syncs for account");
ContentResolver.cancelSync(account, ScheduleContract.CONTENT_AUTHORITY);
}
// Current DB version. We update this variable as we perform upgrades to reflect
// the current version we are in.
int version = oldVersion;
// Indicates whether the data we currently have should be invalidated as a
// result of the db upgrade. Default is true (invalidate); if we detect that this
// is a trivial DB upgrade, we set this to false.
boolean dataInvalidated = true;
// Check if we can upgrade from release A to release C
if (version == VER_2014_RELEASE_A) {
// release A format can be upgraded to release C format
LOGD(TAG, "Upgrading database from 2014 release A to 2014 release C.");
upgradeAtoC(db);
version = VER_2014_RELEASE_C;
}
LOGD(TAG, "After upgrade logic, at version " + version);
// at this point, we ran out of upgrade logic, so if we are still at the wrong
// version, we have no choice but to delete everything and create everything again.
if (version != CUR_DATABASE_VERSION) {
LOGW(TAG, "Upgrade unsuccessful -- destroying old data during upgrade");
db.execSQL("DROP TRIGGER IF EXISTS " + Triggers.SESSIONS_TAGS_DELETE);
db.execSQL("DROP TRIGGER IF EXISTS " + Triggers.SESSIONS_SPEAKERS_DELETE);
db.execSQL("DROP TRIGGER IF EXISTS " + Triggers.SESSIONS_FEEDBACK_DELETE);
db.execSQL("DROP TRIGGER IF EXISTS " + Triggers.SESSIONS_MY_SCHEDULE_DELETE);
db.execSQL("DROP TRIGGER IF EXISTS " + Triggers.DeprecatedTriggers.SESSIONS_TRACKS_DELETE);
db.execSQL("DROP TABLE IF EXISTS " + Tables.BLOCKS);
db.execSQL("DROP TABLE IF EXISTS " + Tables.ROOMS);
db.execSQL("DROP TABLE IF EXISTS " + Tables.TAGS);
db.execSQL("DROP TABLE IF EXISTS " + Tables.SESSIONS);
db.execSQL("DROP TABLE IF EXISTS " + Tables.SPEAKERS);
db.execSQL("DROP TABLE IF EXISTS " + Tables.MY_SCHEDULE);
db.execSQL("DROP TABLE IF EXISTS " + Tables.SESSIONS_SPEAKERS);
db.execSQL("DROP TABLE IF EXISTS " + Tables.SESSIONS_TAGS);
db.execSQL("DROP TABLE IF EXISTS " + Tables.ANNOUNCEMENTS);
db.execSQL("DROP TABLE IF EXISTS " + Tables.FEEDBACK);
db.execSQL("DROP TABLE IF EXISTS " + Tables.SESSIONS_SEARCH);
db.execSQL("DROP TABLE IF EXISTS " + Tables.SEARCH_SUGGEST);
db.execSQL("DROP TABLE IF EXISTS " + Tables.MAPMARKERS);
db.execSQL("DROP TABLE IF EXISTS " + Tables.MAPTILES);
db.execSQL("DROP TABLE IF EXISTS " + Tables.EXPERTS);
db.execSQL("DROP TABLE IF EXISTS " + Tables.HASHTAGS);
db.execSQL("DROP TABLE IF EXISTS " + Tables.PEOPLE_IVE_MET);
db.execSQL("DROP TABLE IF EXISTS " + Tables.VIDEOS);
db.execSQL("DROP TABLE IF EXISTS " + Tables.PARTNERS);
db.execSQL("DROP TABLE IF EXISTS " + Tables.DeprecatedTables.TRACKS);
db.execSQL("DROP TABLE IF EXISTS " + Tables.DeprecatedTables.SESSIONS_TRACKS);
db.execSQL("DROP TABLE IF EXISTS " + Tables.DeprecatedTables.SANDBOX);
onCreate(db);
version = CUR_DATABASE_VERSION;
}
if (dataInvalidated) {
LOGD(TAG, "Data invalidated; resetting our data timestamp.");
ConferenceDataHandler.resetManifestsDataTimestamp(mContext);
if (account != null) {
LOGI(TAG, "DB upgrade complete. Requesting resync.");
SyncHelper.requestManualSync(account);
}
}
}
public static void deleteDatabase(Context context) {
context.deleteDatabase(DATABASE_NAME);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.kafka.consumer.support;
import java.time.Duration;
import java.util.Collections;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.stream.StreamSupport;
import org.apache.camel.Exchange;
import org.apache.camel.Message;
import org.apache.camel.Processor;
import org.apache.camel.component.kafka.KafkaAsyncManualCommit;
import org.apache.camel.component.kafka.KafkaConfiguration;
import org.apache.camel.component.kafka.KafkaConstants;
import org.apache.camel.component.kafka.KafkaManualCommit;
import org.apache.camel.component.kafka.KafkaManualCommitFactory;
import org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer;
import org.apache.camel.spi.ExceptionHandler;
import org.apache.camel.spi.HeaderFilterStrategy;
import org.apache.camel.spi.StateRepository;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.header.Header;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class KafkaRecordProcessor {
public static final long START_OFFSET = -1;
private static final Logger LOG = LoggerFactory.getLogger(KafkaRecordProcessor.class);
private final boolean autoCommitEnabled;
private final KafkaConfiguration configuration;
private final Processor processor;
private final Consumer<?, ?> consumer;
private final KafkaManualCommitFactory manualCommitFactory;
private final String threadId;
private final ConcurrentLinkedQueue<KafkaAsyncManualCommit> asyncCommits;
public static final class ProcessResult {
private static final ProcessResult UNPROCESSED_RESULT = new ProcessResult(false, START_OFFSET);
private boolean breakOnErrorHit;
private long partitionLastOffset;
private ProcessResult(boolean breakOnErrorHit, long partitionLastOffset) {
this.breakOnErrorHit = breakOnErrorHit;
this.partitionLastOffset = partitionLastOffset;
}
public boolean isBreakOnErrorHit() {
return breakOnErrorHit;
}
public long getPartitionLastOffset() {
return partitionLastOffset;
}
public static ProcessResult newUnprocessed() {
return UNPROCESSED_RESULT;
}
}
public KafkaRecordProcessor(boolean autoCommitEnabled, KafkaConfiguration configuration,
Processor processor, Consumer<?, ?> consumer,
KafkaManualCommitFactory manualCommitFactory,
String threadId, ConcurrentLinkedQueue<KafkaAsyncManualCommit> asyncCommits) {
this.autoCommitEnabled = autoCommitEnabled;
this.configuration = configuration;
this.processor = processor;
this.consumer = consumer;
this.manualCommitFactory = manualCommitFactory;
this.threadId = threadId;
this.asyncCommits = asyncCommits;
}
private void setupExchangeMessage(Message message, ConsumerRecord record) {
message.setHeader(KafkaConstants.PARTITION, record.partition());
message.setHeader(KafkaConstants.TOPIC, record.topic());
message.setHeader(KafkaConstants.OFFSET, record.offset());
message.setHeader(KafkaConstants.HEADERS, record.headers());
message.setHeader(KafkaConstants.TIMESTAMP, record.timestamp());
message.setHeader(Exchange.MESSAGE_TIMESTAMP, record.timestamp());
if (record.key() != null) {
message.setHeader(KafkaConstants.KEY, record.key());
}
message.setBody(record.value());
}
private boolean shouldBeFiltered(Header header, Exchange exchange, HeaderFilterStrategy headerFilterStrategy) {
return !headerFilterStrategy.applyFilterToExternalHeaders(header.key(), header.value(), exchange);
}
private void propagateHeaders(ConsumerRecord<Object, Object> record, Exchange exchange) {
HeaderFilterStrategy headerFilterStrategy = configuration.getHeaderFilterStrategy();
KafkaHeaderDeserializer headerDeserializer = configuration.getHeaderDeserializer();
StreamSupport.stream(record.headers().spliterator(), false)
.filter(header -> shouldBeFiltered(header, exchange, headerFilterStrategy))
.forEach(header -> exchange.getIn().setHeader(header.key(),
headerDeserializer.deserialize(header.key(), header.value())));
}
public ProcessResult processExchange(
Exchange exchange, TopicPartition partition, boolean partitionHasNext,
boolean recordHasNext, ConsumerRecord<Object, Object> record, ProcessResult lastResult,
ExceptionHandler exceptionHandler) {
Message message = exchange.getMessage();
setupExchangeMessage(message, record);
propagateHeaders(record, exchange);
// if not auto commit then we have additional information on the exchange
if (!autoCommitEnabled) {
message.setHeader(KafkaConstants.LAST_RECORD_BEFORE_COMMIT, !recordHasNext);
message.setHeader(KafkaConstants.LAST_POLL_RECORD, !recordHasNext && !partitionHasNext);
}
if (configuration.isAllowManualCommit()) {
StateRepository<String, String> offsetRepository = configuration.getOffsetRepository();
// allow Camel users to access the Kafka consumer API to be able to do for example manual commits
KafkaManualCommit manual = manualCommitFactory.newInstance(exchange, consumer, partition.topic(), threadId,
offsetRepository, partition, record.offset(), configuration.getCommitTimeoutMs(), asyncCommits);
message.setHeader(KafkaConstants.MANUAL_COMMIT, manual);
message.setHeader(KafkaConstants.LAST_POLL_RECORD, !recordHasNext && !partitionHasNext);
}
try {
processor.process(exchange);
} catch (Exception e) {
exchange.setException(e);
boolean breakOnErrorExit = processException(exchange, partition, lastResult.getPartitionLastOffset(),
exceptionHandler);
return new ProcessResult(breakOnErrorExit, lastResult.getPartitionLastOffset());
}
return new ProcessResult(false, record.offset());
}
private boolean processException(
Exchange exchange, TopicPartition partition, long partitionLastOffset,
ExceptionHandler exceptionHandler) {
// processing failed due to an unhandled exception, what should we do
if (configuration.isBreakOnFirstError()) {
// we are failing and we should break out
if (LOG.isWarnEnabled()) {
LOG.warn("Error during processing {} from topic: {}", exchange, partition.topic(), exchange.getException());
LOG.warn("Will seek consumer to offset {} and start polling again.", partitionLastOffset);
}
// force commit, so we resume on next poll where we failed
commitOffset(partition, partitionLastOffset, false, true);
// continue to next partition
return true;
} else {
// will handle/log the exception and then continue to next
exceptionHandler.handleException("Error during processing", exchange, exchange.getException());
}
return false;
}
public void commitOffset(
TopicPartition partition, long partitionLastOffset, boolean stopping, boolean forceCommit) {
commitOffset(configuration, consumer, partition, partitionLastOffset, stopping, forceCommit, threadId);
}
public static void commitOffset(
KafkaConfiguration configuration, Consumer<?, ?> consumer, TopicPartition partition, long partitionLastOffset,
boolean stopping, boolean forceCommit, String threadId) {
if (partitionLastOffset == START_OFFSET) {
return;
}
StateRepository<String, String> offsetRepository = configuration.getOffsetRepository();
if (!configuration.isAllowManualCommit() && offsetRepository != null) {
saveStateToOffsetRepository(partition, partitionLastOffset, threadId, offsetRepository);
} else if (stopping) {
// if we are stopping then react according to the configured option
if ("async".equals(configuration.getAutoCommitOnStop())) {
commitAsync(consumer, partition, partitionLastOffset, threadId);
} else if ("sync".equals(configuration.getAutoCommitOnStop())) {
commitSync(configuration, consumer, partition, partitionLastOffset, threadId);
} else if ("none".equals(configuration.getAutoCommitOnStop())) {
noCommit(partition, threadId);
}
} else if (forceCommit) {
forceSyncCommit(configuration, consumer, partition, partitionLastOffset, threadId);
}
}
private static void commitOffset(
KafkaConfiguration configuration, Consumer<?, ?> consumer, TopicPartition partition,
long partitionLastOffset) {
long timeout = configuration.getCommitTimeoutMs();
consumer.commitSync(
Collections.singletonMap(partition, new OffsetAndMetadata(partitionLastOffset + 1)),
Duration.ofMillis(timeout));
}
private static void forceSyncCommit(
KafkaConfiguration configuration, Consumer<?, ?> consumer, TopicPartition partition, long partitionLastOffset,
String threadId) {
if (LOG.isDebugEnabled()) {
LOG.debug("Forcing commitSync {} [topic: {} partition: {} offset: {}]", threadId, partition.topic(),
partition.partition(), partitionLastOffset);
}
commitOffset(configuration, consumer, partition, partitionLastOffset);
}
private static void noCommit(TopicPartition partition, String threadId) {
if (LOG.isDebugEnabled()) {
LOG.debug("Auto commit on stop {} from topic {} is disabled (none)", threadId, partition.topic());
}
}
private static void commitSync(
KafkaConfiguration configuration, Consumer<?, ?> consumer, TopicPartition partition, long partitionLastOffset,
String threadId) {
if (LOG.isDebugEnabled()) {
LOG.debug("Auto commitSync on stop {} from topic {}", threadId, partition.topic());
}
commitOffset(configuration, consumer, partition, partitionLastOffset);
}
private static void commitAsync(
Consumer<?, ?> consumer, TopicPartition partition, long partitionLastOffset, String threadId) {
if (LOG.isDebugEnabled()) {
LOG.debug("Auto commitAsync on stop {} from topic {}", threadId, partition.topic());
}
consumer.commitAsync(
Collections.singletonMap(partition, new OffsetAndMetadata(partitionLastOffset + 1)), null);
}
private static void saveStateToOffsetRepository(
TopicPartition partition, long partitionLastOffset, String threadId,
StateRepository<String, String> offsetRepository) {
if (LOG.isDebugEnabled()) {
LOG.debug("Saving offset repository state {} [topic: {} partition: {} offset: {}]", threadId, partition.topic(),
partition.partition(),
partitionLastOffset);
}
offsetRepository.setState(serializeOffsetKey(partition), serializeOffsetValue(partitionLastOffset));
}
public static String serializeOffsetKey(TopicPartition topicPartition) {
return topicPartition.topic() + '/' + topicPartition.partition();
}
public static String serializeOffsetValue(long offset) {
return String.valueOf(offset);
}
public static long deserializeOffsetValue(String offset) {
return Long.parseLong(offset);
}
}
| |
package watson.user.service;
import watson.user.model.HPEmployee;
import javax.naming.Context;
import javax.naming.NamingEnumeration;
import javax.naming.NamingException;
import javax.naming.directory.Attribute;
import javax.naming.directory.Attributes;
import javax.naming.directory.SearchControls;
import javax.naming.directory.SearchResult;
import javax.naming.ldap.InitialLdapContext;
import javax.naming.ldap.LdapContext;
import java.util.ArrayList;
import java.util.Hashtable;
import java.util.List;
public class LDAPServiceImpl implements LDAPService {
private String ldapURL;
private String ldapAccount;
private String ldapPassword;
private String initialContextFactory;
private String trustStore;
private LdapContext getNewLdapContext(){
LdapContext ldapContext = null;
System.setProperty("javax.net.ssl.trustStore", this.trustStore);
//System.setProperty("javax.net.ssl.trustStorePassword", "123456");
Hashtable<String, String> env = new Hashtable<String, String>();
env.put(Context.INITIAL_CONTEXT_FACTORY, this.initialContextFactory);
env.put(Context.PROVIDER_URL, this.ldapURL);
env.put(Context.SECURITY_PRINCIPAL, this.ldapAccount);
env.put(Context.SECURITY_CREDENTIALS, this.ldapPassword);
env.put(Context.SECURITY_AUTHENTICATION, "simple");
env.put(Context.SECURITY_PROTOCOL, "ssl");
try {
ldapContext = new InitialLdapContext(env, null);
}
catch (Exception e) {
e.printStackTrace();
}
return ldapContext;
}
@Override
public boolean authenticateUser(String email, String password) {
boolean flag = false;
String userPrincipal = "uid=" + email.trim() + ",ou=People,o=hp.com";
System.setProperty("javax.net.ssl.trustStore", this.trustStore);
//System.setProperty("javax.net.ssl.trustStorePassword", "123456");
Hashtable<String, String> env = new Hashtable<String, String>();
env.put(Context.INITIAL_CONTEXT_FACTORY, this.initialContextFactory);
env.put(Context.PROVIDER_URL, this.ldapURL);
env.put(Context.SECURITY_PRINCIPAL, userPrincipal);
env.put(Context.SECURITY_CREDENTIALS, password);
env.put(Context.SECURITY_AUTHENTICATION, "simple");
env.put(Context.SECURITY_PROTOCOL, "ssl");
try {
new InitialLdapContext(env, null);
flag = true;
} catch (Exception e) {
e.printStackTrace();
}
return flag;
}
@Override
public HPEmployee getEmployeeDataByEmail(String email) {
HPEmployee employee = null;
LdapContext ldapContext = this.getNewLdapContext();
try {
String searchAttr = "mail=" + email.trim();
employee = this.getEmployeeData(searchAttr, ldapContext);
if (employee != null) {
String managerEmployeeId = employee.getManagerEmployeeId();
String searchAttrMgr = "employeeNumber=" + managerEmployeeId;
HPEmployee manager = getEmployeeData(searchAttrMgr, ldapContext);
if (manager != null) {
employee.setManagerDomainUserName(manager.getDomainUserName());
employee.setManagerName(manager.getName());
employee.setManagerEmail(manager.getEmail());
}
}
} catch (Exception e) {
e.printStackTrace();
}
return employee;
}
@Override
public HPEmployee getEmployeeDataByDomainUserName(String domainUserName) {
HPEmployee employee = null;
LdapContext ldapContext = this.getNewLdapContext();
try {
String searchAttr = "ntuserdomainid=" + domainUserName.trim().replace("\\", ":");
employee = this.getEmployeeData(searchAttr, ldapContext);
if (employee != null) {
String managerEmployeeId = employee.getManagerEmployeeId();
String searchAttrMgr = "employeeNumber=" + managerEmployeeId;
HPEmployee manager = getEmployeeData(searchAttrMgr, ldapContext);
if (manager != null) {
employee.setManagerDomainUserName(manager.getDomainUserName());
employee.setManagerName(manager.getName());
employee.setManagerEmail(manager.getEmail());
}
}
} catch (Exception e) {
e.printStackTrace();
}
return employee;
}
@Override
public List<HPEmployee> getEmployeeData(List<String> domainUserNames) {
List<HPEmployee> employees = new ArrayList<HPEmployee>();
for (String domainUserName : domainUserNames) {
HPEmployee employee = this.getEmployeeDataByDomainUserName(domainUserName);
employees.add(employee);
}
return employees;
}
private HPEmployee getEmployeeData(String searchAttr, LdapContext ldapContext) throws Exception{
String searchBase = "ou=People, o=hp.com";
String[] retAttrs = {"ntuserdomainid", "employeeNumber", "cn", "givenName", "sn", "hpStatus", "mail", "hpJobFunction", "hpJobFamily", "c", "ou", "managerEmployeeNumber"};
SearchControls searchControls = new SearchControls();
searchControls.setSearchScope(SearchControls.ONELEVEL_SCOPE);
searchControls.setReturningAttributes(retAttrs);
HPEmployee employee = null;
try {
NamingEnumeration results = ldapContext.search(searchBase, searchAttr, searchControls);
while (results != null && results.hasMore()) {
SearchResult searchResult = (SearchResult) results.next();
Attributes attrs = searchResult.getAttributes();
employee = getHPEmployee(attrs);
String hpStatus = employee.getHpStatus();
//Multiple entries for the person may exist in ED. If so, return the first entry
//with hpStatus of 'active' if one exists, else return the last entry
if ((hpStatus != null) && hpStatus.equalsIgnoreCase("Active"))
return employee;
}
} catch (NamingException ne) {
ne.printStackTrace();
}
return employee;
}
private HPEmployee getHPEmployee(Attributes attrs) throws NamingException {
Attribute attr;
attr = attrs.get("ntuserdomainid");
String ntuserdomainid = (attr == null) ? null : attr.get().toString();
attr = attrs.get("employeeNumber");
String employeeId = (attr == null) ? null : attr.get().toString();
attr = attrs.get("cn");
String name = (attr == null) ? null : attr.get().toString();
attr = attrs.get("givenName");
String firstName = (attr == null) ? null : attr.get().toString();
attr = attrs.get("sn");
String lastName = (attrs == null) ? null :attr.get().toString();
attr = attrs.get("mail");
String email = (attr == null) ? null : attr.get().toString();
attr = attrs.get("hpJobFunction");
String jobFunction = (attr == null) ? null : attr.get().toString();
attr = attrs.get("hpJobFamily");
String jobFamily = (attr == null) ? null : attr.get().toString();
attr = attrs.get("c");
String country = (attr == null) ? null : attr.get().toString();
attr = attrs.get("ou");
String orgUnit = (attr == null) ? null : attr.get().toString();
attr = attrs.get("hpStatus");
String hpStatus = (attr == null) ? null : attr.get().toString();
attr = attrs.get("managerEmployeeNumber");
String mgrId = (attr == null) ? null : attr.get().toString();
return new HPEmployee(ntuserdomainid, employeeId, name, firstName, lastName, email, jobFunction + " - " + jobFamily, country, orgUnit, hpStatus, mgrId);
}
public void setLdapURL(String ldapURL) {
this.ldapURL = ldapURL;
}
public void setLdapAccount(String ldapAccount) {
this.ldapAccount = ldapAccount;
}
public void setLdapPassword(String ldapPassword) {
this.ldapPassword = ldapPassword;
}
public void setInitialContextFactory(String initialContextFactory) {
this.initialContextFactory = initialContextFactory;
}
public void setTrustStore(String trustStore) {
this.trustStore = trustStore;
}
}
| |
/**
* Copyright (C) Zhang,Yuexiang (xfeep)
*
*/
package nginx.clojure;
import static nginx.clojure.MiniConstants.DEFAULT_ENCODING;
import static nginx.clojure.MiniConstants.NGX_OK;
import static nginx.clojure.NginxClojureRT.log;
import java.io.Closeable;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.FutureTask;
import nginx.clojure.java.NginxJavaResponse;
import nginx.clojure.net.NginxClojureAsynSocket;
import sun.nio.ch.DirectBuffer;
public class NginxHttpServerChannel implements Closeable {
protected NginxRequest request;
protected boolean ignoreFilter;
protected volatile boolean closed;
protected Object context;
protected long asyncTimeout;
protected final Object closeLock = new Object[0];
private static final ChannelListener<NginxHttpServerChannel> closeListener = new ChannelCloseAdapter<NginxHttpServerChannel>() {
@Override
public void onClose(NginxHttpServerChannel sc) {
synchronized (sc.closeLock) {
if (!sc.closed) {
sc.request.uri();//cache uri for logging usage otherwise we can not get uri from a released request
sc.closed = true;
}
}
}
};
public NginxHttpServerChannel(NginxRequest request, boolean ignoreFilter) {
this.request = request;
this.ignoreFilter = ignoreFilter;
request.addListener(this, closeListener);
}
public <T> void addListener(T data, ChannelListener<T> listener) {
this.request.addListener(data, listener);
}
/**
* turn on event handler
* @throws IOException
*/
public void turnOnEventHandler(boolean read, boolean write, boolean nokeepalive) throws IOException {
checkValid();
int flag = 0;
if (read) {
flag |= MiniConstants.NGX_HTTP_CLOJURE_EVENT_HANDLER_FLAG_READ;
}
if (write) {
flag |= MiniConstants.NGX_HTTP_CLOJURE_EVENT_HANDLER_FLAG_WRITE;
}
if (nokeepalive) {
flag |= MiniConstants.NGX_HTTP_CLOJURE_EVENT_HANDLER_FLAG_NOKEEPALIVE;
}
if (Thread.currentThread() != NginxClojureRT.NGINX_MAIN_THREAD) {
final int fflag = flag;
NginxClojureRT.postPollTaskEvent(() -> NginxClojureRT.ngx_http_hijack_turn_on_event_handler(request.nativeRequest(), fflag));
}else {
NginxClojureRT.ngx_http_hijack_turn_on_event_handler(request.nativeRequest(), flag);
}
}
protected int send(byte[] message, long off, int len, int flag) {
if (message == null) {
return (int)NginxClojureRT.ngx_http_hijack_send(request.nativeRequest(), null, 0, 0, flag);
}
return (int)NginxClojureRT.ngx_http_hijack_send(request.nativeRequest(), message, MiniConstants.BYTE_ARRAY_OFFSET + off, len, flag);
}
protected int send(ByteBuffer message, int flag) {
if (message == null) {
return (int) NginxClojureRT.ngx_http_hijack_send(request.nativeRequest(), null, 0, 0, flag);
}
int rc = (int) (message.isDirect() ?
NginxClojureRT.ngx_http_hijack_send(request.nativeRequest(), null,
((DirectBuffer) message).address() + message.position(), message.remaining(), flag) :
NginxClojureRT.ngx_http_hijack_send(request.nativeRequest(), message.array(),
MiniConstants.BYTE_ARRAY_OFFSET + message.arrayOffset()+message.position(), message.remaining(), flag));
if (rc == MiniConstants.NGX_OK) {
message.position(message.limit());
}
return rc;
}
private final void checkValid() throws IOException {
if (closed) {
throw new IOException("Op on a closed NginxHttpServerChannel with request :" + request);
}
}
/**
* If message is null when flush is true it will do flush, when last is true it will close channel.
*/
public void send(byte[] message, int off, int len, boolean flush, boolean last) throws IOException {
checkValid();
if (last) {
closed = true;
}
int flag = computeFlag(flush, last);
if (Thread.currentThread() != NginxClojureRT.NGINX_MAIN_THREAD) {
NginxClojureRT.postHijackSendEvent(this, message == null ? null : Arrays.copyOfRange(message, off, off + len), 0,
len, flag);
}else {
send(message, off, len, flag);
}
}
public int computeFlag(boolean flush, boolean last) {
int flag = 0;
if (flush) {
flag |= MiniConstants.NGX_CLOJURE_BUF_FLUSH_FLAG;
}
if (last) {
flag |= MiniConstants.NGX_CLOJURE_BUF_LAST_FLAG;
}
if (ignoreFilter) {
flag |= MiniConstants.NGX_CLOJURE_BUF_IGNORE_FILTER_FLAG;
}
return flag;
}
public void flush() throws IOException {
checkValid();
int flag = computeFlag(true, false);
if (Thread.currentThread() != NginxClojureRT.NGINX_MAIN_THREAD) {
NginxClojureRT.postHijackSendEvent(this, null, 0, 0, flag);
}else {
send(null, 0, 0, flag);
}
}
public void send(String message, boolean flush, boolean last) throws IOException {
checkValid();
if (last) {
closed = true;
}
if (log.isDebugEnabled()) {
log.debug("#%s: send message : '%s', len=%s, flush=%s, last=%s, lns=%s", request.nativeRequest(),
HackUtils.truncateToDotAppendString(message, 10), message == null ? "<NULL>" : message.length(), flush, last,
request.listeners() == null ? 0 : request.listeners().size());
}
byte[] bs = message == null ? null : message.getBytes(DEFAULT_ENCODING);
int flag = computeFlag(flush, last) | MiniConstants.NGX_CLOJURE_BUF_APP_MSGTXT;
if (Thread.currentThread() != NginxClojureRT.NGINX_MAIN_THREAD) {
NginxClojureRT.postHijackSendEvent(this, bs, 0, bs == null ? 0 : bs.length, flag);
}else {
send(bs, 0, bs == null ? 0 : bs.length, flag);
}
}
public void send(ByteBuffer message, boolean flush, boolean last) throws IOException {
checkValid();
if (last) {
closed = true;
}
if (log.isDebugEnabled()) {
log.debug("#%s: send message : '%s', flush=%s, last=%s, lns=%s", request.nativeRequest(), message, flush, last, request.listeners() == null ? 0 : request.listeners().size());
}
int flag = computeFlag(flush, last);
if (Thread.currentThread() != NginxClojureRT.NGINX_MAIN_THREAD) {
if (message != null) {
ByteBuffer cm = ByteBuffer.allocate(message.remaining());
cm.put(message);
cm.flip();
NginxClojureRT.postHijackSendEvent(this, cm, 0, cm.remaining(), flag);
}else {
NginxClojureRT.postHijackSendEvent(this, null, 0, 0, flag);
}
}else {
send(message, flag);
}
}
public long read(ByteBuffer buf) throws IOException {
long rc = 0;
if (Thread.currentThread() != NginxClojureRT.NGINX_MAIN_THREAD) {
synchronized (closeLock) {
if (closed) {
throw new IOException("Op on a closed NginxHttpServerChannel with request :" + request);
}
if (buf.isDirect()) {
rc = NginxClojureRT.ngx_http_hijack_read(request.nativeRequest(), null,
((DirectBuffer) buf).address() + buf.position(), buf.remaining());
} else {
rc = NginxClojureRT.ngx_http_hijack_read(request.nativeRequest(), buf.array(), MiniConstants.BYTE_ARRAY_OFFSET
+ buf.arrayOffset() + buf.position(), buf.remaining());
}
}
}else {
if (closed) {
throw new IOException("Op on a closed NginxHttpServerChannel with request :" + request);
}
if (buf.isDirect()) {
rc = NginxClojureRT.ngx_http_hijack_read(request.nativeRequest(), null,
((DirectBuffer) buf).address() + buf.position(), buf.remaining());
} else {
rc = NginxClojureRT.ngx_http_hijack_read(request.nativeRequest(), buf.array(), MiniConstants.BYTE_ARRAY_OFFSET
+ buf.arrayOffset() + buf.position(), buf.remaining());
}
}
if (NginxClojureRT.log.isDebugEnabled()) {
NginxClojureRT.log.debug("NginxHttpServerChannel read rc=%d", rc);
}
if (rc == NginxClojureAsynSocket.NGX_HTTP_CLOJURE_SOCKET_ERR_AGAIN) {
return 0;
}else if (rc == 0) {
return -1;
}else if (rc < 0) {
throw new IOException(NginxClojureAsynSocket.errorCodeToString(rc));
}else {
buf.position(buf.position() + (int)rc);
}
return rc;
}
public long read(byte[] buf, long off, long size) throws IOException {
long rc;
if (Thread.currentThread() != NginxClojureRT.NGINX_MAIN_THREAD) {
synchronized (closeLock) {
if (closed) {
throw new IOException("Op on a closed NginxHttpServerChannel with request :" + request);
}
rc = NginxClojureRT.ngx_http_hijack_read(request.nativeRequest(), buf, MiniConstants.BYTE_ARRAY_OFFSET + off, size);
}
}else {
if (closed) {
throw new IOException("Op on a closed NginxHttpServerChannel with request :" + request);
}
rc = NginxClojureRT.ngx_http_hijack_read(request.nativeRequest(), buf, MiniConstants.BYTE_ARRAY_OFFSET + off, size);
}
if (NginxClojureRT.log.isDebugEnabled()) {
NginxClojureRT.log.debug("NginxHttpServerChannel read rc=%d", rc);
}
if (rc == NginxClojureAsynSocket.NGX_HTTP_CLOJURE_SOCKET_ERR_AGAIN) {
return 0;
}else if (rc == 0) {
return -1;
}else if (rc < 0) {
throw new IOException(NginxClojureAsynSocket.errorCodeToString(rc));
}
return rc;
}
protected long unsafeWrite(byte[] buf, long off, long size) {
return NginxClojureRT.ngx_http_hijack_write(request.nativeRequest(), buf, MiniConstants.BYTE_ARRAY_OFFSET + off, size);
}
protected long unsafeWrite(ByteBuffer buf) {
long rc;
if (buf.isDirect()) {
rc = NginxClojureRT.ngx_http_hijack_write(request.nativeRequest(), null,
((DirectBuffer) buf).address() + buf.position(), buf.remaining());
}else {
rc = NginxClojureRT.ngx_http_hijack_write(request.nativeRequest(), buf.array(),
MiniConstants.BYTE_ARRAY_OFFSET + buf.arrayOffset() + buf.position(), buf.remaining());
}
if (rc > 0) {
buf.position(buf.position() + (int)rc);
}
return rc;
}
public long write(byte[] buf, long off, int size) throws IOException {
checkValid();
long rc;
if (Thread.currentThread() != NginxClojureRT.NGINX_MAIN_THREAD) {
rc = NginxClojureRT.postHijackWriteEvent(this, buf, off, size);
}else {
rc = unsafeWrite(buf, off, size);
}
if (NginxClojureRT.log.isDebugEnabled()) {
NginxClojureRT.log.debug("NginxHttpServerChannel write rc=%d", rc);
}
if (rc == NginxClojureAsynSocket.NGX_HTTP_CLOJURE_SOCKET_ERR_AGAIN) {
return 0;
}else if (rc == 0) {
return -1;
}else if (rc < 0) {
throw new IOException(NginxClojureAsynSocket.errorCodeToString(rc));
}
return (int)rc;
}
public long write(ByteBuffer buf) throws IOException {
checkValid();
long rc;
if (Thread.currentThread() != NginxClojureRT.NGINX_MAIN_THREAD) {
rc = NginxClojureRT.postHijackWriteEvent(this, buf, 0, buf.remaining());
}else {
rc = unsafeWrite(buf);
}
if (NginxClojureRT.log.isDebugEnabled()) {
NginxClojureRT.log.debug("NginxHttpServerChannel write rc=%d", rc);
}
if (rc == NginxClojureAsynSocket.NGX_HTTP_CLOJURE_SOCKET_ERR_AGAIN) {
return 0;
}else if (rc == 0) {
return -1;
}else if (rc < 0) {
throw new IOException(NginxClojureAsynSocket.errorCodeToString(rc));
}
return rc;
}
protected void sendHeader(int flag) {
NginxClojureRT.ngx_http_hijack_send_header(request.nativeRequest(), flag);
}
protected int sendHeader(byte[] message, long off, int len, int flag) {
int rc = (int)NginxClojureRT.ngx_http_hijack_send_header(request.nativeRequest(), message, MiniConstants.BYTE_ARRAY_OFFSET + off, len, flag);
if (rc < 0) {
NginxClojureRT.log.error("bad header from server : %s", new String(message));
}
return rc;
}
protected int sendHeader(ByteBuffer message, int flag) {
int rc = 0;
if (message.isDirect()) {
rc = (int) NginxClojureRT.ngx_http_hijack_send_header(request.nativeRequest(), null,
((DirectBuffer) message).address() + message.position(), message.remaining(), flag);
} else {
rc = (int) NginxClojureRT.ngx_http_hijack_send_header(request.nativeRequest(), message.array(),
MiniConstants.BYTE_ARRAY_OFFSET + message.arrayOffset()+message.position(), message.remaining(), flag);
}
if (rc == MiniConstants.NGX_OK) {
message.position(message.limit());
}else if (rc < 0) {
NginxClojureRT.log.error("bad header from server : %s", HackUtils.decode(message, DEFAULT_ENCODING, NginxClojureRT.pickCharBuffer()));
}
return rc;
}
public <K, V> void sendHeader(long status, Collection<Map.Entry<K, V>> headers, boolean flush, boolean last) throws IOException {
checkValid();
if (last) {
closed = true;
}
int flag = computeFlag(flush, last);
request.handler().prepareHeaders(request, status, headers);
if (Thread.currentThread() != NginxClojureRT.NGINX_MAIN_THREAD) {
NginxClojureRT.postHijackSendHeaderEvent(this, flag);
} else {
sendHeader(flag);
}
}
public void sendHeader(byte[] buf, int pos, int len, boolean flush, boolean last) throws IOException {
checkValid();
if (last) {
closed = true;
}
int flag = computeFlag(flush, last);
if (Thread.currentThread() != NginxClojureRT.NGINX_MAIN_THREAD) {
NginxClojureRT.postHijackSendHeaderEvent(this, buf, pos, len, flag);
} else {
sendHeader(buf, pos, len, flag);
}
}
protected long sendResponseHelp(NginxResponse resp, long chain) {
NginxRequest req = resp.request();
if (req.isReleased()) {
if (resp.type() > 0) {
log.error("#%d: request is release! and we also meet an unhandled exception! %s", req.nativeRequest(), resp.fetchBody());
}else {
log.error("#%d: request is release! ", req.nativeRequest());
}
return MiniConstants.NGX_HTTP_INTERNAL_SERVER_ERROR;
}
req.applyDelayed();
long rc = NGX_OK;
long r = req.nativeRequest();
if (resp.type() == NginxResponse.TYPE_FAKE_PHASE_DONE) {
if (req.phase() == MiniConstants.NGX_HTTP_HEADER_FILTER_PHASE) {
rc = NginxClojureRT.ngx_http_filter_continue_next(r, -1, 0);
NginxClojureRT.ngx_http_finalize_request(r, rc);
return NGX_OK;
}
// if (req.isHijacked()) {
// //decrease r->count
// NginxClojureRT.ngx_http_finalize_request(r, rc);
// }
NginxClojureRT.ngx_http_clojure_mem_continue_current_phase(r, MiniConstants.NGX_DECLINED);
return NGX_OK;
}
int phase = req.phase();
long nr = req.nativeRequest();
if (chain < 0) {
req.handler().prepareHeaders(req, -(int)chain, resp.fetchHeaders());
rc = -chain;
}else if (chain == 0) {
rc = MiniConstants.NGX_HTTP_INTERNAL_SERVER_ERROR;
} else {
int status = resp.fetchStatus(MiniConstants.NGX_HTTP_OK);
if (phase == MiniConstants.NGX_HTTP_HEADER_FILTER_PHASE) {
NginxClojureRT.ngx_http_clear_header_and_reset_ctx_phase(nr, ~phase);
}
req.handler().prepareHeaders(req, status, resp.fetchHeaders());
rc = NginxClojureRT.ngx_http_hijack_send_header(r, computeFlag(false, false));
if (rc != MiniConstants.NGX_ERROR && rc <= NGX_OK) {
//close will be done by handleReturnCodeFromHandler, so we do not need pass close flag
rc = NginxClojureRT.ngx_http_hijack_send_chain(r, chain, computeFlag(true, false));
if (rc == NGX_OK && phase != -1) {
NginxClojureRT.ngx_http_ignore_next_response(nr);
}
if (phase != -1) {
if (phase == MiniConstants.NGX_HTTP_ACCESS_PHASE || phase == MiniConstants.NGX_HTTP_REWRITE_PHASE ) {
rc = NginxClojureRT.handleReturnCodeFromHandler(nr, phase, rc, status);
}else {
NginxClojureRT.handleReturnCodeFromHandler(nr, phase, rc, status);
}
}
}
}
if (phase == -1 || phase == MiniConstants.NGX_HTTP_HEADER_FILTER_PHASE) {
NginxClojureRT.ngx_http_finalize_request(r, rc);
}else if (rc != MiniConstants.NGX_DONE){
NginxClojureRT.ngx_http_clojure_mem_continue_current_phase(r, rc);
}
return NGX_OK;
}
public void sendResponse(Object resp) throws IOException {
checkValid();
NginxResponse response = request.handler().toNginxResponse(request, resp);
if (Thread.currentThread() != NginxClojureRT.NGINX_MAIN_THREAD) {
NginxClojureRT.postHijackSendResponseEvent(this, response, request.handler().buildOutputChain(response));
}else {
sendResponseHelp(response, request.handler().buildOutputChain(response));
}
}
public void sendBody(final Object body, boolean last) throws IOException {
checkValid();
if (last) {
closed = true;
}
NginxResponse tmpResp = new NginxSimpleResponse(request) {
@Override
public Object fetchBody() {
return body;
}
@Override
public <K, V> Collection<Entry<K, V>> fetchHeaders() {
return Collections.emptyList();
}
@Override
public int fetchStatus(int defaultStatus) {
return 200;
}
};
long chain = ((NginxSimpleHandler)request.handler()).buildResponseItemBuf(request.nativeRequest(), body, 0);
if (Thread.currentThread() != NginxClojureRT.NGINX_MAIN_THREAD) {
NginxClojureRT.postHijackSendResponseEvent(this, tmpResp, chain);
}else {
NginxClojureRT.ngx_http_hijack_send_chain(request.nativeRequest(), chain, computeFlag(false, last));
}
}
public void sendResponse(int status) throws IOException {
checkValid();
if (Thread.currentThread() != NginxClojureRT.NGINX_MAIN_THREAD) {
NginxResponse response = new NginxJavaResponse(request, new Object[]{status, null, null});
NginxClojureRT.postHijackSendResponseEvent(this, response, request.handler().buildOutputChain(response));
}else {
closed = true;
NginxClojureRT.ngx_http_finalize_request(request.nativeRequest(), status);
}
}
public void close() throws IOException {
int flag = computeFlag(false, true);
if (Thread.currentThread() != NginxClojureRT.NGINX_MAIN_THREAD) {
synchronized (closeLock) {
if (closed) {
return;
}
closed = true;
}
NginxClojureRT.postHijackSendEvent(this, null, 0,
0, flag);
}else {
if (closed) {
return;
}
closed = true;
send(null, 0, 0, flag);
}
}
public void tagClose() {
closed = true;
}
public boolean isIgnoreFilter() {
return ignoreFilter;
}
public void setIgnoreFilter(boolean ignoreFilter) {
this.ignoreFilter = ignoreFilter;
}
public NginxRequest request() {
return request;
}
public boolean isClosed() {
return closed;
}
public Object getContext() {
return context;
}
public void setContext(Object context) {
this.context = context;
}
public long getAsyncTimeout() {
return asyncTimeout;
}
public void setAsyncTimeout(final long asyncTimeout) throws IOException {
checkValid();
this.asyncTimeout = asyncTimeout;
if (Thread.currentThread() != NginxClojureRT.NGINX_MAIN_THREAD) {
NginxClojureRT.postPollTaskEvent(() -> NginxClojureRT.ngx_http_hijack_set_async_timeout(request.nativeRequest(), asyncTimeout));
}else {
NginxClojureRT.ngx_http_hijack_set_async_timeout(request.nativeRequest(), asyncTimeout);
}
}
public boolean webSocketUpgrade(final boolean sendErrorForNonWebSocket) {
if (Thread.currentThread() != NginxClojureRT.NGINX_MAIN_THREAD) {
FutureTask<Boolean> task = new FutureTask<>(() -> NginxClojureRT.ngx_http_clojure_websocket_upgrade(request.nativeRequest(), sendErrorForNonWebSocket ? 1 : 0) == 0);
NginxClojureRT.postPollTaskEvent(task);
try {
return task.get();
} catch (InterruptedException e) {
throw new RuntimeException("webSocketUpgrade Interrupted", e);
} catch (ExecutionException e) {
throw new RuntimeException("webSocketUpgrade Execution error", e.getCause());
}
}else {
return NginxClojureRT.ngx_http_clojure_websocket_upgrade(request.nativeRequest(), sendErrorForNonWebSocket ? 1 : 0) == 0;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.elasticsearch.common.geo;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
/**
* Utilities for encoding and decoding geohashes. Based on
* http://en.wikipedia.org/wiki/Geohash.
*/
// LUCENE MONITOR: monitor against spatial package
// replaced with native DECODE_MAP
public class GeoHashUtils {
private static final char[] BASE_32 = {'0', '1', '2', '3', '4', '5', '6',
'7', '8', '9', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'j', 'k', 'm', 'n',
'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z'};
public static final int PRECISION = 12;
private static final int[] BITS = {16, 8, 4, 2, 1};
private GeoHashUtils() {
}
public static String encode(double latitude, double longitude) {
return encode(latitude, longitude, PRECISION);
}
/**
* Encodes the given latitude and longitude into a geohash
*
* @param latitude Latitude to encode
* @param longitude Longitude to encode
* @return Geohash encoding of the longitude and latitude
*/
public static String encode(double latitude, double longitude, int precision) {
// double[] latInterval = {-90.0, 90.0};
// double[] lngInterval = {-180.0, 180.0};
double latInterval0 = -90.0;
double latInterval1 = 90.0;
double lngInterval0 = -180.0;
double lngInterval1 = 180.0;
final StringBuilder geohash = new StringBuilder();
boolean isEven = true;
int bit = 0;
int ch = 0;
while (geohash.length() < precision) {
double mid = 0.0;
if (isEven) {
// mid = (lngInterval[0] + lngInterval[1]) / 2D;
mid = (lngInterval0 + lngInterval1) / 2D;
if (longitude > mid) {
ch |= BITS[bit];
// lngInterval[0] = mid;
lngInterval0 = mid;
} else {
// lngInterval[1] = mid;
lngInterval1 = mid;
}
} else {
// mid = (latInterval[0] + latInterval[1]) / 2D;
mid = (latInterval0 + latInterval1) / 2D;
if (latitude > mid) {
ch |= BITS[bit];
// latInterval[0] = mid;
latInterval0 = mid;
} else {
// latInterval[1] = mid;
latInterval1 = mid;
}
}
isEven = !isEven;
if (bit < 4) {
bit++;
} else {
geohash.append(BASE_32[ch]);
bit = 0;
ch = 0;
}
}
return geohash.toString();
}
private static final char encode(int x, int y) {
return BASE_32[((x & 1) + ((y & 1) * 2) + ((x & 2) * 2) + ((y & 2) * 4) + ((x & 4) * 4)) % 32];
}
/**
* Calculate all neighbors of a given geohash cell.
*
* @param geohash Geohash of the defined cell
* @return geohashes of all neighbor cells
*/
public static Collection<? extends CharSequence> neighbors(String geohash) {
return addNeighbors(geohash, geohash.length(), new ArrayList<CharSequence>(8));
}
/**
* Create an {@link Iterable} which allows to iterate over the cells that
* contain a given geohash
*
* @param geohash Geohash of a cell
*
* @return {@link Iterable} of path
*/
public static Iterable<String> path(final String geohash) {
return new Iterable<String>() {
@Override
public Iterator<String> iterator() {
return new GeohashPathIterator(geohash);
}
};
}
/**
* Calculate the geohash of a neighbor of a geohash
*
* @param geohash the geohash of a cell
* @param level level of the geohash
* @param dx delta of the first grid coordinate (must be -1, 0 or +1)
* @param dy delta of the second grid coordinate (must be -1, 0 or +1)
* @return geohash of the defined cell
*/
private final static String neighbor(String geohash, int level, int dx, int dy) {
int cell = decode(geohash.charAt(level - 1));
// Decoding the Geohash bit pattern to determine grid coordinates
int x0 = cell & 1; // first bit of x
int y0 = cell & 2; // first bit of y
int x1 = cell & 4; // second bit of x
int y1 = cell & 8; // second bit of y
int x2 = cell & 16; // third bit of x
// combine the bitpattern to grid coordinates.
// note that the semantics of x and y are swapping
// on each level
int x = x0 + (x1 / 2) + (x2 / 4);
int y = (y0 / 2) + (y1 / 4);
if (level == 1) {
// Root cells at north (namely "bcfguvyz") or at
// south (namely "0145hjnp") do not have neighbors
// in north/south direction
if ((dy < 0 && y == 0) || (dy > 0 && y == 3)) {
return null;
} else {
return Character.toString(encode(x + dx, y + dy));
}
} else {
// define grid coordinates for next level
final int nx = ((level % 2) == 1) ? (x + dx) : (x + dy);
final int ny = ((level % 2) == 1) ? (y + dy) : (y + dx);
// define grid limits for current level
final int xLimit = ((level % 2) == 0) ? 7 : 3;
final int yLimit = ((level % 2) == 0) ? 3 : 7;
// if the defined neighbor has the same parent a the current cell
// encode the cell directly. Otherwise find the cell next to this
// cell recursively. Since encoding wraps around within a cell
// it can be encoded here.
if (nx >= 0 && nx <= xLimit && ny >= 0 && ny <= yLimit) {
return geohash.substring(0, level - 1) + encode(nx, ny);
} else {
String neighbor = neighbor(geohash, level - 1, dx, dy);
if(neighbor != null) {
return neighbor + encode(nx, ny);
} else {
return null;
}
}
}
}
/**
* Add all geohashes of the cells next to a given geohash to a list.
*
* @param geohash Geohash of a specified cell
* @param neighbors list to add the neighbors to
* @return the given list
*/
public static final <E extends Collection<? super String>> E addNeighbors(String geohash, E neighbors) {
return addNeighbors(geohash, geohash.length(), neighbors);
}
/**
* Add all geohashes of the cells next to a given geohash to a list.
*
* @param geohash Geohash of a specified cell
* @param length level of the given geohash
* @param neighbors list to add the neighbors to
* @return the given list
*/
public static final <E extends Collection<? super String>> E addNeighbors(String geohash, int length, E neighbors) {
String south = neighbor(geohash, length, 0, -1);
String north = neighbor(geohash, length, 0, +1);
if (north != null) {
neighbors.add(neighbor(north, length, -1, 0));
neighbors.add(north);
neighbors.add(neighbor(north, length, +1, 0));
}
neighbors.add(neighbor(geohash, length, -1, 0));
neighbors.add(neighbor(geohash, length, +1, 0));
if (south != null) {
neighbors.add(neighbor(south, length, -1, 0));
neighbors.add(south);
neighbors.add(neighbor(south, length, +1, 0));
}
return neighbors;
}
private static final int decode(char geo) {
switch (geo) {
case '0':
return 0;
case '1':
return 1;
case '2':
return 2;
case '3':
return 3;
case '4':
return 4;
case '5':
return 5;
case '6':
return 6;
case '7':
return 7;
case '8':
return 8;
case '9':
return 9;
case 'b':
return 10;
case 'c':
return 11;
case 'd':
return 12;
case 'e':
return 13;
case 'f':
return 14;
case 'g':
return 15;
case 'h':
return 16;
case 'j':
return 17;
case 'k':
return 18;
case 'm':
return 19;
case 'n':
return 20;
case 'p':
return 21;
case 'q':
return 22;
case 'r':
return 23;
case 's':
return 24;
case 't':
return 25;
case 'u':
return 26;
case 'v':
return 27;
case 'w':
return 28;
case 'x':
return 29;
case 'y':
return 30;
case 'z':
return 31;
default:
throw new ElasticsearchIllegalArgumentException("the character '" + geo + "' is not a valid geohash character");
}
}
/**
* Decodes the given geohash
*
* @param geohash Geohash to decocde
* @return {@link GeoPoint} at the center of cell, given by the geohash
*/
public static GeoPoint decode(String geohash) {
return decode(geohash, new GeoPoint());
}
/**
* Decodes the given geohash into a latitude and longitude
*
* @param geohash Geohash to decocde
* @return the given {@link GeoPoint} reseted to the center of
* cell, given by the geohash
*/
public static GeoPoint decode(String geohash, GeoPoint ret) {
double[] interval = decodeCell(geohash);
return ret.reset((interval[0] + interval[1]) / 2D, (interval[2] + interval[3]) / 2D);
}
/**
* Decodes the given geohash into a geohash cell defined by the points nothWest and southEast
*
* @param geohash Geohash to deocde
* @param northWest the point north/west of the cell
* @param southEast the point south/east of the cell
*/
public static void decodeCell(String geohash, GeoPoint northWest, GeoPoint southEast) {
double[] interval = decodeCell(geohash);
northWest.reset(interval[1], interval[2]);
southEast.reset(interval[0], interval[3]);
}
private static double[] decodeCell(String geohash) {
double[] interval = {-90.0, 90.0, -180.0, 180.0};
boolean isEven = true;
for (int i = 0; i < geohash.length(); i++) {
final int cd = decode(geohash.charAt(i));
for (int mask : BITS) {
if (isEven) {
if ((cd & mask) != 0) {
interval[2] = (interval[2] + interval[3]) / 2D;
} else {
interval[3] = (interval[2] + interval[3]) / 2D;
}
} else {
if ((cd & mask) != 0) {
interval[0] = (interval[0] + interval[1]) / 2D;
} else {
interval[1] = (interval[0] + interval[1]) / 2D;
}
}
isEven = !isEven;
}
}
return interval;
}
//========== long-based encodings for geohashes ========================================
/**
* Encodes latitude and longitude information into a single long with variable precision.
* Up to 12 levels of precision are supported which should offer sub-metre resolution.
*
* @param latitude
* @param longitude
* @param precision The required precision between 1 and 12
* @return A single long where 4 bits are used for holding the precision and the remaining
* 60 bits are reserved for 5 bit cell identifiers giving up to 12 layers.
*/
public static long encodeAsLong(double latitude, double longitude, int precision) {
if((precision>12)||(precision<1))
{
throw new ElasticsearchIllegalArgumentException("Illegal precision length of "+precision+
". Long-based geohashes only support precisions between 1 and 12");
}
double latInterval0 = -90.0;
double latInterval1 = 90.0;
double lngInterval0 = -180.0;
double lngInterval1 = 180.0;
long geohash = 0l;
boolean isEven = true;
int bit = 0;
int ch = 0;
int geohashLength=0;
while (geohashLength < precision) {
double mid = 0.0;
if (isEven) {
mid = (lngInterval0 + lngInterval1) / 2D;
if (longitude > mid) {
ch |= BITS[bit];
lngInterval0 = mid;
} else {
lngInterval1 = mid;
}
} else {
mid = (latInterval0 + latInterval1) / 2D;
if (latitude > mid) {
ch |= BITS[bit];
latInterval0 = mid;
} else {
latInterval1 = mid;
}
}
isEven = !isEven;
if (bit < 4) {
bit++;
} else {
geohashLength++;
geohash|=ch;
if(geohashLength<precision){
geohash<<=5;
}
bit = 0;
ch = 0;
}
}
geohash<<=4;
geohash|=precision;
return geohash;
}
/**
* Formats a geohash held as a long as a more conventional
* String-based geohash
* @param geohashAsLong a geohash encoded as a long
* @return A traditional base32-based String representation of a geohash
*/
public static String toString(long geohashAsLong)
{
int precision = (int) (geohashAsLong&15);
char[] chars = new char[precision];
geohashAsLong >>= 4;
for (int i = precision - 1; i >= 0 ; i--) {
chars[i] = BASE_32[(int) (geohashAsLong & 31)];
geohashAsLong >>= 5;
}
return new String(chars);
}
public static GeoPoint decode(long geohash) {
GeoPoint point = new GeoPoint();
decode(geohash, point);
return point;
}
/**
* Decodes the given long-format geohash into a latitude and longitude
*
* @param geohash long format Geohash to decode
* @param ret The Geopoint into which the latitude and longitude will be stored
*/
public static void decode(long geohash, GeoPoint ret) {
double[] interval = decodeCell(geohash);
ret.reset((interval[0] + interval[1]) / 2D, (interval[2] + interval[3]) / 2D);
}
private static double[] decodeCell(long geohash) {
double[] interval = {-90.0, 90.0, -180.0, 180.0};
boolean isEven = true;
int precision= (int) (geohash&15);
geohash>>=4;
int[]cds=new int[precision];
for (int i = precision-1; i >=0 ; i--) {
cds[i] = (int) (geohash&31);
geohash>>=5;
}
for (int i = 0; i <cds.length ; i++) {
final int cd = cds[i];
for (int mask : BITS) {
if (isEven) {
if ((cd & mask) != 0) {
interval[2] = (interval[2] + interval[3]) / 2D;
} else {
interval[3] = (interval[2] + interval[3]) / 2D;
}
} else {
if ((cd & mask) != 0) {
interval[0] = (interval[0] + interval[1]) / 2D;
} else {
interval[1] = (interval[0] + interval[1]) / 2D;
}
}
isEven = !isEven;
}
}
return interval;
}
}
| |
/*
* ServeStream: A HTTP stream browser/player for Android
* Copyright 2012 William Seemann
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.sourceforge.servestream.bean;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
//import net.sourceforge.servestream.dbutils.StreamDatabase;
import android.content.ContentValues;
import android.net.Uri;
public class UriBean {
public static final String BEAN_NAME = "uri";
/* Database fields */
private long id = -1;
private String nickname = null;
private String username = null;
private String password = null;
private String hostname = null;
private int port = -2;
private String path = null;
private String query = null;
private String reference = null;
private String protocol = null;
private long lastConnect = -1;
private String contentType = null;
public UriBean() {
}
public String getBeanName() {
return BEAN_NAME;
}
public void setId(long id) {
this.id = id;
}
public long getId() {
return id;
}
public void setNickname(String nickname) {
this.nickname = nickname;
}
public String getNickname() {
return nickname;
}
public void setUsername(String username) {
this.username = username;
}
public String getUsername() {
return username;
}
public void setPassword(String password) {
this.password = password;
}
public String getPassword() {
return password;
}
public void setHostname(String hostname) {
this.hostname = hostname;
}
public String getHostname() {
return hostname;
}
public void setPort(int port) {
this.port = port;
}
public int getPort() {
return port;
}
public void setPath(String path) {
this.path = path;
}
public String getPath() {
return path;
}
public void setQuery(String query) {
this.query = query;
}
public String getQuery() {
return query;
}
public void setReference(String reference) {
this.reference = reference;
}
public String getReference() {
return reference;
}
public void setProtocol(String protocol) {
this.protocol = protocol;
}
public String getProtocol() {
return protocol;
}
public void setLastConnect(long lastConnect) {
this.lastConnect = lastConnect;
}
public long getLastConnect() {
return lastConnect;
}
public void setContentType(String contentType) {
this.contentType = contentType;
}
public String getContentType() {
return contentType;
}
public String getDescription() {
String description = String.format("%s@%s", username, hostname);
if (port != 22)
description += String.format(":%d", port);
return description;
}
public ContentValues getValues() {
ContentValues values = new ContentValues();
//
// values.put(StreamDatabase.FIELD_STREAM_NICKNAME, nickname);
// values.put(StreamDatabase.FIELD_STREAM_PROTOCOL, protocol);
// values.put(StreamDatabase.FIELD_STREAM_USERNAME, username);
// values.put(StreamDatabase.FIELD_STREAM_PASSWORD, password);
// values.put(StreamDatabase.FIELD_STREAM_HOSTNAME, hostname);
// values.put(StreamDatabase.FIELD_STREAM_PORT, port);
// values.put(StreamDatabase.FIELD_STREAM_PATH, path);
// values.put(StreamDatabase.FIELD_STREAM_QUERY, query);
// values.put(StreamDatabase.FIELD_STREAM_REFERENCE, reference);
// values.put(StreamDatabase.FIELD_STREAM_LASTCONNECT, lastConnect);
return values;
}
@Override
public boolean equals(Object o) {
if (o == null || !(o instanceof UriBean))
return false;
UriBean host = (UriBean)o;
if (id != -1 && host.getId() != -1)
return host.getId() == id;
if (nickname == null) {
if (host.getNickname() != null)
return false;
} else if (!nickname.equals(host.getNickname()))
return false;
if (protocol == null) {
if (host.getProtocol() != null)
return false;
} else if (!protocol.equals(host.getProtocol()))
return false;
if (username == null) {
if (host.getUsername() != null)
return false;
} else if (!username.equals(host.getUsername()))
return false;
if (password == null) {
if (host.getPassword() != null)
return false;
} else if (!password.equals(host.getPassword()))
return false;
if (hostname == null) {
if (host.getHostname() != null)
return false;
} else if (!hostname.equals(host.getHostname()))
return false;
if (port != host.getPort())
return false;
if (path == null) {
if (host.getPath() != null)
return false;
} else if (!path.equals(host.getPath()))
return false;
if (query == null) {
if (host.getQuery() != null)
return false;
} else if (!query.equals(host.getQuery()))
return false;
if (reference == null) {
if (host.getReference() != null)
return false;
} else if (!reference.equals(host.getReference()))
return false;
return true;
}
@Override
public int hashCode() {
int hash = 7;
if (id != -1)
return (int)id;
hash = 31 * hash + (null == nickname ? 0 : nickname.hashCode());
hash = 31 * hash + (null == protocol ? 0 : protocol.hashCode());
hash = 31 * hash + (null == username ? 0 : username.hashCode());
hash = 31 * hash + (null == password ? 0 : password.hashCode());
hash = 31 * hash + (null == hostname ? 0 : hostname.hashCode());
hash = 31 * hash + port;
hash = 31 * hash + (null == path ? 0 : path.hashCode());
hash = 31 * hash + (null == query ? 0 : query.hashCode());
hash = 31 * hash + (null == reference ? 0 : reference.hashCode());
return hash;
}
/**
* @return URI identifying this HostBean
*/
public Uri getUri() {
StringBuilder sb = new StringBuilder();
sb.append(protocol)
.append("://");
if (username != null && password != null) {
sb.append(Uri.encode(username))
.append(":")
.append(password)
.append('@');
}
if (hostname != null) {
sb.append(hostname)
.append(':');
}
if (port != -2) {
sb.append(port);
}
if (path != null) {
sb.append(path);
}
if (query != null) {
sb.append("?")
.append(query);
}
if (reference != null) {
sb.append("#")
.append(reference);
}
return Uri.parse(sb.toString());
}
/**
* @return URI identifying this HostBean
*/
public Uri getScrubbedUri() {
StringBuilder sb = new StringBuilder();
sb.append(protocol)
.append("://");
if (hostname != null) {
sb.append(hostname)
.append(':');
}
if (port != -2) {
sb.append(port);
}
if (path != null) {
sb.append(path);
}
if (query != null) {
sb.append("?")
.append(query);
}
if (reference != null) {
sb.append("#")
.append(reference);
}
return Uri.parse(sb.toString());
}
/**
* @return URL identifying this HostBean
*/
public URL getScrubbedURL() {
URI encodedUri = null;
Uri uri = getScrubbedUri();
try {
encodedUri = new URI(uri.getScheme(), uri.getUserInfo(), uri.getHost(), uri.getPort(), uri.getPath(), uri.getQuery(), uri.getFragment());
} catch (URISyntaxException e) {
}
URL url = null;
try {
url = encodedUri.toURL();
} catch (MalformedURLException e) {
e.printStackTrace();
}
return url;
}
public URL getURL() {
URI encodedUri = null;
Uri uri = getUri();
try {
encodedUri = new URI(uri.getScheme(), uri.getUserInfo(), uri.getHost(), uri.getPort(), uri.getPath(), uri.getQuery(), uri.getFragment());
} catch (URISyntaxException e) {
}
URL url = null;
try {
url = encodedUri.toURL();
} catch (MalformedURLException e) {
e.printStackTrace();
}
return url;
}
}
| |
/*
* Copyright IBM Corp. 2012
*/
package org.rstl;
public class StatementFactory {
int identifier;
public StatementFactory() {
identifier = 0;
}
public int getStatementId() {
int ret = identifier;
identifier ++;
return ret;
}
private Statement create(StatementType type, int line, String... args) {
switch (type) {
case extendsstatement:
case layoutstatement:
String[] params = args[0].split("\\^", -2);
String ident = params[0];
int startIndex = 0;
int stopIndex = 0;
if (params.length > 1) {
try {
startIndex = Integer.parseInt(params[1]);
stopIndex = Integer.parseInt(params[2]);
} catch (NumberFormatException nfe) {
nfe.printStackTrace();
}
}
return new GenericStatementImpl(type, ident, line, startIndex, stopIndex);
case includestatement:
case includeoncestatement:
case rgroupendstatement:
return new GenericStatementImpl(type, args[0], line);
case forstatement:
return new ForLoopImpl(args[0], args[1], line, this);
case blockstatement:
return new BlockImpl(args[0],line, this);
default:
if (args.length < 1) {
throw new IllegalArgumentException(
"Need atleast one specifier for a custom statement");
}
return new GenericStatementImpl(type, args[0], line);
}
}
public Statement createExtends(String encodedIdentText, int line) {
return create(StatementType.extendsstatement, line, encodedIdentText);
}
public Statement createLayout(String encodedIdentText, int line) {
return create(StatementType.layoutstatement, line, encodedIdentText);
}
public BlockImpl createBlock(String name) {
return createBlock(name, 0);
}
public BlockImpl createBlock(String name, int line) {
return new BlockImpl(name, line, this);
}
public Chunk createChunk(String value) {
return createChunk(value, 0);
}
public Chunk createChunk(String value, int line) {
return new Chunk("" + getStatementId(), TemplateUtil.literalize(value), value, line);
}
public VariableImpl createVariable(String name) {
return createVariable(name, 0);
}
public VariableImpl createVariable(String name, int line) {
return new VariableImpl(name, line);
}
public GenericStatementImpl createSuperBlockStatement(String name) {
return createSuperBlockStatement(name, 0);
}
public GenericStatementImpl createSuperBlockStatement(String name, int line) {
return (GenericStatementImpl) create(StatementType.superblockstatement, line, name);
}
public GenericStatementImpl createSuperRGroupStatement(String name) {
return createSuperRGroupStatement(name, 0);
}
public GenericStatementImpl createSuperRGroupStatement(String name, int line) {
return (GenericStatementImpl) create(StatementType.superrgroupstatement, line, name);
}
public GenericStatementImpl createRGroupTerminator(String name) {
return createRGroupTerminator(name, 0);
}
public GenericStatementImpl createRGroupTerminator(String name, int line) {
return (GenericStatementImpl) create(StatementType.rgroupendstatement, line, name);
}
public GenericStatementImpl createPreconditon(String name) {
return createPrecondition(name, 0);
}
public GenericStatementImpl createPrecondition(String name, int line) {
return (GenericStatementImpl) create(StatementType.preconditionstatement, line, name);
}
public GenericStatementImpl createInclude(String name) {
return createInclude(name, 0);
}
public GenericStatementImpl createInclude(String name, int line) {
return (GenericStatementImpl) create(StatementType.includestatement, line, name);
}
public GenericStatementImpl createIncludeOnce(String name) {
return createIncludeOnce(name, 0);
}
public GenericStatementImpl createIncludeOnce(String name, int line) {
return (GenericStatementImpl) create(StatementType.includeoncestatement, line, name);
}
public Statement createCustom(String name, int line) {
return create(StatementType.customstatement, line, name);
}
/**
* Create a for loop with the specified string
* @param forloop '^' delimited arguments, the first mandatory argument is a key, followed by an optional value
* the third argument is a collection, followed by an optional argument "reversed" to mean iterate through the
* collection in reverse order.
* @return a ForLoop statement
*/
public ForLoopImpl createForLoop(String forloop) {
return createForLoop(forloop, 0);
}
/**
* Create a for loop with the specified string
* @param forloop '^' delimited arguments, the first mandatory argument is a key, followed by an optional value
* the third argument is a collection, followed by an optional argument "reversed" to mean iterate through the
* collection in reverse order.
* @param line the line number where the forloop was declared
* @return a ForLoop statement
*/
public ForLoopImpl createForLoop(String forloop, int line) {
String[] args = forloop.split("\\^", -2);
String key = args[0];
String value = (args[1].isEmpty()) ? null : args[1];
String collection = args[2];
String rev = args[3];
boolean reversed = (null != rev && "reversed".equalsIgnoreCase(rev)) ? true
: false;
return new ForLoopImpl(key, value, collection, reversed, line, this);
}
public ResourceGroup createResourceGroup(String argument) {
return createResourceGroup(argument, 0);
}
public ResourceGroup createResourceGroup(String argument, int line) {
String[] args = argument.split("\\^", -2);
String ident = args[0];
int startIndex = 0;
if (args.length > 1) {
try {
startIndex = Integer.parseInt(args[1]);
} catch (NumberFormatException nfe) {
nfe.printStackTrace();
}
}
return new ResourceGroup(ident, line, startIndex, this);
}
public ResourceImpl createResourceJson(String argument) {
return createResourceJson(argument, 0);
}
public ResourceImpl createResourceJson(String argument, int line) {
String[] args = argument.split("\\^", -2);
if (args.length == 1) {
return new ResourceImpl(args[0], "json", line);
} else {
return new ResourceImpl(args[0], args[1], args[2], null, "json", line);
}
}
public ResourceImpl createResourceXhtml(String argument) {
return createResourceXhtml(argument, 0);
}
public ResourceImpl createResourceXhtml(String argument, int line) {
String[] args = argument.split("\\^", -2);
if (args.length == 1) {
return new ResourceImpl(args[0], "xhtml", line);
} else {
return new ResourceImpl(args[0], args[1], args[2], args[3], "xhtml", line);
}
}
public ConditionalImpl createConditional(String expr, int line) {
return new ConditionalImpl(expr, line, this);
}
}
| |
/*
* Licensed to GraphHopper GmbH under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* GraphHopper GmbH licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.graphhopper.ui;
import com.carrotsearch.hppc.IntIndexedContainer;
import com.graphhopper.GraphHopper;
import com.graphhopper.GraphHopperConfig;
import com.graphhopper.coll.GHBitSet;
import com.graphhopper.coll.GHTBitSet;
import com.graphhopper.config.CHProfile;
import com.graphhopper.config.LMProfile;
import com.graphhopper.config.Profile;
import com.graphhopper.reader.osm.GraphHopperOSM;
import com.graphhopper.routing.*;
import com.graphhopper.routing.ev.BooleanEncodedValue;
import com.graphhopper.routing.ev.DecimalEncodedValue;
import com.graphhopper.routing.lm.PrepareLandmarks;
import com.graphhopper.routing.querygraph.QueryGraph;
import com.graphhopper.routing.querygraph.QueryRoutingCHGraph;
import com.graphhopper.routing.util.AllEdgesIterator;
import com.graphhopper.routing.util.EdgeFilter;
import com.graphhopper.routing.util.FlagEncoder;
import com.graphhopper.routing.weighting.Weighting;
import com.graphhopper.storage.CHConfig;
import com.graphhopper.storage.Graph;
import com.graphhopper.storage.NodeAccess;
import com.graphhopper.storage.RoutingCHGraph;
import com.graphhopper.storage.index.LocationIndexTree;
import com.graphhopper.storage.index.Snap;
import com.graphhopper.util.FetchMode;
import com.graphhopper.util.PMap;
import com.graphhopper.util.Parameters.Algorithms;
import com.graphhopper.util.PointList;
import com.graphhopper.util.StopWatch;
import com.graphhopper.util.shapes.BBox;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.swing.*;
import java.awt.*;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.awt.event.MouseWheelEvent;
import java.awt.event.MouseWheelListener;
import java.util.Arrays;
import java.util.Random;
/**
* A rough graphical user interface for visualizing the OSM graph. Mainly for debugging algorithms
* and spatial data structures. See e.g. this blog post:
* https://graphhopper.com/blog/2016/01/19/alternative-roads-to-rome/
* <p>
* Use the web module for a better/faster/userfriendly/... alternative!
* <p>
*
* @author Peter Karich
*/
public class MiniGraphUI {
private final Logger logger = LoggerFactory.getLogger(getClass());
private final Graph graph;
private final NodeAccess na;
private final MapLayer pathLayer;
private final FlagEncoder encoder;
private final DecimalEncodedValue avSpeedEnc;
private final BooleanEncodedValue accessEnc;
private final boolean useCH;
// for moving
int currentPosX;
int currentPosY;
private Path path;
private LocationIndexTree index;
private String latLon = "";
private GraphicsWrapper mg;
private JPanel infoPanel;
private LayeredPanel mainPanel;
private MapLayer roadsLayer;
private boolean fastPaint = false;
private Snap fromRes;
private Snap toRes;
public static void main(String[] strs) {
PMap args = PMap.read(strs);
args.putObject("datareader.file", args.getString("datareader.file", "core/files/monaco.osm.gz"));
args.putObject("graph.location", args.getString("graph.location", "tools/target/mini-graph-ui-gh"));
args.putObject("graph.flag_encoders", args.getString("graph.flag_encoders", "car"));
GraphHopperConfig ghConfig = new GraphHopperConfig(args);
ghConfig.setProfiles(Arrays.asList(
new Profile("profile")
.setVehicle("car")
.setWeighting("fastest")
));
ghConfig.setCHProfiles(Arrays.asList(
new CHProfile("profile")
));
ghConfig.setLMProfiles(Arrays.asList(
new LMProfile("profile")
));
GraphHopper hopper = new GraphHopperOSM().init(ghConfig).importOrLoad();
boolean debug = args.getBool("minigraphui.debug", false);
boolean useCH = args.getBool("minigraphui.useCH", false);
new MiniGraphUI(hopper, debug, useCH).visualize();
}
public MiniGraphUI(GraphHopper hopper, boolean debug, boolean useCH) {
this.graph = hopper.getGraphHopperStorage();
this.na = graph.getNodeAccess();
encoder = hopper.getEncodingManager().fetchEdgeEncoders().get(0);
avSpeedEnc = encoder.getAverageSpeedEnc();
accessEnc = encoder.getAccessEnc();
this.useCH = useCH;
logger.info("locations:" + graph.getNodes() + ", debug:" + debug);
mg = new GraphicsWrapper(graph);
// prepare node quadtree to 'enter' the graph. create a 313*313 grid => <3km
// this.index = new DebugLocation2IDQuadtree(roadGraph, mg);
this.index = (LocationIndexTree) hopper.getLocationIndex();
infoPanel = new JPanel() {
@Override
protected void paintComponent(Graphics g) {
g.setColor(Color.WHITE);
Rectangle b = infoPanel.getBounds();
g.fillRect(0, 0, b.width, b.height);
g.setColor(Color.BLUE);
g.drawString(latLon, 40, 20);
g.drawString("scale:" + mg.getScaleX(), 40, 40);
int w = mainPanel.getBounds().width;
int h = mainPanel.getBounds().height;
g.drawString(mg.setBounds(0, w, 0, h).toLessPrecisionString(), 40, 60);
}
};
mainPanel = new LayeredPanel();
// TODO make it correct with bitset-skipping too
final GHBitSet bitset = new GHTBitSet(graph.getNodes());
mainPanel.addLayer(roadsLayer = new DefaultMapLayer() {
final Random rand = new Random();
@Override
public void paintComponent(final Graphics2D g2) {
clearGraphics(g2);
int locs = graph.getNodes();
Rectangle d = getBounds();
BBox b = mg.setBounds(0, d.width, 0, d.height);
if (fastPaint) {
rand.setSeed(0);
bitset.clear();
}
// g2.setColor(Color.BLUE);
// double fromLat = 42.56819, fromLon = 1.603231;
// mg.plotText(g2, fromLat, fromLon, "from");
// Snap from = index.findClosest(fromLat, fromLon, EdgeFilter.ALL_EDGES);
// double toLat = 42.571034, toLon = 1.520662;
// mg.plotText(g2, toLat, toLon, "to");
// Snap to = index.findClosest(toLat, toLon, EdgeFilter.ALL_EDGES);
//
// g2.setColor(Color.RED.brighter().brighter());
// path = prepare.createAlgo().calcPath(from, to);
// System.out.println("now: " + path.toFlagEncodersAsString());
// plotPath(path, g2, 1);
g2.setColor(Color.black);
Color[] speedColors = generateColors(15);
AllEdgesIterator edge = graph.getAllEdges();
while (edge.next()) {
if (fastPaint && rand.nextInt(30) > 1)
continue;
int nodeIndex = edge.getBaseNode();
double lat = na.getLatitude(nodeIndex);
double lon = na.getLongitude(nodeIndex);
int nodeId = edge.getAdjNode();
double lat2 = na.getLatitude(nodeId);
double lon2 = na.getLongitude(nodeId);
// mg.plotText(g2, lat, lon, "" + nodeIndex);
if (!b.contains(lat, lon) && !b.contains(lat2, lon2))
continue;
int sum = nodeIndex + nodeId;
if (fastPaint) {
if (bitset.contains(sum))
continue;
bitset.add(sum);
}
// mg.plotText(g2, lat * 0.9 + lat2 * 0.1, lon * 0.9 + lon2 * 0.1, iter.getName());
//mg.plotText(g2, lat * 0.9 + lat2 * 0.1, lon * 0.9 + lon2 * 0.1, "s:" + (int) encoder.getSpeed(iter.getFlags()));
double speed = edge.get(avSpeedEnc);
Color color;
if (speed >= 120) {
// red
color = speedColors[12];
} else if (speed >= 100) {
color = speedColors[10];
} else if (speed >= 80) {
color = speedColors[8];
} else if (speed >= 60) {
color = speedColors[6];
} else if (speed >= 50) {
color = speedColors[5];
} else if (speed >= 40) {
color = speedColors[4];
} else if (speed >= 30) {
color = Color.GRAY;
} else {
color = Color.LIGHT_GRAY;
}
g2.setColor(color);
boolean fwd = edge.get(accessEnc);
boolean bwd = edge.getReverse(accessEnc);
float width = speed > 90 ? 1f : 0.8f;
PointList pl = edge.fetchWayGeometry(FetchMode.ALL);
for (int i = 1; i < pl.size(); i++) {
if (fwd && !bwd) {
mg.plotDirectedEdge(g2, pl.getLatitude(i - 1), pl.getLongitude(i - 1), pl.getLatitude(i), pl.getLongitude(i), width);
} else {
mg.plotEdge(g2, pl.getLatitude(i - 1), pl.getLongitude(i - 1), pl.getLatitude(i), pl.getLongitude(i), width);
}
}
}
index.query(graph.getBounds(), new LocationIndexTree.Visitor() {
@Override
public boolean isTileInfo() {
return true;
}
@Override
public void onTile(BBox bbox, int depth) {
int width = Math.max(1, Math.min(4, 4 - depth));
g2.setColor(Color.GRAY);
mg.plotEdge(g2, bbox.minLat, bbox.minLon, bbox.minLat, bbox.maxLon, width);
mg.plotEdge(g2, bbox.minLat, bbox.maxLon, bbox.maxLat, bbox.maxLon, width);
mg.plotEdge(g2, bbox.maxLat, bbox.maxLon, bbox.maxLat, bbox.minLon, width);
mg.plotEdge(g2, bbox.maxLat, bbox.minLon, bbox.minLat, bbox.minLon, width);
}
@Override
public void onNode(int node) {
// mg.plotNode(g2, node, Color.BLUE);
}
});
g2.setColor(Color.WHITE);
g2.fillRect(0, 0, 1000, 20);
for (int i = 4; i < speedColors.length; i++) {
g2.setColor(speedColors[i]);
g2.drawString("" + (i * 10), i * 30 - 100, 10);
}
g2.setColor(Color.BLACK);
}
});
mainPanel.addLayer(pathLayer = new DefaultMapLayer() {
@Override
public void paintComponent(final Graphics2D g2) {
if (fromRes == null || toRes == null)
return;
makeTransparent(g2);
QueryGraph qGraph = QueryGraph.create(graph, fromRes, toRes);
RoutingAlgorithm algo = createAlgo(hopper);
if (algo instanceof DebugAlgo) {
((DebugAlgo) algo).setGraphics2D(g2);
}
StopWatch sw = new StopWatch().start();
logger.info("start searching with " + algo + " from:" + fromRes + " to:" + toRes);
// GHPoint qp = fromRes.getQueryPoint();
// TIntHashSet set = index.findNetworkEntries(qp.lat, qp.lon, 1);
// TIntIterator nodeIter = set.iterator();
// DistanceCalc distCalc = new DistancePlaneProjection();
// System.out.println("set:" + set.size());
// while (nodeIter.hasNext())
// {
// int nodeId = nodeIter.next();
// double lat = graph.getNodeAccess().getLat(nodeId);
// double lon = graph.getNodeAccess().getLon(nodeId);
// int dist = (int) Math.round(distCalc.calcDist(qp.lat, qp.lon, lat, lon));
// mg.plotText(g2, lat, lon, nodeId + ": " + dist);
// mg.plotNode(g2, nodeId, Color.red);
// }
Color red = Color.red.brighter();
g2.setColor(red);
mg.plotNode(g2, qGraph.getNodeAccess(), fromRes.getClosestNode(), red, 10, "");
mg.plotNode(g2, qGraph.getNodeAccess(), toRes.getClosestNode(), red, 10, "");
g2.setColor(Color.blue.brighter().brighter());
path = algo.calcPath(fromRes.getClosestNode(), toRes.getClosestNode());
sw.stop();
// if directed edges
if (!path.isFound()) {
logger.warn("path not found! direction not valid?");
return;
}
logger.info("found path in " + sw.getSeconds() + "s with nodes:"
+ path.calcNodes().size() + ", millis: " + path.getTime()
+ ", visited nodes:" + algo.getVisitedNodes());
g2.setColor(red);
plotPath(path, g2, 4);
}
});
if (debug) {
// disable double buffering for debugging drawing - nice! when do we need DebugGraphics then?
RepaintManager repaintManager = RepaintManager.currentManager(mainPanel);
repaintManager.setDoubleBufferingEnabled(false);
mainPanel.setBuffering(false);
}
}
private RoutingAlgorithm createAlgo(GraphHopper hopper) {
Profile profile = hopper.getProfiles().iterator().next();
if (useCH) {
CHConfig chConfig = hopper.getCHPreparationHandler().getNodeBasedCHConfigs().get(0);
Weighting weighting = chConfig.getWeighting();
RoutingCHGraph chGraph = hopper.getGraphHopperStorage().getRoutingCHGraph(chConfig.getName());
logger.info("CH algo, weighting: " + weighting);
QueryGraph qGraph = QueryGraph.create(hopper.getGraphHopperStorage(), fromRes, toRes);
QueryRoutingCHGraph queryRoutingCHGraph = new QueryRoutingCHGraph(chGraph, qGraph);
return new CHDebugAlgo(queryRoutingCHGraph, mg);
} else {
Weighting weighting = hopper.createWeighting(profile, new PMap());
final PrepareLandmarks preparation = hopper.getLMPreparationHandler().getPreparation(profile.getName());
RoutingAlgorithmFactory algoFactory = (g, opts) -> {
RoutingAlgorithm algo = preparation.getRoutingAlgorithmFactory().createAlgo(g, opts);
if (algo instanceof AStarBidirection) {
return new DebugAStarBi(g, opts.getWeighting(), opts.getTraversalMode(), mg).
setApproximation(((AStarBidirection) algo).getApproximation());
} else if (algo instanceof AStar) {
return new DebugAStar(g, opts.getWeighting(), opts.getTraversalMode(), mg);
} else if (algo instanceof DijkstraBidirectionRef) {
return new DebugDijkstraBidirection(g, opts.getWeighting(), opts.getTraversalMode(), mg);
} else if (algo instanceof Dijkstra) {
return new DebugDijkstraSimple(g, opts.getWeighting(), opts.getTraversalMode(), mg);
}
return algo;
};
AlgorithmOptions algoOpts = new AlgorithmOptions(Algorithms.ASTAR_BI, weighting);
logger.info("algoOpts:" + algoOpts + ", weighting: " + weighting);
QueryGraph qGraph = QueryGraph.create(graph, fromRes, toRes);
return algoFactory.createAlgo(qGraph, algoOpts);
}
}
private static class CHDebugAlgo extends DijkstraBidirectionCH implements DebugAlgo {
private final GraphicsWrapper mg;
private Graphics2D g2;
public CHDebugAlgo(RoutingCHGraph graph, GraphicsWrapper mg) {
super(graph);
this.mg = mg;
}
@Override
public void setGraphics2D(Graphics2D g2) {
this.g2 = g2;
}
@Override
public void updateBestPath(double edgeWeight, SPTEntry entry, int origEdgeId, int traversalId, boolean reverse) {
if (g2 != null)
mg.plotNode(g2, traversalId, Color.YELLOW, 6);
super.updateBestPath(edgeWeight, entry, origEdgeId, traversalId, reverse);
}
}
public Color[] generateColors(int n) {
Color[] cols = new Color[n];
for (int i = 0; i < n; i++) {
cols[i] = Color.getHSBColor((float) i / (float) n, 0.85f, 1.0f);
}
return cols;
}
// for debugging
private Path calcPath(RoutingAlgorithm algo) {
// int from = index.findID(50.042, 10.19);
// int to = index.findID(50.049, 10.23);
//
//// System.out.println("path " + from + "->" + to);
// return algo.calcPath(from, to);
// System.out.println(GraphUtility.getNodeInfo(graph, 60139, DefaultEdgeFilter.allEdges(new CarFlagEncoder()).direction(false, true)));
// System.out.println(((GraphStorage) graph).debug(202947, 10));
// GraphUtility.printInfo(graph, 106511, 10);
return algo.calcPath(162810, 35120);
}
void plotNodeName(Graphics2D g2, int node) {
double lat = na.getLatitude(node);
double lon = na.getLongitude(node);
mg.plotText(g2, lat, lon, "" + node);
}
private Path plotPath(Path tmpPath, Graphics2D g2, int w) {
if (!tmpPath.isFound()) {
logger.info("nothing found " + w);
return tmpPath;
}
double prevLat = Double.NaN;
double prevLon = Double.NaN;
boolean plotNodes = false;
IntIndexedContainer nodes = tmpPath.calcNodes();
if (plotNodes) {
for (int i = 0; i < nodes.size(); i++) {
plotNodeName(g2, nodes.get(i));
}
}
PointList list = tmpPath.calcPoints();
for (int i = 0; i < list.getSize(); i++) {
double lat = list.getLatitude(i);
double lon = list.getLongitude(i);
if (!Double.isNaN(prevLat)) {
mg.plotEdge(g2, prevLat, prevLon, lat, lon, w);
} else {
mg.plot(g2, lat, lon, w);
}
prevLat = lat;
prevLon = lon;
}
logger.info("dist:" + tmpPath.getDistance() + ", path points(" + list.getSize() + ")");
return tmpPath;
}
public void visualize() {
try {
SwingUtilities.invokeAndWait(new Runnable() {
@Override
public void run() {
int frameHeight = 800;
int frameWidth = 1200;
JFrame frame = new JFrame("GraphHopper UI - Small&Ugly ;)");
frame.setLayout(new BorderLayout());
frame.add(mainPanel, BorderLayout.CENTER);
frame.add(infoPanel, BorderLayout.NORTH);
infoPanel.setPreferredSize(new Dimension(300, 100));
// scale
mainPanel.addMouseWheelListener(new MouseWheelListener() {
@Override
public void mouseWheelMoved(MouseWheelEvent e) {
mg.scale(e.getX(), e.getY(), e.getWheelRotation() < 0);
repaintRoads();
}
});
// listener to investigate findID behavior
// MouseAdapter ml = new MouseAdapter() {
//
// @Override public void mouseClicked(MouseEvent e) {
// findIDLat = mg.getLat(e.getY());
// findIDLon = mg.getLon(e.getX());
// findIdLayer.repaint();
// mainPanel.repaint();
// }
//
// @Override public void mouseMoved(MouseEvent e) {
// updateLatLon(e);
// }
//
// @Override public void mousePressed(MouseEvent e) {
// updateLatLon(e);
// }
// };
MouseAdapter ml = new MouseAdapter() {
// for routing:
double fromLat, fromLon;
boolean fromDone = false;
boolean dragging = false;
@Override
public void mouseClicked(MouseEvent e) {
if (!fromDone) {
fromLat = mg.getLat(e.getY());
fromLon = mg.getLon(e.getX());
} else {
double toLat = mg.getLat(e.getY());
double toLon = mg.getLon(e.getX());
StopWatch sw = new StopWatch().start();
logger.info("start searching from " + fromLat + "," + fromLon
+ " to " + toLat + "," + toLon);
// get from and to node id
fromRes = index.findClosest(fromLat, fromLon, EdgeFilter.ALL_EDGES);
toRes = index.findClosest(toLat, toLon, EdgeFilter.ALL_EDGES);
logger.info("found ids " + fromRes + " -> " + toRes + " in " + sw.stop().getSeconds() + "s");
repaintPaths();
}
fromDone = !fromDone;
}
@Override
public void mouseDragged(MouseEvent e) {
dragging = true;
fastPaint = true;
update(e);
updateLatLon(e);
}
@Override
public void mouseReleased(MouseEvent e) {
if (dragging) {
// update only if mouse release comes from dragging! (at the moment equal to fastPaint)
dragging = false;
fastPaint = false;
update(e);
}
}
public void update(MouseEvent e) {
mg.setNewOffset(e.getX() - currentPosX, e.getY() - currentPosY);
repaintRoads();
}
@Override
public void mouseMoved(MouseEvent e) {
updateLatLon(e);
}
@Override
public void mousePressed(MouseEvent e) {
updateLatLon(e);
}
};
mainPanel.addMouseListener(ml);
mainPanel.addMouseMotionListener(ml);
// just for fun
// mainPanel.getInputMap().put(KeyStroke.getKeyStroke("DELETE"), "removedNodes");
// mainPanel.getActionMap().put("removedNodes", new AbstractAction() {
// @Override public void actionPerformed(ActionEvent e) {
// int counter = 0;
// for (CoordTrig<Long> coord : quadTreeNodes) {
// int ret = quadTree.remove(coord.lat, coord.lon);
// if (ret < 1) {
//// logger.info("cannot remove " + coord + " " + ret);
//// ret = quadTree.remove(coord.getLatitude(), coord.getLongitude());
// } else
// counter += ret;
// }
// logger.info("Removed " + counter + " of " + quadTreeNodes.size() + " nodes");
// }
// });
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frame.setSize(frameWidth + 10, frameHeight + 30);
frame.setVisible(true);
}
});
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
void updateLatLon(MouseEvent e) {
latLon = mg.getLat(e.getY()) + "," + mg.getLon(e.getX());
infoPanel.repaint();
currentPosX = e.getX();
currentPosY = e.getY();
}
void repaintPaths() {
pathLayer.repaint();
mainPanel.repaint();
}
void repaintRoads() {
// avoid threading as there should be no updated to scale or offset while painting
// (would to lead to artifacts)
StopWatch sw = new StopWatch().start();
pathLayer.repaint();
roadsLayer.repaint();
mainPanel.repaint();
logger.info("roads painting took " + sw.stop().getSeconds() + " sec");
}
}
| |
/*
* Copyright (c) 2015, Absolute Performance, Inc. http://www.absolute-performance.com
* Copyright (c) 2016, Jack J. Woehr jwoehr@softwoehr.com
* SoftWoehr LLC PO Box 51, Golden CO 80402-0051 http://www.softwoehr.com
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package ublu.command;
import ublu.util.ArgArray;
import ublu.util.Generics.SubsystemArrayList;
import ublu.util.Tuple;
import com.ibm.as400.access.AS400Exception;
import com.ibm.as400.access.AS400SecurityException;
import com.ibm.as400.access.ErrorCompletingRequestException;
import com.ibm.as400.access.ObjectAlreadyExistsException;
import com.ibm.as400.access.ObjectDoesNotExistException;
import com.ibm.as400.access.RequestNotSupportedException;
import com.ibm.as400.access.Subsystem;
import com.ibm.as400.jtopenstubs.javabeans.PropertyVetoException;
import java.io.IOException;
import java.sql.SQLException;
import java.util.logging.Level;
/**
* Manage subsystems
*
* @author jwoehr
*/
public class CmdSubSystem extends Command {
{
setNameAndDescription("subsys",
"/3? [-as400 ~@as400] [--,-subsys ~@subsys] [-to datasink] [-subsyspath ~@{subsysIFSpath}] [-authoritystring ~@{authoritystring}] [-timelimit ~@{intval}] [-assignprivate ~@{sequencenumber} ~@{size} ~@{activityLevel} | -assignshared ~@{sequencenumber} ~@{poolname} | -change [description ~@{text} | displayfile ~@{path} | languagelibrary ~@{lib}} | maxactivejobs ~@${int}] | -create | -delete | -end | -endall | -new,-instance | -list | -query [description | activejobs | displayfilepath | languagelibrary | library | maxactivejobs | monitorjob | name | objectdescription | path | pool | pools ~@{sequencenumber} | status | system] | -refresh | -remove ~@{sequencenumber} | -start ] system userid password : manipulate subsystems");
}
enum OPS {
ASSIGNPRIVATE, ASSIGNSHARED, CREATE, CHANGE, DELETE, END, ENDALL, EXISTS, INSTANCE, LIST, QUERY, REFRESH, REMOVE, START
}
/**
* Command to manage subsystems
*
* @param argArray passed-in arg array
* @return rest of arg array
*/
public ArgArray cmdSubsys(ArgArray argArray) {
Subsystem subsystem = null;
String subsystemIFSPath = null;
String authorityString = null;
String changeString = null;
String changeValue = null;
String queryString = null;
String poolName = null;
Integer poolSize = null;
Integer activityLevel = null;
Integer timeLimit = null;
Integer sequenceNumber = null;
Tuple subsystemTuple = null;
OPS op = OPS.INSTANCE;
while (argArray.hasDashCommand()) {
String dashCommand = argArray.parseDashCommand();
switch (dashCommand) {
case "-as400":
setAs400fromTupleOrPop(argArray);
break;
case "-to":
setDataDestfromArgArray(argArray);
break;
case "--":
case "-subsys":
subsystemTuple = argArray.nextTupleOrPop();
break;
case "-authoritystring":
authorityString = argArray.nextMaybeQuotationTuplePopString().trim();
break;
case "-assignprivate":
op = OPS.ASSIGNPRIVATE;
sequenceNumber = argArray.nextIntMaybeQuotationTuplePopString();
poolSize = argArray.nextIntMaybeQuotationTuplePopString();
activityLevel = argArray.nextIntMaybeQuotationTuplePopString();
break;
case "-assignshared":
op = OPS.ASSIGNSHARED;
sequenceNumber = argArray.nextIntMaybeQuotationTuplePopString();
poolName = argArray.nextMaybeQuotationTuplePopString().trim();
break;
case "-change":
op = OPS.CHANGE;
changeString = argArray.next().toLowerCase().trim();
changeValue = argArray.nextMaybeQuotationTuplePopString().trim();
break;
case "-create":
op = OPS.CREATE;
break;
case "-delete":
op = OPS.DELETE;
break;
case "-end":
op = OPS.END;
break;
case "-endall":
op = OPS.ENDALL;
break;
case "-exists":
op = OPS.EXISTS;
break;
case "-new":
case "-instance":
op = OPS.INSTANCE;
break;
case "-list":
op = OPS.LIST;
break;
case "-query":
op = OPS.QUERY;
queryString = argArray.nextMaybeQuotationTuplePopString();
if (queryString.trim().toLowerCase().equals("ports")) {
sequenceNumber = argArray.nextIntMaybeQuotationTuplePopString();
}
break;
case "-refresh":
op = OPS.REFRESH;
break;
case "-remove":
op = OPS.REMOVE;
sequenceNumber = argArray.nextIntMaybeQuotationTuplePopString();
break;
case "-start":
op = OPS.START;
break;
case "-timelimit":
timeLimit = argArray.nextIntMaybeQuotationTuplePopString();
break;
case "-subsyspath":
subsystemIFSPath = argArray.nextMaybeQuotationTuplePopString().trim();
break;
default:
unknownDashCommand(dashCommand);
}
}
if (havingUnknownDashCommand()) {
setCommandResult(COMMANDRESULT.FAILURE);
} else {
if (subsystemTuple != null) {
Object o = subsystemTuple.getValue();
if (o instanceof Subsystem) {
subsystem = Subsystem.class.cast(o);
} else {
getLogger().log(Level.SEVERE, "Tuple does represent a Subsystem in {0}", new Object[]{getNameAndDescription()});
setCommandResult(COMMANDRESULT.FAILURE);
}
} else if (getAs400() == null) {
if (argArray.size() < 3) {
logArgArrayTooShortError(argArray);
setCommandResult(COMMANDRESULT.FAILURE);
} else {
try {
setAs400FromArgs(argArray);
} catch (PropertyVetoException ex) {
getLogger().log(Level.SEVERE, "Can't set AS400 from arguments", ex);
setCommandResult(COMMANDRESULT.FAILURE);
}
}
}
switch (op) {
case INSTANCE:
if (subsystem == null) {
subsystem = getSubsystem(subsystemIFSPath);
}
if (subsystem != null) {
try {
put(subsystem);
} catch (SQLException | IOException | AS400SecurityException | ErrorCompletingRequestException | InterruptedException | ObjectDoesNotExistException | RequestNotSupportedException ex) {
getLogger().log(Level.SEVERE, "Error putting subsystem in " + getNameAndDescription(), ex);
setCommandResult(COMMANDRESULT.FAILURE);
}
} else {
getLogger().log(Level.SEVERE, "No subsystem instance in {0}", getNameAndDescription());
setCommandResult(COMMANDRESULT.FAILURE);
}
break;
case LIST:
if (getAs400() != null) {
try {
put(new SubsystemArrayList(Subsystem.listAllSubsystems(getAs400())));
} catch (AS400SecurityException | ErrorCompletingRequestException | InterruptedException | IOException | ObjectDoesNotExistException | SQLException | RequestNotSupportedException ex) {
getLogger().log(Level.SEVERE, "Error putting subsystem list in " + getNameAndDescription(), ex);
setCommandResult(COMMANDRESULT.FAILURE);
}
} else {
getLogger().log(Level.SEVERE, "No AS400 instance for listing subsystems in {0}", getNameAndDescription());
setCommandResult(COMMANDRESULT.FAILURE);
}
break;
case ASSIGNPRIVATE:
if (subsystem == null) {
subsystem = getSubsystem(subsystemIFSPath);
}
try {
if (subsystem != null) {
if (sequenceNumber != null) {
if (poolSize != null) {
if (activityLevel != null) {
subsystem.assignPool(sequenceNumber, poolSize, activityLevel);
} else {
getLogger().log(Level.SEVERE, "No activity level to assign private pool in {0}", getNameAndDescription());
setCommandResult(COMMANDRESULT.FAILURE);
}
} else {
getLogger().log(Level.SEVERE, "No pool size to assign private pool in {0}", getNameAndDescription());
setCommandResult(COMMANDRESULT.FAILURE);
}
} else {
getLogger().log(Level.SEVERE, "No sequence number to assign private pool in {0}", getNameAndDescription());
setCommandResult(COMMANDRESULT.FAILURE);
}
} else {
getLogger().log(Level.SEVERE, "No subsystem instance for changing subsystem in {0}", getNameAndDescription());
setCommandResult(COMMANDRESULT.FAILURE);
}
} catch (AS400SecurityException | ErrorCompletingRequestException | IOException | InterruptedException | ObjectDoesNotExistException ex) {
getLogger().log(Level.SEVERE, "Error assigning private pool to subsystem " + subsystem + inNameAndDescription(), ex);
setCommandResult(COMMANDRESULT.FAILURE);
}
break;
case ASSIGNSHARED:
if (subsystem == null) {
subsystem = getSubsystem(subsystemIFSPath);
}
try {
if (subsystem != null) {
if (sequenceNumber != null) {
if (poolName != null) {
subsystem.assignPool(sequenceNumber, poolName);
} else {
getLogger().log(Level.SEVERE, "No pool name to assign shared pool in {0}", getNameAndDescription());
setCommandResult(COMMANDRESULT.FAILURE);
}
} else {
getLogger().log(Level.SEVERE, "No sequence number to assign shared pool in {0}", getNameAndDescription());
setCommandResult(COMMANDRESULT.FAILURE);
}
} else {
getLogger().log(Level.SEVERE, "No subsystem instance for changing subsystem in {0}", getNameAndDescription());
setCommandResult(COMMANDRESULT.FAILURE);
}
} catch (AS400SecurityException | ErrorCompletingRequestException | IOException | InterruptedException | ObjectDoesNotExistException ex) {
getLogger().log(Level.SEVERE, "Error assigning shared pool to subsystem " + subsystem + inNameAndDescription(), ex);
setCommandResult(COMMANDRESULT.FAILURE);
}
break;
case CHANGE:
if (subsystem == null) {
subsystem = getSubsystem(subsystemIFSPath);
}
try {
if (subsystem != null) {
if (changeString != null) {
switch (changeString) {
case "description":
subsystem.changeDescriptionText(changeValue);
break;
case "displayfile":
subsystem.changeDisplayFilePath(changeValue);
break;
case "languagelibrary":
subsystem.changeLanguageLibrary(changeValue);
break;
case "maxactivejobs":
subsystem.changeMaximumActiveJobs(Integer.parseInt(changeValue));
}
} else {
getLogger().log(Level.SEVERE, "No change string to change attribute in {0}", getNameAndDescription());
setCommandResult(COMMANDRESULT.FAILURE);
}
} else {
getLogger().log(Level.SEVERE, "No subsystem instance for changing subsystem in {0}", getNameAndDescription());
setCommandResult(COMMANDRESULT.FAILURE);
}
} catch (AS400SecurityException | ErrorCompletingRequestException | IOException | InterruptedException | ObjectDoesNotExistException ex) {
getLogger().log(Level.SEVERE, "Error creating subsystem " + subsystem + inNameAndDescription(), ex);
setCommandResult(COMMANDRESULT.FAILURE);
}
break;
case CREATE:
if (subsystem == null) {
subsystem = getSubsystem(subsystemIFSPath);
}
if (subsystem != null) {
try {
if (authorityString != null) {
subsystem.create(authorityString);
} else {
subsystem.create();
}
} catch (AS400SecurityException | ErrorCompletingRequestException | IOException | InterruptedException | ObjectAlreadyExistsException | ObjectDoesNotExistException ex) {
getLogger().log(Level.SEVERE, "Error creating subsystem " + subsystem + inNameAndDescription(), ex);
setCommandResult(COMMANDRESULT.FAILURE);
}
} else {
getLogger().log(Level.SEVERE, "No subsystem instance for creating subsystem in {0}", getNameAndDescription());
setCommandResult(COMMANDRESULT.FAILURE);
}
break;
case DELETE: {
if (subsystem == null) {
subsystem = getSubsystem(subsystemIFSPath);
}
if (subsystem != null) {
try {
subsystem.delete();
} catch (AS400SecurityException | ErrorCompletingRequestException | InterruptedException | IOException ex) {
getLogger().log(Level.SEVERE, "Error deleting subsystem " + subsystem + inNameAndDescription(), ex);
setCommandResult(COMMANDRESULT.FAILURE);
}
} else {
getLogger().log(Level.SEVERE, "No subsystem instance for deleting subsystem in {0}", getNameAndDescription());
setCommandResult(COMMANDRESULT.FAILURE);
}
}
break;
case END:
if (subsystem == null) {
subsystem = getSubsystem(subsystemIFSPath);
}
if (subsystem != null) {
try {
if (timeLimit == null) {
subsystem.endImmediately();
} else {
subsystem.end(timeLimit);
}
} catch (AS400SecurityException | ErrorCompletingRequestException | IOException | InterruptedException | ObjectDoesNotExistException ex) {
getLogger().log(Level.SEVERE, "Error ending subsystem in " + getNameAndDescription(), ex);
setCommandResult(COMMANDRESULT.FAILURE);
}
} else {
getLogger().log(Level.SEVERE, "No subsystem instance for deleting subsystem in {0}", getNameAndDescription());
setCommandResult(COMMANDRESULT.FAILURE);
}
break;
case ENDALL:
if (getAs400() != null) {
try {
if (timeLimit != null) {
Subsystem.endAllSubsystems(getAs400(), timeLimit);
} else {
Subsystem.endAllSubsystemsImmediately(getAs400());
}
} catch (AS400SecurityException | ErrorCompletingRequestException | InterruptedException | IOException ex) {
getLogger().log(Level.SEVERE, "Error ending all subsystems in " + getNameAndDescription(), ex);
setCommandResult(COMMANDRESULT.FAILURE);
}
} else {
getLogger().log(Level.SEVERE, "No AS400 instance for listing subsystems in {0}", getNameAndDescription());
setCommandResult(COMMANDRESULT.FAILURE);
}
break;
case EXISTS:
if (subsystem == null) {
subsystem = getSubsystem(subsystemIFSPath);
}
if (subsystem != null) {
try {
put(subsystem.exists());
} catch (SQLException | IOException | AS400SecurityException | ErrorCompletingRequestException | InterruptedException | ObjectDoesNotExistException | RequestNotSupportedException ex) {
getLogger().log(Level.SEVERE, "Error putting subsystem existence in " + getNameAndDescription(), ex);
setCommandResult(COMMANDRESULT.FAILURE);
}
}
break;
case QUERY: {
if (subsystem == null) {
subsystem = getSubsystem(subsystemIFSPath);
}
if (subsystem != null) {
if (queryString == null) {
getLogger().log(Level.SEVERE, "Empty query string in {0}", getNameAndDescription());
setCommandResult(COMMANDRESULT.FAILURE);
} else {
try {
put(querySubSystem(subsystem, queryString, sequenceNumber));
} catch (SQLException | IOException | AS400SecurityException | ErrorCompletingRequestException | InterruptedException | ObjectDoesNotExistException | RequestNotSupportedException ex) {
getLogger().log(Level.SEVERE, "Error getting or putting subsystem monitor job in " + getNameAndDescription(), ex);
setCommandResult(COMMANDRESULT.FAILURE);
}
}
} else {
getLogger().log(Level.SEVERE, "No subsystem instance for testing existence of subsystem in {0}", getNameAndDescription());
setCommandResult(COMMANDRESULT.FAILURE);
}
}
break;
case REMOVE:
if (subsystem == null) {
subsystem = getSubsystem(subsystemIFSPath);
}
if (subsystem != null) {
if (sequenceNumber == null) {
getLogger().log(Level.SEVERE, "No sequence number removing pool in {0}", getNameAndDescription());
setCommandResult(COMMANDRESULT.FAILURE);
} else {
try {
subsystem.removePool(sequenceNumber);
} catch (IOException | AS400SecurityException | ErrorCompletingRequestException | InterruptedException | ObjectDoesNotExistException ex) {
getLogger().log(Level.SEVERE, "Error removing pool in " + getNameAndDescription(), ex);
setCommandResult(COMMANDRESULT.FAILURE);
}
}
} else {
getLogger().log(Level.SEVERE, "No subsystem instance for removing subsystem in {0}", getNameAndDescription());
setCommandResult(COMMANDRESULT.FAILURE);
}
break;
case REFRESH: {
if (subsystem == null) {
subsystem = getSubsystem(subsystemIFSPath);
}
if (subsystem != null) {
try {
subsystem.refresh();
} catch (AS400SecurityException | ErrorCompletingRequestException | InterruptedException | IOException | ObjectDoesNotExistException ex) {
getLogger().log(Level.SEVERE, "Error refreshing subsystem attributes in " + getNameAndDescription(), ex);
setCommandResult(COMMANDRESULT.FAILURE);
}
} else {
getLogger().log(Level.SEVERE, "No subsystem instance for refreshing subsystem in {0}", getNameAndDescription());
setCommandResult(COMMANDRESULT.FAILURE);
}
}
break;
case START:
if (subsystem == null) {
subsystem = getSubsystem(subsystemIFSPath);
}
if (subsystem != null) {
try {
subsystem.start();
} catch (AS400SecurityException | ErrorCompletingRequestException | IOException | InterruptedException | ObjectDoesNotExistException ex) {
getLogger().log(Level.SEVERE, "Error starting subsystem in " + getNameAndDescription(), ex);
setCommandResult(COMMANDRESULT.FAILURE);
}
} else {
getLogger().log(Level.SEVERE, "No subsystem instance for starting subsystem in {0}", getNameAndDescription());
setCommandResult(COMMANDRESULT.FAILURE);
}
break;
}
}
return argArray;
}
private Subsystem getSubsystem(String subsystemIFSPath) {
Subsystem subsystem = null;
if (getAs400() != null) {
if (subsystemIFSPath == null) {
getLogger().log(Level.SEVERE, "No subsystem name nor subsystem tuple in {0}", new Object[]{getNameAndDescription()});
setCommandResult(COMMANDRESULT.FAILURE);
} else {
subsystem = new Subsystem(getAs400(), subsystemIFSPath);
}
}
return subsystem;
}
private Object querySubSystem(Subsystem subsys, String queryString, Integer sequenceNumber) throws AS400SecurityException, ErrorCompletingRequestException, AS400Exception, InterruptedException, IOException, ObjectDoesNotExistException {
Object result = null;
switch (queryString.toLowerCase()) {
case "description":
result = subsys.getDescriptionText();
break;
case "activejobs":
result = subsys.getCurrentActiveJobs();
break;
case "displayfilepath":
result = subsys.getDisplayFilePath();
break;
case "languagelibrary":
result = subsys.getLanguageLibrary();
break;
case "library":
result = subsys.getLibrary();
break;
case "maxactivejobs":
result = subsys.getMaximumActiveJobs();
break;
case "monitorjob":
result = subsys.getMonitorJob();
break;
case "name":
result = subsys.getName();
break;
case "objectdescription":
result = subsys.getObjectDescription();
break;
case "path":
result = subsys.getPath();
break;
case "pool":
result = subsys.getPool(sequenceNumber);
break;
case "pools":
result = subsys.getPools();
break;
case "status":
result = subsys.getStatus();
break;
case "system":
result = subsys.getSystem();
break;
default:
getLogger().log(Level.SEVERE, "Unknown query string in {0}", getNameAndDescription());
setCommandResult(COMMANDRESULT.FAILURE);
}
return result;
}
@Override
public ArgArray cmd(ArgArray args) {
reinit();
return cmdSubsys(args);
}
@Override
public COMMANDRESULT getResult() {
return getCommandResult();
}
}
| |
package net.sf.jabref.model.groups;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.function.Consumer;
/**
* Represents a node in a tree.
* <p>
* Usually, tree nodes have a value property which allows access to the value stored in the node.
* In contrast to this approach, the TreeNode<T> class is designed to be used as a base class which provides the
* tree traversing functionality via inheritance.
* <p>
* Example usage:
* private class BasicTreeNode extends TreeNode<BasicTreeNode> {
* public BasicTreeNode() {
* super(BasicTreeNode.class);
* }
* }
* <p>
* This class started out as a copy of javax.swing.tree.DefaultMutableTreeNode.
*
* @param <T> the type of the class
*/
// We use some explicit casts of the form "(T) this". The constructor ensures that this cast is valid.
@SuppressWarnings("unchecked") public abstract class TreeNode<T extends TreeNode<T>> {
/**
* This node's parent, or null if this node has no parent
*/
private T parent;
/**
* Array of children, may be empty if this node has no children (but never null)
*/
private final List<T> children;
/**
* Constructs a tree node without parent and no children.
*
* @param derivingClass class deriving from TreeNode<T>. It should always be "T.class".
* We need this parameter since it is hard to get this information by other means.
*/
public TreeNode(Class<T> derivingClass) {
parent = null;
children = new ArrayList<>();
if (!derivingClass.isInstance(this)) {
throw new UnsupportedOperationException("The class extending TreeNode<T> has to derive from T");
}
}
/**
* Get the path from the root node to this node.
* <p>
* The elements in the returned list represent the child index of each node in the path, starting at the root.
* If this node is the root node, the returned list has zero elements.
*
* @return a list of numbers which represent an indexed path from the root node to this node
*/
public List<Integer> getIndexedPathFromRoot() {
if (parent == null) {
return new ArrayList<>();
}
List<Integer> path = parent.getIndexedPathFromRoot();
path.add(getPositionInParent());
return path;
}
/**
* Get the descendant of this node as indicated by the indexedPath.
* <p>
* If the path could not be traversed completely (i.e. one of the child indices did not exist),
* an empty Optional will be returned.
*
* @param indexedPath sequence of child indices that describe a path from this node to one of its descendants.
* Be aware that if indexedPath was obtained by getIndexedPathFromRoot(), this node should
* usually be the root node.
* @return descendant found by evaluating indexedPath
*/
public Optional<T> getDescendant(List<Integer> indexedPath) {
T cursor = (T) this;
for (int index : indexedPath) {
Optional<T> child = cursor.getChildAt(index);
if (child.isPresent()) {
cursor = child.get();
} else {
return Optional.empty();
}
}
return Optional.of(cursor);
}
/**
* Get the child index of this node in its parent.
* <p>
* If this node is a root, then an UnsupportedOperationException is thrown.
* Use the isRoot method to check for this case.
*
* @return the child index of this node in its parent
*/
public int getPositionInParent() {
return getParent().orElseThrow(() -> new UnsupportedOperationException("Roots have no position in parent"))
.getIndexOfChild((T) this).get();
}
/**
* Gets the index of the specified child in this node's child list.
* <p>
* If the specified node is not a child of this node, returns an empty Optional.
* This method performs a linear search and is O(n) where n is the number of children.
*
* @param childNode the node to search for among this node's children
* @return an integer giving the index of the node in this node's child list
* or an empty Optional if the specified node is a not a child of this node
* @throws NullPointerException if childNode is null
*/
public Optional<Integer> getIndexOfChild(T childNode) {
Objects.requireNonNull(childNode);
int index = children.indexOf(childNode);
if (index == -1) {
return Optional.empty();
} else {
return Optional.of(index);
}
}
/**
* Gets the number of levels above this node, i.e. the distance from the root to this node.
* <p>
* If this node is the root, returns 0.
*
* @return an int giving the number of levels above this node
*/
public int getLevel() {
if (parent == null) {
return 0;
}
return parent.getLevel() + 1;
}
/**
* Returns the number of children of this node.
*
* @return an int giving the number of children of this node
*/
public int getNumberOfChildren() {
return children.size();
}
/**
* Removes this node from its parent and makes it a child of the specified node
* by adding it to the end of children list.
* In this way the whole subtree based at this node is moved to the given node.
*
* @param target the new parent
* @throws NullPointerException if target is null
* @throws ArrayIndexOutOfBoundsException if targetIndex is out of bounds
* @throws UnsupportedOperationException if target is an descendant of this node
*/
public void moveTo(T target) {
Objects.requireNonNull(target);
Optional<T> oldParent = getParent();
if (oldParent.isPresent() && (oldParent.get() == target)) {
this.moveTo(target, target.getNumberOfChildren() - 1);
} else {
this.moveTo(target, target.getNumberOfChildren());
}
}
/**
* Returns the path from the root, to get to this node. The last element in the path is this node.
*
* @return a list of nodes giving the path, where the first element in the path is the root
* and the last element is this node.
*/
public List<T> getPathFromRoot() {
if (parent == null) {
List<T> pathToMe = new ArrayList<>();
pathToMe.add((T) this);
return pathToMe;
}
List<T> path = parent.getPathFromRoot();
path.add((T) this);
return path;
}
/**
* Returns the next sibling of this node in the parent's children list.
* Returns an empty Optional if this node has no parent or if it is the parent's last child.
* <p>
* This method performs a linear search that is O(n) where n is the number of children.
* To traverse the entire children collection, use the parent's getChildren() instead.
*
* @return the sibling of this node that immediately follows this node
* @see #getChildren
*/
public Optional<T> getNextSibling() {
return getRelativeSibling(+1);
}
/**
* Returns the previous sibling of this node in the parent's children list.
* Returns an empty Optional if this node has no parent or is the parent's first child.
* <p>
* This method performs a linear search that is O(n) where n is the number of children.
*
* @return the sibling of this node that immediately precedes this node
* @see #getChildren
*/
public Optional<T> getPreviousSibling() {
return getRelativeSibling(-1);
}
/**
* Returns the sibling which is shiftIndex away from this node.
*/
private Optional<T> getRelativeSibling(int shiftIndex) {
if (parent == null) {
return Optional.empty();
} else {
int indexInParent = getPositionInParent();
int indexTarget = indexInParent + shiftIndex;
if (parent.childIndexExists(indexTarget)) {
return parent.getChildAt(indexTarget);
} else {
return Optional.empty();
}
}
}
/**
* Returns this node's parent or an empty Optional if this node has no parent.
*
* @return this node's parent T, or an empty Optional if this node has no parent
*/
public Optional<T> getParent() {
return Optional.ofNullable(parent);
}
/**
* Sets the parent node of this node.
* <p>
* This method does not add this node to the children collection of the new parent nor does it remove this node
* from the old parent. You should probably call moveTo or remove to change the tree.
*
* @param parent the new parent
*/
protected void setParent(T parent) {
this.parent = parent;
}
/**
* Returns the child at the specified index in this node's children collection.
*
* @param index an index into this node's children collection
* @return the node in this node's children collection at the specified index,
* or an empty Optional if the index does not point to a child
*/
public Optional<T> getChildAt(int index) {
return childIndexExists(index) ? Optional.of(children.get(index)) : Optional.empty();
}
/**
* Returns whether the specified index is a valid index for a child.
*
* @param index the index to be tested
* @return returns true when index is at least 0 and less then the count of children
*/
protected boolean childIndexExists(int index) {
return (index >= 0) && (index < children.size());
}
/**
* Returns true if this node is the root of the tree.
* The root is the only node in the tree with an empty parent; every tree has exactly one root.
*
* @return true if this node is the root of its tree
*/
public boolean isRoot() {
return parent == null;
}
/**
* Returns true if this node is an ancestor of the given node.
* <p>
* A node is considered an ancestor of itself.
*
* @param anotherNode node to test
* @return true if anotherNode is a descendant of this node
* @throws NullPointerException if anotherNode is null
* @see #isNodeDescendant
*/
public boolean isAncestorOf(T anotherNode) {
Objects.requireNonNull(anotherNode);
if (anotherNode == this) {
return true;
} else {
for (T child : children) {
if (child.isAncestorOf(anotherNode)) {
return true;
}
}
return false;
}
}
/**
* Returns the root of the tree that contains this node. The root is the ancestor with an empty parent.
* Thus a node without a parent is considered its own root.
*
* @return the root of the tree that contains this node
*/
public T getRoot() {
if (parent == null) {
return (T) this;
} else {
return parent.getRoot();
}
}
/**
* Returns true if this node has no children.
*
* @return true if this node has no children
*/
public boolean isLeaf() {
return (getNumberOfChildren() == 0);
}
/**
* Removes the subtree rooted at this node from the tree, giving this node an empty parent.
* Does nothing if this node is the root of it tree.
*/
public void removeFromParent() {
if (parent != null) {
parent.removeChild((T) this);
}
}
/**
* Removes all of this node's children, setting their parents to empty.
* If this node has no children, this method does nothing.
*/
public void removeAllChildren() {
while (getNumberOfChildren() > 0) {
removeChild(0);
}
}
/**
* Returns this node's first child if it exists (otherwise returns an empty Optional).
*
* @return the first child of this node
*/
public Optional<T> getFirstChild() {
return getChildAt(0);
}
/**
* Returns this node's last child if it exists (otherwise returns an empty Optional).
*
* @return the last child of this node
*/
public Optional<T> getLastChild() {
return getChildAt(children.size() - 1);
}
/**
* Returns true if anotherNode is a descendant of this node
* -- if it is this node, one of this node's children, or a descendant of one of this node's children.
* Note that a node is considered a descendant of itself.
* <p>
* If anotherNode is null, an exception is thrown.
*
* @param anotherNode node to test as descendant of this node
* @return true if this node is an ancestor of anotherNode
* @see #isAncestorOf
*/
public boolean isNodeDescendant(T anotherNode) {
Objects.requireNonNull(anotherNode);
return this.isAncestorOf(anotherNode);
}
/**
* Gets a forward-order list of this node's children.
* <p>
* The returned list is unmodifiable - use the add and remove methods to modify the nodes children.
* However, changing the nodes children (for example by calling moveTo) is reflected in a change of
* the list returned by getChildren. In other words, getChildren provides a read-only view on the children but
* not a copy.
*
* @return a list of this node's children
*/
public List<T> getChildren() {
return Collections.unmodifiableList(children);
}
/**
* Removes the given child from this node's child list, giving it an empty parent.
*
* @param child a child of this node to remove
*/
public void removeChild(T child) {
Objects.requireNonNull(child);
children.remove(child);
child.setParent(null);
notifyAboutDescendantChange((T)this);
}
/**
* Removes the child at the specified index from this node's children and sets that node's parent to empty.
* <p>
* Does nothing if the index does not point to a child.
*
* @param childIndex the index in this node's child array of the child to remove
*/
public void removeChild(int childIndex) {
Optional<T> child = getChildAt(childIndex);
if (child.isPresent()) {
children.remove(childIndex);
child.get().setParent(null);
}
notifyAboutDescendantChange((T)this);
}
/**
* Adds the node at the end the children collection. Also sets the parent of the given node to this node.
* The given node is not allowed to already be in a tree (i.e. it has to have no parent).
*
* @param child the node to add
* @return the child node
*/
public T addChild(T child) {
return addChild(child, children.size());
}
/**
* Adds the node at the given position in the children collection. Also sets the parent of the given node to this node.
* The given node is not allowed to already be in a tree (i.e. it has to have no parent).
*
* @param child the node to add
* @param index the position where the node should be added
* @return the child node
* @throws IndexOutOfBoundsException if the index is out of range
*/
public T addChild(T child, int index) {
Objects.requireNonNull(child);
if (child.getParent().isPresent()) {
throw new UnsupportedOperationException("Cannot add a node which already has a parent, use moveTo instead");
}
child.setParent((T) this);
children.add(index, child);
notifyAboutDescendantChange((T)this);
return child;
}
/**
* Removes all children from this node and makes them a child of the specified node
* by adding it to the specified position in the children list.
*
* @param target the new parent
* @param targetIndex the position where the children should be inserted
* @throws NullPointerException if target is null
* @throws ArrayIndexOutOfBoundsException if targetIndex is out of bounds
* @throws UnsupportedOperationException if target is an descendant of one of the children of this node
*/
public void moveAllChildrenTo(T target, int targetIndex) {
while (getNumberOfChildren() > 0) {
getLastChild().get().moveTo(target, targetIndex);
}
}
/**
* Sorts the list of children according to the order induced by the specified {@link Comparator}.
* <p>
* All children must be mutually comparable using the specified comparator
* (that is, {@code c.compare(e1, e2)} must not throw a {@code ClassCastException}
* for any children {@code e1} and {@code e2} in the list).
*
* @param comparator the comparator used to compare the child nodes
* @param recursive if true the whole subtree is sorted
* @throws NullPointerException if the comparator is null
*/
public void sortChildren(Comparator<? super T> comparator, boolean recursive) {
Objects.requireNonNull(comparator);
if (this.isLeaf()) {
return; // nothing to sort
}
int j = getNumberOfChildren() - 1;
int lastModified;
while (j > 0) {
lastModified = j + 1;
j = -1;
for (int i = 1; i < lastModified; ++i) {
T child1 = getChildAt(i - 1).get();
T child2 = getChildAt(i).get();
if (comparator.compare(child1, child2) > 0) {
child1.moveTo((T) this, i);
j = i;
}
}
}
if (recursive) {
for (T child : getChildren()) {
child.sortChildren(comparator, true);
}
}
}
/**
* Removes this node from its parent and makes it a child of the specified node
* by adding it to the specified position in the children list.
* In this way the whole subtree based at this node is moved to the given node.
*
* @param target the new parent
* @param targetIndex the position where the children should be inserted
* @throws NullPointerException if target is null
* @throws ArrayIndexOutOfBoundsException if targetIndex is out of bounds
* @throws UnsupportedOperationException if target is an descendant of this node
*/
public void moveTo(T target, int targetIndex) {
Objects.requireNonNull(target);
// Check that the target node is not an ancestor of this node, because this would create loops in the tree
if (this.isAncestorOf(target)) {
throw new UnsupportedOperationException("the target cannot be a descendant of this node");
}
// Remove from previous parent
Optional<T> oldParent = getParent();
if (oldParent.isPresent()) {
oldParent.get().removeChild((T) this);
}
// Add as child
target.addChild((T) this, targetIndex);
}
/**
* Creates a deep copy of this node and all of its children.
*
* @return a deep copy of the subtree
*/
public T copySubtree() {
T copy = copyNode();
for (T child : getChildren()) {
child.copySubtree().moveTo(copy);
}
return copy;
}
/**
* Creates a copy of this node, completely separated from the tree (i.e. no children and no parent)
*
* @return a deep copy of this node
*/
public abstract T copyNode();
/**
* The function which is invoked when something changed in the subtree.
*/
private Consumer<T> onDescendantChanged = t -> {
/* Do nothing */ };
/**
* Adds the given function to the list of subscribers which are notified when something changes in the subtree.
*
* The following events are supported (the text in parentheses specifies which node is passed as the source):
* - addChild (new parent)
* - removeChild (old parent)
* - move (old parent and new parent)
* @param subscriber function to be invoked upon a change
*/
public void subscribeToDescendantChanged(Consumer<T> subscriber) {
onDescendantChanged = onDescendantChanged.andThen(subscriber);
}
/**
* Helper method which notifies all subscribers about a change in the subtree and bubbles the event to all parents.
* @param source the node which changed
*/
protected void notifyAboutDescendantChange(T source) {
onDescendantChanged.accept(source);
if(! isRoot()) {
parent.notifyAboutDescendantChange(source);
}
}
}
| |
/*
* Copyright (c) 2014, Cloudera, Inc. All Rights Reserved.
*
* Cloudera, Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"). You may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for
* the specific language governing permissions and limitations under the
* License.
*/
package com.cloudera.oryx.lambda.serving;
import javax.servlet.ServletContext;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
import javax.servlet.annotation.WebListener;
import java.io.IOException;
import java.util.Collections;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.Objects;
import com.google.common.base.Function;
import com.google.common.base.Preconditions;
import com.google.common.collect.Iterators;
import com.typesafe.config.Config;
import kafka.consumer.Consumer;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import kafka.message.MessageAndMetadata;
import kafka.serializer.Decoder;
import kafka.serializer.StringDecoder;
import kafka.utils.VerifiableProperties;
import org.apache.hadoop.conf.Configuration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.cloudera.oryx.api.KeyMessage;
import com.cloudera.oryx.api.KeyMessageImpl;
import com.cloudera.oryx.api.TopicProducer;
import com.cloudera.oryx.api.serving.ScalaServingModelManager;
import com.cloudera.oryx.api.serving.ServingModelManager;
import com.cloudera.oryx.common.lang.ClassUtils;
import com.cloudera.oryx.common.lang.LoggingRunnable;
import com.cloudera.oryx.common.settings.ConfigUtils;
import com.cloudera.oryx.kafka.util.KafkaUtils;
/**
* {@link ServletContextListener} that initializes a {@link ServingModelManager} at web
* app startup time in the Serving Layer.
*
* @param <K> type of key written to input topic
* @param <M> type of value written to input topic
* @param <U> type of update/model read from update topic
*/
@WebListener
public final class ModelManagerListener<K,M,U> implements ServletContextListener {
private static final Logger log = LoggerFactory.getLogger(ModelManagerListener.class);
public static final String MANAGER_KEY = ModelManagerListener.class.getName() + ".ModelManager";
public static final String INPUT_PRODUCER_KEY =
ModelManagerListener.class.getName() + ".InputProducer";
private Config config;
private String updateTopic;
private int maxMessageSize;
private String updateTopicLockMaster;
private boolean readOnly;
private String inputTopic;
private String inputTopicLockMaster;
private String inputTopicBroker;
private String modelManagerClassName;
private Class<? extends Decoder<U>> updateDecoderClass;
private ConsumerConnector consumer;
private ServingModelManager<U> modelManager;
private TopicProducer<K,M> inputProducer;
@SuppressWarnings("unchecked")
public void init(ServletContext context) {
String serializedConfig = context.getInitParameter(ConfigUtils.class.getName() + ".serialized");
Objects.requireNonNull(serializedConfig);
this.config = ConfigUtils.deserialize(serializedConfig);
this.updateTopic = config.getString("oryx.update-topic.message.topic");
this.maxMessageSize = config.getInt("oryx.update-topic.message.max-size");
this.updateTopicLockMaster = config.getString("oryx.update-topic.lock.master");
this.readOnly = config.getBoolean("oryx.serving.api.read-only");
if (!readOnly) {
this.inputTopic = config.getString("oryx.input-topic.message.topic");
this.inputTopicLockMaster = config.getString("oryx.input-topic.lock.master");
this.inputTopicBroker = config.getString("oryx.input-topic.broker");
}
this.modelManagerClassName = config.getString("oryx.serving.model-manager-class");
this.updateDecoderClass = (Class<? extends Decoder<U>>) ClassUtils.loadClass(
config.getString("oryx.update-topic.message.decoder-class"), Decoder.class);
Preconditions.checkArgument(maxMessageSize > 0);
}
@Override
public void contextInitialized(ServletContextEvent sce) {
log.info("ModelManagerListener initializing");
ServletContext context = sce.getServletContext();
init(context);
if (!readOnly) {
Preconditions.checkArgument(KafkaUtils.topicExists(inputTopicLockMaster, inputTopic),
"Topic %s does not exist; did you create it?", inputTopic);
Preconditions.checkArgument(KafkaUtils.topicExists(updateTopicLockMaster, updateTopic),
"Topic %s does not exist; did you create it?", updateTopic);
inputProducer = new TopicProducerImpl<>(inputTopicBroker, inputTopic);
context.setAttribute(INPUT_PRODUCER_KEY, inputProducer);
}
consumer = Consumer.createJavaConsumerConnector(new ConsumerConfig(
ConfigUtils.keyValueToProperties(
"group.id", "OryxGroup-ServingLayer-" + System.currentTimeMillis(),
"zookeeper.connect", updateTopicLockMaster,
"fetch.message.max.bytes", maxMessageSize,
// Do start from the beginning of the update queue
"auto.offset.reset", "smallest"
)));
KafkaStream<String,U> stream =
consumer.createMessageStreams(Collections.singletonMap(updateTopic, 1),
new StringDecoder(null),
loadDecoderInstance())
.get(updateTopic).get(0);
final Iterator<KeyMessage<String,U>> transformed = Iterators.transform(stream.iterator(),
new Function<MessageAndMetadata<String,U>, KeyMessage<String,U>>() {
@Override
public KeyMessage<String,U> apply(MessageAndMetadata<String,U> input) {
return new KeyMessageImpl<>(input.key(), input.message());
}
});
modelManager = loadManagerInstance();
new Thread(new LoggingRunnable() {
@Override
public void doRun() throws IOException {
// Can we do better than a default Hadoop config? Nothing else provides it here
modelManager.consume(transformed, new Configuration());
}
}, "OryxServingLayerUpdateConsumerThread").start();
// Set the Model Manager in the Application scope
context.setAttribute(MANAGER_KEY, modelManager);
}
@Override
public void contextDestroyed(ServletContextEvent sce) {
log.info("ModelManagerListener destroying");
// Slightly paranoid; remove objects from app scope manually
ServletContext context = sce.getServletContext();
for (Enumeration<String> names = context.getAttributeNames(); names.hasMoreElements();) {
context.removeAttribute(names.nextElement());
}
if (modelManager != null) {
log.info("Shutting down model manager");
modelManager.close();
modelManager = null;
}
if (inputProducer != null) {
log.info("Shutting down input producer");
inputProducer.close();
inputProducer = null;
}
if (consumer != null) {
log.info("Shutting down consumer");
consumer.commitOffsets();
consumer.shutdown();
consumer = null;
}
// Hacky, but prevents Tomcat from complaining that ZK's cleanup thread 'leaked' since
// it has a short sleep at its end
try {
Thread.sleep(1000);
} catch (InterruptedException ie) {
// continue
}
}
@SuppressWarnings("unchecked")
private ServingModelManager<U> loadManagerInstance() {
Class<?> managerClass = ClassUtils.loadClass(modelManagerClassName);
if (ServingModelManager.class.isAssignableFrom(managerClass)) {
try {
return ClassUtils.loadInstanceOf(
modelManagerClassName,
ServingModelManager.class,
new Class<?>[] { Config.class },
new Object[] { config });
} catch (IllegalArgumentException iae) {
return ClassUtils.loadInstanceOf(modelManagerClassName, ServingModelManager.class);
}
} else if (ScalaServingModelManager.class.isAssignableFrom(managerClass)) {
try {
return new ScalaServingModelManagerAdapter<>(ClassUtils.loadInstanceOf(
modelManagerClassName,
ScalaServingModelManager.class,
new Class<?>[] { Config.class },
new Object[] { config }));
} catch (IllegalArgumentException iae) {
return new ScalaServingModelManagerAdapter<>(ClassUtils.loadInstanceOf(
modelManagerClassName, ScalaServingModelManager.class));
}
} else {
throw new IllegalArgumentException("Bad manager class: " + managerClass);
}
}
private Decoder<U> loadDecoderInstance() {
try {
return ClassUtils.loadInstanceOf(updateDecoderClass);
} catch (IllegalArgumentException iae) {
// special case the Kafka decoder, which wants an optional nullable parameter unfortunately
return ClassUtils.loadInstanceOf(updateDecoderClass.getName(),
updateDecoderClass,
new Class<?>[] { VerifiableProperties.class },
new Object[] { null });
}
}
}
| |
package me.everything.android.ui.overscroll;
import android.view.MotionEvent;
import android.view.View;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.RobolectricTestRunner;
import org.robolectric.annotation.Config;
import me.everything.android.ui.overscroll.adapters.IOverScrollDecoratorAdapter;
import static me.everything.android.ui.overscroll.IOverScrollState.*;
import static me.everything.android.ui.overscroll.VerticalOverScrollBounceEffectDecorator.DEFAULT_DECELERATE_FACTOR;
import static me.everything.android.ui.overscroll.VerticalOverScrollBounceEffectDecorator.DEFAULT_TOUCH_DRAG_MOVE_RATIO_FWD;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.*;
import static org.mockito.Mockito.*;
/**
* @author amitd
*/
@RunWith(RobolectricTestRunner.class)
@Config(manifest = Config.NONE)
public class VerticalOverScrollBounceEffectDecoratorTest {
View mView;
IOverScrollDecoratorAdapter mViewAdapter;
IOverScrollStateListener mStateListener;
IOverScrollUpdateListener mUpdateListener;
@Before
public void setUp() throws Exception {
mView = mock(View.class);
mViewAdapter = mock(IOverScrollDecoratorAdapter.class);
when(mViewAdapter.getView()).thenReturn(mView);
mStateListener = mock(IOverScrollStateListener.class);
mUpdateListener = mock(IOverScrollUpdateListener.class);
}
@Test
public void detach_decoratorIsAttached_detachFromView() throws Exception {
// Arrange
HorizontalOverScrollBounceEffectDecorator uut = new HorizontalOverScrollBounceEffectDecorator(mViewAdapter);
// Act
uut.detach();
// Assert
verify(mView).setOnTouchListener(eq((View.OnTouchListener) null));
verify(mView).setOverScrollMode(View.OVER_SCROLL_ALWAYS);
}
@Test
public void detach_overScrollInEffect_detachFromView() throws Exception {
when(mViewAdapter.isInAbsoluteStart()).thenReturn(true);
when(mViewAdapter.isInAbsoluteEnd()).thenReturn(false);
VerticalOverScrollBounceEffectDecorator uut = getUUT();
uut.onTouch(mView, createShortDownwardsMoveEvent());
// Act
uut.detach();
// Assert
verify(mView).setOnTouchListener(eq((View.OnTouchListener) null));
verify(mView).setOverScrollMode(View.OVER_SCROLL_ALWAYS);
}
/*
* Move-action event
*/
@Test
public void onTouchMoveAction_notInViewEnds_ignoreTouchEvent() throws Exception {
// Arrange
MotionEvent event = createShortDownwardsMoveEvent();
when(mViewAdapter.isInAbsoluteStart()).thenReturn(false);
when(mViewAdapter.isInAbsoluteEnd()).thenReturn(false);
VerticalOverScrollBounceEffectDecorator uut = getUUT();
// Act
boolean ret = uut.onTouch(mView, event);
// Assert
verify(mView, never()).setTranslationX(anyFloat());
verify(mView, never()).setTranslationY(anyFloat());
assertFalse(ret);
assertEquals(STATE_IDLE, uut.getCurrentState());
verify(mStateListener, never()).onOverScrollStateChange(eq(uut),anyInt(), anyInt());
verify(mUpdateListener, never()).onOverScrollUpdate(eq(uut), anyInt(), anyFloat());
}
@Test
public void onTouchMoveAction_dragDownInUpperEnd_overscrollDownwards() throws Exception {
// Arrange
MotionEvent event = createShortDownwardsMoveEvent();
when(mViewAdapter.isInAbsoluteStart()).thenReturn(true);
when(mViewAdapter.isInAbsoluteEnd()).thenReturn(false);
VerticalOverScrollBounceEffectDecorator uut = getUUT();
// Act
boolean ret = uut.onTouch(mView, event);
// Assert
float expectedTransY = (event.getY() - event.getHistoricalY(0)) / DEFAULT_TOUCH_DRAG_MOVE_RATIO_FWD;
verify(mView).setTranslationY(expectedTransY);
verify(mView, never()).setTranslationX(anyFloat());
assertTrue(ret);
assertEquals(STATE_DRAG_START_SIDE, uut.getCurrentState());
verify(mStateListener).onOverScrollStateChange(eq(uut), eq(STATE_IDLE), eq(STATE_DRAG_START_SIDE));
verify(mUpdateListener).onOverScrollUpdate(eq(uut), eq(STATE_DRAG_START_SIDE), eq(expectedTransY));
}
@Test
public void onTouchMoveAction_dragUpInBottomEnd_overscrollUpwards() throws Exception {
// Arrange
MotionEvent event = createShortUpwardsMoveEvent();
when(mViewAdapter.isInAbsoluteStart()).thenReturn(false);
when(mViewAdapter.isInAbsoluteEnd()).thenReturn(true);
VerticalOverScrollBounceEffectDecorator uut = getUUT();
// Act
boolean ret = uut.onTouch(mView, event);
// Assert
float expectedTransY = (event.getY() - event.getHistoricalY(0)) / DEFAULT_TOUCH_DRAG_MOVE_RATIO_FWD;
verify(mView).setTranslationY(expectedTransY);
verify(mView, never()).setTranslationX(anyFloat());
assertTrue(ret);
assertEquals(STATE_DRAG_END_SIDE, uut.getCurrentState());
verify(mStateListener).onOverScrollStateChange(eq(uut), eq(STATE_IDLE), eq(STATE_DRAG_END_SIDE));
verify(mUpdateListener).onOverScrollUpdate(eq(uut), eq(STATE_DRAG_END_SIDE), eq(expectedTransY));
}
@Test
public void onTouchMoveAction_dragUpInUpperEnd_ignoreTouchEvent() throws Exception {
// Arrange
MotionEvent event = createShortUpwardsMoveEvent();
when(mViewAdapter.isInAbsoluteStart()).thenReturn(true);
when(mViewAdapter.isInAbsoluteEnd()).thenReturn(false);
VerticalOverScrollBounceEffectDecorator uut = getUUT();
// Act
boolean ret = uut.onTouch(mView, event);
// Assert
verify(mView, never()).setTranslationX(anyFloat());
verify(mView, never()).setTranslationY(anyFloat());
assertFalse(ret);
assertEquals(STATE_IDLE, uut.getCurrentState());
verify(mStateListener, never()).onOverScrollStateChange(eq(uut), anyInt(), anyInt());
verify(mUpdateListener, never()).onOverScrollUpdate(eq(uut), anyInt(), anyFloat());
}
@Test
public void onTouchMoveAction_dragDownInBottomEnd_ignoreTouchEvent() throws Exception {
// Arrange
MotionEvent event = createShortDownwardsMoveEvent();
when(mViewAdapter.isInAbsoluteStart()).thenReturn(false);
when(mViewAdapter.isInAbsoluteEnd()).thenReturn(true);
VerticalOverScrollBounceEffectDecorator uut = getUUT();
// Act
boolean ret = uut.onTouch(mView, event);
// Assert
verify(mView, never()).setTranslationX(anyFloat());
verify(mView, never()).setTranslationY(anyFloat());
assertFalse(ret);
assertEquals(STATE_IDLE, uut.getCurrentState());
verify(mStateListener, never()).onOverScrollStateChange(eq(uut), anyInt(), anyInt());
verify(mUpdateListener, never()).onOverScrollUpdate(eq(uut), anyInt(), anyFloat());
}
@Test
public void onTouchMoveAction_2ndDownDragInUpperEnd_overscrollDownwardsFurther() throws Exception {
// Arrange
// Bring UUT to a downwards-overscroll state
MotionEvent event1 = createShortDownwardsMoveEvent();
when(mViewAdapter.isInAbsoluteStart()).thenReturn(true);
when(mViewAdapter.isInAbsoluteEnd()).thenReturn(false);
VerticalOverScrollBounceEffectDecorator uut = getUUT();
uut.onTouch(mView, event1);
reset(mView);
// Create 2nd downwards-drag event
MotionEvent event2 = createLongDownwardsMoveEvent();
// Act
final boolean ret = uut.onTouch(mView, event2);
// Assert
final float expectedTransY1 = (event1.getY() - event1.getHistoricalY(0)) / DEFAULT_TOUCH_DRAG_MOVE_RATIO_FWD;
final float expectedTransY2 = (event2.getY() - event2.getHistoricalY(0)) / DEFAULT_TOUCH_DRAG_MOVE_RATIO_FWD;
verify(mView).setTranslationY(expectedTransY2);
verify(mView, never()).setTranslationX(anyFloat());
assertTrue(ret);
assertEquals(STATE_DRAG_START_SIDE, uut.getCurrentState());
// State-change listener called only once?
verify(mStateListener).onOverScrollStateChange(eq(uut), eq(STATE_IDLE), eq(STATE_DRAG_START_SIDE));
verify(mStateListener).onOverScrollStateChange(eq(uut), anyInt(), anyInt());
// Update-listener called exactly twice?
verify(mUpdateListener).onOverScrollUpdate(eq(uut), eq(STATE_DRAG_START_SIDE), eq(expectedTransY1));
verify(mUpdateListener).onOverScrollUpdate(eq(uut), eq(STATE_DRAG_START_SIDE), eq(expectedTransY2));
verify(mUpdateListener, times(2)).onOverScrollUpdate(eq(uut), anyInt(), anyFloat());
}
@Test
public void onTouchMoveAction_2ndUpDragInBottomEnd_overscrollUpwardsFurther() throws Exception {
// Arrange
// Bring UUT to an upwards-overscroll state
MotionEvent event1 = createShortUpwardsMoveEvent();
when(mViewAdapter.isInAbsoluteStart()).thenReturn(false);
when(mViewAdapter.isInAbsoluteEnd()).thenReturn(true);
VerticalOverScrollBounceEffectDecorator uut = getUUT();
uut.onTouch(mView, event1);
reset(mView);
// Create 2nd upward-drag event
MotionEvent event2 = createLongUpwardsMoveEvent();
// Act
final boolean ret = uut.onTouch(mView, event2);
// Assert
final float expectedTransY1 = (event1.getY() - event1.getHistoricalY(0)) / DEFAULT_TOUCH_DRAG_MOVE_RATIO_FWD;
final float expectedTransY2 = (event2.getY() - event2.getHistoricalY(0)) / DEFAULT_TOUCH_DRAG_MOVE_RATIO_FWD;
verify(mView).setTranslationY(expectedTransY2);
verify(mView, never()).setTranslationX(anyFloat());
assertTrue(ret);
assertEquals(STATE_DRAG_END_SIDE, uut.getCurrentState());
// State-change listener called only once?
verify(mStateListener).onOverScrollStateChange(eq(uut), eq(STATE_IDLE), eq(STATE_DRAG_END_SIDE));
verify(mStateListener).onOverScrollStateChange(eq(uut), anyInt(), anyInt());
// Update-listener called exactly twice?
verify(mUpdateListener).onOverScrollUpdate(eq(uut), eq(STATE_DRAG_END_SIDE), eq(expectedTransY1));
verify(mUpdateListener).onOverScrollUpdate(eq(uut), eq(STATE_DRAG_END_SIDE), eq(expectedTransY2));
verify(mUpdateListener, times(2)).onOverScrollUpdate(eq(uut), anyInt(), anyFloat());
}
/**
* When over-scroll has already started (downwards in this case) and suddenly the user changes
* their mind and scrolls a bit in the other direction:
* <br/>We expect the <b>touch to still be intercepted</b> in that case, and the <b>overscroll to remain in effect</b>.
*/
@Test
public void onTouchMoveAction_dragUpWhenDownOverscolled_continueOverscrollingUpwards() throws Exception {
// Arrange
// In down & up drag tests we use equal ratios to avoid the effect's under-scroll handling
final float touchDragRatioFwd = 3f;
final float touchDragRatioBck = 3f;
// Bring UUT to a downwrads-overscroll state
when(mViewAdapter.isInAbsoluteStart()).thenReturn(true);
when(mViewAdapter.isInAbsoluteEnd()).thenReturn(false);
VerticalOverScrollBounceEffectDecorator uut = getUUT(touchDragRatioFwd, touchDragRatioBck);
MotionEvent eventMoveRight = createLongDownwardsMoveEvent();
uut.onTouch(mView, eventMoveRight);
reset(mView);
float startTransY = (eventMoveRight.getY() - eventMoveRight.getHistoricalY(0)) / touchDragRatioFwd;
when(mView.getTranslationY()).thenReturn(startTransY);
// Create the up-drag event
MotionEvent eventMoveUpwards = createShortUpwardsMoveEvent();
// Act
boolean ret = uut.onTouch(mView, eventMoveUpwards);
// Assert
float expectedTransY = startTransY +
(eventMoveUpwards.getY() - eventMoveUpwards.getHistoricalY(0)) / touchDragRatioBck;
verify(mView).setTranslationY(expectedTransY);
verify(mView, never()).setTranslationX(anyFloat());
assertTrue(ret);
assertEquals(STATE_DRAG_START_SIDE, uut.getCurrentState());
// State-change listener called only once?
verify(mStateListener).onOverScrollStateChange(eq(uut), eq(STATE_IDLE), eq(STATE_DRAG_START_SIDE));
verify(mStateListener).onOverScrollStateChange(eq(uut), anyInt(), anyInt());
// Update-listener called exactly twice?
verify(mUpdateListener).onOverScrollUpdate(eq(uut), eq(STATE_DRAG_START_SIDE), eq(startTransY));
verify(mUpdateListener).onOverScrollUpdate(eq(uut), eq(STATE_DRAG_START_SIDE), eq(expectedTransY));
verify(mUpdateListener, times(2)).onOverScrollUpdate(eq(uut), anyInt(), anyFloat());
}
/**
* When over-scroll has already started (upwards in this case) and suddenly the user changes
* their mind and scrolls a bit in the other direction:
* <br/>We expect the <b>touch to still be intercepted</b> in that case, and the <b>overscroll to remain in effect</b>.
*/
@Test
public void onTouchMoveAction_dragDownWhenUpOverscolled_continueOverscrollingDownwards() throws Exception {
// Arrange
// In up & down drag tests we use equal ratios to avoid the effect's under-scroll handling
final float touchDragRatioFwd = 3f;
final float touchDragRatioBck = 3f;
// Bring UUT to an upwards-overscroll state
when(mViewAdapter.isInAbsoluteStart()).thenReturn(false);
when(mViewAdapter.isInAbsoluteEnd()).thenReturn(true);
VerticalOverScrollBounceEffectDecorator uut = getUUT(touchDragRatioFwd, touchDragRatioBck);
MotionEvent eventMoveUp = createLongUpwardsMoveEvent();
uut.onTouch(mView, eventMoveUp);
reset(mView);
float startTransY = (eventMoveUp.getY() - eventMoveUp.getHistoricalY(0)) / touchDragRatioFwd;
when(mView.getTranslationY()).thenReturn(startTransY);
// Create the down-drag event
MotionEvent eventMoveDown = createShortDownwardsMoveEvent();
// Act
boolean ret = uut.onTouch(mView, eventMoveDown);
// Assert
float expectedTransY = startTransY + (eventMoveDown.getY() - eventMoveDown.getHistoricalY(0)) / touchDragRatioBck;
verify(mView).setTranslationY(expectedTransY);
verify(mView, never()).setTranslationX(anyFloat());
assertTrue(ret);
assertEquals(STATE_DRAG_END_SIDE, uut.getCurrentState());
// State-change listener called only once?
verify(mStateListener).onOverScrollStateChange(eq(uut), eq(STATE_IDLE), eq(STATE_DRAG_END_SIDE));
verify(mStateListener).onOverScrollStateChange(eq(uut), anyInt(), anyInt());
// Update-listener called exactly twice?
verify(mUpdateListener).onOverScrollUpdate(eq(uut), eq(STATE_DRAG_END_SIDE), eq(startTransY));
verify(mUpdateListener).onOverScrollUpdate(eq(uut), eq(STATE_DRAG_END_SIDE), eq(expectedTransY));
verify(mUpdateListener, times(2)).onOverScrollUpdate(eq(uut), anyInt(), anyFloat());
}
@Test
public void onTouchMoveAction_undragWhenDownOverscrolled_endOverscrolling() throws Exception {
// Arrange
// In left & right tests we use equal ratios to avoid the effect's under-scroll handling
final float touchDragRatioFwd = 3f;
final float touchDragRatioBck = 3f;
// Bring UUT to a downwards-overscroll state
when(mViewAdapter.isInAbsoluteStart()).thenReturn(true);
when(mViewAdapter.isInAbsoluteEnd()).thenReturn(false);
VerticalOverScrollBounceEffectDecorator uut = getUUT(touchDragRatioFwd, touchDragRatioBck);
MotionEvent eventMoveDown = createLongDownwardsMoveEvent();
uut.onTouch(mView, eventMoveDown);
reset(mView);
float startTransX = (eventMoveDown.getX() - eventMoveDown.getHistoricalX(0)) / touchDragRatioFwd;
when(mView.getTranslationX()).thenReturn(startTransX);
// Create the (negative) upwards-drag event
MotionEvent eventMoveUp = createLongUpwardsMoveEvent();
// Act
boolean ret = uut.onTouch(mView, eventMoveUp);
// Assert
verify(mView, never()).setTranslationX(anyFloat());
verify(mView).setTranslationY(0);
assertTrue(ret);
assertEquals(STATE_IDLE, uut.getCurrentState());
// State-change listener invoked to say drag-on and drag-off (idle).
verify(mStateListener).onOverScrollStateChange(eq(uut), eq(STATE_IDLE), eq(STATE_DRAG_START_SIDE));
verify(mStateListener).onOverScrollStateChange(eq(uut), eq(STATE_DRAG_START_SIDE), eq(STATE_IDLE));
verify(mStateListener, times(2)).onOverScrollStateChange(eq(uut), anyInt(), anyInt());
// Update-listener called exactly twice?
verify(mUpdateListener).onOverScrollUpdate(eq(uut), eq(STATE_DRAG_START_SIDE), eq(startTransX));
verify(mUpdateListener).onOverScrollUpdate(eq(uut), eq(STATE_DRAG_START_SIDE), eq(0f));
verify(mUpdateListener, times(2)).onOverScrollUpdate(eq(uut), anyInt(), anyFloat());
}
@Test
public void onTouchMoveAction_undragWhenUpOverscrolled_endOverscrolling() throws Exception {
// Arrange
// In left & right tests we use equal ratios to avoid the effect's under-scroll handling
final float touchDragRatioFwd = 3f;
final float touchDragRatioBck = 3f;
// Bring UUT to a left-overscroll state
when(mViewAdapter.isInAbsoluteStart()).thenReturn(false);
when(mViewAdapter.isInAbsoluteEnd()).thenReturn(true);
VerticalOverScrollBounceEffectDecorator uut = getUUT(touchDragRatioFwd, touchDragRatioBck);
MotionEvent eventMoveUp = createLongUpwardsMoveEvent();
uut.onTouch(mView, eventMoveUp);
reset(mView);
float startTransX = (eventMoveUp.getX() - eventMoveUp.getHistoricalX(0)) / touchDragRatioFwd;
when(mView.getTranslationX()).thenReturn(startTransX);
// Create the (negative) downwards-drag event
MotionEvent eventMoveDown = createLongDownwardsMoveEvent();
// Act
boolean ret = uut.onTouch(mView, eventMoveDown);
// Assert
verify(mView, never()).setTranslationX(anyFloat());
verify(mView).setTranslationY(0);
assertTrue(ret);
assertEquals(STATE_IDLE, uut.getCurrentState());
// State-change listener invoked to say drag-on and drag-off (idle).
verify(mStateListener).onOverScrollStateChange(eq(uut), eq(STATE_IDLE), eq(STATE_DRAG_END_SIDE));
verify(mStateListener).onOverScrollStateChange(eq(uut), eq(STATE_DRAG_END_SIDE), eq(STATE_IDLE));
verify(mStateListener, times(2)).onOverScrollStateChange(eq(uut), anyInt(), anyInt());
// Update-listener called exactly twice?
verify(mUpdateListener).onOverScrollUpdate(eq(uut), eq(STATE_DRAG_END_SIDE), eq(startTransX));
verify(mUpdateListener).onOverScrollUpdate(eq(uut), eq(STATE_DRAG_END_SIDE), eq(0f));
verify(mUpdateListener, times(2)).onOverScrollUpdate(eq(uut), anyInt(), anyFloat());
}
/*
* Up action event
*/
@Test
public void onTouchUpAction_eventWhenNotOverscrolled_ignoreTouchEvent() throws Exception {
// Arrange
MotionEvent event = createDefaultUpActionEvent();
when(mViewAdapter.isInAbsoluteStart()).thenReturn(true);
when(mViewAdapter.isInAbsoluteEnd()).thenReturn(true);
VerticalOverScrollBounceEffectDecorator uut = getUUT();
// Act
boolean ret = uut.onTouch(mView, event);
// Assert
verify(mView, never()).setTranslationX(anyFloat());
verify(mView, never()).setTranslationY(anyFloat());
assertFalse(ret);
assertEquals(STATE_IDLE, uut.getCurrentState());
verify(mStateListener, never()).onOverScrollStateChange(eq(uut), anyInt(), anyInt());
verify(mUpdateListener, never()).onOverScrollUpdate(eq(uut), anyInt(), anyFloat());
}
protected MotionEvent createShortDownwardsMoveEvent() {
MotionEvent event = mock(MotionEvent.class);
when(event.getAction()).thenReturn(MotionEvent.ACTION_MOVE);
when(event.getX()).thenReturn(200f);
when(event.getY()).thenReturn(100f);
when(event.getX(0)).thenReturn(200f);
when(event.getY(0)).thenReturn(100f);
when(event.getHistorySize()).thenReturn(1);
when(event.getHistoricalX(eq(0))).thenReturn(190f);
when(event.getHistoricalY(eq(0))).thenReturn(80f);
when(event.getHistoricalX(eq(0), eq(0))).thenReturn(190f);
when(event.getHistoricalY(eq(0), eq(0))).thenReturn(80f);
return event;
}
protected MotionEvent createLongDownwardsMoveEvent() {
MotionEvent event = mock(MotionEvent.class);
when(event.getAction()).thenReturn(MotionEvent.ACTION_MOVE);
when(event.getX()).thenReturn(250f);
when(event.getY()).thenReturn(150f);
when(event.getX(0)).thenReturn(250f);
when(event.getY(0)).thenReturn(150f);
when(event.getHistorySize()).thenReturn(1);
when(event.getHistoricalX(eq(0))).thenReturn(200f);
when(event.getHistoricalY(eq(0))).thenReturn(100f);
when(event.getHistoricalX(eq(0), eq(0))).thenReturn(200f);
when(event.getHistoricalY(eq(0), eq(0))).thenReturn(100f);
return event;
}
protected MotionEvent createShortUpwardsMoveEvent() {
MotionEvent event = mock(MotionEvent.class);
when(event.getAction()).thenReturn(MotionEvent.ACTION_MOVE);
when(event.getX()).thenReturn(200f);
when(event.getY()).thenReturn(100f);
when(event.getX(0)).thenReturn(200f);
when(event.getY(0)).thenReturn(100f);
when(event.getHistorySize()).thenReturn(1);
when(event.getHistoricalX(eq(0))).thenReturn(220f);
when(event.getHistoricalY(eq(0))).thenReturn(120f);
when(event.getHistoricalX(eq(0), eq(0))).thenReturn(220f);
when(event.getHistoricalY(eq(0), eq(0))).thenReturn(120f);
return event;
}
protected MotionEvent createLongUpwardsMoveEvent() {
MotionEvent event = mock(MotionEvent.class);
when(event.getAction()).thenReturn(MotionEvent.ACTION_MOVE);
when(event.getX()).thenReturn(200f);
when(event.getY()).thenReturn(100f);
when(event.getX(0)).thenReturn(200f);
when(event.getY(0)).thenReturn(100f);
when(event.getHistorySize()).thenReturn(1);
when(event.getHistoricalX(eq(0))).thenReturn(250f);
when(event.getHistoricalY(eq(0))).thenReturn(150f);
when(event.getHistoricalX(eq(0), eq(0))).thenReturn(250f);
when(event.getHistoricalY(eq(0), eq(0))).thenReturn(150f);
return event;
}
protected MotionEvent createDefaultUpActionEvent() {
MotionEvent event = mock(MotionEvent.class);
when(event.getAction()).thenReturn(MotionEvent.ACTION_UP);
return event;
}
protected VerticalOverScrollBounceEffectDecorator getUUT() {
VerticalOverScrollBounceEffectDecorator uut = new VerticalOverScrollBounceEffectDecorator(mViewAdapter);
uut.setOverScrollStateListener(mStateListener);
uut.setOverScrollUpdateListener(mUpdateListener);
return uut;
}
protected VerticalOverScrollBounceEffectDecorator getUUT(float touchDragRatioFwd, float touchDragRatioBck) {
VerticalOverScrollBounceEffectDecorator uut = new VerticalOverScrollBounceEffectDecorator(mViewAdapter, touchDragRatioFwd, touchDragRatioBck, DEFAULT_DECELERATE_FACTOR);
uut.setOverScrollStateListener(mStateListener);
uut.setOverScrollUpdateListener(mUpdateListener);
return uut;
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide.util;
import com.intellij.lexer.JavaLexer;
import com.intellij.lexer.Lexer;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.fileTypes.FileTypeManager;
import com.intellij.openapi.fileTypes.LanguageFileType;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.pom.java.LanguageLevel;
import com.intellij.psi.JavaTokenType;
import com.intellij.psi.impl.source.tree.ElementType;
import com.intellij.util.StringBuilderSpinAllocator;
import com.intellij.util.text.CharArrayCharSequence;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
public class JavaUtil {
private JavaUtil() { }
public static List<Pair<File,String>> suggestRoots(File dir, LanguageFileType fileType) {
ArrayList<Pair<File,String>> foundDirectories = new ArrayList<Pair<File, String>>();
try{
suggestRootsImpl(dir, dir, foundDirectories, fileType);
}
catch(PathFoundException ignore){
}
return foundDirectories;
}
private static class PathFoundException extends Exception {
public File myDirectory;
public PathFoundException(File directory) {
myDirectory = directory;
}
}
private static void suggestRootsImpl(File base,
File dir,
ArrayList<? super Pair<File, String>> foundDirectories,
LanguageFileType fileType) throws PathFoundException {
if (!dir.isDirectory()) {
return;
}
FileTypeManager typeManager = FileTypeManager.getInstance();
if (typeManager.isFileIgnored(dir.getName())) {
return;
}
final ProgressIndicator progressIndicator = ProgressManager.getInstance().getProgressIndicator();
if (progressIndicator != null) {
if (progressIndicator.isCanceled()) {
return;
}
progressIndicator.setText2(dir.getPath());
}
File[] list = dir.listFiles();
if (list == null || list.length == 0) {
return;
}
for (File child : list) {
if (child.isFile()) {
FileType type = typeManager.getFileTypeByFileName(child.getName());
if (fileType == type) {
if (progressIndicator != null && progressIndicator.isCanceled()) {
return;
}
Pair<File, String> root = suggestRootForJavaFile(child);
if (root != null) {
String packagePrefix = getPackagePrefix(base, root);
if (packagePrefix == null) {
foundDirectories.add(root);
}
else {
foundDirectories.add(Pair.create(base, packagePrefix));
}
throw new PathFoundException(root.getFirst());
}
else {
return;
}
}
}
}
for (File child : list) {
if (child.isDirectory()) {
try {
suggestRootsImpl(base, child, foundDirectories, fileType);
}
catch (PathFoundException found) {
if (!found.myDirectory.equals(child)) {
throw found;
}
}
}
}
}
@Nullable
private static String getPackagePrefix(File base, Pair<File,String> root) {
String result = "";
for (File parent = base; parent != null; parent = parent.getParentFile()) {
if (parent.equals(root.getFirst())) {
return root.getSecond() + (root.getSecond().length() > 0 && result.length() > 0 ? "." : "") + result;
}
result = parent.getName() + (result.length() > 0 ? "." : "") + result;
}
return null;
}
@Nullable
private static Pair<File,String> suggestRootForJavaFile(File javaFile) {
if (!javaFile.isFile()) return null;
final CharSequence chars;
try {
chars = new CharArrayCharSequence(FileUtil.loadFileText(javaFile));
}
catch(IOException e){
return null;
}
String packageName = getPackageStatement(chars);
if (packageName != null) {
File root = javaFile.getParentFile();
int index = packageName.length();
while (index > 0) {
int index1 = packageName.lastIndexOf('.', index - 1);
String token = packageName.substring(index1 + 1, index);
String dirName = root.getName();
final boolean equalsToToken = SystemInfo.isFileSystemCaseSensitive ? dirName.equals(token) : dirName.equalsIgnoreCase(token);
if (!equalsToToken) {
return Pair.create(root, packageName.substring(0, index));
}
String parent = root.getParent();
if (parent == null) {
return null;
}
root = new File(parent);
index = index1;
}
return Pair.create(root, "");
}
return null;
}
@Nullable
public static String getPackageStatement(CharSequence text){
Lexer lexer = new JavaLexer(LanguageLevel.JDK_1_3);
lexer.start(text);
skipWhiteSpaceAndComments(lexer);
if (lexer.getTokenType() != JavaTokenType.PACKAGE_KEYWORD) return null;
lexer.advance();
skipWhiteSpaceAndComments(lexer);
final StringBuilder buffer = StringBuilderSpinAllocator.alloc();
try {
while(true){
if (lexer.getTokenType() != JavaTokenType.IDENTIFIER) break;
buffer.append(text, lexer.getTokenStart(), lexer.getTokenEnd());
lexer.advance();
skipWhiteSpaceAndComments(lexer);
if (lexer.getTokenType() != JavaTokenType.DOT) break;
buffer.append('.');
lexer.advance();
skipWhiteSpaceAndComments(lexer);
}
String packageName = buffer.toString();
if (packageName.length() == 0 || StringUtil.endsWithChar(packageName, '.')) return null;
return packageName;
}
finally {
StringBuilderSpinAllocator.dispose(buffer);
}
}
public static void skipWhiteSpaceAndComments(Lexer lexer){
while(ElementType.JAVA_COMMENT_OR_WHITESPACE_BIT_SET.contains(lexer.getTokenType())) {
lexer.advance();
}
}
}
| |
package com.datasift.client;
import io.higgs.http.client.HttpRequestBuilder;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
/*
*/
public class DataSiftConfig {
private static final String illConfigured = "(%s) is null, this is an ill-configured object and all " +
"API requests using it will fail";
protected boolean compatibleSSLProtocolsFound;
protected List<String> sslProtocols = new ArrayList<>();
protected String username, apiKey;
protected boolean sslEnabled = true;
protected String host = "api.datasift.com";
protected String wsHost = "websocket.datasift.com";
protected String ingestionHost = "in.datasift.com";
/*
* This instance should be used as a base for configurations.
* All new requests should use {@link io.higgs.http.client.HttpRequestBuilder#copy()}
* Although a single instance would work, this keeps things simple and ensures no stale data is passed in
* subsequent requests.
* It is also very cheap in terms of Netty's thread usage because Higgs re-uses the same event loop group
* so it'll never be creating a whole new load of resources for each instance.
*/
protected HttpRequestBuilder http = HttpRequestBuilder.instance();
protected String versionPrefix = "v1.6";
protected String urlEncodingFormat = "ISO-8859-1";
protected int port = 80;
protected boolean manualPort;
private boolean autoReconnect = true;
protected int connectTimeout = 10000;
public DataSiftConfig() {
http.userAgent("DataSift/" + versionPrefix + " Java/" + getClientVersion());
if (HttpRequestBuilder.isSupportedSSLProtocol("SSLv3")) {
sslProtocols.add("SSLv3");
}
if (HttpRequestBuilder.isSupportedSSLProtocol("TLSv1")) {
sslProtocols.add("TLSv1");
}
if (HttpRequestBuilder.isSupportedSSLProtocol("TLSv1.2")) {
sslProtocols.add("TLSv1.2");
}
compatibleSSLProtocolsFound = sslProtocols.size() > 0;
}
/*
* Initialize a new config instance with the given username and api key
*
* @param username the DataSift username
* @param apiKey the DataSift API key
*/
public DataSiftConfig(String username, String apiKey) {
this();
auth(username, apiKey);
}
/*
* Provide the user credentials that should be used for authentication
*
* @param username the DataSift username
* @param apiKey the DataSift API key
* @return this object for further configuration
*/
public DataSiftConfig auth(String username, String apiKey) {
if (username == null || apiKey == null || username.isEmpty() || apiKey.isEmpty()) {
throw new IllegalArgumentException(String.format("A valid username and API key are required. Username = " +
"%s, API key = %s", username, apiKey));
}
this.username = username;
this.apiKey = apiKey;
return this;
}
/*
* Sets a host and port for a proxy that all requests should be sent to
*
* @param host proxy host
* @param port proxy port
* @return this
*/
public DataSiftConfig proxy(String host, int port) {
http.proxy(host, port);
return this;
}
/*
* @param username the username the proxy requires
* @param password the password the proxy requires
* @return this
*/
public DataSiftConfig proxy(String host, int port, String username, String password) {
http.proxy(host, port, username, password);
return this;
}
/*
* @return The HTTP protocol prefix to use.
* If SSL is enabled this will be "https://" if not it'll be "http://"
*/
public String protocol() {
return sslEnabled ? "https://" : "http://";
}
public DataSiftConfig host(String host) {
this.host = host;
return this;
}
/*
* @return The host name to which all api calls with this configurations will be made
*/
public String host() {
return host;
}
public DataSiftConfig wsHost(String host) {
this.wsHost = host;
return this;
}
/*
* @return The host name used to stream
*/
public String wsHost() {
return wsHost;
}
public DataSiftConfig ingestionHost(String host) {
this.ingestionHost = host;
return this;
}
/*
* @return The host name to which all ingestion api calls with this configurations will be made
*/
public String ingestionHost() {
return ingestionHost;
}
/*
* @return The port on which connections should be made
*/
public int port() {
return manualPort ? port : sslEnabled ? 443 : 80;
}
public void port(int p) {
manualPort = true;
port = p;
}
/*
* @return A base URL to the DataSift API. e.g. https://api.datasift.com/v1/
*/
public URI baseURL() {
StringBuilder b = new StringBuilder()
.append(protocol())
.append(host())
.append(":")
.append(port())
.append("/")
.append(versionPrefix())
.append("/");
try {
return new URI(b.toString());
} catch (URISyntaxException e) {
throw new IllegalArgumentException("Unable to construct a base URL for the API", e);
}
}
/*
* @return A base URL to the DataSift Ingestion API. e.g. https://in.datasift.com/
*/
public URI baseIngestionURL() {
StringBuilder b = new StringBuilder()
.append(protocol())
.append(ingestionHost())
.append(":")
.append(port())
.append("/");
try {
return new URI(b.toString());
} catch (URISyntaxException e) {
throw new IllegalArgumentException("Unable to construct a base URL for the ingestion API", e);
}
}
/*
* Generate a new URI object given an endpoint relative to the base url of this configuration.
* For example, if the base URL is https://api.datasift.com/v1/ and the endpoint parameters is validate
* this will return the URI https://api.datasift.com/v1/validate.
* Technically any path acceptable by {@link URI#resolve(String)} is acceptable
*
* @param endpoint the endpoint to return a URI for.
* @return a new URI with the resolved URL that is safe to manipulate
*/
public URI newAPIEndpointURI(String endpoint) {
return baseURL().resolve(endpoint);
}
/*
* Generate a new URI object given an endpoint relative to the base url of this configuration.
* For example, if the base URL is https://api.datasift.com/v1/ and the endpoint parameters is validate
* this will return the URI https://api.datasift.com/v1/validate.
* Technically any path acceptable by {@link URI#resolve(URI)} is acceptable
*
* @param endpoint the endpoint to return a URI for.
* @return a new URI with the resolved URL that is safe to manipulate
*/
public URI newAPIEndpointURI(URI endpoint) {
return baseURL().resolve(endpoint);
}
/*
* Generate a new URI object given an endpoint relative to the base ingestion url of this configuration.
* For example, if the base URL is https://in.datasift.com/ and the endpoint is the source ID,
* this would return a URI similar to https://in.datasift.com/9b101935be2044bb9cfdffbc953924e8.
* Technically any path acceptable by {@link URI#resolve(String)} is acceptable
*
* @param endpoint the endpoint to return a URI for.
* @return a new URI with the resolved URL that is safe to manipulate
*/
public URI newIngestionAPIEndpointURI(String endpoint) {
return baseIngestionURL().resolve(endpoint);
}
/*
* Generate a new URI object given an endpoint relative to the base ingestion url of this configuration.
* For example, if the base URL is https://in.datasift.com/ and the endpoint is the source ID,
* this would return a URI similar to https://in.datasift.com/9b101935be2044bb9cfdffbc953924e8.
* Technically any path acceptable by {@link URI#resolve(String)} is acceptable
*
* @param endpoint the endpoint to return a URI for.
* @return a new URI with the resolved URL that is safe to manipulate
*/
public URI newIngestionAPIEndpointURI(URI endpoint) {
return baseIngestionURL().resolve(endpoint);
}
/*
* @return The API version prefix to use, e.g. v1
*/
public String versionPrefix() {
return versionPrefix;
}
/*
* Force the client to use a version other than the default.
* @param prefix the prefix to use, this should be along the lines of v1.6 i.e. vMajor.Minor
*/
public DataSiftConfig versionPrefix(String prefix) {
versionPrefix = prefix;
return this;
}
public String getUsername() {
if (username == null) {
throw new IllegalStateException(String.format(illConfigured, "Username"));
}
return username;
}
public String getApiKey() {
if (username == null) {
throw new IllegalStateException(String.format(illConfigured, "API key"));
}
return apiKey;
}
public boolean isSslEnabled() {
return sslEnabled;
}
public void setSslEnabled(boolean sslEnabled) {
this.sslEnabled = sslEnabled;
if (!manualPort) {
port = this.sslEnabled ? 443 : 80;
}
}
public String authAsHeader() {
return username + ":" + apiKey;
}
public HttpRequestBuilder http() {
return http;
}
/*
* @return The format that should be used to URL encode prameters
*/
public String urlEncodingFormat() {
return urlEncodingFormat;
}
/*
* Sets the format that should be used to encode URL parameters when the option arises
* e.g. ISO-8859-1 or UTF-8
*
* @param format the format
* @return this
*/
public DataSiftConfig urlEncodingFormat(String format) {
urlEncodingFormat = format;
return this;
}
public boolean isAutoReconnect() {
return autoReconnect;
}
/*
* Sets whether streams are automatically reconnected when a websocket connection is closed
*
* @param autoReconnect true or false, defaults to true
*/
public void setAutoReconnect(boolean autoReconnect) {
this.autoReconnect = autoReconnect;
}
/*
* Gets an array of compatible SSL protocols found on this JVM
*
* @return an array of SSL protocols or NULL if none are available
*/
public String[] sslProtocols() {
return compatibleSSLProtocolsFound() ? sslProtocols.toArray(new String[sslProtocols.size()]) : null;
}
/*
* @return true if at least one compatible secure protocol is available
*/
public boolean compatibleSSLProtocolsFound() {
return compatibleSSLProtocolsFound;
}
public int connectTimeout() {
return connectTimeout;
}
public void connectTimeout(int connectTimeout) {
this.connectTimeout = connectTimeout;
}
public String getClientVersion() {
String path = "/version.prop";
InputStream stream = getClass().getResourceAsStream(path);
if (stream == null) {
return "3.x";
}
Properties props = new Properties();
try {
props.load(stream);
stream.close();
return (String) props.get("version");
} catch (IOException e) {
return "3.x";
}
}
}
| |
package org.nd4j.linalg.jcublas;
import org.nd4j.linalg.api.complex.IComplexDouble;
import org.nd4j.linalg.api.complex.IComplexFloat;
import org.nd4j.linalg.api.complex.IComplexNumber;
import org.nd4j.linalg.api.complex.IComplexNDArray;
import org.nd4j.linalg.api.ndarray.INDArray;
/**
* Blas wrapper for JCUDA
*
* @author mjk
* @author Adam Gibson
*/
public class JCublasWrapper implements org.nd4j.linalg.factory.BlasWrapper {
@Override
public INDArray swap(INDArray x, INDArray y) {
SimpleJCublas.swap(x,y);
return y;
}
@Override
public INDArray scal(double alpha, INDArray x) {
return SimpleJCublas.scal(alpha,x);
}
@Override
public INDArray scal(float alpha, INDArray x) {
SimpleJCublas.scal(alpha,x);
return x;
}
@Override
public IComplexNDArray scal(IComplexFloat alpha, IComplexNDArray x) {
return SimpleJCublas.scal(alpha, x);
}
@Override
public IComplexNDArray scal(IComplexDouble alpha, IComplexNDArray x) {
return SimpleJCublas.scal(alpha, x);
}
@Override
public INDArray copy(INDArray x, INDArray y) {
SimpleJCublas.copy(x,y);
return y;
}
@Override
public IComplexNDArray copy(IComplexNDArray x, IComplexNDArray y) {
SimpleJCublas.copy(x,y);
return y;
}
@Override
public INDArray axpy(double da, INDArray dx, INDArray dy) {
SimpleJCublas.axpy(da,dx,dy);
return dy;
}
@Override
public INDArray axpy(float da, INDArray dx, INDArray dy) {
SimpleJCublas.axpy(da,dx,dy);
return dy;
}
@Override
public IComplexNDArray axpy(IComplexNumber da, IComplexNDArray dx, IComplexNDArray dy) {
if(da instanceof IComplexDouble) {
SimpleJCublas.axpy((IComplexDouble) da,dx,dy);
}
else
SimpleJCublas.axpy((IComplexFloat) da,dx,dy);
return dy;
}
public double dot(INDArray x, INDArray y) {
return SimpleJCublas.dot(x,y);
}
//@Override
public double dotd(INDArray x, INDArray y) {
return SimpleJCublas.dot(x,y);
}
@Override
public IComplexNumber dotc(IComplexNDArray x, IComplexNDArray y) {
return SimpleJCublas.dot(x,y);
}
@Override
public IComplexNumber dotu(IComplexNDArray x, IComplexNDArray y) {
return SimpleJCublas.dotu(x, y);
}
@Override
public double nrm2(INDArray x) {
return SimpleJCublas.nrm2(x);
}
@Override
public double nrm2(IComplexNDArray x) {
return SimpleJCublas.nrm2(x);
}
@Override
public double asum(INDArray x) {
return SimpleJCublas.asum(x);
}
@Override
public double asum(IComplexNDArray x) {
return SimpleJCublas.asum(x);
}
@Override
public int iamax(INDArray x) {
return SimpleJCublas.iamax(x);
}
@Override
public int iamax(IComplexNDArray x) {
return SimpleJCublas.iamax(x);
}
@Override
public INDArray gemv(double alpha, INDArray a, INDArray x, double beta, INDArray y) {
SimpleJCublas.gemv(a,x,y,alpha,beta);
return y;
}
@Override
public INDArray gemv(float alpha, INDArray a, INDArray x, float beta, INDArray y) {
return SimpleJCublas.gemv(a,x,y, alpha, beta);
}
@Override
public INDArray ger(double alpha, INDArray x, INDArray y, INDArray a) {
return SimpleJCublas.ger(x,y,a,alpha);
}
@Override
public INDArray ger(float alpha, INDArray x, INDArray y, INDArray a) {
return SimpleJCublas.ger(x,y,a,alpha);
}
@Override
public IComplexNDArray gemv(IComplexDouble alpha, IComplexNDArray a, IComplexNDArray x, IComplexDouble beta, IComplexNDArray y) {
return null;
}
@Override
public IComplexNDArray gemv(IComplexFloat alpha, IComplexNDArray a, IComplexNDArray x, IComplexFloat beta, IComplexNDArray y) {
return null;
}
@Override
public IComplexNDArray geru(IComplexDouble alpha, IComplexNDArray x, IComplexNDArray y, IComplexNDArray a) {
throw new UnsupportedOperationException();
}
@Override
public IComplexNDArray geru(IComplexFloat alpha, IComplexNDArray x, IComplexNDArray y, IComplexNDArray a) {
return SimpleJCublas.geru(x, y, a, alpha.asDouble());
}
@Override
public IComplexNDArray gerc(IComplexFloat alpha, IComplexNDArray x, IComplexNDArray y, IComplexNDArray a) {
return SimpleJCublas.gerc(x, y, a, alpha.asDouble());
}
@Override
public IComplexNDArray gerc(IComplexDouble alpha, IComplexNDArray x, IComplexNDArray y, IComplexNDArray a) {
throw new UnsupportedOperationException();
}
@Override
public INDArray gemm(double alpha, INDArray a, INDArray b, double beta, INDArray c) {
return SimpleJCublas.gemm(a,b,c,alpha,beta);
}
@Override
public INDArray gemm(float alpha, INDArray a, INDArray b, float beta, INDArray c) {
return SimpleJCublas.gemm(a,b,c,alpha,beta);
}
@Override
public IComplexNDArray gemm(IComplexNumber alpha, IComplexNDArray a, IComplexNDArray b, IComplexNumber beta, IComplexNDArray c) {
if(beta instanceof IComplexDouble)
SimpleJCublas.gemm(a,b,alpha.asDouble(),c,beta.asDouble());
else
SimpleJCublas.gemm(a,b,alpha.asFloat(),c,beta.asFloat());
return c;
}
@Override
public INDArray gesv(INDArray a, int[] ipiv, INDArray b) {
throw new UnsupportedOperationException();
}
@Override
public void checkInfo(String name, int info) {
throw new UnsupportedOperationException();
}
@Override
public INDArray sysv(char uplo, INDArray a, int[] ipiv, INDArray b) {
throw new UnsupportedOperationException();
}
@Override
public int syev(char jobz, char uplo, INDArray a, INDArray w) {
return 0;
}
@Override
public int syevx(char jobz, char range, char uplo, INDArray a, float vl, float vu, int il, int iu, float abstol, INDArray w, INDArray z) {
throw new UnsupportedOperationException();
}
//@Override
public int syevx(char jobz, char range, char uplo, INDArray a, double vl, double vu, int il, int iu, double abstol, INDArray w, INDArray z) {
throw new UnsupportedOperationException();
}
@Override
public int syevd(char jobz, char uplo, INDArray A, INDArray w) {
return 0;
}
@Override
public int syevr(char jobz, char range, char uplo, INDArray a, float vl, float vu, int il, int iu, float abstol, INDArray w, INDArray z, int[] isuppz) {
throw new UnsupportedOperationException();
}
//@Override
public int syevr(char jobz, char range, char uplo, INDArray a, double vl, double vu, int il, int iu, double abstol, INDArray w, INDArray z, int[] isuppz) {
throw new UnsupportedOperationException();
}
@Override
public void posv(char uplo, INDArray A, INDArray B) {
throw new UnsupportedOperationException();
}
@Override
public int geev(char jobvl, char jobvr, INDArray A, INDArray WR, INDArray WI, INDArray VL, INDArray VR) {
throw new UnsupportedOperationException();
}
@Override
public int sygvd(int itype, char jobz, char uplo, INDArray A, INDArray B, INDArray W) {
throw new UnsupportedOperationException();
}
@Override
public void gelsd(INDArray A, INDArray B) {
throw new UnsupportedOperationException();
}
@Override
public void geqrf(INDArray A, INDArray tau) {
throw new UnsupportedOperationException();
}
@Override
public void ormqr(char side, char trans, INDArray A, INDArray tau, INDArray C) {
throw new UnsupportedOperationException();
}
@Override
public void dcopy(int n, float[] dx, int dxIdx, int incx, float[] dy, int dyIdx, int incy) {
throw new UnsupportedOperationException();
}
@Override
public void saxpy(double alpha, INDArray x, INDArray y) {
SimpleJCublas.axpy(alpha,x,y);
}
/**
* Abstraction over saxpy
*
* @param alpha the alpha to scale by
* @param x the ndarray to use
* @param y the ndarray to use
*/
@Override
public void saxpy(float alpha, INDArray x, INDArray y) {
SimpleJCublas.saxpy(alpha,x,y);
}
/*
missing functions
gesv
sysv
syev
syevx
syevd
syevr
posv
geev
sygvd
gelsd
geqrf
ormqr
*/
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.falcon.entity;
import org.apache.falcon.FalconException;
import org.apache.falcon.entity.parser.EntityParserFactory;
import org.apache.falcon.entity.parser.FeedEntityParser;
import org.apache.falcon.entity.store.ConfigurationStore;
import org.apache.falcon.entity.v0.EntityType;
import org.apache.falcon.entity.v0.Frequency;
import org.apache.falcon.entity.v0.SchemaHelper;
import org.apache.falcon.entity.v0.cluster.Cluster;
import org.apache.falcon.entity.v0.cluster.Properties;
import org.apache.falcon.entity.v0.cluster.Property;
import org.apache.falcon.entity.v0.feed.Argument;
import org.apache.falcon.entity.v0.feed.Arguments;
import org.apache.falcon.entity.v0.feed.ClusterType;
import org.apache.falcon.entity.v0.feed.Clusters;
import org.apache.falcon.entity.v0.feed.Extract;
import org.apache.falcon.entity.v0.feed.ExtractMethod;
import org.apache.falcon.entity.v0.feed.Feed;
import org.apache.falcon.entity.v0.feed.FieldIncludeExclude;
import org.apache.falcon.entity.v0.feed.FieldsType;
import org.apache.falcon.entity.v0.feed.Import;
import org.apache.falcon.entity.v0.feed.Lifecycle;
import org.apache.falcon.entity.v0.feed.Location;
import org.apache.falcon.entity.v0.feed.LocationType;
import org.apache.falcon.entity.v0.feed.Locations;
import org.apache.falcon.entity.v0.feed.MergeType;
import org.apache.falcon.entity.v0.feed.RetentionStage;
import org.apache.falcon.entity.v0.feed.Datasource;
import org.apache.falcon.entity.v0.feed.Validity;
import org.apache.falcon.entity.v0.process.Input;
import org.apache.falcon.entity.v0.process.Inputs;
import org.apache.falcon.entity.v0.process.Output;
import org.apache.falcon.entity.v0.process.Outputs;
import org.apache.falcon.entity.v0.process.Process;
import org.apache.falcon.resource.SchedulableEntityInstance;
import org.apache.falcon.service.LifecyclePolicyMap;
import org.apache.falcon.util.DateUtil;
import org.apache.hadoop.fs.Path;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.TimeZone;
/**
* Test for feed helper methods.
*/
public class FeedHelperTest extends AbstractTestBase {
private static final TimeZone UTC = TimeZone.getTimeZone("UTC");
private ConfigurationStore store;
@BeforeClass
public void init() throws Exception {
initConfigStore();
LifecyclePolicyMap.get().init();
}
@BeforeMethod
public void setUp() throws Exception {
cleanupStore();
store = getStore();
}
@Test
public void testPartitionExpression() {
Assert.assertEquals(FeedHelper.normalizePartitionExpression(" /a// ", " /b// "), "a/b");
Assert.assertEquals(FeedHelper.normalizePartitionExpression(null, " /b// "), "b");
Assert.assertEquals(FeedHelper.normalizePartitionExpression(null, null), "");
}
@Test(expectedExceptions = IllegalArgumentException.class)
public void testInstanceBeforeStart() throws Exception {
Cluster cluster = publishCluster();
Feed feed = publishFeed(cluster, "minutes(5)", "2011-02-28 10:00 UTC", "2016-02-28 10:00 UTC");
Process process = prepareProcess(cluster, "minutes(10)", "2012-02-28 10:37 UTC", "2012-02-28 10:47 UTC");
Outputs outputs = new Outputs();
Output outFeed = new Output();
outFeed.setName("outputFeed");
outFeed.setFeed(feed.getName());
outFeed.setInstance("now(0,0)");
outputs.getOutputs().add(outFeed);
process.setOutputs(outputs);
store.publish(EntityType.PROCESS, process);
FeedHelper.getProducerInstance(feed, getDate("2011-02-27 10:00 UTC"), cluster);
}
@Test(expectedExceptions = IllegalArgumentException.class)
public void testInstanceEqualsEnd() throws Exception {
Cluster cluster = publishCluster();
Feed feed = publishFeed(cluster, "minutes(5)", "2011-02-28 10:00 UTC", "2016-02-28 10:00 UTC");
Process process = prepareProcess(cluster, "minutes(10)", "2012-02-28 10:37 UTC", "2012-02-28 10:47 UTC");
Outputs outputs = new Outputs();
Output outFeed = new Output();
outFeed.setName("outputFeed");
outFeed.setFeed(feed.getName());
outFeed.setInstance("now(0,0)");
outputs.getOutputs().add(outFeed);
process.setOutputs(outputs);
store.publish(EntityType.PROCESS, process);
FeedHelper.getProducerInstance(feed, getDate("2016-02-28 10:00 UTC"), cluster);
}
@Test(expectedExceptions = IllegalArgumentException.class)
public void testInstanceOutOfSync() throws Exception {
Cluster cluster = publishCluster();
Feed feed = publishFeed(cluster, "minutes(5)", "2011-02-28 10:00 UTC", "2016-02-28 10:00 UTC");
Process process = prepareProcess(cluster, "minutes(10)", "2012-02-28 10:37 UTC", "2012-02-28 10:47 UTC");
Outputs outputs = new Outputs();
Output outFeed = new Output();
outFeed.setName("outputFeed");
outFeed.setFeed(feed.getName());
outFeed.setInstance("now(0,0)");
outputs.getOutputs().add(outFeed);
process.setOutputs(outputs);
store.publish(EntityType.PROCESS, process);
FeedHelper.getProducerInstance(feed, getDate("2016-02-28 09:04 UTC"), cluster);
}
@Test
public void testInvalidProducerInstance() throws Exception {
Cluster cluster = publishCluster();
Feed feed = publishFeed(cluster, "minutes(5)", "2011-02-28 10:00 UTC", "2016-02-28 10:00 UTC");
Process process = prepareProcess(cluster, "minutes(10)", "2012-02-28 10:37 UTC", "2012-02-28 10:47 UTC");
Outputs outputs = new Outputs();
Output outFeed = new Output();
outFeed.setName("outputFeed");
outFeed.setFeed(feed.getName());
outFeed.setInstance("now(0,0)");
outputs.getOutputs().add(outFeed);
process.setOutputs(outputs);
store.publish(EntityType.PROCESS, process);
Assert.assertNull(FeedHelper.getProducerInstance(feed, getDate("2012-02-28 10:40 UTC"), cluster));
}
@Test
public void testGetProducerOutOfValidity() throws FalconException, ParseException {
Cluster cluster = publishCluster();
Feed feed = publishFeed(cluster, "minutes(5)", "2011-02-28 10:00 UTC", "2016-02-28 10:00 UTC");
Process process = prepareProcess(cluster, "minutes(10)", "2012-02-28 10:37 UTC", "2012-02-28 10:47 UTC");
Outputs outputs = new Outputs();
Output outFeed = new Output();
outFeed.setName("outputFeed");
outFeed.setFeed(feed.getName());
outFeed.setInstance("now(0,0)");
outputs.getOutputs().add(outFeed);
process.setOutputs(outputs);
store.publish(EntityType.PROCESS, process);
Assert.assertEquals(FeedHelper.getProducerProcess(feed).getName(), process.getName());
SchedulableEntityInstance result = FeedHelper.getProducerInstance(feed, getDate("2012-02-28 10:45 UTC"),
cluster);
Assert.assertNull(result);
}
@Test
public void testGetConsumersOutOfValidity() throws Exception {
Cluster cluster = publishCluster();
Feed feed = publishFeed(cluster, "minutes(5)", "2011-02-28 10:00 UTC", "2016-02-28 10:00 UTC");
Process process = prepareProcess(cluster, "minutes(10)", "2012-02-28 10:37 UTC", "2012-02-28 10:47 UTC");
Inputs inputs = new Inputs();
Input inFeed = new Input();
inFeed.setName("inputFeed");
inFeed.setFeed(feed.getName());
inFeed.setStart("now(0, -20)");
inFeed.setEnd("now(0, 0)");
inputs.getInputs().add(inFeed);
process.setInputs(inputs);
store.publish(EntityType.PROCESS, process);
Set<SchedulableEntityInstance> result = FeedHelper.getConsumerInstances(feed, getDate("2016-02-28 09:00 UTC"),
cluster);
Assert.assertTrue(result.isEmpty());
}
@Test
public void testGetFeedValidityStartAndNextInstance() throws Exception {
Cluster cluster = publishCluster();
Feed feed = publishFeed(cluster, "minutes(5)", "2011-02-28 10:00 UTC", "2016-02-28 10:00 UTC");
Date date = FeedHelper.getFeedValidityStart(feed, cluster.getName());
Assert.assertEquals(DateUtil.getDateFormatFromTime(date.getTime()), "2011-02-28T10:00Z");
Date nextDate = FeedHelper.getNextFeedInstanceDate(date, feed);
Assert.assertEquals(DateUtil.getDateFormatFromTime(nextDate.getTime()), "2011-02-28T10:05Z");
}
@Test
public void testGetConsumersFirstInstance() throws Exception {
Cluster cluster = publishCluster();
Feed feed = publishFeed(cluster, "minutes(5)", "2011-02-28 10:00 UTC", "2016-02-28 10:00 UTC");
Process process = prepareProcess(cluster, "minutes(10)", "2012-02-28 10:37 UTC", "2012-02-28 10:47 UTC");
Inputs inputs = new Inputs();
Input inFeed = new Input();
inFeed.setName("inputFeed");
inFeed.setFeed(feed.getName());
inFeed.setStart("now(0, -20)");
inFeed.setEnd("now(0, 0)");
inputs.getInputs().add(inFeed);
process.setInputs(inputs);
store.publish(EntityType.PROCESS, process);
Set<SchedulableEntityInstance> result = FeedHelper.getConsumerInstances(feed, getDate("2012-02-28 10:15 UTC"),
cluster);
Set<SchedulableEntityInstance> expected = new HashSet<>();
SchedulableEntityInstance consumer = new SchedulableEntityInstance(process.getName(), cluster.getName(),
getDate("2012-02-28 10:37 UTC"), EntityType.PROCESS);
consumer.setTags(SchedulableEntityInstance.INPUT);
expected.add(consumer);
Assert.assertEquals(result, expected);
}
@Test
public void testGetConsumersLastInstance() throws Exception {
Cluster cluster = publishCluster();
Feed feed = publishFeed(cluster, "minutes(5)", "2011-02-28 10:00 UTC", "2016-02-28 10:00 UTC");
Process process = prepareProcess(cluster, "minutes(10)", "2012-02-28 10:20 UTC", "2016-02-28 10:00 UTC");
Inputs inputs = new Inputs();
Input inFeed = new Input();
inFeed.setName("inputFeed");
inFeed.setFeed(feed.getName());
inFeed.setStart("now(0, -20)");
inFeed.setEnd("now(0, 0)");
inputs.getInputs().add(inFeed);
process.setInputs(inputs);
store.publish(EntityType.PROCESS, process);
Set<SchedulableEntityInstance> result = FeedHelper.getConsumerInstances(feed, getDate("2012-02-28 10:15 UTC"),
cluster);
Set<SchedulableEntityInstance> expected = new HashSet<>();
String[] consumers = { "2012-02-28 10:20 UTC", "2012-02-28 10:30 UTC", };
for (String d : consumers) {
SchedulableEntityInstance i = new SchedulableEntityInstance(process.getName(), cluster.getName(),
getDate(d), EntityType.PROCESS);
i.setTags(SchedulableEntityInstance.INPUT);
expected.add(i);
}
Assert.assertEquals(result, expected);
}
@Test
public void testGetPolicies() throws Exception {
FeedEntityParser parser = (FeedEntityParser) EntityParserFactory
.getParser(EntityType.FEED);
Feed feed = parser.parse(this.getClass().getResourceAsStream(FEED3_XML));
List<String> policies = FeedHelper.getPolicies(feed, "testCluster");
Assert.assertEquals(policies.size(), 1);
Assert.assertEquals(policies.get(0), "AgeBasedDelete");
}
@Test
public void testFeedWithNoDependencies() throws Exception {
Cluster cluster = publishCluster();
Feed feed = publishFeed(cluster, "minutes(5)", "2011-02-28 10:00 UTC", "2016-02-28 10:00 UTC");
Set<SchedulableEntityInstance> result = FeedHelper.getConsumerInstances(feed, getDate("2016-02-28 09:00 UTC"),
cluster);
Assert.assertTrue(result.isEmpty());
SchedulableEntityInstance res = FeedHelper.getProducerInstance(feed, getDate("2012-02-28 10:45 UTC"),
cluster);
Assert.assertNull(res);
}
@Test
public void testEvaluateExpression() throws Exception {
Cluster cluster = new Cluster();
cluster.setName("name");
cluster.setColo("colo");
cluster.setProperties(new Properties());
Property prop = new Property();
prop.setName("pname");
prop.setValue("pvalue");
cluster.getProperties().getProperties().add(prop);
Assert.assertEquals(FeedHelper.evaluateClusterExp(cluster, "${cluster.colo}/*/US"), "colo/*/US");
Assert.assertEquals(FeedHelper.evaluateClusterExp(cluster, "${cluster.name}/*/${cluster.pname}"),
"name/*/pvalue");
Assert.assertEquals(FeedHelper.evaluateClusterExp(cluster, "IN"), "IN");
}
@DataProvider(name = "fsPathsforDate")
public Object[][] createPathsForGetDate() {
final TimeZone utc = TimeZone.getTimeZone("UTC");
final TimeZone pacificTime = TimeZone.getTimeZone("America/Los_Angeles");
final TimeZone ist = TimeZone.getTimeZone("IST");
return new Object[][] {
{"/data/${YEAR}/${MONTH}/${DAY}/${HOUR}/${MINUTE}", "/data/2015/01/01/00/30", utc, "2015-01-01T00:30Z"},
{"/data/${YEAR}-${MONTH}-${DAY}-${HOUR}-${MINUTE}", "/data/2015-01-01-01-00", utc, "2015-01-01T01:00Z"},
{"/data/${YEAR}/${MONTH}/${DAY}", "/data/2015/01/01", utc, "2015-01-01T00:00Z"},
{"/data/${YEAR}/${MONTH}/${DAY}/data", "/data/2015/01/01/data", utc, "2015-01-01T00:00Z"},
{"/data/${YEAR}/${MONTH}/${DAY}/${HOUR}/${MINUTE}", "/data/2015-01-01/00/30", utc, null},
{"/data/${YEAR}/${MONTH}/${DAY}/${HOUR}/${MINUTE}/data", "/data/2015-01-01/00/30", utc, null},
{"/d/${YEAR}/${MONTH}/${DAY}/${HOUR}/data", "/d/2015/05/25/00/data/{p1}/p2", utc, "2015-05-25T00:00Z"},
{"/data/${YEAR}/${MONTH}/${DAY}/${HOUR}/${MINUTE}/data", "/data/2015/05/25/00/00/{p1}/p2", utc, null},
{"/d/${YEAR}/${MONTH}/M", "/d/2015/11/M", utc, "2015-11-01T00:00Z"},
{"/d/${YEAR}/${MONTH}/${DAY}/M", "/d/2015/11/02/M", utc, "2015-11-02T00:00Z"},
{"/d/${YEAR}/${MONTH}/${DAY}/${HOUR}/M", "/d/2015/11/01/04/M", utc, "2015-11-01T04:00Z"},
{"/d/${YEAR}/${MONTH}/${DAY}/${HOUR}/${MINUTE}/M", "/d/2015/11/01/04/15/M", utc, "2015-11-01T04:15Z"},
{"/d/${YEAR}/${MONTH}/M", "/d/2015/11/M", pacificTime, "2015-11-01T07:00Z"},
{"/d/${YEAR}/${MONTH}/${DAY}/M", "/d/2015/11/02/M", pacificTime, "2015-11-02T08:00Z"},
{"/d/${YEAR}/${MONTH}/${DAY}/${HOUR}/M", "/d/2015/11/01/04/M", pacificTime, "2015-11-01T12:00Z"},
{"/d/${YEAR}/${MONTH}/${DAY}/${HOUR}/${MINUTE}/M", "/d/2015/11/01/04/15/M", ist, "2015-10-31T22:45Z"},
};
}
@Test(dataProvider = "fsPathsforDate")
public void testGetDateFromPath(String template, String path, TimeZone tz, String expectedDate) throws Exception {
Date date = FeedHelper.getDate(template, new Path(path), tz);
Assert.assertEquals(SchemaHelper.formatDateUTC(date), expectedDate);
}
@Test
public void testGetLocations() {
Cluster cluster = new Cluster();
cluster.setName("name");
Feed feed = new Feed();
Location location1 = new Location();
location1.setType(LocationType.META);
Locations locations = new Locations();
locations.getLocations().add(location1);
Location location2 = new Location();
location2.setType(LocationType.DATA);
locations.getLocations().add(location2);
org.apache.falcon.entity.v0.feed.Cluster feedCluster = new org.apache.falcon.entity.v0.feed.Cluster();
feedCluster.setName("name");
feed.setLocations(locations);
Clusters clusters = new Clusters();
feed.setClusters(clusters);
feed.getClusters().getClusters().add(feedCluster);
Assert.assertEquals(FeedHelper.getLocations(feedCluster, feed),
locations.getLocations());
Assert.assertEquals(FeedHelper.getLocation(feed, cluster, LocationType.DATA), location2);
}
@Test
public void testGetProducerProcessWithOffset() throws FalconException, ParseException {
Cluster cluster = publishCluster();
Feed feed = publishFeed(cluster, "minutes(5)", "2011-02-28 10:00 UTC", "2016-02-28 10:00 UTC");
Assert.assertNull(FeedHelper.getProducerProcess(feed));
Process process = prepareProcess(cluster, "minutes(10)", "2012-02-28 10:37 UTC", "2016-02-28 10:37 UTC");
Outputs outputs = new Outputs();
Output outFeed = new Output();
outFeed.setName("outputFeed");
outFeed.setFeed(feed.getName());
outFeed.setInstance("now(0,0)");
outputs.getOutputs().add(outFeed);
process.setOutputs(outputs);
store.publish(EntityType.PROCESS, process);
Assert.assertEquals(FeedHelper.getProducerProcess(feed).getName(), process.getName());
SchedulableEntityInstance result = FeedHelper.getProducerInstance(feed, getDate("2013-02-28 10:35 UTC"),
cluster);
SchedulableEntityInstance expected = new SchedulableEntityInstance(process.getName(), cluster.getName(),
getDate("2013-02-28 10:37 UTC"), EntityType.PROCESS);
expected.setTags(SchedulableEntityInstance.OUTPUT);
Assert.assertEquals(result, expected);
}
@Test
public void testGetProducerProcessForNow() throws FalconException, ParseException {
Cluster cluster = publishCluster();
Feed feed = publishFeed(cluster, "days(1)", "2011-02-28 10:00 UTC", "2016-02-28 10:00 UTC");
Assert.assertNull(FeedHelper.getProducerProcess(feed));
// create it's producer process submit it, test it's ProducerProcess
Process process = prepareProcess(cluster, "days(1)", "2012-02-28 10:00 UTC", "2016-02-28 10:00 UTC");
Outputs outputs = new Outputs();
Output outFeed = new Output();
outFeed.setName("outputFeed");
outFeed.setFeed(feed.getName());
outFeed.setInstance("now(0,0)");
outputs.getOutputs().add(outFeed);
process.setOutputs(outputs);
store.publish(EntityType.PROCESS, process);
Assert.assertEquals(FeedHelper.getProducerProcess(feed).getName(), process.getName());
SchedulableEntityInstance result = FeedHelper.getProducerInstance(feed, getDate("2013-02-28 10:00 UTC"),
cluster);
SchedulableEntityInstance expected = new SchedulableEntityInstance(process.getName(), cluster.getName(),
getDate("2013-02-28 10:00 UTC"), EntityType.PROCESS);
expected.setTags(SchedulableEntityInstance.OUTPUT);
Assert.assertEquals(result, expected);
}
@Test
public void testGetProducerWithNowNegativeOffset() throws FalconException, ParseException {
Cluster cluster = publishCluster();
Feed feed = publishFeed(cluster, "days(1)", "2011-02-28 10:00 UTC", "2016-02-28 10:00 UTC");
Assert.assertNull(FeedHelper.getProducerProcess(feed));
// create it's producer process submit it, test it's ProducerProcess
Process process = prepareProcess(cluster, "days(1)", "2012-02-28 10:00 UTC", "2016-02-28 10:00 UTC");
Outputs outputs = new Outputs();
Output outFeed = new Output();
outFeed.setName("outputFeed");
outFeed.setFeed(feed.getName());
outFeed.setInstance("now(-4,0)");
outputs.getOutputs().add(outFeed);
process.setOutputs(outputs);
store.publish(EntityType.PROCESS, process);
Assert.assertEquals(FeedHelper.getProducerProcess(feed).getName(), process.getName());
SchedulableEntityInstance result = FeedHelper.getProducerInstance(feed, getDate("2013-02-27 10:00 UTC"),
cluster);
SchedulableEntityInstance expected = new SchedulableEntityInstance(process.getName(), cluster.getName(),
getDate("2013-02-28 10:00 UTC"), EntityType.PROCESS);
expected.setTags(SchedulableEntityInstance.OUTPUT);
Assert.assertEquals(result, expected);
}
@Test
public void testGetProducerWithNowPositiveOffset() throws FalconException, ParseException {
Cluster cluster = publishCluster();
Feed feed = publishFeed(cluster, "days(1)", "2011-02-28 10:00 UTC", "2016-02-28 10:00 UTC");
Assert.assertNull(FeedHelper.getProducerProcess(feed));
// create it's producer process submit it, test it's ProducerProcess
Process process = prepareProcess(cluster, "days(1)", "2012-02-28 10:00 UTC", "2016-02-28 10:00 UTC");
Outputs outputs = new Outputs();
Output outFeed = new Output();
outFeed.setName("outputFeed");
outFeed.setFeed(feed.getName());
outFeed.setInstance("now(4,0)");
outputs.getOutputs().add(outFeed);
process.setOutputs(outputs);
store.publish(EntityType.PROCESS, process);
Assert.assertEquals(FeedHelper.getProducerProcess(feed).getName(), process.getName());
SchedulableEntityInstance result = FeedHelper.getProducerInstance(feed, getDate("2013-02-28 10:00 UTC"),
cluster);
SchedulableEntityInstance expected = new SchedulableEntityInstance(process.getName(), cluster.getName(),
getDate("2013-02-28 10:00 UTC"), EntityType.PROCESS);
expected.setTags(SchedulableEntityInstance.OUTPUT);
Assert.assertEquals(result, expected);
}
@Test
public void testGetProducerProcessInstance() throws FalconException, ParseException {
//create a feed, submit it, test that ProducerProcess is null
Cluster cluster = publishCluster();
Feed feed = publishFeed(cluster, "days(1)", "2011-02-28 00:00 UTC", "2016-02-28 10:00 UTC");
// create it's producer process submit it, test it's ProducerProcess
Process process = prepareProcess(cluster, "days(1)", "2012-02-28 10:00 UTC", "2016-02-28 10:00 UTC");
Outputs outputs = new Outputs();
Output outFeed = new Output();
outFeed.setName("outputFeed");
outFeed.setFeed(feed.getName());
outFeed.setInstance("today(0,0)");
outputs.getOutputs().add(outFeed);
process.setOutputs(outputs);
store.publish(EntityType.PROCESS, process);
Assert.assertEquals(FeedHelper.getProducerProcess(feed).getName(), process.getName());
SchedulableEntityInstance result = FeedHelper.getProducerInstance(feed, getDate("2013-02-28 00:00 UTC"),
cluster);
SchedulableEntityInstance expected = new SchedulableEntityInstance(process.getName(), cluster.getName(),
getDate("2013-02-28 10:00 UTC"), EntityType.PROCESS);
expected.setTags(SchedulableEntityInstance.OUTPUT);
Assert.assertEquals(result, expected);
}
@Test
public void testGetConsumerProcesses() throws FalconException, ParseException {
//create a feed, submit it, test that ConsumerProcesses is blank list
Cluster cluster = publishCluster();
Feed feed = publishFeed(cluster, "days(1)", "2012-02-28 10:00 UTC", "2016-02-28 10:00 UTC");
//create a consumer Process and submit it, assert that this is returned in ConsumerProcesses
Process process = prepareProcess(cluster, "days(1)", "2012-02-28 10:00 UTC", "2016-02-28 10:00 UTC");
Inputs inputs = new Inputs();
Input inFeed = new Input();
inFeed.setName("outputFeed");
inFeed.setFeed(feed.getName());
inFeed.setStart("today(0,0)");
inFeed.setEnd("today(0,0)");
inputs.getInputs().add(inFeed);
process.setInputs(inputs);
store.publish(EntityType.PROCESS, process);
Set<Process> result = FeedHelper.getConsumerProcesses(feed);
Assert.assertEquals(result.size(), 1);
Assert.assertTrue(result.contains(process));
}
@Test
public void testGetConsumerProcessInstances() throws Exception {
//create a feed, submit it, test that ConsumerProcesses is blank list
Cluster cluster = publishCluster();
Feed feed = publishFeed(cluster, "hours(1)", "2012-02-28 00:00 UTC", "2016-02-28 00:00 UTC");
//create a consumer Process and submit it, assert that this is returned in ConsumerProcesses
Process process = prepareProcess(cluster, "days(1)", "2012-02-28 10:00 UTC", "2016-02-28 10:00 UTC");
Inputs inputs = new Inputs();
Input inFeed = new Input();
inFeed.setName("inputFeed");
inFeed.setFeed(feed.getName());
inFeed.setStart("now(-4, 30)");
inFeed.setEnd("now(4, 30)");
inputs.getInputs().add(inFeed);
process.setInputs(inputs);
store.publish(EntityType.PROCESS, process);
Set<SchedulableEntityInstance> result = FeedHelper.getConsumerInstances(feed,
getDate("2012-02-28 09:00 UTC"), cluster);
Assert.assertEquals(result.size(), 1);
Set<SchedulableEntityInstance> expected = new HashSet<>();
SchedulableEntityInstance ins = new SchedulableEntityInstance(process.getName(), cluster.getName(),
getDate("2012-02-28 10:00 UTC"), EntityType.PROCESS);
ins.setTags(SchedulableEntityInstance.INPUT);
expected.add(ins);
Assert.assertEquals(result, expected);
}
@Test
public void testGetConsumerProcessInstancesWithNonUnitFrequency() throws Exception {
//create a feed, submit it, test that ConsumerProcesses is blank list
Cluster cluster = publishCluster();
Feed feed = publishFeed(cluster, "minutes(5)", "2012-02-28 00:00 UTC", "2016-02-28 00:00 UTC");
//create a consumer Process and submit it, assert that this is returned in ConsumerProcesses
Process process = prepareProcess(cluster, "minutes(10)", "2012-02-28 09:37 UTC", "2016-02-28 10:00 UTC");
Inputs inputs = new Inputs();
Input inFeed = new Input();
inFeed.setName("inputFeed");
inFeed.setFeed(feed.getName());
inFeed.setStart("now(0, -20)");
inFeed.setEnd("now(0,0)");
inputs.getInputs().add(inFeed);
process.setInputs(inputs);
store.publish(EntityType.PROCESS, process);
Set<SchedulableEntityInstance> result = FeedHelper.getConsumerInstances(feed,
getDate("2012-02-28 09:40 UTC"), cluster);
Set<SchedulableEntityInstance> expected = new HashSet<>();
String[] consumers = {"2012-02-28 09:47 UTC", "2012-02-28 09:57 UTC"};
for (String d : consumers) {
SchedulableEntityInstance i = new SchedulableEntityInstance(process.getName(), cluster.getName(),
getDate(d), EntityType.PROCESS);
i.setTags(SchedulableEntityInstance.INPUT);
expected.add(i);
}
Assert.assertEquals(result, expected);
}
@Test
public void testGetConsumersOutOfValidityRange() throws Exception {
//create a feed, submit it, test that ConsumerProcesses is blank list
Cluster cluster = publishCluster();
Feed feed = publishFeed(cluster, "minutes(5)", "2010-02-28 00:00 UTC", "2016-02-28 00:00 UTC");
//create a consumer Process and submit it, assert that this is returned in ConsumerProcesses
Process process = prepareProcess(cluster, "minutes(10)", "2012-02-28 09:37 UTC", "2016-02-28 10:00 UTC");
Inputs inputs = new Inputs();
Input inFeed = new Input();
inFeed.setName("inputFeed");
inFeed.setFeed(feed.getName());
inFeed.setStart("now(0, -20)");
inFeed.setEnd("now(0,0)");
inputs.getInputs().add(inFeed);
process.setInputs(inputs);
store.publish(EntityType.PROCESS, process);
Set<SchedulableEntityInstance> result = FeedHelper.getConsumerInstances(feed,
getDate("2010-02-28 09:40 UTC"), cluster);
Assert.assertEquals(result.size(), 0);
}
@Test
public void testGetConsumersLargeOffsetShortValidity() throws Exception {
//create a feed, submit it, test that ConsumerProcesses is blank list
Cluster cluster = publishCluster();
Feed feed = publishFeed(cluster, "minutes(5)", "2010-02-28 00:00 UTC", "2016-02-28 00:00 UTC");
//create a consumer Process and submit it, assert that this is returned in ConsumerProcesses
Process process = prepareProcess(cluster, "minutes(10)", "2012-02-28 09:37 UTC", "2012-02-28 09:47 UTC");
Inputs inputs = new Inputs();
Input inFeed = new Input();
inFeed.setName("inputFeed");
inFeed.setFeed(feed.getName());
inFeed.setStart("today(-2, 0)");
inFeed.setEnd("now(0,0)");
inputs.getInputs().add(inFeed);
process.setInputs(inputs);
store.publish(EntityType.PROCESS, process);
Set<SchedulableEntityInstance> result = FeedHelper.getConsumerInstances(feed,
getDate("2012-02-28 09:35 UTC"), cluster);
Set<SchedulableEntityInstance> expected = new HashSet<>();
SchedulableEntityInstance consumer = new SchedulableEntityInstance(process.getName(), cluster.getName(),
getDate("2012-02-28 09:37 UTC"), EntityType.PROCESS);
consumer.setTags(SchedulableEntityInstance.INPUT);
expected.add(consumer);
Assert.assertEquals(result, expected);
}
@Test
public void testGetMultipleConsumerInstances() throws Exception {
Cluster cluster = publishCluster();
Feed feed = publishFeed(cluster, "hours(1)", "2012-02-27 00:00 UTC", "2016-02-28 00:00 UTC");
Process process = prepareProcess(cluster, "hours(1)", "2012-02-27 10:00 UTC", "2016-02-28 10:00 UTC");
Inputs inputs = new Inputs();
Input inFeed = new Input();
inFeed.setName("inputFeed");
inFeed.setFeed(feed.getName());
inFeed.setStart("now(-4, 30)");
inFeed.setEnd("now(4, 30)");
inputs.getInputs().add(inFeed);
process.setInputs(inputs);
store.publish(EntityType.PROCESS, process);
Set<SchedulableEntityInstance> result = FeedHelper.getConsumerInstances(feed,
getDate("2012-02-28 09:00 UTC"), cluster);
Assert.assertEquals(result.size(), 9);
Set<SchedulableEntityInstance> expected = new HashSet<>();
String[] consumers = { "2012-02-28 05:00 UTC", "2012-02-28 06:00 UTC", "2012-02-28 07:00 UTC",
"2012-02-28 08:00 UTC", "2012-02-28 09:00 UTC", "2012-02-28 10:00 UTC", "2012-02-28 11:00 UTC",
"2012-02-28 12:00 UTC", "2012-02-28 13:00 UTC", };
for (String d : consumers) {
SchedulableEntityInstance i = new SchedulableEntityInstance(process.getName(), cluster.getName(),
getDate(d), EntityType.PROCESS);
i.setTags(SchedulableEntityInstance.INPUT);
expected.add(i);
}
Assert.assertEquals(result, expected);
}
@Test
public void testGetConsumerWithVariableEnd() throws Exception {
Cluster cluster = publishCluster();
Feed feed = publishFeed(cluster, "hours(1)", "2012-02-27 00:00 UTC", "2016-02-28 00:00 UTC");
//create a consumer Process and submit it, assert that this is returned in ConsumerProcesses
Process process = prepareProcess(cluster, "hours(1)", "2012-02-27 10:00 UTC", "2016-02-28 10:00 UTC");
Inputs inputs = new Inputs();
Input inFeed = new Input();
inFeed.setName("inputFeed");
inFeed.setFeed(feed.getName());
inFeed.setStart("today(0, 0)");
inFeed.setEnd("now(0, 0)");
inputs.getInputs().add(inFeed);
process.setInputs(inputs);
store.publish(EntityType.PROCESS, process);
Set<SchedulableEntityInstance> result = FeedHelper.getConsumerInstances(feed,
getDate("2012-02-28 00:00 UTC"), cluster);
Set<SchedulableEntityInstance> expected = new HashSet<>();
String[] consumers = {"2012-02-28 11:00 UTC", "2012-02-28 16:00 UTC", "2012-02-28 18:00 UTC",
"2012-02-28 20:00 UTC", "2012-02-28 13:00 UTC", "2012-02-28 03:00 UTC", "2012-02-28 04:00 UTC",
"2012-02-28 06:00 UTC", "2012-02-28 05:00 UTC", "2012-02-28 17:00 UTC", "2012-02-28 00:00 UTC",
"2012-02-28 23:00 UTC", "2012-02-28 21:00 UTC", "2012-02-28 15:00 UTC", "2012-02-28 22:00 UTC",
"2012-02-28 14:00 UTC", "2012-02-28 08:00 UTC", "2012-02-28 12:00 UTC", "2012-02-28 02:00 UTC",
"2012-02-28 01:00 UTC", "2012-02-28 19:00 UTC", "2012-02-28 10:00 UTC", "2012-02-28 09:00 UTC",
"2012-02-28 07:00 UTC", };
for (String d : consumers) {
SchedulableEntityInstance i = new SchedulableEntityInstance(process.getName(), cluster.getName(),
getDate(d), EntityType.PROCESS);
i.setTags(SchedulableEntityInstance.INPUT);
expected.add(i);
}
Assert.assertEquals(result, expected);
}
@Test
public void testGetConsumerWithVariableStart() throws Exception {
Cluster cluster = publishCluster();
Feed feed = publishFeed(cluster, "hours(1)", "2012-02-27 00:00 UTC", "2016-02-28 00:00 UTC");
//create a consumer Process and submit it, assert that this is returned in ConsumerProcesses
Process process = prepareProcess(cluster, "hours(1)", "2012-02-27 10:00 UTC", "2016-02-28 10:00 UTC");
Inputs inputs = new Inputs();
Input inFeed = new Input();
inFeed.setName("inputFeed");
inFeed.setFeed(feed.getName());
inFeed.setStart("now(0, 0)");
inFeed.setEnd("today(24, 0)");
inputs.getInputs().add(inFeed);
process.setInputs(inputs);
store.publish(EntityType.PROCESS, process);
Set<SchedulableEntityInstance> result = FeedHelper.getConsumerInstances(feed,
getDate("2012-03-28 00:00 UTC"), cluster);
Set<SchedulableEntityInstance> expected = new HashSet<>();
String[] consumers = {"2012-03-27 16:00 UTC", "2012-03-27 01:00 UTC", "2012-03-27 10:00 UTC",
"2012-03-27 03:00 UTC", "2012-03-27 08:00 UTC", "2012-03-27 07:00 UTC", "2012-03-27 19:00 UTC",
"2012-03-27 22:00 UTC", "2012-03-27 12:00 UTC", "2012-03-27 20:00 UTC", "2012-03-27 09:00 UTC",
"2012-03-27 04:00 UTC", "2012-03-27 14:00 UTC", "2012-03-27 05:00 UTC", "2012-03-27 23:00 UTC",
"2012-03-27 17:00 UTC", "2012-03-27 13:00 UTC", "2012-03-27 18:00 UTC", "2012-03-27 15:00 UTC",
"2012-03-28 00:00 UTC", "2012-03-27 02:00 UTC", "2012-03-27 11:00 UTC", "2012-03-27 21:00 UTC",
"2012-03-27 00:00 UTC", "2012-03-27 06:00 UTC", };
for (String d : consumers) {
SchedulableEntityInstance i = new SchedulableEntityInstance(process.getName(), cluster.getName(),
getDate(d), EntityType.PROCESS);
i.setTags(SchedulableEntityInstance.INPUT);
expected.add(i);
}
Assert.assertEquals(result, expected);
}
@Test
public void testGetConsumerWithLatest() throws Exception {
Cluster cluster = publishCluster();
Feed feed = publishFeed(cluster, "hours(1)", "2012-02-27 00:00 UTC", "2016-02-28 00:00 UTC");
Process process = prepareProcess(cluster, "hours(1)", "2012-02-27 10:00 UTC", "2016-02-28 10:00 UTC");
Inputs inputs = new Inputs();
Input inFeed = new Input();
inFeed.setName("inputFeed");
inFeed.setFeed(feed.getName());
inFeed.setStart("today(0, 0)");
inFeed.setEnd("latest(0)");
inputs.getInputs().add(inFeed);
process.setInputs(inputs);
store.publish(EntityType.PROCESS, process);
Set<SchedulableEntityInstance> result = FeedHelper.getConsumerInstances(feed,
getDate("2012-02-28 00:00 UTC"), cluster);
Set<SchedulableEntityInstance> expected = new HashSet<>();
String[] consumers = {"2012-02-28 23:00 UTC", "2012-02-28 04:00 UTC", "2012-02-28 10:00 UTC",
"2012-02-28 07:00 UTC", "2012-02-28 17:00 UTC", "2012-02-28 13:00 UTC", "2012-02-28 05:00 UTC",
"2012-02-28 22:00 UTC", "2012-02-28 03:00 UTC", "2012-02-28 21:00 UTC", "2012-02-28 11:00 UTC",
"2012-02-28 20:00 UTC", "2012-02-28 06:00 UTC", "2012-02-28 01:00 UTC", "2012-02-28 14:00 UTC",
"2012-02-28 00:00 UTC", "2012-02-28 18:00 UTC", "2012-02-28 12:00 UTC", "2012-02-28 16:00 UTC",
"2012-02-28 09:00 UTC", "2012-02-28 15:00 UTC", "2012-02-28 19:00 UTC", "2012-02-28 08:00 UTC",
"2012-02-28 02:00 UTC", };
for (String d : consumers) {
SchedulableEntityInstance i = new SchedulableEntityInstance(process.getName(), cluster.getName(),
getDate(d), EntityType.PROCESS);
i.setTags(SchedulableEntityInstance.INPUT);
expected.add(i);
}
Assert.assertEquals(result, expected);
}
@Test
public void testIsLifeCycleEnabled() throws Exception {
Feed feed = new Feed();
// lifecycle is not defined
Clusters clusters = new Clusters();
org.apache.falcon.entity.v0.feed.Cluster cluster = new org.apache.falcon.entity.v0.feed.Cluster();
cluster.setName("cluster1");
clusters.getClusters().add(cluster);
feed.setClusters(clusters);
Assert.assertFalse(FeedHelper.isLifecycleEnabled(feed, cluster.getName()));
// lifecycle is defined at global level
Lifecycle globalLifecycle = new Lifecycle();
RetentionStage retentionStage = new RetentionStage();
retentionStage.setFrequency(new Frequency("hours(2)"));
globalLifecycle.setRetentionStage(retentionStage);
feed.setLifecycle(globalLifecycle);
Assert.assertTrue(FeedHelper.isLifecycleEnabled(feed, cluster.getName()));
// lifecycle is defined at both global and cluster level
Lifecycle clusterLifecycle = new Lifecycle();
retentionStage = new RetentionStage();
retentionStage.setFrequency(new Frequency("hours(4)"));
clusterLifecycle.setRetentionStage(retentionStage);
feed.getClusters().getClusters().get(0).setLifecycle(clusterLifecycle);
Assert.assertTrue(FeedHelper.isLifecycleEnabled(feed, cluster.getName()));
// lifecycle is defined only at cluster level
feed.setLifecycle(null);
Assert.assertTrue(FeedHelper.isLifecycleEnabled(feed, cluster.getName()));
}
@Test
public void testGetRetentionStage() throws Exception {
Feed feed = new Feed();
feed.setFrequency(new Frequency("days(1)"));
// retention stage frequency is not defined
Lifecycle globalLifecycle = new Lifecycle();
RetentionStage globalRetentionStage = new RetentionStage();
globalLifecycle.setRetentionStage(globalRetentionStage);
feed.setLifecycle(globalLifecycle);
Clusters clusters = new Clusters();
org.apache.falcon.entity.v0.feed.Cluster cluster = new org.apache.falcon.entity.v0.feed.Cluster();
cluster.setName("cluster1");
clusters.getClusters().add(cluster);
feed.setClusters(clusters);
Assert.assertEquals(FeedHelper.getLifecycleRetentionFrequency(feed, cluster.getName()),
new Frequency("days(1)"));
// lifecycle is defined only at global level
globalRetentionStage.setFrequency(new Frequency("hours(2)"));
globalLifecycle.setRetentionStage(globalRetentionStage);
feed.setLifecycle(globalLifecycle);
Assert.assertNotNull(FeedHelper.getRetentionStage(feed, cluster.getName()));
Assert.assertEquals(FeedHelper.getLifecycleRetentionFrequency(feed, cluster.getName()),
feed.getLifecycle().getRetentionStage().getFrequency());
// lifecycle is defined at both global and cluster level
Lifecycle clusterLifecycle = new Lifecycle();
RetentionStage clusterRetentionStage = new RetentionStage();
clusterRetentionStage.setFrequency(new Frequency("hours(4)"));
clusterLifecycle.setRetentionStage(clusterRetentionStage);
feed.getClusters().getClusters().get(0).setLifecycle(clusterLifecycle);
Assert.assertNotNull(FeedHelper.getRetentionStage(feed, cluster.getName()));
Assert.assertEquals(FeedHelper.getLifecycleRetentionFrequency(feed, cluster.getName()),
cluster.getLifecycle().getRetentionStage().getFrequency());
// lifecycle at both level - retention only at cluster level.
feed.getLifecycle().setRetentionStage(null);
Assert.assertNotNull(FeedHelper.getRetentionStage(feed, cluster.getName()));
Assert.assertEquals(FeedHelper.getLifecycleRetentionFrequency(feed, cluster.getName()),
cluster.getLifecycle().getRetentionStage().getFrequency());
// lifecycle at both level - retention only at global level.
feed.getLifecycle().setRetentionStage(globalRetentionStage);
feed.getClusters().getClusters().get(0).getLifecycle().setRetentionStage(null);
Assert.assertNotNull(FeedHelper.getRetentionStage(feed, cluster.getName()));
Assert.assertEquals(FeedHelper.getLifecycleRetentionFrequency(feed, cluster.getName()),
feed.getLifecycle().getRetentionStage().getFrequency());
// lifecycle is defined only at cluster level
feed.setLifecycle(null);
feed.getClusters().getClusters().get(0).getLifecycle().setRetentionStage(clusterRetentionStage);
Assert.assertNotNull(FeedHelper.getRetentionStage(feed, cluster.getName()));
Assert.assertEquals(FeedHelper.getLifecycleRetentionFrequency(feed, cluster.getName()),
cluster.getLifecycle().getRetentionStage().getFrequency());
}
@Test
public void testGetRetentionFrequency() throws Exception {
Feed feed = new Feed();
feed.setFrequency(new Frequency("days(10)"));
// no retention stage frequency defined - test both daily and monthly feeds
Lifecycle globalLifecycle = new Lifecycle();
RetentionStage globalRetentionStage = new RetentionStage();
globalLifecycle.setRetentionStage(globalRetentionStage);
feed.setLifecycle(globalLifecycle);
Clusters clusters = new Clusters();
org.apache.falcon.entity.v0.feed.Cluster cluster = new org.apache.falcon.entity.v0.feed.Cluster();
cluster.setName("cluster1");
clusters.getClusters().add(cluster);
feed.setClusters(clusters);
Assert.assertEquals(FeedHelper.getLifecycleRetentionFrequency(feed, cluster.getName()),
new Frequency("days(10)"));
feed.setFrequency(new Frequency("hours(1)"));
Assert.assertEquals(FeedHelper.getLifecycleRetentionFrequency(feed, cluster.getName()),
new Frequency("hours(6)"));
feed.setFrequency(new Frequency("minutes(10)"));
Assert.assertEquals(FeedHelper.getLifecycleRetentionFrequency(feed, cluster.getName()),
new Frequency("hours(6)"));
feed.setFrequency(new Frequency("hours(7)"));
Assert.assertEquals(FeedHelper.getLifecycleRetentionFrequency(feed, cluster.getName()),
new Frequency("hours(7)"));
feed.setFrequency(new Frequency("days(2)"));
Assert.assertEquals(FeedHelper.getLifecycleRetentionFrequency(feed, cluster.getName()),
new Frequency("days(2)"));
// lifecycle at both level - retention only at global level.
feed.setFrequency(new Frequency("hours(1)"));
globalRetentionStage.setFrequency(new Frequency("hours(2)"));
globalLifecycle.setRetentionStage(globalRetentionStage);
feed.setLifecycle(globalLifecycle);
Lifecycle clusterLifecycle = new Lifecycle();
RetentionStage clusterRetentionStage = new RetentionStage();
clusterLifecycle.setRetentionStage(clusterRetentionStage);
feed.getClusters().getClusters().get(0).setLifecycle(clusterLifecycle);
Assert.assertEquals(FeedHelper.getLifecycleRetentionFrequency(feed, cluster.getName()),
new Frequency("hours(6)"));
// lifecycle at both level - retention only at cluster level.
feed.getLifecycle().getRetentionStage().setFrequency(null);
clusterRetentionStage.setFrequency(new Frequency("hours(4)"));
Assert.assertEquals(FeedHelper.getLifecycleRetentionFrequency(feed, cluster.getName()),
new Frequency("hours(4)"));
}
@Test
public void testFeedImportSnapshot() throws Exception {
Cluster cluster = publishCluster();
Feed feed = importFeedSnapshot(cluster, "hours(1)", "2012-02-07 00:00 UTC", "2020-02-25 00:00 UTC");
org.apache.falcon.entity.v0.feed.Cluster feedCluster = FeedHelper.getCluster(feed, cluster.getName());
Date startInstResult = FeedHelper.getImportInitalInstance(feedCluster);
Assert.assertNotNull(feed.getClusters().getClusters());
Assert.assertNotNull(feed.getClusters().getClusters().get(0));
Assert.assertNotNull(feed.getClusters().getClusters().get(0).getValidity());
Assert.assertNotNull(feed.getClusters().getClusters().get(0).getValidity().getStart());
Assert.assertNotNull(startInstResult);
Assert.assertNotNull(feedCluster.getValidity().getStart());
Assert.assertEquals(getDate("2012-02-07 00:00 UTC"), feedCluster.getValidity().getStart());
Assert.assertTrue(FeedHelper.isImportEnabled(feedCluster));
Assert.assertEquals(MergeType.SNAPSHOT, FeedHelper.getImportMergeType(feedCluster));
Assert.assertEquals(startInstResult, feedCluster.getValidity().getStart());
}
@Test
public void testFeedImportFields() throws Exception {
Cluster cluster = publishCluster();
Feed feed = importFeedSnapshot(cluster, "hours(1)", "2012-02-07 00:00 UTC", "2020-02-25 00:00 UTC");
org.apache.falcon.entity.v0.feed.Cluster feedCluster = FeedHelper.getCluster(feed, cluster.getName());
Date startInstResult = FeedHelper.getImportInitalInstance(feedCluster);
List<String> fieldList = FeedHelper.getImportFieldList(feedCluster);
Assert.assertEquals(2, fieldList.size());
Assert.assertFalse(FeedHelper.isFieldExcludes(feedCluster.getImport().getSource()));
}
@Test
public void testFeedImportAppend() throws Exception {
Cluster cluster = publishCluster();
Feed feed = importFeedAppend(cluster, "hours(1)", "2012-02-07 00:00 UTC", "2020-02-25 00:00 UTC");
org.apache.falcon.entity.v0.feed.Cluster feedCluster = FeedHelper.getCluster(feed, cluster.getName());
Date startInstResult = FeedHelper.getImportInitalInstance(feedCluster);
Assert.assertEquals(startInstResult, feed.getClusters().getClusters().get(0).getValidity().getStart());
}
public void testGetFeedClusterValidity() throws Exception {
Cluster cluster = publishCluster();
Feed feed = publishFeed(cluster, "hours(1)", "2012-02-07 00:00 UTC", "2020-02-25 00:00 UTC");
Validity validity = FeedHelper.getClusterValidity(feed, cluster.getName());
Assert.assertEquals(validity.getStart(), getDate("2012-02-07 00:00 UTC"));
Assert.assertEquals(validity.getEnd(), getDate("2020-02-25 00:00 UTC"));
}
@Test(expectedExceptions = FalconException.class)
public void testGetClusterValidityInvalidCluster() throws Exception {
Cluster cluster = publishCluster();
Feed feed = publishFeed(cluster, "hours(1)", "2012-02-07 00:00 UTC", "2020-02-25 00:00 UTC");
FeedHelper.getClusterValidity(feed, "abracadabra");
}
private Validity getFeedValidity(String start, String end) throws ParseException {
Validity validity = new Validity();
validity.setStart(getDate(start));
validity.setEnd(getDate(end));
return validity;
}
private org.apache.falcon.entity.v0.process.Validity getProcessValidity(String start, String end) throws
ParseException {
org.apache.falcon.entity.v0.process.Validity validity = new org.apache.falcon.entity.v0.process.Validity();
validity.setStart(getDate(start));
validity.setEnd(getDate(end));
return validity;
}
private Date getDate(String dateString) throws ParseException {
DateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm Z");
return format.parse(dateString);
}
private Cluster publishCluster() throws FalconException {
Cluster cluster = new Cluster();
cluster.setName("feedCluster");
cluster.setColo("colo");
store.publish(EntityType.CLUSTER, cluster);
return cluster;
}
private Feed publishFeed(Cluster cluster, String frequency, String start, String end)
throws FalconException, ParseException {
return publishFeed(cluster, frequency, start, end, null);
}
private Feed publishFeed(Cluster cluster, String frequency, String start, String end, Import imp)
throws FalconException, ParseException {
Feed feed = new Feed();
feed.setName("feed");
Frequency f = new Frequency(frequency);
feed.setFrequency(f);
feed.setTimezone(UTC);
Clusters fClusters = new Clusters();
org.apache.falcon.entity.v0.feed.Cluster fCluster = new org.apache.falcon.entity.v0.feed.Cluster();
fCluster.setType(ClusterType.SOURCE);
fCluster.setImport(imp);
fCluster.setName(cluster.getName());
fCluster.setValidity(getFeedValidity(start, end));
fClusters.getClusters().add(fCluster);
feed.setClusters(fClusters);
store.publish(EntityType.FEED, feed);
return feed;
}
private Process prepareProcess(Cluster cluster, String frequency, String start, String end) throws ParseException {
Process process = new Process();
process.setName("process");
process.setTimezone(UTC);
org.apache.falcon.entity.v0.process.Clusters pClusters = new org.apache.falcon.entity.v0.process.Clusters();
org.apache.falcon.entity.v0.process.Cluster pCluster = new org.apache.falcon.entity.v0.process.Cluster();
pCluster.setName(cluster.getName());
org.apache.falcon.entity.v0.process.Validity validity = getProcessValidity(start, end);
pCluster.setValidity(validity);
pClusters.getClusters().add(pCluster);
process.setClusters(pClusters);
Frequency f = new Frequency(frequency);
process.setFrequency(f);
return process;
}
private Feed importFeedSnapshot(Cluster cluster, String frequency, String start, String end)
throws FalconException, ParseException {
Import imp = getAnImport(MergeType.SNAPSHOT);
Feed feed = publishFeed(cluster, frequency, start, end, imp);
return feed;
}
private Feed importFeedAppend(Cluster cluster, String frequency, String start, String end)
throws FalconException, ParseException {
Import imp = getAnImport(MergeType.APPEND);
Feed feed = publishFeed(cluster, frequency, start, end);
return feed;
}
private Import getAnImport(MergeType mergeType) {
Extract extract = new Extract();
extract.setType(ExtractMethod.FULL);
extract.setMergepolicy(mergeType);
FieldIncludeExclude fieldInclude = new FieldIncludeExclude();
fieldInclude.getFields().add("id");
fieldInclude.getFields().add("name");
FieldsType fields = new FieldsType();
fields.setIncludes(fieldInclude);
Datasource source = new Datasource();
source.setName("test-db");
source.setTableName("test-table");
source.setExtract(extract);
source.setFields(fields);
Argument a1 = new Argument();
a1.setName("--split_by");
a1.setValue("id");
Argument a2 = new Argument();
a2.setName("--num-mappers");
a2.setValue("2");
Arguments args = new Arguments();
List<Argument> argList = args.getArguments();
argList.add(a1);
argList.add(a2);
Import imp = new Import();
imp.setSource(source);
imp.setArguments(args);
return imp;
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.rest.service.api.runtime;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import java.io.ObjectOutputStream;
import java.util.Calendar;
import java.util.HashMap;
import java.util.Map;
import org.apache.http.HttpStatus;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.entity.StringEntity;
import org.flowable.engine.runtime.ProcessInstance;
import org.flowable.engine.test.Deployment;
import org.flowable.rest.service.BaseSpringRestTestCase;
import org.flowable.rest.service.HttpMultipartHelper;
import org.flowable.rest.service.api.RestUrls;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
/**
* Test for all REST-operations related to Process instance variables.
*
* @author Frederik Heremans
*/
public class ProcessInstanceVariablesCollectionResourceTest extends BaseSpringRestTestCase {
/**
* Test getting all process variables. GET runtime/process-instances/{processInstanceId}/variables
*/
@Deployment(resources = { "org/flowable/rest/service/api/runtime/ProcessInstanceVariablesCollectionResourceTest.testProcess.bpmn20.xml" })
public void testGetProcessVariables() throws Exception {
Calendar cal = Calendar.getInstance();
// Start process with all types of variables
Map<String, Object> processVariables = new HashMap<>();
processVariables.put("stringProcVar", "This is a ProcVariable");
processVariables.put("intProcVar", 123);
processVariables.put("longProcVar", 1234L);
processVariables.put("shortProcVar", (short) 123);
processVariables.put("doubleProcVar", 99.99);
processVariables.put("booleanProcVar", Boolean.TRUE);
processVariables.put("dateProcVar", cal.getTime());
processVariables.put("byteArrayProcVar", "Some raw bytes".getBytes());
processVariables.put("overlappingVariable", "process-value");
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess", processVariables);
// Request all variables (no scope provides) which include global an
// local
CloseableHttpResponse response = executeRequest(new HttpGet(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_PROCESS_INSTANCE_VARIABLE_COLLECTION, processInstance.getId())),
HttpStatus.SC_OK);
JsonNode responseNode = objectMapper.readTree(response.getEntity().getContent());
closeResponse(response);
assertNotNull(responseNode);
assertTrue(responseNode.isArray());
assertEquals(9, responseNode.size());
}
/**
* Test creating a single process variable. POST runtime/process-instance/{processInstanceId}/variables
*/
@Deployment(resources = { "org/flowable/rest/service/api/runtime/ProcessInstanceVariablesCollectionResourceTest.testProcess.bpmn20.xml" })
public void testCreateSingleProcessInstanceVariable() throws Exception {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess");
ArrayNode requestNode = objectMapper.createArrayNode();
ObjectNode variableNode = requestNode.addObject();
variableNode.put("name", "myVariable");
variableNode.put("value", "simple string value");
variableNode.put("type", "string");
// Create a new local variable
HttpPost httpPost = new HttpPost(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_PROCESS_INSTANCE_VARIABLE_COLLECTION, processInstance.getId()));
httpPost.setEntity(new StringEntity(requestNode.toString()));
CloseableHttpResponse response = executeRequest(httpPost, HttpStatus.SC_CREATED);
JsonNode responseNode = objectMapper.readTree(response.getEntity().getContent()).get(0);
closeResponse(response);
assertNotNull(responseNode);
assertEquals("myVariable", responseNode.get("name").asText());
assertEquals("simple string value", responseNode.get("value").asText());
assertEquals("local", responseNode.get("scope").asText());
assertEquals("string", responseNode.get("type").asText());
assertNull(responseNode.get("valueUrl"));
assertTrue(runtimeService.hasVariableLocal(processInstance.getId(), "myVariable"));
assertEquals("simple string value", runtimeService.getVariableLocal(processInstance.getId(), "myVariable"));
}
/**
* Test creating a single process variable using a binary stream. POST runtime/process-instances/{processInstanceId}/variables
*/
@Deployment(resources = { "org/flowable/rest/service/api/runtime/ProcessInstanceVariablesCollectionResourceTest.testProcess.bpmn20.xml" })
public void testCreateSingleBinaryProcessVariable() throws Exception {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess");
InputStream binaryContent = new ByteArrayInputStream("This is binary content".getBytes());
// Add name, type and scope
Map<String, String> additionalFields = new HashMap<>();
additionalFields.put("name", "binaryVariable");
additionalFields.put("type", "binary");
HttpPost httpPost = new HttpPost(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_PROCESS_INSTANCE_VARIABLE_COLLECTION, processInstance.getId()));
httpPost.setEntity(HttpMultipartHelper.getMultiPartEntity("value", "application/octet-stream", binaryContent, additionalFields));
CloseableHttpResponse response = executeBinaryRequest(httpPost, HttpStatus.SC_CREATED);
JsonNode responseNode = objectMapper.readTree(response.getEntity().getContent());
closeResponse(response);
assertNotNull(responseNode);
assertEquals("binaryVariable", responseNode.get("name").asText());
assertTrue(responseNode.get("value").isNull());
assertEquals("local", responseNode.get("scope").asText());
assertEquals("binary", responseNode.get("type").asText());
assertFalse(responseNode.get("valueUrl").isNull());
assertTrue(responseNode.get("valueUrl").asText().endsWith(RestUrls.createRelativeResourceUrl(RestUrls.URL_PROCESS_INSTANCE_VARIABLE_DATA, processInstance.getId(), "binaryVariable")));
// Check actual value of variable in engine
Object variableValue = runtimeService.getVariableLocal(processInstance.getId(), "binaryVariable");
assertNotNull(variableValue);
assertTrue(variableValue instanceof byte[]);
assertEquals("This is binary content", new String((byte[]) variableValue));
}
/**
* Test creating a single process variable using a binary stream containing a serializable. POST runtime/process-instances/{processInstanceId}/variables
*/
@Deployment(resources = { "org/flowable/rest/service/api/runtime/ProcessInstanceVariablesCollectionResourceTest.testProcess.bpmn20.xml" })
public void testCreateSingleSerializableProcessVariable() throws Exception {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess");
TestSerializableVariable serializable = new TestSerializableVariable();
serializable.setSomeField("some value");
// Serialize object to readable stream for representation
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
ObjectOutputStream output = new ObjectOutputStream(buffer);
output.writeObject(serializable);
output.close();
InputStream binaryContent = new ByteArrayInputStream(buffer.toByteArray());
// Add name, type and scope
Map<String, String> additionalFields = new HashMap<>();
additionalFields.put("name", "serializableVariable");
additionalFields.put("type", "serializable");
// Upload a valid BPMN-file using multipart-data
HttpPost httpPost = new HttpPost(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_PROCESS_INSTANCE_VARIABLE_COLLECTION, processInstance.getId()));
httpPost.setEntity(HttpMultipartHelper.getMultiPartEntity("value", "application/x-java-serialized-object", binaryContent, additionalFields));
CloseableHttpResponse response = executeBinaryRequest(httpPost, HttpStatus.SC_CREATED);
JsonNode responseNode = objectMapper.readTree(response.getEntity().getContent());
closeResponse(response);
assertNotNull(responseNode);
assertEquals("serializableVariable", responseNode.get("name").asText());
assertTrue(responseNode.get("value").isNull());
assertEquals("local", responseNode.get("scope").asText());
assertEquals("serializable", responseNode.get("type").asText());
assertFalse(responseNode.get("valueUrl").isNull());
assertTrue(responseNode.get("valueUrl").asText().endsWith(RestUrls.createRelativeResourceUrl(RestUrls.URL_PROCESS_INSTANCE_VARIABLE_DATA, processInstance.getId(), "serializableVariable")));
// Check actual value of variable in engine
Object variableValue = runtimeService.getVariableLocal(processInstance.getId(), "serializableVariable");
assertNotNull(variableValue);
assertTrue(variableValue instanceof TestSerializableVariable);
assertEquals("some value", ((TestSerializableVariable) variableValue).getSomeField());
}
/**
* Test creating a single process variable, testing edge case exceptions. POST runtime/process-instances/{processInstanceId}/variables
*/
@Deployment(resources = { "org/flowable/rest/service/api/runtime/ProcessInstanceVariablesCollectionResourceTest.testProcess.bpmn20.xml" })
public void testCreateSingleProcessVariableEdgeCases() throws Exception {
// Test adding variable to unexisting execution
ArrayNode requestNode = objectMapper.createArrayNode();
ObjectNode variableNode = requestNode.addObject();
variableNode.put("name", "existingVariable");
variableNode.put("value", "simple string value");
variableNode.put("type", "string");
HttpPost httpPost = new HttpPost(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_PROCESS_INSTANCE_VARIABLE_COLLECTION, "unexisting"));
httpPost.setEntity(new StringEntity(requestNode.toString()));
closeResponse(executeRequest(httpPost, HttpStatus.SC_NOT_FOUND));
// Test trying to create already existing variable
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess");
runtimeService.setVariable(processInstance.getId(), "existingVariable", "I already exist");
httpPost = new HttpPost(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_PROCESS_INSTANCE_VARIABLE_COLLECTION, processInstance.getId()));
httpPost.setEntity(new StringEntity(requestNode.toString()));
closeResponse(executeRequest(httpPost, HttpStatus.SC_CONFLICT));
// Test creating nameless variable
variableNode.removeAll();
variableNode.put("value", "simple string value");
httpPost = new HttpPost(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_PROCESS_INSTANCE_VARIABLE_COLLECTION, processInstance.getId()));
httpPost.setEntity(new StringEntity(requestNode.toString()));
closeResponse(executeRequest(httpPost, HttpStatus.SC_BAD_REQUEST));
// Test passing in empty array
requestNode.removeAll();
httpPost = new HttpPost(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_PROCESS_INSTANCE_VARIABLE_COLLECTION, processInstance.getId()));
httpPost.setEntity(new StringEntity(requestNode.toString()));
closeResponse(executeRequest(httpPost, HttpStatus.SC_BAD_REQUEST));
// Test passing in object instead of array
httpPost = new HttpPost(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_PROCESS_INSTANCE_VARIABLE_COLLECTION, processInstance.getId()));
httpPost.setEntity(new StringEntity(objectMapper.createObjectNode().toString()));
closeResponse(executeRequest(httpPost, HttpStatus.SC_BAD_REQUEST));
}
/**
* Test creating a single process variable, testing default types when omitted. POST runtime/process-instances/{processInstanceId}/variables
*/
@Deployment(resources = { "org/flowable/rest/service/api/runtime/ProcessInstanceVariablesCollectionResourceTest.testProcess.bpmn20.xml" })
public void testCreateSingleProcessVariableDefaultTypes() throws Exception {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess");
// String type detection
ArrayNode requestNode = objectMapper.createArrayNode();
ObjectNode varNode = requestNode.addObject();
varNode.put("name", "stringVar");
varNode.put("value", "String value");
HttpPost httpPost = new HttpPost(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_PROCESS_INSTANCE_VARIABLE_COLLECTION, processInstance.getId()));
httpPost.setEntity(new StringEntity(requestNode.toString()));
closeResponse(executeRequest(httpPost, HttpStatus.SC_CREATED));
assertEquals("String value", runtimeService.getVariable(processInstance.getId(), "stringVar"));
// Integer type detection
varNode.put("name", "integerVar");
varNode.put("value", 123);
httpPost.setEntity(new StringEntity(requestNode.toString()));
closeResponse(executeRequest(httpPost, HttpStatus.SC_CREATED));
assertEquals(123, runtimeService.getVariable(processInstance.getId(), "integerVar"));
// Double type detection
varNode.put("name", "doubleVar");
varNode.put("value", 123.456);
httpPost.setEntity(new StringEntity(requestNode.toString()));
closeResponse(executeRequest(httpPost, HttpStatus.SC_CREATED));
assertEquals(123.456, runtimeService.getVariable(processInstance.getId(), "doubleVar"));
// Boolean type detection
varNode.put("name", "booleanVar");
varNode.put("value", Boolean.TRUE);
httpPost.setEntity(new StringEntity(requestNode.toString()));
closeResponse(executeRequest(httpPost, HttpStatus.SC_CREATED));
assertEquals(Boolean.TRUE, runtimeService.getVariable(processInstance.getId(), "booleanVar"));
}
/**
* Test creating multiple process variables in a single call. POST runtime/process-instance/{processInstanceId}/variables
*/
@Deployment(resources = { "org/flowable/rest/service/api/runtime/ProcessInstanceVariablesCollectionResourceTest.testProcess.bpmn20.xml" })
public void testCreateMultipleProcessVariables() throws Exception {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess");
ArrayNode requestNode = objectMapper.createArrayNode();
// String variable
ObjectNode stringVarNode = requestNode.addObject();
stringVarNode.put("name", "stringVariable");
stringVarNode.put("value", "simple string value");
stringVarNode.put("type", "string");
// Integer
ObjectNode integerVarNode = requestNode.addObject();
integerVarNode.put("name", "integerVariable");
integerVarNode.put("value", 1234);
integerVarNode.put("type", "integer");
// Short
ObjectNode shortVarNode = requestNode.addObject();
shortVarNode.put("name", "shortVariable");
shortVarNode.put("value", 123);
shortVarNode.put("type", "short");
// Long
ObjectNode longVarNode = requestNode.addObject();
longVarNode.put("name", "longVariable");
longVarNode.put("value", 4567890L);
longVarNode.put("type", "long");
// Double
ObjectNode doubleVarNode = requestNode.addObject();
doubleVarNode.put("name", "doubleVariable");
doubleVarNode.put("value", 123.456);
doubleVarNode.put("type", "double");
// Boolean
ObjectNode booleanVarNode = requestNode.addObject();
booleanVarNode.put("name", "booleanVariable");
booleanVarNode.put("value", Boolean.TRUE);
booleanVarNode.put("type", "boolean");
// Date
Calendar varCal = Calendar.getInstance();
String isoString = getISODateString(varCal.getTime());
ObjectNode dateVarNode = requestNode.addObject();
dateVarNode.put("name", "dateVariable");
dateVarNode.put("value", isoString);
dateVarNode.put("type", "date");
// Create local variables with a single request
HttpPost httpPost = new HttpPost(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_PROCESS_INSTANCE_VARIABLE_COLLECTION, processInstance.getId()));
httpPost.setEntity(new StringEntity(requestNode.toString()));
CloseableHttpResponse response = executeRequest(httpPost, HttpStatus.SC_CREATED);
JsonNode responseNode = objectMapper.readTree(response.getEntity().getContent());
closeResponse(response);
assertNotNull(responseNode);
assertTrue(responseNode.isArray());
assertEquals(7, responseNode.size());
// Check if engine has correct variables set
Map<String, Object> variables = runtimeService.getVariablesLocal(processInstance.getId());
assertEquals(7, variables.size());
assertEquals("simple string value", variables.get("stringVariable"));
assertEquals(1234, variables.get("integerVariable"));
assertEquals((short) 123, variables.get("shortVariable"));
assertEquals(4567890L, variables.get("longVariable"));
assertEquals(123.456, variables.get("doubleVariable"));
assertEquals(Boolean.TRUE, variables.get("booleanVariable"));
assertEquals(dateFormat.parse(isoString), variables.get("dateVariable"));
}
/**
* Test creating multiple process variables in a single call. POST runtime/process-instance/{processInstanceId}/variables?override=true
*/
@Deployment(resources = { "org/flowable/rest/service/api/runtime/ProcessInstanceVariablesCollectionResourceTest.testProcess.bpmn20.xml" })
public void testCreateMultipleProcessVariablesWithOverride() throws Exception {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess");
runtimeService.setVariable(processInstance.getId(), "stringVariable", "initialValue");
ArrayNode requestNode = objectMapper.createArrayNode();
// String variable
ObjectNode stringVarNode = requestNode.addObject();
stringVarNode.put("name", "stringVariable");
stringVarNode.put("value", "simple string value");
stringVarNode.put("type", "string");
ObjectNode anotherVariable = requestNode.addObject();
anotherVariable.put("name", "stringVariable2");
anotherVariable.put("value", "another string value");
anotherVariable.put("type", "string");
// Create local variables with a single request
HttpPut httpPut = new HttpPut(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_PROCESS_INSTANCE_VARIABLE_COLLECTION, processInstance.getId()));
httpPut.setEntity(new StringEntity(requestNode.toString()));
CloseableHttpResponse response = executeRequest(httpPut, HttpStatus.SC_CREATED);
JsonNode responseNode = objectMapper.readTree(response.getEntity().getContent());
closeResponse(response);
assertNotNull(responseNode);
assertTrue(responseNode.isArray());
assertEquals(2, responseNode.size());
// Check if engine has correct variables set
Map<String, Object> variables = runtimeService.getVariablesLocal(processInstance.getId());
assertEquals(2, variables.size());
assertEquals("simple string value", variables.get("stringVariable"));
assertEquals("another string value", variables.get("stringVariable2"));
}
/**
* Test deleting all process variables. DELETE runtime/process-instance/{processInstanceId}/variables
*/
@Deployment(resources = { "org/flowable/rest/service/api/runtime/ProcessInstanceVariablesCollectionResourceTest.testProcess.bpmn20.xml" })
public void testDeleteAllProcessVariables() throws Exception {
Map<String, Object> processVariables = new HashMap<>();
processVariables.put("var1", "This is a ProcVariable");
processVariables.put("var2", 123);
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess", processVariables);
HttpDelete httpDelete = new HttpDelete(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_PROCESS_INSTANCE_VARIABLE_COLLECTION, processInstance.getId()));
closeResponse(executeRequest(httpDelete, HttpStatus.SC_NO_CONTENT));
// Check if local variables are gone and global remain unchanged
assertEquals(0, runtimeService.getVariablesLocal(processInstance.getId()).size());
}
}
| |
/*
* Copyright 2018 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.plugin.redis.jedis;
import java.lang.reflect.Modifier;
import java.security.ProtectionDomain;
import com.navercorp.pinpoint.bootstrap.instrument.InstrumentClass;
import com.navercorp.pinpoint.bootstrap.instrument.InstrumentException;
import com.navercorp.pinpoint.bootstrap.instrument.InstrumentMethod;
import com.navercorp.pinpoint.bootstrap.instrument.MethodFilters;
import com.navercorp.pinpoint.bootstrap.instrument.Instrumentor;
import com.navercorp.pinpoint.bootstrap.instrument.transformer.TransformCallback;
import com.navercorp.pinpoint.bootstrap.instrument.transformer.TransformTemplate;
import com.navercorp.pinpoint.bootstrap.instrument.transformer.TransformTemplateAware;
import com.navercorp.pinpoint.bootstrap.interceptor.scope.ExecutionPolicy;
import com.navercorp.pinpoint.bootstrap.logging.PLogger;
import com.navercorp.pinpoint.bootstrap.logging.PLoggerFactory;
import com.navercorp.pinpoint.bootstrap.plugin.ProfilerPlugin;
import com.navercorp.pinpoint.bootstrap.plugin.ProfilerPluginSetupContext;
import com.navercorp.pinpoint.plugin.redis.jedis.interceptor.AttachEndPointInterceptor;
import com.navercorp.pinpoint.plugin.redis.jedis.interceptor.ProtocolSendCommandAndReadMethodInterceptor;
/**
* @author jaehong.kim
*/
public class JedisPlugin implements ProfilerPlugin, TransformTemplateAware {
private final PLogger logger = PLoggerFactory.getLogger(this.getClass());
private TransformTemplate transformTemplate;
@Override
public void setup(ProfilerPluginSetupContext context) {
final JedisPluginConfig config = new JedisPluginConfig(context.getConfig());
if (!config.isEnable()) {
if (logger.isInfoEnabled()) {
logger.info("Disable JedisPlugin. config={}", config);
}
return;
}
logger.info("{} config:{}", this.getClass().getSimpleName(), config);
final boolean pipeline = config.isPipeline();
// jedis & jedis cluster
addJedis(config);
addProtocol();
if (pipeline) {
// jedis pipeline
addClient();
addPipeline(config);
}
}
// Jedis & BinaryJedis
private void addJedis(JedisPluginConfig config) {
addBinaryJedisExtends(config, "redis.clients.jedis.BinaryJedis", BinaryJedisTransform.class);
// Jedis extends BinaryJedis
addBinaryJedisExtends(config, "redis.clients.jedis.Jedis", BinaryJedisExtendsTransform.class);
}
public static class BinaryJedisTransform extends BinaryJedisExtendsTransform {
@Override
public void handle(InstrumentClass target) throws InstrumentException {
target.addField(EndPointAccessor.class);
}
}
private void addBinaryJedisExtends(final JedisPluginConfig config, final String targetClassName, Class<? extends TransformCallback> transformCallback) {
transformTemplate.transform(targetClassName, transformCallback);
}
public static class BinaryJedisExtendsTransform implements TransformCallback {
public BinaryJedisExtendsTransform() {
}
@Override
public byte[] doInTransform(Instrumentor instrumentor, ClassLoader classLoader, String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) throws InstrumentException {
final InstrumentClass target = instrumentor.getInstrumentClass(classLoader, className, classfileBuffer);
handle(target);
// Set endpoint
// host
JedisUtils.addSetEndPointInterceptor(target, "java.lang.String");
// host, port
JedisUtils.addSetEndPointInterceptor(target, "java.lang.String", "int");
// host, port, ssl
JedisUtils.addSetEndPointInterceptor(target, "java.lang.String", "int", "boolean");
// host, port, ssl, sslSocketFactory, sslParameters, hostnameVerifier
JedisUtils.addSetEndPointInterceptor(target, "java.lang.String", "int", "boolean", "javax.net.ssl.SSLSocketFactory", "javax.net.ssl.SSLParameters", "javax.net.ssl.HostnameVerifier");
// host, port, timeout
JedisUtils.addSetEndPointInterceptor(target, "java.lang.String", "int", "int");
// host, port, timeout, ssl
JedisUtils.addSetEndPointInterceptor(target, "java.lang.String", "int", "int", "boolean");
// host, port, timeout, ssl, sslSocketFactory, sslParameters, hostnameVerifier
JedisUtils.addSetEndPointInterceptor(target, "java.lang.String", "int", "int", "boolean", "javax.net.ssl.SSLSocketFactory", "javax.net.ssl.SSLParameters", "javax.net.ssl.HostnameVerifier");
// host, port, connectionTimeout, soTimeout
JedisUtils.addSetEndPointInterceptor(target, "java.lang.String", "int", "int", "int");
// host, port, connectionTimeout, soTimeout, infiniteSoTimeout
JedisUtils.addSetEndPointInterceptor(target, "java.lang.String", "int", "int", "int", "int");
// host, port, connectionTimeout, soTimeout, ssl
JedisUtils.addSetEndPointInterceptor(target, "java.lang.String", "int", "int", "int", "boolean");
// host, port, connectionTimeout, soTimeout, ssl, sslSocketFactory, sslParameters, hostnameVerifier
JedisUtils.addSetEndPointInterceptor(target, "java.lang.String", "int", "int", "int", "boolean", "javax.net.ssl.SSLSocketFactory", "javax.net.ssl.SSLParameters", "javax.net.ssl.HostnameVerifier");
// host, port, connectionTimeout, soTimeout, infiniteSoTimeout, ssl, sslSocketFactory, sslParameters, hostnameVerifier
JedisUtils.addSetEndPointInterceptor(target, "java.lang.String", "int", "int", "int", "int", "boolean", "javax.net.ssl.SSLSocketFactory", "javax.net.ssl.SSLParameters", "javax.net.ssl.HostnameVerifier");
// shardInfo
JedisUtils.addSetEndPointInterceptor(target, "redis.clients.jedis.JedisShardInfo");
// uri
JedisUtils.addSetEndPointInterceptor(target, "java.net.URI");
// uri, sslSocketFactory, sslParameters, hostnameVerifier
JedisUtils.addSetEndPointInterceptor(target, "java.net.URI", "javax.net.ssl.SSLSocketFactory", "javax.net.ssl.SSLParameters", "javax.net.ssl.HostnameVerifier");
// uri, timeout
JedisUtils.addSetEndPointInterceptor(target, "java.net.URI", "int");
// uri, timeout, sslSocketFactory, sslParameters, hostnameVerifier
JedisUtils.addSetEndPointInterceptor(target, "java.net.URI", "int", "javax.net.ssl.SSLSocketFactory", "javax.net.ssl.SSLParameters", "javax.net.ssl.HostnameVerifier");
// uri, connectionTimeout, soTimeout
JedisUtils.addSetEndPointInterceptor(target, "java.net.URI", "int", "int");
// uri, connectionTimeout, soTimeout, sslSocketFactory, sslParameters, hostnameVerifier
JedisUtils.addSetEndPointInterceptor(target, "java.net.URI", "int", "int", "javax.net.ssl.SSLSocketFactory", "javax.net.ssl.SSLParameters", "javax.net.ssl.HostnameVerifier");
// uri, connectionTimeout, soTimeout, infiniteSoTimeout, sslSocketFactory, sslParameters, hostnameVerifier
JedisUtils.addSetEndPointInterceptor(target, "java.net.URI", "int", "int", "int", "javax.net.ssl.SSLSocketFactory", "javax.net.ssl.SSLParameters", "javax.net.ssl.HostnameVerifier");
// redis.clients.jedis.JedisSocketFactory
JedisUtils.addSetEndPointInterceptor(target, "redis.clients.jedis.JedisSocketFactory");
// methods(commands)
final JedisPluginConfig config = new JedisPluginConfig(instrumentor.getProfilerConfig());
JedisUtils.addJedisMethodInterceptor(target, config, JedisConstants.REDIS_SCOPE);
return target.toBytecode();
}
protected void handle(InstrumentClass target) throws InstrumentException {
;
}
}
// Client
private void addClient() {
transformTemplate.transform("redis.clients.jedis.Client", ClientTransform.class);
}
public static class ClientTransform implements TransformCallback {
@Override
public byte[] doInTransform(Instrumentor instrumentor, ClassLoader classLoader, String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) throws InstrumentException {
final InstrumentClass target = instrumentor.getInstrumentClass(classLoader, className, classfileBuffer);
target.addField(EndPointAccessor.class);
// Set endpoint
// host
JedisUtils.addSetEndPointInterceptor(target, "java.lang.String");
// host, port
JedisUtils.addSetEndPointInterceptor(target, "java.lang.String", "int");
// host, port, ssl
JedisUtils.addSetEndPointInterceptor(target, "java.lang.String", "int", "boolean");
// host, port, ssl, sslSocketFactory, sslParameters, hostnameVerifier
JedisUtils.addSetEndPointInterceptor(target, "java.lang.String", "int", "boolean", "javax.net.ssl.SSLSocketFactory", "javax.net.ssl.SSLParameters", "javax.net.ssl.HostnameVerifier");
return target.toBytecode();
}
}
private void addProtocol() {
transformTemplate.transform("redis.clients.jedis.Protocol", ProtocolTransform.class);
}
public static class ProtocolTransform implements TransformCallback {
@Override
public byte[] doInTransform(Instrumentor instrumentor, ClassLoader classLoader, String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) throws InstrumentException {
final InstrumentClass target = instrumentor.getInstrumentClass(classLoader, className, classfileBuffer);
for (InstrumentMethod method : target.getDeclaredMethods(MethodFilters.chain(MethodFilters.name("sendCommand", "read"), MethodFilters.modifierNot(Modifier.PRIVATE)))) {
method.addScopedInterceptor(ProtocolSendCommandAndReadMethodInterceptor.class, JedisConstants.REDIS_SCOPE, ExecutionPolicy.INTERNAL);
}
return target.toBytecode();
}
}
// Pipeline
private void addPipeline(JedisPluginConfig config) {
addPipelineBaseExtends("redis.clients.jedis.PipelineBase", PipelineBaseExtendsTransform.class);
// MultikeyPipellineBase extends PipelineBase
addPipelineBaseExtends("redis.clients.jedis.MultiKeyPipelineBase", PipelineBaseExtendsTransform.class);
// Pipeline extends PipelineBase
addPipelineBaseExtends("redis.clients.jedis.Pipeline", PipelineTransform.class);
}
private void addPipelineBaseExtends(String targetClassName, final Class<? extends TransformCallback> transformCallback) {
transformTemplate.transform(targetClassName, transformCallback);
}
public static class PipelineTransform extends PipelineBaseExtendsTransform {
@Override
protected void handle(InstrumentClass target) throws InstrumentException {
target.addField(EndPointAccessor.class);
final InstrumentMethod setClientMethod = target.getDeclaredMethod("setClient", "redis.clients.jedis.Client");
if (setClientMethod != null) {
setClientMethod.addInterceptor(AttachEndPointInterceptor.class);
}
final InstrumentMethod constructor = target.getConstructor("redis.clients.jedis.Client");
if (constructor != null) {
constructor.addInterceptor(AttachEndPointInterceptor.class);
}
}
}
public static class PipelineBaseExtendsTransform implements TransformCallback {
@Override
public byte[] doInTransform(Instrumentor instrumentor, ClassLoader classLoader, String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) throws InstrumentException {
final InstrumentClass target = instrumentor.getInstrumentClass(classLoader, className, classfileBuffer);
handle(target);
final JedisPluginConfig config = new JedisPluginConfig(instrumentor.getProfilerConfig());
// methods(commands)
JedisUtils.addJedisMethodInterceptor(target, config, JedisConstants.REDIS_SCOPE);
return target.toBytecode();
}
protected void handle(InstrumentClass target) throws InstrumentException {
;
}
}
@Override
public void setTransformTemplate(TransformTemplate transformTemplate) {
this.transformTemplate = transformTemplate;
}
}
| |
/**
* redpen: a text inspection tool
* Copyright (C) 2014 Recruit Technologies Co., Ltd. and contributors
* (see CONTRIBUTORS.md)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bigram.docvalidator;
import org.apache.commons.io.input.ReaderInputStream;
import org.bigram.docvalidator.distributor.FakeResultDistributor;
import org.junit.Test;
import org.bigram.docvalidator.config.Configuration;
import org.bigram.docvalidator.config.ValidationConfigurationLoader;
import org.bigram.docvalidator.config.ValidatorConfiguration;
import org.bigram.docvalidator.model.DocumentCollection;
import org.bigram.docvalidator.model.Sentence;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.List;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.junit.matchers.JUnitMatchers.containsString;
public class DocumentValidatorTest {
@Test
public void testEmptyValidator() throws DocumentValidatorException {
DocumentCollection documents = new DocumentCollection.Builder()
.addDocument("")
.addSection(1, new ArrayList<Sentence>())
.addParagraph()
.addSentence(
"In a land far away, there once was as a hungry programmer.",
1)
.addSentence(
"He was hungry for programming and programmed all day - "
+ " - in Java, Python, C++, etc.", 2)
.addSentence(
"Whe he wasn't programming, he was eating noodles.",
3)
.addParagraph()
.addSentence(
"One day while programming, he got a new idea.", 4)
.build();
ValidatorConfiguration validatorConfig = new ValidatorConfiguration(
"<?xml version=\"1.0\"?>\n" +
"<character-table></character-table>"
);
Configuration configuration = new Configuration(
validatorConfig); // = ValidatorConfiguration + CharacterTable
DocumentValidator validator = new DocumentValidator.Builder()
.setConfiguration(configuration)
.setResultDistributor(new FakeResultDistributor())
.build();
List<ValidationError> errors = validator.check(documents);
assertEquals(0, errors.size());
}
@Test
public void testSentenceValidatorWithSimpleDocument()
throws DocumentValidatorException {
DocumentCollection documents = new DocumentCollection.Builder()
.addDocument("tested file")
.addSection(0, new ArrayList<Sentence>())
.addParagraph()
.addSentence("it is a piece of a cake.", 0)
.addSentence("that is also a piece of a cake.", 1)
.build();
DocumentValidator validator = getValidaorWithSentenceValidator();
List<ValidationError> errors = validator.check(documents);
// validate the errors
assertEquals(2, errors.size());
for (ValidationError error : errors) {
assertThat(error.getValidatorName(), is("SentenceLength"));
assertThat(error.getMessage(),
containsString("The length of the line exceeds the maximum "));
}
}
@Test
public void testSectionValidatorWithSimpleDocument()
throws DocumentValidatorException {
DocumentCollection documents = new DocumentCollection.Builder()
.addDocument("tested file")
.addSection(0, new ArrayList<Sentence>())
.addSectionHeader("foobar")
.addParagraph()
.addSentence("it is a piece of a cake.", 0)
.addSentence("that is also a piece of a cake.", 1)
.build();
DocumentValidator validator = getValidaorWithSectionValidator();
List<ValidationError> errors = validator.check(documents);
// validate the errors
assertEquals(1, errors.size());
for (ValidationError error : errors) {
assertThat(error.getValidatorName(), is("SectionLength"));
assertThat(error.getMessage(),
containsString("The number of the character exceeds the maximum"));
}
}
@Test
public void testDocumentWithHeader() throws DocumentValidatorException {
DocumentCollection documents = new DocumentCollection.Builder()
.addDocument("tested file")
.addSection(0)
.addSectionHeader("this is it.")
.addParagraph()
.addSentence("it is a piece of a cake.", 0)
.addSentence("that is also a piece of a cake.", 1)
.build();
DocumentValidator validator = getValidaorWithSentenceValidator();
List<ValidationError> errors = validator.check(documents);
// validate the errors
assertEquals(3, errors.size());
for (ValidationError error : errors) {
assertThat(error.getValidatorName(), is("SentenceLength"));
assertThat(error.getMessage(),
containsString("The length of the line exceeds the maximum "));
}
}
@Test
public void testDocumentWithList() throws DocumentValidatorException {
DocumentCollection documents = new DocumentCollection.Builder()
.addDocument("tested file")
.addSection(0, new ArrayList<Sentence>())
.addSectionHeader("this is it")
.addParagraph()
.addSentence("it is a piece of a cake.", 0)
.addSentence("that is also a piece of a cake.", 1)
.addListBlock()
.addListElement(0, "this is a list.")
.build();
DocumentValidator validator = getValidaorWithSentenceValidator();
List<ValidationError> errors = validator.check(documents);
// validate the errors
assertEquals(4, errors.size());
for (ValidationError error : errors) {
assertThat(error.getValidatorName(), is("SentenceLength"));
assertThat(error.getMessage(),
containsString("The length of the line exceeds the maximum "));
}
}
@Test
public void testDocumentWithoutContent() throws DocumentValidatorException {
DocumentCollection documents = new DocumentCollection.Builder()
.addDocument("tested file")
.build();
DocumentValidator validator = getValidaorWithSentenceValidator();
List<ValidationError> errors = validator.check(documents);
// validate the errors
assertEquals(0, errors.size());
}
private DocumentValidator getValidaorWithSentenceValidator() throws
DocumentValidatorException {
ValidatorConfiguration validatorConfig =
ValidationConfigurationLoader.loadConfiguration(
new ReaderInputStream(new StringReader(
"<?xml version=\"1.0\"?>\n" +
"<component name=\"Validator\">" +
" <component name=\"SentenceLength\">\n" +
" <property name=\"max_length\" value=\"5\"/>\n" +
" </component>" +
"</component>"
))
);
Configuration configuration = new Configuration(validatorConfig);
return new DocumentValidator.Builder()
.setConfiguration(configuration)
.setResultDistributor(new FakeResultDistributor())
.build();
}
private DocumentValidator getValidaorWithSectionValidator() throws
DocumentValidatorException {
ValidatorConfiguration validatorConfig =
ValidationConfigurationLoader.loadConfiguration(
new ReaderInputStream(new StringReader(
"<?xml version=\"1.0\"?>\n" +
"<component name=\"Validator\">" +
" <component name=\"SectionLength\">\n" +
" <property name=\"max_char_num\" value=\"5\"/>\n" +
" </component>" +
"</component>"
))
);
Configuration configuration = new Configuration(validatorConfig);
return new DocumentValidator.Builder()
.setConfiguration(configuration)
.setResultDistributor(new FakeResultDistributor())
.build();
}
}
| |
/*
* Copyright 2016-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.rage;
import static com.facebook.buck.zip.ZipOutputStreams.HandleDuplicates.APPEND_TO_ZIP;
import com.facebook.buck.event.BuckEventBus;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.log.CommandThreadFactory;
import com.facebook.buck.log.Logger;
import com.facebook.buck.slb.ClientSideSlb;
import com.facebook.buck.slb.HttpResponse;
import com.facebook.buck.slb.HttpService;
import com.facebook.buck.slb.LoadBalancedService;
import com.facebook.buck.slb.RetryingHttpService;
import com.facebook.buck.slb.SlbBuckConfig;
import com.facebook.buck.timing.Clock;
import com.facebook.buck.zip.CustomZipEntry;
import com.facebook.buck.zip.CustomZipOutputStream;
import com.facebook.buck.zip.ZipOutputStreams;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Charsets;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.io.ByteStreams;
import com.google.common.io.CharStreams;
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.TimeUnit;
import okhttp3.MediaType;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.RequestBody;
import okio.BufferedSink;
/**
* Takes care of actually writing out the report.
*/
public class DefaultDefectReporter implements DefectReporter {
private static final Logger LOG = Logger.get(AbstractReport.class);
private static final String REPORT_FILE_NAME = "report.json";
private static final String DIFF_FILE_NAME = "changes.diff";
private static final int HTTP_SUCCESS_CODE = 200;
private static final String REQUEST_PROTOCOL_VERSION = "x-buck-protocol-version";
private final ProjectFilesystem filesystem;
private final ObjectMapper objectMapper;
private final RageConfig rageConfig;
private final BuckEventBus buckEventBus;
private final Clock clock;
public DefaultDefectReporter(
ProjectFilesystem filesystem,
ObjectMapper objectMapper,
RageConfig rageConfig,
BuckEventBus buckEventBus,
Clock clock
) {
this.filesystem = filesystem;
this.objectMapper = objectMapper;
this.rageConfig = rageConfig;
this.buckEventBus = buckEventBus;
this.clock = clock;
}
private void addFilesToArchive(
CustomZipOutputStream out,
ImmutableSet<Path> paths) throws IOException {
for (Path logFile : paths) {
Preconditions.checkArgument(!logFile.isAbsolute(), "Should be a relative Path.", logFile);
// If the file is hidden(UNIX terms) save it as normal file.
if (logFile.getFileName().toString().startsWith(".")) {
out.putNextEntry(new CustomZipEntry(
Paths.get(logFile.getFileName().toString().substring(1))));
} else {
out.putNextEntry(new CustomZipEntry(logFile));
}
try (InputStream input = filesystem.newFileInputStream(logFile)) {
ByteStreams.copy(input, out);
}
out.closeEntry();
}
}
private void addStringsAsFilesToArchive(
CustomZipOutputStream out,
ImmutableMap<String, String> files) throws IOException {
for (Map.Entry<String, String> file : files.entrySet()) {
out.putNextEntry(new CustomZipEntry(file.getKey()));
out.write(file.getValue().getBytes(Charsets.UTF_8));
out.closeEntry();
}
}
@Override
public DefectSubmitResult submitReport(DefectReport defectReport) throws IOException {
DefectSubmitResult.Builder defectSubmitResult = DefectSubmitResult.builder();
defectSubmitResult.setRequestProtocol(rageConfig.getProtocolVersion());
Optional<SlbBuckConfig> frontendConfig = rageConfig.getFrontendConfig();
if (frontendConfig.isPresent()) {
Optional<ClientSideSlb> slb =
frontendConfig.get().tryCreatingClientSideSlb(
clock,
buckEventBus,
new CommandThreadFactory("RemoteLog.HttpLoadBalancer"));
if (slb.isPresent()) {
try {
return uploadReport(defectReport, defectSubmitResult, slb.get());
} catch (IOException e) {
LOG.debug(e, "Failed uploading report to server.");
defectSubmitResult.setIsRequestSuccessful(false);
defectSubmitResult.setReportSubmitErrorMessage(e.getMessage());
}
}
}
filesystem.mkdirs(filesystem.getBuckPaths().getBuckOut());
Path defectReportPath = filesystem.createTempFile(
filesystem.getBuckPaths().getBuckOut(),
"defect_report",
".zip");
try (OutputStream outputStream = filesystem.newFileOutputStream(defectReportPath)) {
writeReport(defectReport, outputStream);
}
return defectSubmitResult
.setIsRequestSuccessful(Optional.empty())
.setReportSubmitLocation(defectReportPath.toString())
.build();
}
private void writeReport(
DefectReport defectReport,
OutputStream outputStream) throws IOException {
try (BufferedOutputStream baseOut = new BufferedOutputStream(outputStream);
CustomZipOutputStream out =
ZipOutputStreams.newOutputStream(baseOut, APPEND_TO_ZIP)) {
if (defectReport.getSourceControlInfo().isPresent() &&
defectReport.getSourceControlInfo().get().getDiff().isPresent()) {
addStringsAsFilesToArchive(
out,
ImmutableMap.of(
DIFF_FILE_NAME,
defectReport.getSourceControlInfo().get().getDiff().get()));
}
addFilesToArchive(out, defectReport.getIncludedPaths());
out.putNextEntry(new CustomZipEntry(REPORT_FILE_NAME));
objectMapper.writeValue(out, defectReport);
}
}
private DefectSubmitResult uploadReport(
final DefectReport defectReport,
DefectSubmitResult.Builder defectSubmitResult,
ClientSideSlb slb) throws IOException {
long timeout = rageConfig.getHttpTimeout();
OkHttpClient httpClient = new OkHttpClient.Builder()
.connectTimeout(timeout, TimeUnit.MILLISECONDS)
.readTimeout(timeout, TimeUnit.MILLISECONDS)
.writeTimeout(timeout, TimeUnit.MILLISECONDS)
.build();
HttpService httpService = new RetryingHttpService(buckEventBus,
new LoadBalancedService(slb, httpClient, buckEventBus),
rageConfig.getMaxUploadRetries());
try {
Request.Builder requestBuilder = new Request.Builder();
requestBuilder.addHeader(
REQUEST_PROTOCOL_VERSION,
rageConfig.getProtocolVersion().name().toLowerCase());
requestBuilder.post(
new RequestBody() {
@Override
public MediaType contentType() {
return MediaType.parse("application/x-www-form-urlencoded");
}
@Override
public void writeTo(BufferedSink bufferedSink) throws IOException {
writeReport(defectReport, bufferedSink.outputStream());
}
});
HttpResponse response = httpService.makeRequest(
rageConfig.getReportUploadPath(),
requestBuilder);
String responseBody;
try (InputStream inputStream = response.getBody()) {
responseBody = CharStreams.toString(new InputStreamReader(inputStream, Charsets.UTF_8));
}
if (response.code() == HTTP_SUCCESS_CODE) {
defectSubmitResult.setIsRequestSuccessful(true);
if (rageConfig.getProtocolVersion().equals(AbstractRageConfig.RageProtocolVersion.SIMPLE)) {
return defectSubmitResult
.setReportSubmitMessage(responseBody)
.setReportSubmitLocation(responseBody)
.build();
} else {
// Decode Json response.
RageJsonResponse json = objectMapper.readValue(
responseBody.getBytes(Charsets.UTF_8),
RageJsonResponse.class);
return defectSubmitResult
.setIsRequestSuccessful(json.getRequestSuccessful())
.setReportSubmitErrorMessage(json.getErrorMessage())
.setReportSubmitMessage(json.getMessage())
.setReportSubmitLocation(json.getRageUrl())
.build();
}
} else {
throw new IOException(
String.format(
"Connection to %s returned code %d and message: %s",
response.requestUrl(),
response.code(),
responseBody));
}
} catch (IOException e) {
throw new IOException(String.format("Failed uploading report because [%s].", e.getMessage()));
} finally {
httpService.close();
}
}
}
| |
package org.apache.commons.jcs.utils.discovery;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.net.DatagramPacket;
import java.net.InetAddress;
import java.net.MulticastSocket;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import org.apache.commons.jcs.engine.behavior.IShutdownObserver;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/** Receives UDP Discovery messages. */
public class UDPDiscoveryReceiver
implements Runnable, IShutdownObserver
{
/** The log factory */
protected final static Log log = LogFactory.getLog( UDPDiscoveryReceiver.class );
/** buffer */
private final byte[] mBuffer = new byte[65536];
/** The socket used for communication. */
private MulticastSocket mSocket;
/**
* TODO: Consider using the threadpool manager to get this thread pool. For now place a tight
* restriction on the pool size
*/
private static final int maxPoolSize = 2;
/** The processor */
private ThreadPoolExecutor pooledExecutor = null;
/** number of messages received. For debugging and testing. */
private int cnt = 0;
/** Service to get cache names and handle request broadcasts */
protected UDPDiscoveryService service = null;
/** Address */
private String multicastAddressString = "";
/** The port */
private int multicastPort = 0;
/** Is it shutdown. */
private boolean shutdown = false;
/**
* Constructor for the LateralUDPReceiver object.
* <p>
* We determine out own host using InetAddress
*<p>
* @param service
* @param multicastAddressString
* @param multicastPort
* @exception IOException
*/
public UDPDiscoveryReceiver( UDPDiscoveryService service, String multicastAddressString, int multicastPort )
throws IOException
{
this.service = service;
this.multicastAddressString = multicastAddressString;
this.multicastPort = multicastPort;
// create a small thread pool to handle a barrage
pooledExecutor = (ThreadPoolExecutor)Executors.newFixedThreadPool(maxPoolSize, new MyThreadFactory());
pooledExecutor.setRejectedExecutionHandler(new ThreadPoolExecutor.DiscardOldestPolicy());
//pooledExecutor.setMinimumPoolSize(1);
if ( log.isInfoEnabled() )
{
log.info( "Constructing listener, [" + this.multicastAddressString + ":" + this.multicastPort + "]" );
}
try
{
createSocket( this.multicastAddressString, this.multicastPort );
}
catch ( IOException ioe )
{
// consider eating this so we can go on, or constructing the socket
// later
throw ioe;
}
}
/**
* Creates the socket for this class.
* <p>
* @param multicastAddressString
* @param multicastPort
* @throws IOException
*/
private void createSocket( String multicastAddressString, int multicastPort )
throws IOException
{
try
{
mSocket = new MulticastSocket( multicastPort );
if ( log.isInfoEnabled() )
{
log.info( "Joining Group: [" + InetAddress.getByName( multicastAddressString ) + "]" );
}
mSocket.joinGroup( InetAddress.getByName( multicastAddressString ) );
}
catch ( IOException e )
{
log.error( "Could not bind to multicast address [" + InetAddress.getByName( multicastAddressString ) + ":" + multicastPort + "]", e );
throw e;
}
}
/**
* Highly unreliable. If it is processing one message while another comes in, the second
* message is lost. This is for low concurrency peppering.
* <p>
* @return the object message
* @throws IOException
*/
public Object waitForMessage()
throws IOException
{
final DatagramPacket packet = new DatagramPacket( mBuffer, mBuffer.length );
Object obj = null;
try
{
if ( log.isDebugEnabled() )
{
log.debug( "Waiting for message." );
}
mSocket.receive( packet );
if ( log.isDebugEnabled() )
{
log.debug( "Received packet from address [" + packet.getSocketAddress() + "]" );
}
final ByteArrayInputStream byteStream = new ByteArrayInputStream( mBuffer, 0, packet.getLength() );
final ObjectInputStream objectStream = new ObjectInputStream( byteStream );
obj = objectStream.readObject();
if ( (obj != null) && (obj instanceof UDPDiscoveryMessage) )
{
// Ensure that the address we're supposed to send to is, indeed, the address
// of the machine on the other end of this connection. This guards against
// instances where we don't exactly get the right local host address
UDPDiscoveryMessage msg = (UDPDiscoveryMessage) obj;
msg.setHost(packet.getAddress().getHostAddress());
if ( log.isDebugEnabled() )
{
log.debug( "Read object from address [" + packet.getSocketAddress() + "], object=[" + obj + "]" );
}
}
}
catch ( Exception e )
{
log.error( "Error receving multicast packet", e );
}
return obj;
}
/** Main processing method for the LateralUDPReceiver object */
public void run()
{
try
{
while ( !shutdown )
{
Object obj = waitForMessage();
// not thread safe, but just for debugging
cnt++;
if ( log.isDebugEnabled() )
{
log.debug( getCnt() + " messages received." );
}
UDPDiscoveryMessage message = null;
try
{
message = (UDPDiscoveryMessage) obj;
// check for null
if ( message != null )
{
MessageHandler handler = new MessageHandler( message );
pooledExecutor.execute( handler );
if ( log.isDebugEnabled() )
{
log.debug( "Passed handler to executor." );
}
}
else
{
log.warn( "message is null" );
}
}
catch ( ClassCastException cce )
{
log.warn( "Received unknown message type " + cce.getMessage() );
}
} // end while
}
catch ( Exception e )
{
log.error( "Unexpected exception in UDP receiver.", e );
try
{
Thread.sleep( 100 );
// TODO consider some failure count so we don't do this
// forever.
}
catch ( Exception e2 )
{
log.error( "Problem sleeping", e2 );
}
}
}
/**
* @param cnt The cnt to set.
*/
public void setCnt( int cnt )
{
this.cnt = cnt;
}
/**
* @return Returns the cnt.
*/
public int getCnt()
{
return cnt;
}
/**
* Separate thread run when a command comes into the UDPDiscoveryReceiver.
*/
public class MessageHandler
implements Runnable
{
/** The message to handle. Passed in during construction. */
private UDPDiscoveryMessage message = null;
/**
* @param message
*/
public MessageHandler( UDPDiscoveryMessage message )
{
this.message = message;
}
/**
* Process the message.
*/
public void run()
{
// consider comparing ports here instead.
if ( message.getRequesterId() == UDPDiscoveryInfo.listenerId )
{
if ( log.isDebugEnabled() )
{
log.debug( "Ignoring message sent from self" );
}
}
else
{
if ( log.isDebugEnabled() )
{
log.debug( "Process message sent from another" );
log.debug( "Message = " + message );
}
if ( message.getHost() == null || message.getCacheNames() == null || message.getCacheNames().isEmpty() )
{
if ( log.isDebugEnabled() )
{
log.debug( "Ignoring invalid message: " + message );
}
}
else
{
processMessage();
}
}
}
/**
* Process the incoming message.
*/
private void processMessage()
{
DiscoveredService discoveredService = new DiscoveredService();
discoveredService.setServiceAddress( message.getHost() );
discoveredService.setCacheNames( message.getCacheNames() );
discoveredService.setServicePort( message.getPort() );
discoveredService.setLastHearFromTime( System.currentTimeMillis() );
// if this is a request message, have the service handle it and
// return
if ( message.getMessageType() == UDPDiscoveryMessage.REQUEST_BROADCAST )
{
if ( log.isDebugEnabled() )
{
log.debug( "Message is a Request Broadcase, will have the service handle it." );
}
service.serviceRequestBroadcast();
return;
}
else if ( message.getMessageType() == UDPDiscoveryMessage.REMOVE_BROADCAST )
{
if ( log.isDebugEnabled() )
{
log.debug( "Removing service from set " + discoveredService );
}
service.removeDiscoveredService( discoveredService );
}
else
{
service.addOrUpdateService( discoveredService );
}
}
}
/**
* Allows us to set the daemon status on the executor threads
*/
protected static class MyThreadFactory
implements ThreadFactory
{
/**
* Sets the thread to daemon.
* <p>
* @param runner
* @return a daemon thread
*/
public Thread newThread( Runnable runner )
{
Thread t = new Thread( runner );
String oldName = t.getName();
t.setName( "JCS-UDPDiscoveryReceiver-" + oldName );
t.setDaemon( true );
t.setPriority( Thread.MIN_PRIORITY );
return t;
}
}
/** Shuts down the socket. */
public void shutdown()
{
try
{
shutdown = true;
mSocket.leaveGroup( InetAddress.getByName( multicastAddressString ) );
mSocket.close();
pooledExecutor.shutdownNow();
}
catch ( IOException e )
{
log.error( "Problem closing socket" );
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.geronimo.security.realm.providers;
import java.util.Map;
import java.util.Collections;
import javax.security.auth.login.Configuration;
import javax.security.auth.login.AppConfigurationEntry;
import javax.security.auth.login.LoginException;
import javax.security.auth.login.LoginContext;
import javax.security.auth.spi.LoginModule;
import javax.security.auth.Subject;
import javax.security.auth.callback.CallbackHandler;
import junit.framework.TestCase;
/**
* @version $Rev$ $Date$
*/
public class FlagsMeaningTest extends TestCase {
private static final Map<String, Object> noOptions = Collections.emptyMap();
public void testSufficientExceptionTrue() throws LoginException {
Configuration conf = new FixedConfiguration(new AppConfigurationEntry[] {
new AppConfigurationEntry(ExceptionLM.class.getName(), AppConfigurationEntry.LoginModuleControlFlag.SUFFICIENT, noOptions),
new AppConfigurationEntry(TrueLM.class.getName(), AppConfigurationEntry.LoginModuleControlFlag.SUFFICIENT, noOptions),
});
LoginContext lc = new LoginContext("foo", null, null, conf);
lc.login();
}
public void testSufficientFalseTrue() throws LoginException {
Configuration conf = new FixedConfiguration(new AppConfigurationEntry[] {
new AppConfigurationEntry(FalseLM.class.getName(), AppConfigurationEntry.LoginModuleControlFlag.SUFFICIENT, noOptions),
new AppConfigurationEntry(TrueLM.class.getName(), AppConfigurationEntry.LoginModuleControlFlag.SUFFICIENT, noOptions),
});
LoginContext lc = new LoginContext("foo", null, null, conf);
lc.login();
}
public void testSufficientExceptionRequiredTrue() throws LoginException {
Configuration conf = new FixedConfiguration(new AppConfigurationEntry[] {
new AppConfigurationEntry(ExceptionLM.class.getName(), AppConfigurationEntry.LoginModuleControlFlag.SUFFICIENT, noOptions),
new AppConfigurationEntry(TrueLM.class.getName(), AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, noOptions),
});
LoginContext lc = new LoginContext("foo", null, null, conf);
lc.login();
}
public void testOptionalExceptionTrue() throws LoginException {
Configuration conf = new FixedConfiguration(new AppConfigurationEntry[] {
new AppConfigurationEntry(ExceptionLM.class.getName(), AppConfigurationEntry.LoginModuleControlFlag.OPTIONAL, noOptions),
new AppConfigurationEntry(TrueLM.class.getName(), AppConfigurationEntry.LoginModuleControlFlag.OPTIONAL, noOptions),
});
LoginContext lc = new LoginContext("foo", null, null, conf);
lc.login();
}
public void testOptionalTrueException() throws LoginException {
Configuration conf = new FixedConfiguration(new AppConfigurationEntry[] {
new AppConfigurationEntry(TrueLM.class.getName(), AppConfigurationEntry.LoginModuleControlFlag.OPTIONAL, noOptions),
new AppConfigurationEntry(ExceptionLM.class.getName(), AppConfigurationEntry.LoginModuleControlFlag.OPTIONAL, noOptions),
});
LoginContext lc = new LoginContext("foo", null, null, conf);
lc.login();
}
public void testRequiredExceptionTrue() throws LoginException {
Configuration conf = new FixedConfiguration(new AppConfigurationEntry[] {
new AppConfigurationEntry(ExceptionLM.class.getName(), AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, noOptions),
new AppConfigurationEntry(TrueLM.class.getName(), AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, noOptions),
});
LoginContext lc = new LoginContext("foo", null, null, conf);
try {
lc.login();
fail("login exception expected");
} catch (LoginException e) {
}
}
public void testRequisiteExceptionTrue() throws LoginException {
Configuration conf = new FixedConfiguration(new AppConfigurationEntry[] {
new AppConfigurationEntry(ExceptionLM.class.getName(), AppConfigurationEntry.LoginModuleControlFlag.REQUISITE, noOptions),
new AppConfigurationEntry(TrueLM.class.getName(), AppConfigurationEntry.LoginModuleControlFlag.REQUISITE, noOptions),
});
LoginContext lc = new LoginContext("foo", null, null, conf);
try {
lc.login();
fail("login exception expected");
} catch (LoginException e) {
}
}
public void testRequisiteTrueException() throws LoginException {
Configuration conf = new FixedConfiguration(new AppConfigurationEntry[] {
new AppConfigurationEntry(TrueLM.class.getName(), AppConfigurationEntry.LoginModuleControlFlag.REQUISITE, noOptions),
new AppConfigurationEntry(ExceptionLM.class.getName(), AppConfigurationEntry.LoginModuleControlFlag.REQUISITE, noOptions),
});
LoginContext lc = new LoginContext("foo", null, null, conf);
try {
lc.login();
fail("login exception expected");
} catch (LoginException e) {
}
}
public void testRequiredTrueException() throws LoginException {
Configuration conf = new FixedConfiguration(new AppConfigurationEntry[] {
new AppConfigurationEntry(TrueLM.class.getName(), AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, noOptions),
new AppConfigurationEntry(ExceptionLM.class.getName(), AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, noOptions),
});
LoginContext lc = new LoginContext("foo", null, null, conf);
try {
lc.login();
fail("login exception expected");
} catch (LoginException e) {
}
}
public void testRequiredTrueFalse() throws LoginException {
Configuration conf = new FixedConfiguration(new AppConfigurationEntry[] {
new AppConfigurationEntry(TrueLM.class.getName(), AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, noOptions),
new AppConfigurationEntry(FalseLM.class.getName(), AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, noOptions),
});
LoginContext lc = new LoginContext("foo", null, null, conf);
lc.login();
}
public void testRequiredFalseTrue() throws LoginException {
Configuration conf = new FixedConfiguration(new AppConfigurationEntry[] {
new AppConfigurationEntry(FalseLM.class.getName(), AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, noOptions),
new AppConfigurationEntry(TrueLM.class.getName(), AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, noOptions),
});
LoginContext lc = new LoginContext("foo", null, null, conf);
lc.login();
}
public void testRequisiteTrueFalse() throws LoginException {
Configuration conf = new FixedConfiguration(new AppConfigurationEntry[] {
new AppConfigurationEntry(TrueLM.class.getName(), AppConfigurationEntry.LoginModuleControlFlag.REQUISITE, noOptions),
new AppConfigurationEntry(FalseLM.class.getName(), AppConfigurationEntry.LoginModuleControlFlag.REQUISITE, noOptions),
});
LoginContext lc = new LoginContext("foo", null, null, conf);
lc.login();
}
public void testRequisiteFalseTrue() throws LoginException {
Configuration conf = new FixedConfiguration(new AppConfigurationEntry[] {
new AppConfigurationEntry(FalseLM.class.getName(), AppConfigurationEntry.LoginModuleControlFlag.REQUISITE, noOptions),
new AppConfigurationEntry(TrueLM.class.getName(), AppConfigurationEntry.LoginModuleControlFlag.REQUISITE, noOptions),
});
LoginContext lc = new LoginContext("foo", null, null, conf);
lc.login();
}
private static class FixedConfiguration extends Configuration {
private final AppConfigurationEntry[] entries;
private FixedConfiguration(AppConfigurationEntry[] entries) {
this.entries = entries;
}
public AppConfigurationEntry[] getAppConfigurationEntry(String s) {
return entries;
}
public void refresh() {
}
}
public static class FalseLM implements LoginModule {
public void initialize(Subject subject, CallbackHandler callbackHandler, Map<String, ?> stringMap, Map<String, ?> stringMap1) {
}
public boolean login() throws LoginException {
return false;
}
public boolean commit() throws LoginException {
return true;
}
public boolean abort() throws LoginException {
return true;
}
public boolean logout() throws LoginException {
return true;
}
}
public static class TrueLM implements LoginModule {
public void initialize(Subject subject, CallbackHandler callbackHandler, Map<String, ?> stringMap, Map<String, ?> stringMap1) {
}
public boolean login() throws LoginException {
return true;
}
public boolean commit() throws LoginException {
return true;
}
public boolean abort() throws LoginException {
return true;
}
public boolean logout() throws LoginException {
return true;
}
}
public static class ExceptionLM implements LoginModule {
public void initialize(Subject subject, CallbackHandler callbackHandler, Map<String, ?> stringMap, Map<String, ?> stringMap1) {
}
public boolean login() throws LoginException {
throw new LoginException();
}
public boolean commit() throws LoginException {
return false;
}
public boolean abort() throws LoginException {
return false;
}
public boolean logout() throws LoginException {
return false;
}
}
}
| |
package com.dianping.cat.report.page.cross;
import java.io.IOException;
import java.util.Date;
import javax.servlet.ServletException;
import org.unidal.lookup.annotation.Inject;
import org.unidal.lookup.util.StringUtils;
import org.unidal.web.mvc.PageHandler;
import org.unidal.web.mvc.annotation.InboundActionMeta;
import org.unidal.web.mvc.annotation.OutboundActionMeta;
import org.unidal.web.mvc.annotation.PayloadMeta;
import com.dianping.cat.consumer.cross.CrossAnalyzer;
import com.dianping.cat.consumer.cross.model.entity.CrossReport;
import com.dianping.cat.mvc.PayloadNormalizer;
import com.dianping.cat.report.ReportPage;
import com.dianping.cat.report.page.cross.display.HostInfo;
import com.dianping.cat.report.page.cross.display.MethodInfo;
import com.dianping.cat.report.page.cross.display.ProjectInfo;
import com.dianping.cat.report.page.cross.service.CrossReportService;
import com.dianping.cat.report.service.ModelRequest;
import com.dianping.cat.report.service.ModelResponse;
import com.dianping.cat.report.service.ModelService;
import com.dianping.cat.service.HostinfoService;
public class Handler implements PageHandler<Context> {
@Inject
private JspViewer m_jspViewer;
@Inject
private CrossReportService m_reportService;
@Inject
private PayloadNormalizer m_normalizePayload;
@Inject
private HostinfoService m_hostinfoService;
@Inject(type = ModelService.class, value = CrossAnalyzer.ID)
private ModelService<CrossReport> m_service;
private CrossReport getHourlyReport(Payload payload) {
String domain = payload.getDomain();
String ipAddress = payload.getIpAddress();
ModelRequest request = new ModelRequest(domain, payload.getDate()) //
.setProperty("ip", ipAddress);
if (m_service.isEligable(request)) {
ModelResponse<CrossReport> response = m_service.invoke(request);
CrossReport report = response.getModel();
return report;
} else {
throw new RuntimeException("Internal error: no eligable cross service registered for " + request + "!");
}
}
private CrossReport getSummarizeReport(Payload payload) {
String domain = payload.getDomain();
Date start = payload.getHistoryStartDate();
Date end = payload.getHistoryEndDate();
return m_reportService.queryReport(domain, start, end);
}
@Override
@PayloadMeta(Payload.class)
@InboundActionMeta(name = CrossAnalyzer.ID)
public void handleInbound(Context ctx) throws ServletException, IOException {
// display only, no action here
}
@Override
@OutboundActionMeta(name = CrossAnalyzer.ID)
public void handleOutbound(Context ctx) throws ServletException, IOException {
Model model = new Model(ctx);
Payload payload = ctx.getPayload();
normalize(model, payload);
long historyTime = (payload.getHistoryEndDate().getTime() - payload.getHistoryStartDate().getTime()) / 1000;
switch (payload.getAction()) {
case HOURLY_PROJECT:
CrossReport projectReport = getHourlyReport(payload);
ProjectInfo projectInfo = new ProjectInfo(payload.getHourDuration());
projectInfo.setClientIp(model.getIpAddress()).setCallSortBy(model.getCallSort())
.setServiceSortBy(model.getServiceSort());
projectInfo.visitCrossReport(projectReport);
model.setProjectInfo(projectInfo);
model.setReport(projectReport);
break;
case HOURLY_HOST:
CrossReport hostReport = getHourlyReport(payload);
HostInfo hostInfo = new HostInfo(payload.getHourDuration());
hostInfo.setHostinfoService(m_hostinfoService);
hostInfo.setClientIp(model.getIpAddress()).setCallSortBy(model.getCallSort())
.setServiceSortBy(model.getServiceSort());
hostInfo.setProjectName(payload.getProjectName());
hostInfo.visitCrossReport(hostReport);
model.setReport(hostReport);
model.setHostInfo(hostInfo);
break;
case HOURLY_METHOD:
CrossReport methodReport = getHourlyReport(payload);
MethodInfo methodInfo = new MethodInfo(payload.getHourDuration());
methodInfo.setHostinfoService(m_hostinfoService);
methodInfo.setClientIp(model.getIpAddress()).setCallSortBy(model.getCallSort())
.setServiceSortBy(model.getServiceSort()).setRemoteProject(payload.getProjectName());
methodInfo.setRemoteIp(payload.getRemoteIp()).setQuery(model.getQueryName());
methodInfo.visitCrossReport(methodReport);
model.setReport(methodReport);
model.setMethodInfo(methodInfo);
break;
case HISTORY_PROJECT:
CrossReport historyProjectReport = getSummarizeReport(payload);
ProjectInfo historyProjectInfo = new ProjectInfo(historyTime);
historyProjectInfo.setClientIp(model.getIpAddress()).setCallSortBy(model.getCallSort())
.setServiceSortBy(model.getServiceSort());
historyProjectInfo.visitCrossReport(historyProjectReport);
model.setProjectInfo(historyProjectInfo);
model.setReport(historyProjectReport);
break;
case HISTORY_HOST:
CrossReport historyHostReport = getSummarizeReport(payload);
HostInfo historyHostInfo = new HostInfo(historyTime);
historyHostInfo.setHostinfoService(m_hostinfoService);
historyHostInfo.setClientIp(model.getIpAddress()).setCallSortBy(model.getCallSort())
.setServiceSortBy(model.getServiceSort());
historyHostInfo.setProjectName(payload.getProjectName());
historyHostInfo.visitCrossReport(historyHostReport);
model.setReport(historyHostReport);
model.setHostInfo(historyHostInfo);
break;
case HISTORY_METHOD:
CrossReport historyMethodReport = getSummarizeReport(payload);
MethodInfo historyMethodInfo = new MethodInfo(historyTime);
historyMethodInfo.setHostinfoService(m_hostinfoService);
historyMethodInfo.setClientIp(model.getIpAddress()).setCallSortBy(model.getCallSort())
.setServiceSortBy(model.getServiceSort()).setRemoteProject(payload.getProjectName());
historyMethodInfo.setRemoteIp(payload.getRemoteIp()).setQuery(model.getQueryName());
historyMethodInfo.visitCrossReport(historyMethodReport);
model.setReport(historyMethodReport);
model.setMethodInfo(historyMethodInfo);
break;
case METHOD_QUERY:
String method = payload.getMethod();
CrossMethodVisitor info = new CrossMethodVisitor(method);
CrossReport queryReport = null;
if (isHistory(payload)) {
queryReport = getSummarizeReport(payload);
} else {
queryReport = getHourlyReport(payload);
}
info.visitCrossReport(queryReport);
model.setReport(queryReport);
model.setInfo(info.getInfo());
break;
}
m_jspViewer.view(ctx, model);
}
private boolean isHistory(Payload payload) {
String rawDate = payload.getRawDate();
return rawDate != null && rawDate.length() == 8;
}
private void normalize(Model model, Payload payload) {
model.setPage(ReportPage.CROSS);
model.setAction(payload.getAction());
m_normalizePayload.normalize(model, payload);
model.setCallSort(payload.getCallSort());
model.setServiceSort(payload.getServiceSort());
model.setQueryName(payload.getQueryName());
if (StringUtils.isEmpty(payload.getProjectName())) {
if (payload.getAction() == Action.HOURLY_HOST) {
payload.setAction("view");
}
if (payload.getAction() == Action.HISTORY_HOST) {
payload.setAction("history");
}
}
if (StringUtils.isEmpty(payload.getRemoteIp())) {
if (payload.getAction() == Action.HOURLY_METHOD) {
payload.setAction("view");
}
if (payload.getAction() == Action.HISTORY_METHOD) {
payload.setAction("history");
}
}
}
}
| |
package com.musicplayer.integrated.sanket.music;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.SharedPreferences;
import android.content.res.Configuration;
import android.graphics.Bitmap;
import android.media.audiofx.Visualizer;
import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
import android.support.v7.app.AlertDialog;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.PopupMenu;
import android.util.Log;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowManager;
import android.widget.AdapterView;
import android.widget.BaseAdapter;
import android.widget.ImageView;
import android.widget.RelativeLayout;
import android.widget.SeekBar;
import android.widget.TextView;
import com.bumptech.glide.Glide;
import com.bumptech.glide.request.target.SimpleTarget;
import com.bumptech.glide.request.transition.Transition;
import ak.sh.ay.musicwave.MusicWave;
import it.moondroid.coverflow.components.ui.containers.FeatureCoverFlow;
import jp.wasabeef.blurry.Blurry;
public class ActivityMainFullPlayer extends AppCompatActivity {
private ImageView fullPlayer ,fullPlayer_album, fullPlayer_shuffle , fullPlayer_loop , more;
private TextView fullPlayer_song , fullPlayer_artist , fullPlayer_currentTime , fullPlayer_maxTime;
private SeekBar seekBar;
private ModelSongs song;
public ImageView fullPlayer_play;
private Handler progressHandler;
private static Runnable progressRunnable;
private Proximity proximity;
private MusicWave musicWave;
private Visualizer visualizer;
private SwipeDetector swipeDetector;
private RelativeLayout relativeLayout;
private final int SAMPLING = 4;
private CloseApp closeApp;
private FeatureCoverFlow coverFlow;
private CustomBroadcast broadcast;
private SharedPreferences shared ,sh ;
private SharedPreferences.Editor editor;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setup();
closeApp = new CloseApp();
IntentFilter intentF = new IntentFilter("close");
registerReceiver(closeApp,intentF);
getWindow().setSoftInputMode(
WindowManager.LayoutParams.SOFT_INPUT_STATE_ALWAYS_HIDDEN);
setContentView(R.layout.activity_main_full_player);
fullPlayer = (ImageView) findViewById(R.id.mainFullPlayerContainer);
fullPlayer_play = (ImageView)findViewById(R.id.imageView_full_player_play);
fullPlayer_album = (ImageView) findViewById(R.id.imageView_full_player_album_art);
fullPlayer_song = (TextView)findViewById(R.id.textView_full_player_song);
fullPlayer_artist = (TextView)findViewById(R.id.textView_full_player_artist);
fullPlayer_currentTime = (TextView)findViewById(R.id.textView_current_time);
fullPlayer_maxTime = (TextView)findViewById(R.id.textView_total_length);
relativeLayout = (RelativeLayout)findViewById(R.id.h);
more = (ImageView)findViewById(R.id.imageView_full_player_more);
shared = getSharedPreferences("ActivitySettings",MODE_PRIVATE);
sh = getSharedPreferences("MusicData",MODE_PRIVATE);
editor = sh.edit();
fullPlayer_song.setSelected(true);
fullPlayer_artist.setSelected(true);
if(getResources().getConfiguration().orientation==Configuration.ORIENTATION_LANDSCAPE){
coverFlow = (FeatureCoverFlow)findViewById(R.id.coverflow);
coverFlow.setAdapter(new FeatureCoverflowAdapter());
coverFlow.scrollToPosition(MusicPlayback.songSet.size());
coverFlow.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
if(MusicPlayback.songSet.size()==MusicPlayback.allTracks.size()){
MusicPlayback.cursor = position;
MusicPlayback.shuffleIndexPosition = MusicPlayback.trackPosition.get(MusicPlayback.cursor);
MusicPlayback.startMediaPlayback(position);
}
else{
try {
MusicPlayback.songPosition = MusicPlayback.songSet.get(position);
MusicPlayback.startMediaPlayback(MusicPlayback.songSet.get(position));
}catch (Exception e){
position%=MusicPlayback.songSet.size();
MusicPlayback.songPosition = MusicPlayback.songSet.get(position);
MusicPlayback.startMediaPlayback(MusicPlayback.songSet.get(position));
}
}
}
});
}
more.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
PopupMenu p = new PopupMenu(ActivityMainFullPlayer.this , v);
p.getMenuInflater().inflate(R.menu.more_options,p.getMenu());
p.show();
p.setOnMenuItemClickListener(new PopupMenu.OnMenuItemClickListener() {
@Override
public boolean onMenuItemClick(MenuItem item) {
switch (item.getItemId()){
case R.id.menu_more_action_guide :
AlertDialog.Builder builder = new AlertDialog.Builder(ActivityMainFullPlayer.this);
View view = getLayoutInflater().inflate(R.layout.guide_layout,null);
TextView ok =(TextView) view.findViewById(R.id.textView_guide_ok);
builder.setView(view);
final AlertDialog dialog = builder.create();
dialog.show();
ok.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
dialog.dismiss();
}
});
break;
case R.id.menu_more_action_about_dev:
AlertDialog.Builder builder1 = new AlertDialog.Builder(ActivityMainFullPlayer.this);
View view1 = getLayoutInflater().inflate(R.layout.about_dev,null);
TextView ok1 =(TextView) view1.findViewById(R.id.textView_about_ok);
builder1.setView(view1);
final AlertDialog dialog1 = builder1.create();
dialog1.show();
ok1.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
dialog1.dismiss();
}
});
break;
case R.id.menu_more_action_settings : startActivity( new Intent(getApplicationContext() , ActivitySettings.class));
overridePendingTransition(R.anim.left_enter_translate , R.anim.right_enter_translate);
break;
case R.id.menu_more_action_equalizer :startActivity(new Intent(getApplicationContext(),ActivityEqualizer.class));
overridePendingTransition(R.anim.left_enter_translate , R.anim.right_enter_translate);
}
return true;
}
});
}
});
swipeDetector = new SwipeDetector(relativeLayout);
swipeDetector.setMinDistanceInPixels(240);
swipeDetector.setOnSwipeListener(new SwipeDetector.onSwipeEvent() {
@Override
public void SwipeEventDetected(View v, SwipeDetector.SwipeTypeEnum swipeType) {
if(swipeType == SwipeDetector.SwipeTypeEnum.RIGHT_TO_LEFT){
playNext();
}
if(swipeType == SwipeDetector.SwipeTypeEnum.LEFT_TO_RIGHT){
playPrev();
}
}
});
try{
if(MusicPlayback.allTracks.get(MusicPlayback.getSongPosition()).getAlbumArt() != null){
// d = new BitmapDrawable(getResources(),ImageEnhancer.getAdjustedOpacity(ImageEnhancer.getConvertedImage(MusicPlayback.allTracks.get(MusicPlayback.getSongPosition()).getAlbumArt(), SIZE),OPACITY));
// fullPlayer.setBackground(d);
fullPlayer_album.setImageURI(Uri.parse(MusicPlayback.allTracks.get(MusicPlayback.getSongPosition()).getAlbumArt()));
Glide.with(this).asBitmap().load(MusicPlayback.allTracks.get(MusicPlayback.getSongPosition()).getAlbumArt()).into(new SimpleTarget<Bitmap>() {
@Override
public void onResourceReady(Bitmap resource, Transition<? super Bitmap> transition) {
Blurry.with(ActivityMainFullPlayer.this).radius(30).sampling(SAMPLING).from(resource).into(fullPlayer);
}
});
}
else {
Glide.with(ActivityMainFullPlayer.this).asBitmap().load(R.drawable.default_background).into(new SimpleTarget<Bitmap>() {
@Override
public void onResourceReady(Bitmap resource, Transition<? super Bitmap> transition) {
Blurry.with(ActivityMainFullPlayer.this).radius(30).sampling(SAMPLING).from(resource).into(fullPlayer);
}
});
fullPlayer_album.setImageResource(R.drawable.default_album_art);
}}catch (Exception e){
System.exit(0);
}
fullPlayer_maxTime.setText(MusicPlayback.getTime(MusicPlayback.mediaPlayer.getDuration()));
fullPlayer_shuffle = (ImageView)findViewById(R.id.imageView_full_player_shuffle);
fullPlayer_loop = (ImageView)findViewById(R.id.imageView_full_player_loop);
if(proximity==null)proximity= new Proximity(this);
if(MusicPlayback.isShuffle)fullPlayer_shuffle.setImageResource(R.drawable.shuffle_on);
else fullPlayer_shuffle.setImageResource(R.drawable.shuffle_off);
if(MusicPlayback.isLoop)fullPlayer_loop.setImageResource(R.drawable.loop_current);
else fullPlayer_loop.setImageResource(R.drawable.loop_all);
seekBar = (SeekBar)findViewById(R.id.seekBar);
song = MusicPlayback.allTracks.get(MusicPlayback.songPosition);
fullPlayer_song.setText(song.getTitle());
fullPlayer_artist.setText(song.getArtist());
broadcast = new CustomBroadcast();
IntentFilter intentFilter = new IntentFilter("myaction");
registerReceiver(broadcast,intentFilter);
if(MusicPlayback.getPlayingStatus()){
fullPlayer_play.setImageResource(R.drawable.music_widget_pause);
}
else
{
fullPlayer_play.setImageResource(R.drawable.music_widget_play);
}
seekBar.setMax(MusicPlayback.mediaPlayer.getDuration());
progressHandler = new Handler();
seekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
if(fromUser){
fullPlayer_currentTime.setText(MusicPlayback.getTime(progress));
}
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
progressHandler.removeCallbacks(progressRunnable);
MusicPlayback.isDisturbed = true;
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
MusicPlayback.mediaPlayer.seekTo(seekBar.getProgress());
progressHandler.post(progressRunnable);
}
});
}
@Override
protected void onResume() {
super.onResume();
progressRunnable = new Runnable() {
@Override
public void run() {
seekBar.setProgress(MusicPlayback.mediaPlayer.getCurrentPosition());
fullPlayer_currentTime.setText(MusicPlayback.getTime(MusicPlayback.mediaPlayer.getCurrentPosition()));
progressHandler.postDelayed(progressRunnable,100);
}
};
progressHandler.postDelayed(progressRunnable,1000);
if(shared.getBoolean("Pro",true)){
proximity.start();
}
if(shared.getBoolean("Vis",true)){
musicWave = (MusicWave)findViewById(R.id.musicWave);
int speed =Visualizer.getMaxCaptureRate()/2;
visualizer = new Visualizer(0);
visualizer.setDataCaptureListener(new Visualizer.OnDataCaptureListener() {
@Override
public void onWaveFormDataCapture(Visualizer visualizer, byte[] waveform, int samplingRate) {
try {
musicWave.updateVisualizer(waveform);
}catch (Exception e){
}
}
@Override
public void onFftDataCapture(Visualizer visualizer, byte[] fft, int samplingRate) {
}
},speed,true,false);
visualizer.setEnabled(true);
}
}
@Override
protected void onPause() {
super.onPause();
if(shared.getBoolean("Pro",true)){
proximity.stop();
}
if(shared.getBoolean("Vis",true)){
visualizer.release();
musicWave = null;
}
}
public void loop(View view){
MusicPlayback.isLoop = !MusicPlayback.isLoop;
if(MusicPlayback.isLoop)fullPlayer_loop.setImageResource(R.drawable.loop_current);
else fullPlayer_loop.setImageResource(R.drawable.loop_all);
editor.putBoolean("isLoop",MusicPlayback.isLoop).apply();
}
public void shuffle(View view){
MusicPlayback.isShuffle = !MusicPlayback.isShuffle;
if(MusicPlayback.isShuffle){fullPlayer_shuffle.setImageResource(R.drawable.shuffle_on);
if(MusicPlayback.songPosition>=0){
MusicPlayback.shuffleIndexPosition = MusicPlayback.trackPosition.get(MusicPlayback.songPosition);
}
}
else {
Log.d("Cursor",""+MusicPlayback.cursor);
Log.d("Song Pos ", ""+MusicPlayback.songPosition);
fullPlayer_shuffle.setImageResource(R.drawable.shuffle_off);
for(int i = 0 ; i<MusicPlayback.songSet.size();i++){
if(MusicPlayback.allTracks.get(MusicPlayback.songPosition).getTitle().equals(MusicPlayback.allTracks.get(MusicPlayback.songSet.get(i)).getTitle())){
MusicPlayback.cursor = i; // change cursor position to the currently playing song when user switch offs the shuffle
break;
}
}
}
editor.putBoolean("isShuffle",MusicPlayback.isShuffle).apply();
}
@Override
public void onBackPressed() {
super.onBackPressed();
if(!MusicPlayback.isMainPlayerOpen){
startActivity(new Intent(this , ActivityMainPlayer.class));
this.overridePendingTransition(R.anim.left_exit_translate,R.anim.right_exit_translate);
}
else this.overridePendingTransition(R.anim.left_exit_translate,R.anim.right_exit_translate);
}
public void playPrevFull(View v){
playPrev();
}
public void playNextFull(View v) {
playNext();
}
private void playNext(){
if(MusicPlayback.getPlayingStatus()){
MusicPlayback.startMediaPlayback(MusicPlayback.getPlayNextMediaIndex());
// ActivityMainPlayer.updateMainUI(MusicPlayback.songPosition);
Log.d("log_next","next_true");
}
else{
Log.d("log_next","next_false");
MusicPlayback.startMediaPlayback(MusicPlayback.getPlayNextMediaIndex());
}
Log.d("song position",String.valueOf(MusicPlayback.songPosition));
updateUI();
fullPlayer_play.setImageResource(R.drawable.music_widget_pause);
}
private void playPrev(){
if(MusicPlayback.getPlayingStatus()){
if(seekBar.getProgress()>5000){
MusicPlayback.startMediaPlayback(MusicPlayback.songPosition);
}
else{
MusicPlayback.startMediaPlayback(MusicPlayback.getPlayPrevMediaIndex());
}
Log.d("log_next","next_true");
}
else{
Log.d("log_next","next_false");
MusicPlayback.startMediaPlayback(MusicPlayback.getPlayPrevMediaIndex());
}
Log.d("song position",String.valueOf(MusicPlayback.songPosition));
updateUI();
fullPlayer_play.setImageResource(R.drawable.music_widget_pause);
}
private void updateUI(){
fullPlayer_song.setText(MusicPlayback.allTracks.get(MusicPlayback.songPosition).getTitle());
fullPlayer_artist.setText(MusicPlayback.allTracks.get(MusicPlayback.songPosition).getArtist());
seekBar.setMax(MusicPlayback.mediaPlayer.getDuration());
fullPlayer_maxTime.setText(MusicPlayback.getTime(MusicPlayback.mediaPlayer.getDuration()));
ex();
}
public void startPauseFull(View v){
if(MusicPlayback.getPlayingStatus()){
fullPlayer_play.setImageResource(R.drawable.music_widget_play);
MusicPlayback.pauseMediaPlayback();
}
else{
MusicPlayback.resumeMediaPlayback();
fullPlayer_play.setImageResource(R.drawable.music_widget_pause);
}
}
private void setup(){
if(MusicPlayback.allTracks==null){
finish();
}
}
private class CustomBroadcast extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
ex();
}
}
private void ex(){
if(MusicPlayback.getPlayingStatus()){
fullPlayer_play.setImageResource(R.drawable.music_widget_play);
}
else{
fullPlayer_play.setImageResource(R.drawable.music_widget_pause);
}
ModelSongs song = MusicPlayback.allTracks.get(MusicPlayback.songPosition);
if (song.getAlbumArt() != null) {
Glide.with(ActivityMainFullPlayer.this).asBitmap().load(MusicPlayback.allTracks.get(MusicPlayback.getSongPosition()).getAlbumArt()).into(new SimpleTarget<Bitmap>() {
@Override
public void onResourceReady(Bitmap resource, Transition<? super Bitmap> transition) {
Blurry.with(ActivityMainFullPlayer.this).radius(30).async().sampling(SAMPLING).from(resource).into(fullPlayer);
}
});
fullPlayer_album.setImageURI(Uri.parse(song.getAlbumArt()));
} else {
Glide.with(ActivityMainFullPlayer.this).asBitmap().load(R.drawable.default_background).into(new SimpleTarget<Bitmap>() {
@Override
public void onResourceReady(Bitmap resource, Transition<? super Bitmap> transition) {
Blurry.with(ActivityMainFullPlayer.this).radius(30).async().sampling(SAMPLING).from(resource).into(fullPlayer);
}
});
fullPlayer_album.setImageResource(R.drawable.default_album_art);
}
fullPlayer_song.setText(song.getTitle());
fullPlayer_artist.setText(song.getArtist());
seekBar.setMax(MusicPlayback.mediaPlayer.getDuration());
fullPlayer_maxTime.setText(MusicPlayback.getTime(MusicPlayback.mediaPlayer.getDuration()));
if (MusicPlayback.getPlayingStatus()) {
fullPlayer_play.setImageResource(R.drawable.music_widget_pause);
} else {
fullPlayer_play.setImageResource(R.drawable.music_widget_play);
}
}
@Override
protected void onDestroy() {
super.onDestroy();
unregisterReceiver(broadcast);
try{
coverFlow.releaseAllMemoryResources();
}catch (Exception e ){
}
}
private class CloseApp extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
finish();
}
}
private class FeatureCoverflowAdapter extends BaseAdapter{
@Override
public int getCount() {
return MusicPlayback.songSet.size();
}
@Override
public Object getItem(int position) {
return MusicPlayback.allTracks.get(MusicPlayback.songSet.get(position)).getAlbumArt();
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
if(convertView==null){
convertView = getLayoutInflater().inflate(R.layout.adapter_coverflow,parent,false);
}
ImageView imageView = (ImageView)convertView.findViewById(R.id.coverflow_adapter_image);
if(MusicPlayback.allTracks.get(MusicPlayback.songSet.get(position)).getAlbumArt()!=null){
Glide.with(ActivityMainFullPlayer.this)
.asBitmap()
.load(MusicPlayback.allTracks.get(MusicPlayback.songSet.get(position)).getAlbumArt())
.into(imageView);}
else {
Glide.with(ActivityMainFullPlayer.this)
.asBitmap()
.load(R.drawable.default_album_art_track)
.into(imageView);}
return convertView;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.isis.viewer.restfulobjects.rendering.domainobjects;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import com.fasterxml.jackson.databind.node.NullNode;
import com.google.common.base.Function;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.joda.time.DateTime;
import org.joda.time.LocalDate;
import org.joda.time.LocalDateTime;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat;
import org.apache.isis.core.metamodel.adapter.ObjectAdapter;
import org.apache.isis.core.metamodel.adapter.mgr.AdapterManager;
import org.apache.isis.core.metamodel.facets.object.encodeable.EncodableFacet;
import org.apache.isis.core.metamodel.facets.object.parseable.TextEntryParseException;
import org.apache.isis.core.metamodel.spec.ObjectSpecId;
import org.apache.isis.core.metamodel.spec.ObjectSpecification;
import org.apache.isis.core.runtime.system.context.IsisContext;
import org.apache.isis.core.runtime.system.persistence.PersistenceSession;
import org.apache.isis.core.runtime.system.session.IsisSessionFactory;
import org.apache.isis.viewer.restfulobjects.applib.JsonRepresentation;
/**
* Similar to Isis' value encoding, but with additional support for JSON
* primitives.
*/
public final class JsonValueEncoder {
private JsonValueEncoder(){}
public static class ExpectedStringRepresentingValueException extends IllegalArgumentException {
private static final long serialVersionUID = 1L;
}
public static abstract class JsonValueConverter {
protected final String format;
protected final String xIsisFormat;
private final Class<?>[] classes;
public JsonValueConverter(String format, String xIsisFormat, Class<?>... classes) {
this.format = format;
this.xIsisFormat = xIsisFormat;
this.classes = classes;
}
public List<ObjectSpecId> getSpecIds() {
return Lists.newArrayList(Iterables.transform(Arrays.asList(classes), new Function<Class<?>, ObjectSpecId>() {
public ObjectSpecId apply(Class<?> cls) {
return new ObjectSpecId(cls.getName());
}
}));
}
/**
* The value, otherwise <tt>null</tt>.
*/
public abstract ObjectAdapter asAdapter(JsonRepresentation repr, String format);
public Object appendValueAndFormat(ObjectAdapter objectAdapter, String format, JsonRepresentation repr, boolean suppressExtensions) {
final Object value = unwrapAsObjectElseNullNode(objectAdapter);
repr.mapPut("value", value);
appendFormats(repr, this.format, this.xIsisFormat, suppressExtensions);
return value;
}
public Object asObject(ObjectAdapter objectAdapter, String format) {
return objectAdapter.getObject();
}
}
private static Map<ObjectSpecId, JsonValueConverter> converterBySpec = Maps.newLinkedHashMap();
private static void putConverter(JsonValueConverter jvc) {
final List<ObjectSpecId> specIds = jvc.getSpecIds();
for (ObjectSpecId specId : specIds) {
converterBySpec.put(specId, jvc);
}
}
static {
putConverter(new JsonValueConverter(null, "string", String.class){
@Override
public ObjectAdapter asAdapter(JsonRepresentation repr, String format) {
if (repr.isString()) {
return adapterFor(repr.asString());
}
return null;
}
@Override
public Object appendValueAndFormat(ObjectAdapter objectAdapter, String format, JsonRepresentation repr, boolean suppressExtensions) {
final Object obj = unwrapAsObjectElseNullNode(objectAdapter);
if(obj instanceof String) {
final String str = (String) obj;
repr.mapPut("value", str);
} else {
repr.mapPut("value", obj);
}
appendFormats(repr, this.format, xIsisFormat, suppressExtensions);
return obj;
}
});
putConverter(new JsonValueConverter(null, "boolean", boolean.class, Boolean.class){
@Override
public ObjectAdapter asAdapter(JsonRepresentation repr, String format) {
if (repr.isBoolean()) {
return adapterFor(repr.asBoolean());
}
return null;
}
@Override
public Object appendValueAndFormat(ObjectAdapter objectAdapter, String format, JsonRepresentation repr, boolean suppressExtensions) {
final Object obj = unwrapAsObjectElseNullNode(objectAdapter);
if(obj instanceof Boolean) {
final Boolean b = (Boolean) obj;
repr.mapPut("value", b);
} else {
repr.mapPut("value", obj);
}
appendFormats(repr, this.format, xIsisFormat, suppressExtensions);
return obj;
}
});
putConverter(new JsonValueConverter("int", "byte", byte.class, Byte.class){
@Override
public ObjectAdapter asAdapter(JsonRepresentation repr, String format) {
if (repr.isNumber()) {
return adapterFor(repr.asNumber().byteValue());
}
if (repr.isInt()) {
return adapterFor((byte)(int)repr.asInt());
}
if (repr.isLong()) {
return adapterFor((byte)(long)repr.asLong());
}
if (repr.isBigInteger()) {
return adapterFor(repr.asBigInteger().byteValue());
}
return null;
}
@Override
public Object appendValueAndFormat(ObjectAdapter objectAdapter, String format, JsonRepresentation repr, boolean suppressExtensions) {
final Object obj = unwrapAsObjectElseNullNode(objectAdapter);
if(obj instanceof Byte) {
final Byte b = (Byte) obj;
repr.mapPut("value", b);
} else {
repr.mapPut("value", obj);
}
appendFormats(repr, this.format, xIsisFormat, suppressExtensions);
return obj;
}
});
putConverter(new JsonValueConverter("int", "short", short.class, Short.class){
@Override
public ObjectAdapter asAdapter(JsonRepresentation repr, String format) {
if (repr.isNumber()) {
return adapterFor(repr.asNumber().shortValue());
}
if (repr.isInt()) {
return adapterFor((short)(int)repr.asInt());
}
if (repr.isLong()) {
return adapterFor((short)(long)repr.asLong());
}
if (repr.isBigInteger()) {
return adapterFor(repr.asBigInteger().shortValue());
}
return null;
}
@Override
public Object appendValueAndFormat(ObjectAdapter objectAdapter, String format, JsonRepresentation repr, boolean suppressExtensions) {
final Object obj = unwrapAsObjectElseNullNode(objectAdapter);
if(obj instanceof Short) {
final Short s = (Short) obj;
repr.mapPut("value", s);
} else {
repr.mapPut("value", obj);
}
appendFormats(repr, this.format, xIsisFormat, suppressExtensions);
return obj;
}
});
putConverter(new JsonValueConverter("int", "int", int.class, Integer.class){
@Override
public ObjectAdapter asAdapter(JsonRepresentation repr, String format) {
if (repr.isInt()) {
return adapterFor(repr.asInt());
}
if (repr.isLong()) {
return adapterFor((int)(long)repr.asLong());
}
if (repr.isBigInteger()) {
return adapterFor(repr.asBigInteger().intValue());
}
if (repr.isNumber()) {
return adapterFor(repr.asNumber().intValue());
}
return null;
}
@Override
public Object appendValueAndFormat(ObjectAdapter objectAdapter, String format, JsonRepresentation repr, boolean suppressExtensions) {
final Object obj = unwrapAsObjectElseNullNode(objectAdapter);
if(obj instanceof Integer) {
final Integer i = (Integer) obj;
repr.mapPut("value", i);
} else {
repr.mapPut("value", obj);
}
appendFormats(repr, this.format, xIsisFormat, suppressExtensions);
return obj;
}
});
putConverter(new JsonValueConverter("int", "long", long.class, Long.class){
@Override
public ObjectAdapter asAdapter(JsonRepresentation repr, String format) {
if (repr.isLong()) {
return adapterFor(repr.asLong());
}
if (repr.isInt()) {
return adapterFor(repr.asLong());
}
if (repr.isBigInteger()) {
return adapterFor(repr.asBigInteger().longValue());
}
if (repr.isNumber()) {
return adapterFor(repr.asNumber().longValue());
}
return null;
}
@Override
public Object appendValueAndFormat(ObjectAdapter objectAdapter, String format, JsonRepresentation repr, boolean suppressExtensions) {
final Object obj = unwrapAsObjectElseNullNode(objectAdapter);
if(obj instanceof Long) {
final Long l = (Long) obj;
repr.mapPut("value", l);
} else {
repr.mapPut("value", obj);
}
appendFormats(repr, this.format, xIsisFormat, suppressExtensions);
return obj;
}
});
putConverter(new JsonValueConverter("decimal", "float", float.class, Float.class){
@Override
public ObjectAdapter asAdapter(JsonRepresentation repr, String format) {
if (repr.isDouble()) {
return adapterFor((float)(double)repr.asDouble());
}
if (repr.isNumber()) {
return adapterFor(repr.asNumber().floatValue());
}
if (repr.isLong()) {
return adapterFor((float)repr.asLong());
}
if (repr.isInt()) {
return adapterFor((float)repr.asInt());
}
if (repr.isBigInteger()) {
return adapterFor(repr.asBigInteger().floatValue());
}
return null;
}
@Override
public Object appendValueAndFormat(ObjectAdapter objectAdapter, String format, JsonRepresentation repr, boolean suppressExtensions) {
final Object obj = unwrapAsObjectElseNullNode(objectAdapter);
if(obj instanceof Float) {
final Float f = (Float) obj;
repr.mapPut("value", f);
} else {
repr.mapPut("value", obj);
}
appendFormats(repr, this.format, xIsisFormat, suppressExtensions);
return obj;
}
});
putConverter(new JsonValueConverter("decimal", "double", double.class, Double.class){
@Override
public ObjectAdapter asAdapter(JsonRepresentation repr, String format) {
if (repr.isDouble()) {
return adapterFor(repr.asDouble());
}
if (repr.isLong()) {
return adapterFor((double)repr.asLong());
}
if (repr.isInt()) {
return adapterFor((double)repr.asInt());
}
if (repr.isBigInteger()) {
return adapterFor(repr.asBigInteger().doubleValue());
}
if (repr.isBigDecimal()) {
return adapterFor(repr.asBigDecimal().doubleValue());
}
if (repr.isNumber()) {
return adapterFor(repr.asNumber().doubleValue());
}
return null;
}
@Override
public Object appendValueAndFormat(ObjectAdapter objectAdapter, String format, JsonRepresentation repr, boolean suppressExtensions) {
final Object obj = unwrapAsObjectElseNullNode(objectAdapter);
if(obj instanceof Double) {
final Double d = (Double) obj;
repr.mapPut("value", d);
} else {
repr.mapPut("value", obj);
}
appendFormats(repr, this.format, xIsisFormat, suppressExtensions);
return obj;
}
});
putConverter(new JsonValueConverter(null, "char", char.class, Character.class){
@Override
public ObjectAdapter asAdapter(JsonRepresentation repr, String format) {
if (repr.isString()) {
final String str = repr.asString();
if(str != null && str.length()>0) {
return adapterFor(str.charAt(0));
}
}
// in case a char literal was provided
if(repr.isInt()) {
final Integer x = repr.asInt();
if(Character.MIN_VALUE <= x && x <= Character.MAX_VALUE) {
char c = (char) x.intValue();
return adapterFor(c);
}
}
return null;
}
@Override
public Object appendValueAndFormat(ObjectAdapter objectAdapter, String format, JsonRepresentation repr, boolean suppressExtensions) {
final Object obj = unwrapAsObjectElseNullNode(objectAdapter);
if(obj instanceof Character) {
final Character c = (Character) obj;
repr.mapPut("value", c);
} else {
repr.mapPut("value", obj);
}
appendFormats(repr, this.format, xIsisFormat, suppressExtensions);
return obj;
}
});
putConverter(new JsonValueConverter("big-integer(18)", "javamathbiginteger", BigInteger.class){
@Override
public ObjectAdapter asAdapter(JsonRepresentation repr, String format) {
if (repr.isString()) {
return adapterFor(new BigInteger(repr.asString()));
}
if (repr.isBigInteger()) {
return adapterFor(repr.asBigInteger(format));
}
if (repr.isLong()) {
return adapterFor(BigInteger.valueOf(repr.asLong()));
}
if (repr.isInt()) {
return adapterFor(BigInteger.valueOf(repr.asInt()));
}
if (repr.isNumber()) {
return adapterFor(BigInteger.valueOf(repr.asNumber().longValue()));
}
return null;
}
@Override
public Object appendValueAndFormat(ObjectAdapter objectAdapter, String format, JsonRepresentation repr, boolean suppressExtensions) {
final Object obj = unwrapAsObjectElseNullNode(objectAdapter);
if(obj instanceof BigInteger) {
final BigInteger bi = (BigInteger) obj;
repr.mapPut("value", bi);
} else {
repr.mapPut("value", obj);
}
appendFormats(repr, format != null? format: this.format, xIsisFormat, suppressExtensions);
return obj;
}
});
putConverter(new JsonValueConverter("big-decimal", "javamathbigdecimal", BigDecimal.class){
@Override
public ObjectAdapter asAdapter(JsonRepresentation repr, String format) {
if (repr.isString()) {
return adapterFor(new BigDecimal(repr.asString()));
}
if (repr.isBigDecimal()) {
return adapterFor(repr.asBigDecimal(format));
}
if (repr.isBigInteger()) {
return adapterFor(new BigDecimal(repr.asBigInteger()));
}
if (repr.isDouble()) {
return adapterFor(BigDecimal.valueOf(repr.asDouble()));
}
if (repr.isLong()) {
return adapterFor(BigDecimal.valueOf(repr.asLong()));
}
if (repr.isInt()) {
return adapterFor(BigDecimal.valueOf(repr.asInt()));
}
return null;
}
@Override
public Object appendValueAndFormat(ObjectAdapter objectAdapter, String format, JsonRepresentation repr, boolean suppressExtensions) {
final Object obj = unwrapAsObjectElseNullNode(objectAdapter);
if(obj instanceof BigDecimal) {
final BigDecimal bd = (BigDecimal) obj;
repr.mapPut("value", bd);
} else {
repr.mapPut("value", obj);
}
appendFormats(repr, format != null ? format : this.format, xIsisFormat, suppressExtensions);
return obj;
}
});
putConverter(new JsonValueConverter("date", "jodalocaldate", LocalDate.class){
// these formatters do NOT use withZoneUTC()
final List<DateTimeFormatter> formatters = Arrays.asList(
ISODateTimeFormat.date(),
ISODateTimeFormat.basicDate(),
DateTimeFormat.forPattern("yyyyMMdd"),
JsonRepresentation.yyyyMMdd
);
@Override
public ObjectAdapter asAdapter(JsonRepresentation repr, String format) {
if (repr.isString()) {
final String dateStr = repr.asString();
for (DateTimeFormatter formatter : formatters) {
try {
final LocalDate parsedDate = formatter.parseLocalDate(dateStr);
return adapterFor(parsedDate);
} catch (IllegalArgumentException ex) {
// fall through
}
}
}
return null;
}
@Override
public Object appendValueAndFormat(ObjectAdapter objectAdapter, String format, JsonRepresentation repr, boolean suppressExtensions) {
final Object obj = unwrapAsObjectElseNullNode(objectAdapter);
if(obj instanceof LocalDate) {
final LocalDate date = (LocalDate) obj;
final String dateStr = formatters.get(0).print(date.toDateTimeAtStartOfDay());
repr.mapPut("value", dateStr);
} else {
repr.mapPut("value", obj);
}
appendFormats(repr, this.format, xIsisFormat, suppressExtensions);
return obj;
}
});
putConverter(new JsonValueConverter("date-time", "jodalocaldatetime", LocalDateTime.class){
final List<DateTimeFormatter> formatters = Arrays.asList(
ISODateTimeFormat.dateTimeNoMillis().withZoneUTC(),
ISODateTimeFormat.dateTime().withZoneUTC(),
ISODateTimeFormat.basicDateTimeNoMillis().withZoneUTC(),
ISODateTimeFormat.basicDateTime().withZoneUTC(),
JsonRepresentation.yyyyMMddTHHmmssZ.withZoneUTC()
);
@Override
public ObjectAdapter asAdapter(JsonRepresentation repr, String format) {
if (repr.isString()) {
final String dateStr = repr.asString();
for (DateTimeFormatter formatter : formatters) {
try {
final LocalDateTime parsedDate = formatter.parseLocalDateTime(dateStr);
return adapterFor(parsedDate);
} catch (IllegalArgumentException ex) {
// fall through
}
}
}
return null;
}
@Override
public Object appendValueAndFormat(ObjectAdapter objectAdapter, String format, JsonRepresentation repr, boolean suppressExtensions) {
final Object obj = unwrapAsObjectElseNullNode(objectAdapter);
if(obj instanceof LocalDateTime) {
final LocalDateTime date = (LocalDateTime) obj;
final String dateStr = formatters.get(0).print(date.toDateTime());
repr.mapPut("value", dateStr);
} else {
repr.mapPut("value", obj);
}
appendFormats(repr, this.format, xIsisFormat, suppressExtensions);
return obj;
}
});
putConverter(new JsonValueConverter("date-time", "jodadatetime", DateTime.class){
final List<DateTimeFormatter> formatters = Arrays.asList(
ISODateTimeFormat.dateTimeNoMillis().withZoneUTC(),
ISODateTimeFormat.dateTime().withZoneUTC(),
ISODateTimeFormat.basicDateTimeNoMillis().withZoneUTC(),
ISODateTimeFormat.basicDateTime().withZoneUTC(),
JsonRepresentation.yyyyMMddTHHmmssZ.withZoneUTC()
);
@Override
public ObjectAdapter asAdapter(JsonRepresentation repr, String format) {
if (repr.isString()) {
final String dateStr = repr.asString();
for (DateTimeFormatter formatter : formatters) {
try {
final DateTime parsedDate = formatter.parseDateTime(dateStr);
return adapterFor(parsedDate);
} catch (IllegalArgumentException ex) {
// fall through
}
}
}
return null;
}
@Override
public Object appendValueAndFormat(ObjectAdapter objectAdapter, String format, JsonRepresentation repr, boolean suppressExtensions) {
final Object obj = unwrapAsObjectElseNullNode(objectAdapter);
if(obj instanceof DateTime) {
final DateTime date = (DateTime) obj;
final String dateStr = formatters.get(0).print(date.toDateTime());
repr.mapPut("value", dateStr);
} else {
repr.mapPut("value", obj);
}
appendFormats(repr, this.format, xIsisFormat, suppressExtensions);
return obj;
}
});
putConverter(new JsonValueConverter("date-time", "javautildate", java.util.Date.class){
final List<DateTimeFormatter> formatters = Arrays.asList(
ISODateTimeFormat.dateTimeNoMillis().withZoneUTC(),
ISODateTimeFormat.dateTime().withZoneUTC(),
ISODateTimeFormat.basicDateTimeNoMillis().withZoneUTC(),
ISODateTimeFormat.basicDateTime().withZoneUTC(),
JsonRepresentation.yyyyMMddTHHmmssZ.withZoneUTC()
);
@Override
public ObjectAdapter asAdapter(JsonRepresentation repr, String format) {
if (repr.isString()) {
final String dateStr = repr.asString();
for (DateTimeFormatter formatter : formatters) {
try {
final DateTime parseDateTime = formatter.parseDateTime(dateStr);
final java.util.Date parsedDate = parseDateTime.toDate();
return adapterFor(parsedDate);
} catch (IllegalArgumentException ex) {
// fall through
}
}
}
return null;
}
@Override
public Object appendValueAndFormat(ObjectAdapter objectAdapter, String format, JsonRepresentation repr, boolean suppressExtensions) {
final Object obj = unwrapAsObjectElseNullNode(objectAdapter);
if(obj instanceof java.util.Date) {
final java.util.Date date = (java.util.Date) obj;
final DateTimeFormatter dateTimeFormatter = formatters.get(0);
final String dateStr = dateTimeFormatter.print(new DateTime(date));
repr.mapPut("value", dateStr);
} else {
repr.mapPut("value", obj);
}
appendFormats(repr, this.format, xIsisFormat, suppressExtensions);
return obj;
}
});
putConverter(new JsonValueConverter("date", "javasqldate", java.sql.Date.class){
final List<DateTimeFormatter> formatters = Arrays.asList(
ISODateTimeFormat.date().withZoneUTC(),
ISODateTimeFormat.basicDate().withZoneUTC(),
JsonRepresentation.yyyyMMdd.withZoneUTC()
);
@Override
public ObjectAdapter asAdapter(JsonRepresentation repr, String format) {
if (repr.isString()) {
final String dateStr = repr.asString();
for (DateTimeFormatter formatter : formatters) {
try {
final DateTime parseDateTime = formatter.parseDateTime(dateStr);
final java.sql.Date parsedDate = new java.sql.Date(parseDateTime.getMillis());
return adapterFor(parsedDate);
} catch (IllegalArgumentException ex) {
// fall through
}
}
}
return null;
}
@Override
public Object appendValueAndFormat(ObjectAdapter objectAdapter, String format, JsonRepresentation repr, boolean suppressExtensions) {
final Object obj = unwrapAsObjectElseNullNode(objectAdapter);
if(obj instanceof java.sql.Date) {
final java.sql.Date date = (java.sql.Date) obj;
final String dateStr = formatters.get(0).print(new DateTime(date));
repr.mapPut("value", dateStr);
} else {
repr.mapPut("value", obj);
}
appendFormats(repr, this.format, xIsisFormat, suppressExtensions);
return obj;
}
});
putConverter(new JsonValueConverter("time", "javasqltime", java.sql.Time.class){
final List<DateTimeFormatter> formatters = Arrays.asList(
ISODateTimeFormat.hourMinuteSecond().withZoneUTC(),
ISODateTimeFormat.basicTimeNoMillis().withZoneUTC(),
ISODateTimeFormat.basicTime().withZoneUTC(),
JsonRepresentation._HHmmss.withZoneUTC()
);
@Override
public ObjectAdapter asAdapter(JsonRepresentation repr, String format) {
if (repr.isString()) {
final String dateStr = repr.asString();
for (DateTimeFormatter formatter : formatters) {
try {
final DateTime parseDateTime = formatter.parseDateTime(dateStr);
final java.sql.Time parsedTime = new java.sql.Time(parseDateTime.getMillis());
return adapterFor(parsedTime);
} catch (IllegalArgumentException ex) {
// fall through
}
}
}
return null;
}
@Override
public Object appendValueAndFormat(ObjectAdapter objectAdapter, String format, JsonRepresentation repr, boolean suppressExtensions) {
final Object obj = unwrapAsObjectElseNullNode(objectAdapter);
if(obj instanceof java.sql.Time) {
final java.sql.Time date = (java.sql.Time) obj;
final String dateStr = formatters.get(0).print(new DateTime(date));
repr.mapPut("value", dateStr);
} else {
repr.mapPut("value", obj);
}
appendFormats(repr, this.format, xIsisFormat, suppressExtensions);
return obj;
}
});
putConverter(new JsonValueConverter("utc-millisec", "javasqltimestamp", java.sql.Timestamp.class){
@Override
public ObjectAdapter asAdapter(JsonRepresentation repr, String format) {
if (repr.isLong()) {
final Long millis = repr.asLong();
final java.sql.Timestamp parsedTimestamp = new java.sql.Timestamp(millis);
return adapterFor(parsedTimestamp);
}
if (repr.isString()) {
final String dateStr = repr.asString();
try {
final Long parseMillis = Long.parseLong(dateStr);
final java.sql.Timestamp parsedTimestamp = new java.sql.Timestamp(parseMillis);
return adapterFor(parsedTimestamp);
} catch (IllegalArgumentException ex) {
// fall through
}
}
return null;
}
@Override
public Object appendValueAndFormat(ObjectAdapter objectAdapter, String format, JsonRepresentation repr, boolean suppressExtensions) {
final Object obj = unwrapAsObjectElseNullNode(objectAdapter);
if(obj instanceof java.sql.Timestamp) {
final java.sql.Timestamp date = (java.sql.Timestamp) obj;
final long millisStr = date.getTime();
repr.mapPut("value", millisStr);
} else {
repr.mapPut("value", obj);
}
appendFormats(repr, this.format, xIsisFormat, suppressExtensions);
return obj;
}
});
}
public static ObjectAdapter asAdapter(final ObjectSpecification objectSpec, final JsonRepresentation argValueRepr, final String format) {
if(argValueRepr == null) {
return null;
}
if (objectSpec == null) {
throw new IllegalArgumentException("ObjectSpecification is required");
}
if (!argValueRepr.isValue()) {
throw new IllegalArgumentException("Representation must be of a value");
}
final EncodableFacet encodableFacet = objectSpec.getFacet(EncodableFacet.class);
if (encodableFacet == null) {
String reason = "ObjectSpec expected to have an EncodableFacet";
throw new IllegalArgumentException(reason);
}
final ObjectSpecId specId = objectSpec.getSpecId();
final JsonValueConverter jvc = converterBySpec.get(specId);
if(jvc == null) {
// best effort
if (argValueRepr.isString()) {
final String argStr = argValueRepr.asString();
return encodableFacet.fromEncodedString(argStr);
}
throw new IllegalArgumentException("Unable to parse value");
}
final ObjectAdapter asAdapter = jvc.asAdapter(argValueRepr, format);
if(asAdapter != null) {
return asAdapter;
}
// last attempt
if (argValueRepr.isString()) {
final String argStr = argValueRepr.asString();
try {
return encodableFacet.fromEncodedString(argStr);
} catch(TextEntryParseException ex) {
throw new IllegalArgumentException(ex.getMessage());
}
}
throw new IllegalArgumentException("Could not parse value '" + argValueRepr.asString() + "' as a " + objectSpec.getFullIdentifier());
}
public static Object appendValueAndFormat(ObjectSpecification objectSpec, ObjectAdapter objectAdapter, JsonRepresentation repr, String format, boolean suppressExtensions) {
final JsonValueConverter jvc = converterBySpec.get(objectSpec.getSpecId());
if(jvc != null) {
return jvc.appendValueAndFormat(objectAdapter, format, repr, suppressExtensions);
} else {
final EncodableFacet encodableFacet = objectSpec.getFacet(EncodableFacet.class);
if (encodableFacet == null) {
throw new IllegalArgumentException("objectSpec expected to have EncodableFacet");
}
Object value = objectAdapter != null? encodableFacet.toEncodedString(objectAdapter): NullNode.getInstance();
repr.mapPut("value", value);
appendFormats(repr, "string", "string", suppressExtensions);
return value;
}
}
public static Object asObject(final ObjectAdapter objectAdapter, final String format) {
if (objectAdapter == null) {
throw new IllegalArgumentException("objectAdapter cannot be null");
}
final ObjectSpecification objectSpec = objectAdapter.getSpecification();
final JsonValueConverter jvc = converterBySpec.get(objectSpec.getSpecId());
if(jvc != null) {
return jvc.asObject(objectAdapter, format);
}
// else
final EncodableFacet encodableFacet = objectSpec.getFacet(EncodableFacet.class);
if (encodableFacet == null) {
throw new IllegalArgumentException("objectSpec expected to have EncodableFacet");
}
return encodableFacet.toEncodedString(objectAdapter);
}
private static void appendFormats(JsonRepresentation repr, String format, String xIsisFormat, boolean suppressExtensions) {
if(format != null) {
repr.mapPut("format", format);
}
if(!suppressExtensions && xIsisFormat != null) {
repr.mapPut("extensions.x-isis-format", xIsisFormat);
}
}
private static Object unwrapAsObjectElseNullNode(ObjectAdapter objectAdapter) {
return objectAdapter != null? objectAdapter.getObject(): NullNode.getInstance();
}
private static ObjectAdapter adapterFor(Object value) {
return getAdapterManager().adapterFor(value);
}
private static AdapterManager testAdapterManager;
// for testing purposes only
static void testSetAdapterManager(AdapterManager adapterManager) {
JsonValueEncoder.testAdapterManager = adapterManager;
}
public static AdapterManager getAdapterManager() {
return testAdapterManager != null? testAdapterManager: getPersistenceSession();
}
private static PersistenceSession getPersistenceSession() {
return getIsisSessionFactory().getCurrentSession().getPersistenceSession();
}
static IsisSessionFactory getIsisSessionFactory() {
return IsisContext.getSessionFactory();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter;
import org.junit.Test;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
public class InterpreterSettingTest {
@Test
public void testCreateInterpreters() {
InterpreterOption interpreterOption = new InterpreterOption();
interpreterOption.setPerUser(InterpreterOption.SHARED);
InterpreterInfo interpreterInfo1 = new InterpreterInfo(EchoInterpreter.class.getName(),
"echo", true, new HashMap<String, Object>(), new HashMap<String, Object>());
InterpreterInfo interpreterInfo2 = new InterpreterInfo(DoubleEchoInterpreter.class.getName(),
"double_echo", false, new HashMap<String, Object>(),
new HashMap<String, Object>());
List<InterpreterInfo> interpreterInfos = new ArrayList<>();
interpreterInfos.add(interpreterInfo1);
interpreterInfos.add(interpreterInfo2);
InterpreterSetting interpreterSetting = new InterpreterSetting.Builder()
.setId("id")
.setName("test")
.setGroup("test")
.setInterpreterInfos(interpreterInfos)
.setOption(interpreterOption)
.create();
// create default interpreter for user1 and note1
assertEquals(EchoInterpreter.class.getName(), interpreterSetting.getDefaultInterpreter("user1", "note1").getClassName());
// create interpreter echo for user1 and note1
assertEquals(EchoInterpreter.class.getName(), interpreterSetting.getInterpreter("user1", "note1", "echo").getClassName());
assertEquals(interpreterSetting.getDefaultInterpreter("user1", "note1"), interpreterSetting.getInterpreter("user1", "note1", "echo"));
// create interpreter double_echo for user1 and note1
assertEquals(DoubleEchoInterpreter.class.getName(), interpreterSetting.getInterpreter("user1", "note1", "double_echo").getClassName());
// create non-existed interpreter
assertNull(interpreterSetting.getInterpreter("user1", "note1", "invalid_echo"));
}
@Test
public void testSharedMode() {
InterpreterOption interpreterOption = new InterpreterOption();
interpreterOption.setPerUser(InterpreterOption.SHARED);
InterpreterInfo interpreterInfo1 = new InterpreterInfo(EchoInterpreter.class.getName(),
"echo", true, new HashMap<String, Object>(), new HashMap<String, Object>());
InterpreterInfo interpreterInfo2 = new InterpreterInfo(DoubleEchoInterpreter.class.getName(),
"double_echo", false, new HashMap<String, Object>(), new HashMap<String, Object>());
List<InterpreterInfo> interpreterInfos = new ArrayList<>();
interpreterInfos.add(interpreterInfo1);
interpreterInfos.add(interpreterInfo2);
InterpreterSetting interpreterSetting = new InterpreterSetting.Builder()
.setId("id")
.setName("test")
.setGroup("test")
.setInterpreterInfos(interpreterInfos)
.setOption(interpreterOption)
.create();
// create default interpreter for user1 and note1
Interpreter interpreter = interpreterSetting.getDefaultInterpreter("user1", "note1");
assertEquals(1, interpreterSetting.getAllInterpreterGroups().size());
assertEquals("test-shared_process", interpreter.getInterpreterGroup().getId());
// create default interpreter for user2 and note1
interpreterSetting.getDefaultInterpreter("user2", "note1");
assertEquals(1, interpreterSetting.getAllInterpreterGroups().size());
// create default interpreter user1 and note2
interpreterSetting.getDefaultInterpreter("user1", "note2");
assertEquals(1, interpreterSetting.getAllInterpreterGroups().size());
// only 1 session is created, this session is shared across users and notes
assertEquals(1, interpreterSetting.getAllInterpreterGroups().get(0).getSessionNum());
interpreterSetting.closeInterpreters("note1", "user1");
assertEquals(0, interpreterSetting.getAllInterpreterGroups().size());
}
@Test
public void testPerUserScopedMode() {
InterpreterOption interpreterOption = new InterpreterOption();
interpreterOption.setPerUser(InterpreterOption.SCOPED);
InterpreterInfo interpreterInfo1 = new InterpreterInfo(EchoInterpreter.class.getName(),
"echo", true, new HashMap<String, Object>(), new HashMap<String, Object>());
InterpreterInfo interpreterInfo2 = new InterpreterInfo(DoubleEchoInterpreter.class.getName(),
"double_echo", false, new HashMap<String, Object>(), new HashMap<String, Object>());
List<InterpreterInfo> interpreterInfos = new ArrayList<>();
interpreterInfos.add(interpreterInfo1);
interpreterInfos.add(interpreterInfo2);
InterpreterSetting interpreterSetting = new InterpreterSetting.Builder()
.setId("id")
.setName("test")
.setGroup("test")
.setInterpreterInfos(interpreterInfos)
.setOption(interpreterOption)
.create();
// create interpreter for user1 and note1
Interpreter interpreter = interpreterSetting.getDefaultInterpreter("user1", "note1");
assertEquals(1, interpreterSetting.getAllInterpreterGroups().size());
assertEquals(1, interpreterSetting.getAllInterpreterGroups().get(0).getSessionNum());
assertEquals("test-shared_process", interpreter.getInterpreterGroup().getId());
// create interpreter for user2 and note1
interpreterSetting.getDefaultInterpreter("user2", "note1");
assertEquals(1, interpreterSetting.getAllInterpreterGroups().size());
assertEquals(2, interpreterSetting.getAllInterpreterGroups().get(0).getSessionNum());
interpreterSetting.closeInterpreters("user1", "note1");
// InterpreterGroup is still there, but one session is removed
assertEquals(1, interpreterSetting.getAllInterpreterGroups().size());
assertEquals(1, interpreterSetting.getAllInterpreterGroups().get(0).getSessionNum());
interpreterSetting.closeInterpreters("user2", "note1");
assertEquals(0, interpreterSetting.getAllInterpreterGroups().size());
}
@Test
public void testPerNoteScopedMode() {
InterpreterOption interpreterOption = new InterpreterOption();
interpreterOption.setPerNote(InterpreterOption.SCOPED);
InterpreterInfo interpreterInfo1 = new InterpreterInfo(EchoInterpreter.class.getName(),
"echo", true, new HashMap<String, Object>(), new HashMap<String, Object>());
InterpreterInfo interpreterInfo2 = new InterpreterInfo(DoubleEchoInterpreter.class.getName(),
"double_echo", false, new HashMap<String, Object>(), new HashMap<String, Object>());
List<InterpreterInfo> interpreterInfos = new ArrayList<>();
interpreterInfos.add(interpreterInfo1);
interpreterInfos.add(interpreterInfo2);
InterpreterSetting interpreterSetting = new InterpreterSetting.Builder()
.setId("id")
.setName("test")
.setGroup("test")
.setInterpreterInfos(interpreterInfos)
.setOption(interpreterOption)
.create();
// create interpreter for user1 and note1
Interpreter interpreter = interpreterSetting.getDefaultInterpreter("user1", "note1");
assertEquals(1, interpreterSetting.getAllInterpreterGroups().size());
assertEquals(1, interpreterSetting.getAllInterpreterGroups().get(0).getSessionNum());
assertEquals("test-shared_process", interpreter.getInterpreterGroup().getId());
// create interpreter for user1 and note2
interpreterSetting.getDefaultInterpreter("user1", "note2");
assertEquals(1, interpreterSetting.getAllInterpreterGroups().size());
assertEquals(2, interpreterSetting.getAllInterpreterGroups().get(0).getSessionNum());
interpreterSetting.closeInterpreters("user1", "note1");
// InterpreterGroup is still there, but one session is removed
assertEquals(1, interpreterSetting.getAllInterpreterGroups().size());
assertEquals(1, interpreterSetting.getAllInterpreterGroups().get(0).getSessionNum());
interpreterSetting.closeInterpreters("user1", "note2");
assertEquals(0, interpreterSetting.getAllInterpreterGroups().size());
}
@Test
public void testPerUserIsolatedMode() {
InterpreterOption interpreterOption = new InterpreterOption();
interpreterOption.setPerUser(InterpreterOption.ISOLATED);
InterpreterInfo interpreterInfo1 = new InterpreterInfo(EchoInterpreter.class.getName(),
"echo", true, new HashMap<String, Object>(), new HashMap<String, Object>());
InterpreterInfo interpreterInfo2 = new InterpreterInfo(DoubleEchoInterpreter.class.getName(),
"double_echo", false, new HashMap<String, Object>(), new HashMap<String, Object>());
List<InterpreterInfo> interpreterInfos = new ArrayList<>();
interpreterInfos.add(interpreterInfo1);
interpreterInfos.add(interpreterInfo2);
InterpreterSetting interpreterSetting = new InterpreterSetting.Builder()
.setId("id")
.setName("test")
.setGroup("test")
.setInterpreterInfos(interpreterInfos)
.setOption(interpreterOption)
.create();
// create interpreter for user1 and note1
Interpreter interpreter1 = interpreterSetting.getDefaultInterpreter("user1", "note1");
assertEquals(1, interpreterSetting.getAllInterpreterGroups().size());
assertEquals(1, interpreterSetting.getAllInterpreterGroups().get(0).getSessionNum());
assertEquals("test-user1", interpreter1.getInterpreterGroup().getId());
// create interpreter for user2 and note1
Interpreter interpreter2 = interpreterSetting.getDefaultInterpreter("user2", "note1");
assertEquals(2, interpreterSetting.getAllInterpreterGroups().size());
assertEquals("test-user2", interpreter2.getInterpreterGroup().getId());
// Each user own one InterpreterGroup and one session per InterpreterGroup
assertEquals(1, interpreterSetting.getAllInterpreterGroups().get(0).getSessionNum());
assertEquals(1, interpreterSetting.getAllInterpreterGroups().get(1).getSessionNum());
interpreterSetting.closeInterpreters("user1", "note1");
assertEquals(1, interpreterSetting.getAllInterpreterGroups().size());
interpreterSetting.closeInterpreters("user2", "note1");
assertEquals(0, interpreterSetting.getAllInterpreterGroups().size());
}
@Test
public void testPerNoteIsolatedMode() {
InterpreterOption interpreterOption = new InterpreterOption();
interpreterOption.setPerNote(InterpreterOption.ISOLATED);
InterpreterInfo interpreterInfo1 = new InterpreterInfo(EchoInterpreter.class.getName(),
"echo", true, new HashMap<String, Object>(), new HashMap<String, Object>());
InterpreterInfo interpreterInfo2 = new InterpreterInfo(DoubleEchoInterpreter.class.getName(),
"double_echo", false, new HashMap<String, Object>(), new HashMap<String, Object>());
List<InterpreterInfo> interpreterInfos = new ArrayList<>();
interpreterInfos.add(interpreterInfo1);
interpreterInfos.add(interpreterInfo2);
InterpreterSetting interpreterSetting = new InterpreterSetting.Builder()
.setId("id")
.setName("test")
.setGroup("test")
.setInterpreterInfos(interpreterInfos)
.setOption(interpreterOption)
.create();
// create interpreter for user1 and note1
Interpreter interpreter1 = interpreterSetting.getDefaultInterpreter("user1", "note1");
assertEquals(1, interpreterSetting.getAllInterpreterGroups().size());
assertEquals(1, interpreterSetting.getAllInterpreterGroups().get(0).getSessionNum());
assertEquals("test-note1", interpreter1.getInterpreterGroup().getId());
// create interpreter for user2 and note2
Interpreter interpreter2 = interpreterSetting.getDefaultInterpreter("user1", "note2");
assertEquals(2, interpreterSetting.getAllInterpreterGroups().size());
assertEquals("test-note2", interpreter2.getInterpreterGroup().getId());
// Each user own one InterpreterGroup and one session per InterpreterGroup
assertEquals(1, interpreterSetting.getAllInterpreterGroups().get(0).getSessionNum());
assertEquals(1, interpreterSetting.getAllInterpreterGroups().get(1).getSessionNum());
interpreterSetting.closeInterpreters("user1", "note1");
assertEquals(1, interpreterSetting.getAllInterpreterGroups().size());
interpreterSetting.closeInterpreters("user1", "note2");
assertEquals(0, interpreterSetting.getAllInterpreterGroups().size());
}
@Test
public void testPerUserIsolatedPerNoteScopedMode() {
InterpreterOption interpreterOption = new InterpreterOption();
interpreterOption.setPerUser(InterpreterOption.ISOLATED);
interpreterOption.setPerNote(InterpreterOption.SCOPED);
InterpreterInfo interpreterInfo1 = new InterpreterInfo(EchoInterpreter.class.getName(),
"echo", true, new HashMap<String, Object>(), new HashMap<String, Object>());
InterpreterInfo interpreterInfo2 = new InterpreterInfo(DoubleEchoInterpreter.class.getName(),
"double_echo", false, new HashMap<String, Object>(), new HashMap<String, Object>());
List<InterpreterInfo> interpreterInfos = new ArrayList<>();
interpreterInfos.add(interpreterInfo1);
interpreterInfos.add(interpreterInfo2);
InterpreterSetting interpreterSetting = new InterpreterSetting.Builder()
.setId("id")
.setName("test")
.setGroup("test")
.setInterpreterInfos(interpreterInfos)
.setOption(interpreterOption)
.create();
// create interpreter for user1 and note1
Interpreter interpreter1 = interpreterSetting.getDefaultInterpreter("user1", "note1");
assertEquals(1, interpreterSetting.getAllInterpreterGroups().size());
assertEquals(1, interpreterSetting.getAllInterpreterGroups().get(0).getSessionNum());
assertEquals("test-user1", interpreter1.getInterpreterGroup().getId());
interpreterSetting.getDefaultInterpreter("user1", "note2");
assertEquals(1, interpreterSetting.getAllInterpreterGroups().size());
assertEquals(2, interpreterSetting.getAllInterpreterGroups().get(0).getSessionNum());
// create interpreter for user2 and note1
Interpreter interpreter2 = interpreterSetting.getDefaultInterpreter("user2", "note1");
assertEquals(2, interpreterSetting.getAllInterpreterGroups().size());
assertEquals("test-user2", interpreter2.getInterpreterGroup().getId());
// group1 for user1 has 2 sessions, and group2 for user2 has 1 session
assertEquals(interpreterSetting.getInterpreterGroup("user1", "note1"), interpreterSetting.getInterpreterGroup("user1", "note2"));
assertEquals(2, interpreterSetting.getInterpreterGroup("user1", "note1").getSessionNum());
assertEquals(2, interpreterSetting.getInterpreterGroup("user1", "note2").getSessionNum());
assertEquals(1, interpreterSetting.getInterpreterGroup("user2", "note1").getSessionNum());
// close one session for user1
interpreterSetting.closeInterpreters("user1", "note1");
assertEquals(2, interpreterSetting.getAllInterpreterGroups().size());
assertEquals(1, interpreterSetting.getInterpreterGroup("user1", "note1").getSessionNum());
// close another session for user1
interpreterSetting.closeInterpreters("user1", "note2");
assertEquals(1, interpreterSetting.getAllInterpreterGroups().size());
// close session for user2
interpreterSetting.closeInterpreters("user2", "note1");
assertEquals(0, interpreterSetting.getAllInterpreterGroups().size());
}
@Test
public void testPerUserIsolatedPerNoteIsolatedMode() {
InterpreterOption interpreterOption = new InterpreterOption();
interpreterOption.setPerUser(InterpreterOption.ISOLATED);
interpreterOption.setPerNote(InterpreterOption.ISOLATED);
InterpreterInfo interpreterInfo1 = new InterpreterInfo(EchoInterpreter.class.getName(),
"echo", true, new HashMap<String, Object>(), new HashMap<String, Object>());
InterpreterInfo interpreterInfo2 = new InterpreterInfo(DoubleEchoInterpreter.class.getName(),
"double_echo", false, new HashMap<String, Object>(), new HashMap<String, Object>());
List<InterpreterInfo> interpreterInfos = new ArrayList<>();
interpreterInfos.add(interpreterInfo1);
interpreterInfos.add(interpreterInfo2);
InterpreterSetting interpreterSetting = new InterpreterSetting.Builder()
.setId("id")
.setName("test")
.setGroup("test")
.setInterpreterInfos(interpreterInfos)
.setOption(interpreterOption)
.create();
// create interpreter for user1 and note1
Interpreter interpreter1 = interpreterSetting.getDefaultInterpreter("user1", "note1");
assertEquals(1, interpreterSetting.getAllInterpreterGroups().size());
assertEquals("test-user1-note1", interpreter1.getInterpreterGroup().getId());
// create interpreter for user1 and note2
Interpreter interpreter2 = interpreterSetting.getDefaultInterpreter("user1", "note2");
assertEquals(2, interpreterSetting.getAllInterpreterGroups().size());
assertEquals("test-user1-note2", interpreter2.getInterpreterGroup().getId());
// create interpreter for user2 and note1
Interpreter interpreter3 = interpreterSetting.getDefaultInterpreter("user2", "note1");
assertEquals("test-user2-note1", interpreter3.getInterpreterGroup().getId());
// create interpreter for user2 and note2
Interpreter interpreter4 = interpreterSetting.getDefaultInterpreter("user2", "note2");
assertEquals(4, interpreterSetting.getAllInterpreterGroups().size());
assertEquals("test-user2-note2", interpreter4.getInterpreterGroup().getId());
for (InterpreterGroup interpreterGroup : interpreterSetting.getAllInterpreterGroups()) {
// each InterpreterGroup has one session
assertEquals(1, interpreterGroup.getSessionNum());
}
// close one session for user1 and note1
interpreterSetting.closeInterpreters("user1", "note1");
assertEquals(3, interpreterSetting.getAllInterpreterGroups().size());
// close one session for user1 and note2
interpreterSetting.closeInterpreters("user1", "note2");
assertEquals(2, interpreterSetting.getAllInterpreterGroups().size());
// close one session for user2 and note1
interpreterSetting.closeInterpreters("user2", "note1");
assertEquals(1, interpreterSetting.getAllInterpreterGroups().size());
// close one session for user2 and note2
interpreterSetting.closeInterpreters("user2", "note2");
assertEquals(0, interpreterSetting.getAllInterpreterGroups().size());
}
@Test
public void testPerUserScopedPerNoteScopedMode() {
InterpreterOption interpreterOption = new InterpreterOption();
interpreterOption.setPerUser(InterpreterOption.SCOPED);
interpreterOption.setPerNote(InterpreterOption.SCOPED);
InterpreterInfo interpreterInfo1 = new InterpreterInfo(EchoInterpreter.class.getName(),
"echo", true, new HashMap<String, Object>(), new HashMap<String, Object>());
InterpreterInfo interpreterInfo2 = new InterpreterInfo(DoubleEchoInterpreter.class.getName(),
"double_echo", false, new HashMap<String, Object>(), new HashMap<String, Object>());
List<InterpreterInfo> interpreterInfos = new ArrayList<>();
interpreterInfos.add(interpreterInfo1);
interpreterInfos.add(interpreterInfo2);
InterpreterSetting interpreterSetting = new InterpreterSetting.Builder()
.setId("id")
.setName("test")
.setGroup("test")
.setInterpreterInfos(interpreterInfos)
.setOption(interpreterOption)
.create();
// create interpreter for user1 and note1
Interpreter interpreter1 = interpreterSetting.getDefaultInterpreter("user1", "note1");
assertEquals(1, interpreterSetting.getAllInterpreterGroups().size());
assertEquals(1, interpreterSetting.getAllInterpreterGroups().get(0).getSessionNum());
assertEquals("test-shared_process", interpreter1.getInterpreterGroup().getId());
// create interpreter for user1 and note2
interpreterSetting.getDefaultInterpreter("user1", "note2");
assertEquals(1, interpreterSetting.getAllInterpreterGroups().size());
assertEquals(2, interpreterSetting.getAllInterpreterGroups().get(0).getSessionNum());
// create interpreter for user2 and note1
interpreterSetting.getDefaultInterpreter("user2", "note1");
assertEquals(1, interpreterSetting.getAllInterpreterGroups().size());
assertEquals(3, interpreterSetting.getAllInterpreterGroups().get(0).getSessionNum());
// create interpreter for user2 and note2
interpreterSetting.getDefaultInterpreter("user2", "note2");
assertEquals(1, interpreterSetting.getAllInterpreterGroups().size());
assertEquals(4, interpreterSetting.getAllInterpreterGroups().get(0).getSessionNum());
// close one session for user1 and note1
interpreterSetting.closeInterpreters("user1", "note1");
assertEquals(3, interpreterSetting.getAllInterpreterGroups().get(0).getSessionNum());
// close one session for user1 and note2
interpreterSetting.closeInterpreters("user1", "note2");
assertEquals(2, interpreterSetting.getAllInterpreterGroups().get(0).getSessionNum());
// close one session for user2 and note1
interpreterSetting.closeInterpreters("user2", "note1");
assertEquals(1, interpreterSetting.getAllInterpreterGroups().get(0).getSessionNum());
// close one session for user2 and note2
interpreterSetting.closeInterpreters("user2", "note2");
assertEquals(0, interpreterSetting.getAllInterpreterGroups().size());
}
}
| |
/*******************************************************************************
* Copyright 2015 DANS - Data Archiving and Networked Services
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package nl.knaw.dans.dccd.rest;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.StringEscapeUtils;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat;
import nl.knaw.dans.common.lang.search.SearchHit;
import nl.knaw.dans.common.lang.search.SearchResult;
import nl.knaw.dans.dccd.model.ProjectPermissionLevel;
import nl.knaw.dans.dccd.search.DccdSB;
import nl.knaw.dans.dccd.util.StringUtil;
import nl.knaw.dans.dccd.application.services.DataServiceException;
import nl.knaw.dans.dccd.application.services.DccdDataService;
import nl.knaw.dans.dccd.application.services.DccdUserService;
import nl.knaw.dans.dccd.application.services.UserServiceException;
import nl.knaw.dans.dccd.model.DccdUser;
import nl.knaw.dans.dccd.model.Project;
/**
*
* @author paulboon
*
*/
public abstract class AbstractProjectResource extends AbstractResource {
/**
* Query arguments.
*/
public static final String PLAIN_TEXT_QUERY_PARAM = "q";
public static final String CATEGORY_QUERY_PARAM = "category";
public static final String LABNAME_QUERY_PARAM = "labname";
public static final String OBJECT_TYPE_QUERY_PARAM = "object.type";
public static final String OBJECT_CREATOR_QUERY_PARAM = "object.creator";
public static final String OBJECT_TITLE = "object.title";
public static final String OBJECT_ID = "object.id";
public static final String ELEMENT_TAXON_QUERY_PARAM = "element.taxon";
public static final String ELEMENT_TYPE_QUERY_PARAM = "element.type";
public static final String ELEMENT_ID = "element.id";
public static final String DEATH_YEAR_FROM_QUERY_PARAM = "deathYearFrom";
public static final String DEATH_YEAR_TO_QUERY_PARAM = "deathYearTo";
public static final String FIRST_YEAR_FROM_QUERY_PARAM = "firstYearFrom";
public static final String FIRST_YEAR_TO_QUERY_PARAM = "firstYearTo";
public static final String LAST_YEAR_FROM_QUERY_PARAM = "lastYearFrom";
public static final String LAST_YEAR_TO_QUERY_PARAM = "lastYearTo";
public static final String PITH_YEAR_FROM_QUERY_PARAM = "pithYearFrom";
public static final String PITH_YEAR_TO_QUERY_PARAM = "pithYearTo";
public static final String PROJECT_ORGANISATION_ID = "project.organisation.id";
public static final String PROJECT_TITLE = "project.title";
public static final String PROJECT_ID = "project.id";
/**
* Construct search result list information as XML String
*
* @param searchResults
* The results
* @param offset
* Number of results to skip
* @param limit
* Number of results in this list
* @return The XML String
*/
protected String getProjectListSearchResultAsXml(
SearchResult<? extends DccdSB> searchResults, int offset, int limit, DccdUser requestingUser) {
java.io.StringWriter sw = new StringWriter();
sw.append("<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>"); // XML
// instruction
sw.append("<projects" + " total=\"" + searchResults.getTotalHits()
+ "\"" + " offset=\"" + offset + "\"" + " limit=\"" + limit
+ "\"" + ">");
for (SearchHit<? extends DccdSB> hit : searchResults.getHits()) {
sw.append("<project>");
appendSearchResultDataAsXml(sw, hit.getData(), requestingUser);
sw.append("</project>");
}
sw.append("</projects>");
return sw.toString();
}
/**
* Append search result information as XML String
*
* @param sw
* writer to append to
* @param dccdSB
* search result
*/
protected abstract void appendSearchResultDataAsXml(
java.io.StringWriter sw, DccdSB dccdSB, DccdUser requestingUser);
// TODO strings need to be escaped for xml, maybe use a lib for constructing
// xml
/**
* Append information anyone is allowed to see
* The most important project data but not identical to TRiDaS!
*
* @param sw
* writer to append to
* @param dccdSB
* search result
*/
protected void appendProjectPublicDataAsXml(java.io.StringWriter sw,
DccdSB dccdSB) {
// Note the Fedora pid is our sid, but sometimes called pid anyway;
// confusing I know
sw.append(getXMLElementString("sid", dccdSB.getPid()));
// modified timestamp
// convert to UTC and format as ISO
DateTimeFormatter fmt = ISODateTimeFormat.dateTime();
DateTime dUtc = dccdSB.getAdministrativeStateLastChange().toDateTime(DateTimeZone.UTC);
//sw.append(getXMLElementString("stateChanged", dccdSB.getAdministrativeStateLastChange().toString()));
sw.append(getXMLElementString("stateChanged", fmt.print(dUtc)));
// Not at first only added title, so a client can show something in a
// user interface,
// but now we put in (almost) everything from the search results.
// title
sw.append(getXMLElementString("title", dccdSB.getTridasProjectTitle()));
// identifier
sw.append(getXMLElementString("identifier", dccdSB.getTridasProjectIdentifier()));
// category, but not std, normal etc.
sw.append(getXMLElementString("category", dccdSB.getTridasProjectCategory()));
// investigator
sw.append(getXMLElementString("investigator", dccdSB.getTridasProjectInvestigator()));
// lab(s) (combined name, address, but not concatenated...)
sw.append("<laboratories>");
for (String lab : dccdSB.getTridasProjectLaboratoryCombined()) {
sw.append(getXMLElementString("laboratory", lab));
}
sw.append("</laboratories>");
// type(s)
sw.append("<types>");
for (String type : dccdSB.getTridasProjectType()) {
sw.append(getXMLElementString("type", type));
}
sw.append("</types>");
// Note that this goes to another service and is a Performance Penalty
sw.append(getXMLElementString("ownerOrganizationId", getOwnerOrganizationId(dccdSB)));
// And this one goes to the data archive... a penalty...
sw.append(getXMLElementString("language", getProjectlanguage(dccdSB)));
}
/**
* Append location XML, but only when allowed
*
* @param sw
* writer to append to
* @param dccdSB
* search result
*/
protected void appendProjectPublicLocationAsXml(java.io.StringWriter sw,
DccdSB dccdSB) {
// NOTE also give location if object level is open to everyone (even
// when not logged in)!
ProjectPermissionLevel effectivelevel = ProjectPermissionLevel
.valueOf(dccdSB.getPermissionDefaultLevel());
Boolean isAllowedToViewLocation = ProjectPermissionLevel.OBJECT
.isPermittedBy(effectivelevel);
if (isAllowedToViewLocation) {
appendProjectLocationAsXml(sw, dccdSB);
}
}
@SuppressWarnings({ "serial" })
public static final Map<ProjectPermissionLevel, String> MAP_PERMISSION_TO_ENTITYLEVEL =
Collections.unmodifiableMap(new HashMap<ProjectPermissionLevel, String>() {{
put(ProjectPermissionLevel.MINIMAL, "minimal");
put(ProjectPermissionLevel.PROJECT, "project");
put(ProjectPermissionLevel.OBJECT, "object");
put(ProjectPermissionLevel.ELEMENT, "element");
put(ProjectPermissionLevel.SAMPLE, "sample");
put(ProjectPermissionLevel.RADIUS, "radius");
put(ProjectPermissionLevel.SERIES, "series");
put(ProjectPermissionLevel.VALUES, "values");
}});
/**
* Append the permission related information of the project
*
* @param sw
* writer to append to
* @param dccdSB
* search result
*/
protected void appendProjectPermissionAsXml(java.io.StringWriter sw,
DccdSB dccdSB) {
// only the default level
sw.append("<permission>");
sw.append(getXMLElementString("defaultLevel",
//dccdSB.getPermissionDefaultLevel()));
MAP_PERMISSION_TO_ENTITYLEVEL.get(ProjectPermissionLevel.valueOf(dccdSB.getPermissionDefaultLevel()))));
sw.append("</permission>");
}
/**
* Append location XML
*
* @param sw
* writer to append to
* @param dccdSB
* search result
*/
protected void appendProjectLocationAsXml(java.io.StringWriter sw,
DccdSB dccdSB) {
if (dccdSB.hasLatLng())
{
// just append it, no WGS84 or EPSG indications, it's implicit
sw.append("<location>");
sw.append(getXMLElementString("lat", dccdSB.getLat().toString()));
sw.append(getXMLElementString("lng", dccdSB.getLng().toString()));
sw.append("</location>");
}
}
/**
* Append Taxon's, but only when allowed
*
* @param sw
* @param dccdSB
*/
protected void appendProjectPublicTaxonsAsXml(java.io.StringWriter sw,
DccdSB dccdSB) {
ProjectPermissionLevel effectivelevel = ProjectPermissionLevel
.valueOf(dccdSB.getPermissionDefaultLevel());
Boolean isAllowedToViewTaxon = ProjectPermissionLevel.ELEMENT
.isPermittedBy(effectivelevel);
if (isAllowedToViewTaxon) {
appendProjectTaxonsAsXml(sw, dccdSB);
}
}
protected void appendProjectTaxonsAsXml(java.io.StringWriter sw,
DccdSB dccdSB) {
if (dccdSB.hasTridasElementTaxon())
{
// avoid duplicates
List<String> taxons = StringUtil.getUniqueStrings(dccdSB.getTridasElementTaxon());
sw.append("<taxons>");
for(String taxon : taxons)
{
sw.append(getXMLElementString("taxon", taxon));
}
sw.append("</taxons>");
}
}
/**
* Append Object and elements Types, but only when allowed
*
* @param sw
* @param dccdSB
*/
protected void appendProjectPublicTypesAsXml(java.io.StringWriter sw,
DccdSB dccdSB) {
ProjectPermissionLevel effectivelevel = ProjectPermissionLevel
.valueOf(dccdSB.getPermissionDefaultLevel());
Boolean isAllowedToViewType = ProjectPermissionLevel.ELEMENT
.isPermittedBy(effectivelevel);
if (isAllowedToViewType) {
appendProjectElementTypesAsXml(sw, dccdSB);
appendProjectObjectTypesAsXml(sw, dccdSB);
} else {
// maybe only object types
Boolean isAllowedToViewObjectType = ProjectPermissionLevel.OBJECT
.isPermittedBy(effectivelevel);
if (isAllowedToViewObjectType) {
appendProjectObjectTypesAsXml(sw, dccdSB);
}
}
}
protected void appendProjectElementTypesAsXml(java.io.StringWriter sw,
DccdSB dccdSB) {
if (dccdSB.hasTridasElementType())
{
// avoid duplicates
List<String> types = StringUtil.getUniqueStrings(dccdSB.getTridasElementType());
sw.append("<elementTypes>");
for(String type : types)
{
sw.append(getXMLElementString("elementType", type));
}
sw.append("</elementTypes>");
}
// Note: what to do with normal and normalId ?
}
protected void appendProjectObjectTypesAsXml(java.io.StringWriter sw,
DccdSB dccdSB) {
if (dccdSB.hasTridasObjectType())
{
// avoid duplicates
List<String> types = StringUtil.getUniqueStrings(dccdSB.getTridasObjectType());
sw.append("<objectTypes>");
for(String type : types)
{
sw.append(getXMLElementString("objectType", type));
}
sw.append("</objectTypes>");
}
// Note: what to do with normal and normalId ?
}
/**
* Append project description XML, but only when allowed
*
* @param sw
* writer to append to
* @param dccdSB
* search result
*/
protected void appendProjectPublicDescriptionAsXml(java.io.StringWriter sw,
DccdSB dccdSB) {
// NOTE also give description if project level is open to everyone (even
// when not logged in)!
// Also note that we don't do object descriptions...
ProjectPermissionLevel effectivelevel = ProjectPermissionLevel
.valueOf(dccdSB.getPermissionDefaultLevel());
Boolean isAllowedToViewDescription = ProjectPermissionLevel.PROJECT
.isPermittedBy(effectivelevel);
if (isAllowedToViewDescription) {
appendProjectDescriptionAsXml(sw, dccdSB);
}
}
protected void appendProjectDescriptionAsXml(java.io.StringWriter sw,
DccdSB dccdSB) {
if (dccdSB.hasTridasProjectDescription())
sw.append(getXMLElementString("description", dccdSB.getTridasProjectDescription()));
}
/**
* Append time range, but only when allowed
*
* @param sw
* @param dccdSB
*/
protected void appendProjectPublicTimeRangeAsXml(java.io.StringWriter sw,
DccdSB dccdSB) {
ProjectPermissionLevel effectivelevel = ProjectPermissionLevel
.valueOf(dccdSB.getPermissionDefaultLevel());
Boolean isAllowedToViewTimeRange = ProjectPermissionLevel.SERIES
.isPermittedBy(effectivelevel);
if (isAllowedToViewTimeRange) {
appendProjectTimeRangeAsXml(sw, dccdSB);
}
}
/**
* TimeRange (or Temporal Coverage)
*
* @param sw
* @param dccdSB
*/
protected void appendProjectTimeRangeAsXml(java.io.StringWriter sw,
DccdSB dccdSB) {
// concat all the lists, but only non null elements
List<Integer> years = new ArrayList<Integer>();
List<Integer> yearsFromTridas = dccdSB.getTridasMeasurementseriesInterpretationPithyear();
if (yearsFromTridas != null)
{
for(Integer year : yearsFromTridas)
{
if (year != null) years.add(year);
}
}
yearsFromTridas = dccdSB.getTridasMeasurementseriesInterpretationFirstyear();
if (yearsFromTridas != null)
{
for(Integer year : yearsFromTridas)
{
if (year != null) years.add(year);
}
}
yearsFromTridas = dccdSB.getTridasMeasurementseriesInterpretationLastyear();
if (yearsFromTridas != null)
{
for(Integer year : yearsFromTridas)
{
if (year != null) years.add(year);
}
}
yearsFromTridas = dccdSB.getTridasMeasurementseriesInterpretationDeathyear();
if (yearsFromTridas != null)
{
for(Integer year : yearsFromTridas)
{
if (year != null) years.add(year);
}
}
if (!years.isEmpty())
{
// we have at least one year (and it is not null)
Integer min = years.get(0);
Integer max = min;
for(int i=1; i < years.size(); i++)
{
if (years.get(i) < min) min = years.get(i);
if (years.get(i) > max) max = years.get(i);
}
// firstDate = min
// lastDate = max
sw.append("<timeRange>");
sw.append(getXMLElementString("firstYear", min.toString()));
sw.append(getXMLElementString("lastYear", max.toString()));
sw.append("</timeRange>");
}
}
public static String getXMLElementString(final String name, final String value)
{
// NOTE the name is not escaped
return "<" + name + ">" + StringEscapeUtils.escapeXml(value) + "</" + name + ">";
}
/**
* get the user information from the user service to obtain the organisation id
*
* TODO have this id indexed in the dccdSB !
*
* @param dccdSB
* @return
*/
String getOwnerOrganizationId(DccdSB dccdSB) {
String id = "";
try {
DccdUser user = DccdUserService.getService().getUserById(dccdSB.getOwnerId());
id = user.getOrganization();
} catch (UserServiceException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return id;
}
String getProjectlanguage(DccdSB dccdSB)
{
String lang = "";
// Aye, get the project ....
try {
Project p = DccdDataService.getService().getProject(dccdSB.getPid());
lang = p.getTridasLanguage().getLanguage();
} catch (DataServiceException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return lang;
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.python.psi.types;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiNamedElement;
import com.intellij.util.ArrayUtil;
import com.intellij.util.containers.ContainerUtil;
import com.jetbrains.python.PyNames;
import com.jetbrains.python.psi.*;
import com.jetbrains.python.psi.impl.PyBuiltinCache;
import com.jetbrains.python.psi.impl.PyCallExpressionHelper;
import com.jetbrains.python.psi.resolve.PyResolveContext;
import com.jetbrains.python.psi.resolve.RatedResolveResult;
import one.util.streamex.StreamEx;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
/**
* @author vlan
*/
public class PyTypeChecker {
private PyTypeChecker() {
}
public static boolean match(@Nullable PyType expected, @Nullable PyType actual, @NotNull TypeEvalContext context) {
return match(expected, actual, context, null, true);
}
/**
* Checks whether a type *actual* can be placed where *expected* is expected.
* For example int matches object, while str doesn't match int.
* Work for builtin types, classes, tuples etc.
*
* @param expected expected type
* @param actual type to be matched against expected
* @param context
* @param substitutions
* @return
*/
public static boolean match(@Nullable PyType expected, @Nullable PyType actual, @NotNull TypeEvalContext context,
@Nullable Map<PyGenericType, PyType> substitutions) {
return match(expected, actual, context, substitutions, true);
}
private static boolean match(@Nullable PyType expected, @Nullable PyType actual, @NotNull TypeEvalContext context,
@Nullable Map<PyGenericType, PyType> substitutions, boolean recursive) {
// TODO: subscriptable types?, module types?, etc.
if (expected instanceof PyGenericType && substitutions != null) {
final PyGenericType generic = (PyGenericType)expected;
final PyType subst = substitutions.get(generic);
final PyType bound = generic.getBound();
if (!match(bound, actual, context, substitutions, recursive)) {
return false;
}
else if (subst != null) {
if (expected.equals(actual)) {
return true;
}
else if (recursive) {
return match(subst, actual, context, substitutions, false);
}
else {
return false;
}
}
else if (actual != null) {
substitutions.put(generic, actual);
}
else if (bound != null) {
substitutions.put(generic, bound);
}
return true;
}
if (expected == null || actual == null) {
return true;
}
if (expected instanceof PyClassType) {
final PyClass c = ((PyClassType)expected).getPyClass();
if ("object".equals(c.getName())) {
return true;
}
}
if (isUnknown(actual)) {
return true;
}
if (actual instanceof PyUnionType) {
final PyUnionType actualUnionType = (PyUnionType)actual;
if (expected instanceof PyTupleType) {
final PyTupleType expectedTupleType = (PyTupleType)expected;
final int elementCount = expectedTupleType.getElementCount();
if (!expectedTupleType.isHomogeneous() && consistsOfSameElementNumberTuples(actualUnionType, elementCount)) {
return substituteExpectedElementsWithUnions(expectedTupleType, elementCount, actualUnionType, context, substitutions, recursive);
}
}
for (PyType m : actualUnionType.getMembers()) {
if (match(expected, m, context, substitutions, recursive)) {
return true;
}
}
return false;
}
if (expected instanceof PyUnionType) {
for (PyType t : ((PyUnionType)expected).getMembers()) {
if (match(t, actual, context, substitutions, recursive)) {
return true;
}
}
return false;
}
if (expected instanceof PyClassType && actual instanceof PyClassType) {
final PyClass superClass = ((PyClassType)expected).getPyClass();
final PyClass subClass = ((PyClassType)actual).getPyClass();
if (expected instanceof PyTupleType && actual instanceof PyTupleType) {
final PyTupleType superTupleType = (PyTupleType)expected;
final PyTupleType subTupleType = (PyTupleType)actual;
if (!superTupleType.isHomogeneous() && !subTupleType.isHomogeneous()) {
if (superTupleType.getElementCount() != subTupleType.getElementCount()) {
return false;
}
else {
for (int i = 0; i < superTupleType.getElementCount(); i++) {
if (!match(superTupleType.getElementType(i), subTupleType.getElementType(i), context, substitutions, recursive)) {
return false;
}
}
return true;
}
}
else if (superTupleType.isHomogeneous() && !subTupleType.isHomogeneous()) {
final PyType expectedElementType = superTupleType.getIteratedItemType();
for (int i = 0; i < subTupleType.getElementCount(); i++) {
if (!match(expectedElementType, subTupleType.getElementType(i), context)) {
return false;
}
}
return true;
}
else if (!superTupleType.isHomogeneous() && subTupleType.isHomogeneous()) {
return false;
}
else {
return match(superTupleType.getIteratedItemType(), subTupleType.getIteratedItemType(), context);
}
}
else if (expected instanceof PyCollectionType && actual instanceof PyTupleType) {
if (!matchClasses(superClass, subClass, context)) {
return false;
}
final PyType superElementType = ((PyCollectionType)expected).getIteratedItemType();
final PyType subElementType = ((PyTupleType)actual).getIteratedItemType();
if (!match(superElementType, subElementType, context, substitutions, recursive)) {
return false;
}
return true;
}
else if (expected instanceof PyCollectionType && actual instanceof PyCollectionType) {
if (!matchClasses(superClass, subClass, context)) {
return false;
}
// TODO: Match generic parameters based on the correspondence between the generic parameters of subClass and its base classes
final List<PyType> superElementTypes = ((PyCollectionType)expected).getElementTypes(context);
final List<PyType> subElementTypes = ((PyCollectionType)actual).getElementTypes(context);
for (int i = 0; i < subElementTypes.size(); i++) {
final PyType superElementType = i < superElementTypes.size() ? superElementTypes.get(i) : null;
if (!match(superElementType, subElementTypes.get(i), context, substitutions, recursive)) {
return false;
}
}
return true;
}
else if (matchClasses(superClass, subClass, context)) {
return true;
}
else if (((PyClassType)actual).isDefinition() && PyNames.CALLABLE.equals(expected.getName())) {
return true;
}
if (expected.equals(actual)) {
return true;
}
}
if (actual instanceof PyFunctionTypeImpl && expected instanceof PyClassType) {
final PyClass superClass = ((PyClassType)expected).getPyClass();
if (PyNames.CALLABLE.equals(superClass.getName())) {
return true;
}
}
if (actual instanceof PyStructuralType && ((PyStructuralType)actual).isInferredFromUsages()) {
return true;
}
if (expected instanceof PyStructuralType && actual instanceof PyStructuralType) {
final PyStructuralType expectedStructural = (PyStructuralType)expected;
final PyStructuralType actualStructural = (PyStructuralType)actual;
if (expectedStructural.isInferredFromUsages()) {
return true;
}
return expectedStructural.getAttributeNames().containsAll(actualStructural.getAttributeNames());
}
if (expected instanceof PyStructuralType && actual instanceof PyClassType) {
final PyClassType actualClassType = (PyClassType)actual;
if (overridesGetAttr(actualClassType.getPyClass(), context)) {
return true;
}
final Set<String> actualAttributes = actualClassType.getMemberNames(true, context);
return actualAttributes.containsAll(((PyStructuralType)expected).getAttributeNames());
}
if (actual instanceof PyStructuralType && expected instanceof PyClassType) {
final Set<String> expectedAttributes = ((PyClassType)expected).getMemberNames(true, context);
return expectedAttributes.containsAll(((PyStructuralType)actual).getAttributeNames());
}
if (actual instanceof PyCallableType && expected instanceof PyCallableType) {
final PyCallableType expectedCallable = (PyCallableType)expected;
final PyCallableType actualCallable = (PyCallableType)actual;
if (expectedCallable.isCallable() && actualCallable.isCallable()) {
final List<PyCallableParameter> expectedParameters = expectedCallable.getParameters(context);
final List<PyCallableParameter> actualParameters = actualCallable.getParameters(context);
if (expectedParameters != null && actualParameters != null) {
final int size = Math.min(expectedParameters.size(), actualParameters.size());
for (int i = 0; i < size; i++) {
final PyCallableParameter expectedParam = expectedParameters.get(i);
final PyCallableParameter actualParam = actualParameters.get(i);
// TODO: Check named and star params, not only positional ones
if (!match(expectedParam.getType(context), actualParam.getType(context), context, substitutions, recursive)) {
return false;
}
}
}
if (!match(expectedCallable.getReturnType(context), actualCallable.getReturnType(context), context, substitutions, recursive)) {
return false;
}
return true;
}
}
return matchNumericTypes(expected, actual);
}
private static boolean consistsOfSameElementNumberTuples(@NotNull PyUnionType unionType, int elementCount) {
for (PyType type : unionType.getMembers()) {
if (type instanceof PyTupleType) {
final PyTupleType tupleType = (PyTupleType)type;
if (!tupleType.isHomogeneous() && elementCount != tupleType.getElementCount()) {
return false;
}
}
else {
return false;
}
}
return true;
}
private static boolean substituteExpectedElementsWithUnions(@NotNull PyTupleType expected,
int elementCount,
@NotNull PyUnionType actual,
@NotNull TypeEvalContext context,
@Nullable Map<PyGenericType, PyType> substitutions,
boolean recursive) {
for (int i = 0; i < elementCount; i++) {
final int currentIndex = i;
final PyType elementType = PyUnionType.union(
StreamEx
.of(actual.getMembers())
.select(PyTupleType.class)
.map(type -> type.getElementType(currentIndex))
.toList()
);
if (!match(expected.getElementType(i), elementType, context, substitutions, recursive)) {
return false;
}
}
return true;
}
private static boolean matchNumericTypes(PyType expected, PyType actual) {
final String superName = expected.getName();
final String subName = actual.getName();
final boolean subIsBool = "bool".equals(subName);
final boolean subIsInt = "int".equals(subName);
final boolean subIsLong = "long".equals(subName);
final boolean subIsFloat = "float".equals(subName);
final boolean subIsComplex = "complex".equals(subName);
if (superName == null || subName == null ||
superName.equals(subName) ||
("int".equals(superName) && subIsBool) ||
(("long".equals(superName) || PyNames.ABC_INTEGRAL.equals(superName)) && (subIsBool || subIsInt)) ||
(("float".equals(superName) || PyNames.ABC_REAL.equals(superName)) && (subIsBool || subIsInt || subIsLong)) ||
(("complex".equals(superName) || PyNames.ABC_COMPLEX.equals(superName)) && (subIsBool || subIsInt || subIsLong || subIsFloat)) ||
(PyNames.ABC_NUMBER.equals(superName) && (subIsBool || subIsInt || subIsLong || subIsFloat || subIsComplex))) {
return true;
}
return false;
}
public static boolean isUnknown(@Nullable PyType type) {
if (type == null || type instanceof PyGenericType) {
return true;
}
if (type instanceof PyUnionType) {
final PyUnionType union = (PyUnionType)type;
for (PyType t : union.getMembers()) {
if (isUnknown(t)) {
return true;
}
}
}
return false;
}
@Nullable
public static PyType toNonWeakType(@Nullable PyType type, @NotNull TypeEvalContext context) {
if (type instanceof PyUnionType) {
final PyUnionType unionType = (PyUnionType)type;
if (unionType.isWeak()) {
return unionType.excludeNull(context);
}
}
return type;
}
public static boolean hasGenerics(@Nullable PyType type, @NotNull TypeEvalContext context) {
final Set<PyGenericType> collected = new HashSet<>();
collectGenerics(type, context, collected, new HashSet<>());
return !collected.isEmpty();
}
private static void collectGenerics(@Nullable PyType type, @NotNull TypeEvalContext context, @NotNull Set<PyGenericType> collected,
@NotNull Set<PyType> visited) {
if (visited.contains(type)) {
return;
}
visited.add(type);
if (type instanceof PyGenericType) {
collected.add((PyGenericType)type);
}
else if (type instanceof PyUnionType) {
final PyUnionType union = (PyUnionType)type;
for (PyType t : union.getMembers()) {
collectGenerics(t, context, collected, visited);
}
}
else if (type instanceof PyTupleType) {
final PyTupleType tuple = (PyTupleType)type;
final int n = tuple.isHomogeneous() ? 1 : tuple.getElementCount();
for (int i = 0; i < n; i++) {
collectGenerics(tuple.getElementType(i), context, collected, visited);
}
}
else if (type instanceof PyCollectionType) {
final PyCollectionType collection = (PyCollectionType)type;
for (PyType elementType : collection.getElementTypes(context)) {
collectGenerics(elementType, context, collected, visited);
}
}
else if (type instanceof PyCallableType) {
final PyCallableType callable = (PyCallableType)type;
final List<PyCallableParameter> parameters = callable.getParameters(context);
if (parameters != null) {
for (PyCallableParameter parameter : parameters) {
if (parameter != null) {
collectGenerics(parameter.getType(context), context, collected, visited);
}
}
}
collectGenerics(callable.getReturnType(context), context, collected, visited);
}
}
@Nullable
public static PyType substitute(@Nullable PyType type, @NotNull Map<PyGenericType, PyType> substitutions,
@NotNull TypeEvalContext context) {
if (hasGenerics(type, context)) {
if (type instanceof PyGenericType) {
return substitutions.get((PyGenericType)type);
}
else if (type instanceof PyUnionType) {
final PyUnionType union = (PyUnionType)type;
final List<PyType> results = new ArrayList<>();
for (PyType t : union.getMembers()) {
final PyType subst = substitute(t, substitutions, context);
results.add(subst);
}
return PyUnionType.union(results);
}
else if (type instanceof PyCollectionTypeImpl) {
final PyCollectionTypeImpl collection = (PyCollectionTypeImpl)type;
final List<PyType> elementTypes = collection.getElementTypes(context);
final List<PyType> substitutes = new ArrayList<>();
for (PyType elementType : elementTypes) {
substitutes.add(substitute(elementType, substitutions, context));
}
return new PyCollectionTypeImpl(collection.getPyClass(), collection.isDefinition(), substitutes);
}
else if (type instanceof PyTupleType) {
final PyTupleType tupleType = (PyTupleType)type;
final PyClass tupleClass = tupleType.getPyClass();
final List<PyType> oldElementTypes = tupleType.isHomogeneous()
? Collections.singletonList(tupleType.getIteratedItemType())
: tupleType.getElementTypes(context);
final List<PyType> newElementTypes =
ContainerUtil.map(oldElementTypes, elementType -> substitute(elementType, substitutions, context));
return new PyTupleType(tupleClass, newElementTypes, tupleType.isHomogeneous());
}
else if (type instanceof PyCallableType) {
final PyCallableType callable = (PyCallableType)type;
List<PyCallableParameter> substParams = null;
final List<PyCallableParameter> parameters = callable.getParameters(context);
if (parameters != null) {
substParams = new ArrayList<>();
for (PyCallableParameter parameter : parameters) {
final PyType substType = substitute(parameter.getType(context), substitutions, context);
final PyCallableParameter subst = parameter.getParameter() != null ?
new PyCallableParameterImpl(parameter.getParameter()) :
new PyCallableParameterImpl(parameter.getName(), substType);
substParams.add(subst);
}
}
final PyType substResult = substitute(callable.getReturnType(context), substitutions, context);
return new PyCallableTypeImpl(substParams, substResult);
}
}
return type;
}
@Nullable
public static Map<PyGenericType, PyType> unifyGenericCall(@Nullable PyExpression receiver,
@NotNull Map<PyExpression, PyNamedParameter> arguments,
@NotNull TypeEvalContext context) {
final Map<PyGenericType, PyType> substitutions = unifyReceiver(receiver, context);
for (Map.Entry<PyExpression, PyNamedParameter> entry : arguments.entrySet()) {
final PyNamedParameter p = entry.getValue();
if (p.isPositionalContainer() || p.isKeywordContainer()) {
continue;
}
final PyType argType = context.getType(entry.getKey());
final PyType paramType = context.getType(p);
if (!match(paramType, argType, context, substitutions)) {
return null;
}
}
return substitutions;
}
@NotNull
public static Map<PyGenericType, PyType> unifyReceiver(@Nullable PyExpression receiver, @NotNull TypeEvalContext context) {
final Map<PyGenericType, PyType> substitutions = new LinkedHashMap<>();
// Collect generic params of object type
final Set<PyGenericType> generics = new LinkedHashSet<>();
final PyType qualifierType = receiver != null ? context.getType(receiver) : null;
collectGenerics(qualifierType, context, generics, new HashSet<>());
for (PyGenericType t : generics) {
substitutions.put(t, t);
}
// Unify generics in constructor
if (qualifierType != null) {
final PyResolveContext resolveContext = PyResolveContext.noImplicits().withTypeEvalContext(context);
// TODO: Resolve to __new__ as well
final List<? extends RatedResolveResult> results = qualifierType.resolveMember(PyNames.INIT, null, AccessDirection.READ,
resolveContext);
if (results != null && !results.isEmpty()) {
final PsiElement init = results.get(0).getElement();
if (init instanceof PyTypedElement) {
final PyType initType = context.getType((PyTypedElement)init);
if (initType instanceof PyCallableType) {
final PyType initReturnType = ((PyCallableType)initType).getReturnType(context);
if (initReturnType != null) {
match(initReturnType, qualifierType, context, substitutions);
}
}
}
}
}
return substitutions;
}
private static boolean matchClasses(@Nullable PyClass superClass, @Nullable PyClass subClass, @NotNull TypeEvalContext context) {
if (superClass == null ||
subClass == null ||
subClass.isSubclass(superClass, context) ||
PyABCUtil.isSubclass(subClass, superClass, context) ||
PyUtil.hasUnresolvedAncestors(subClass, context)) {
return true;
}
else {
final String superName = superClass.getName();
return superName != null && superName.equals(subClass.getName());
}
}
@NotNull
public static List<AnalyzeCallResults> analyzeCallSite(@Nullable PyCallSiteExpression callSite, @NotNull TypeEvalContext context) {
if (callSite != null) {
final List<AnalyzeCallResults> results = new ArrayList<>();
for (PyCallable callable : resolveCallee(callSite, context)) {
final PyExpression receiver = getReceiver(callSite, callable);
for (List<PyParameter> parameters : PyUtil.getOverloadedParametersSet(callable, context)) {
final Map<PyExpression, PyNamedParameter> mapping = PyCallExpressionHelper.mapArguments(callSite, callable, parameters, context);
results.add(new AnalyzeCallResults(callable, receiver, mapping));
}
}
return results;
}
return Collections.emptyList();
}
@NotNull
private static List<PyCallable> resolveCallee(@NotNull PyCallSiteExpression callSite, @NotNull TypeEvalContext context) {
final PyResolveContext resolveContext = PyResolveContext.noImplicits().withTypeEvalContext(context);
if (callSite instanceof PyCallExpression) {
final PyCallExpression callExpr = (PyCallExpression)callSite;
final PyCallExpression.PyMarkedCallee callee = callExpr.resolveCallee(resolveContext);
return callee != null ? Collections.singletonList(callee.getCallable()) : Collections.emptyList();
}
else if (callSite instanceof PySubscriptionExpression || callSite instanceof PyBinaryExpression) {
final List<PyCallable> results = new ArrayList<>();
boolean resolvedToUnknownResult = false;
for (PsiElement result : PyUtil.multiResolveTopPriority(callSite, resolveContext)) {
if (result instanceof PyCallable) {
results.add((PyCallable)result);
continue;
}
if (result instanceof PyTypedElement) {
final PyType resultType = context.getType((PyTypedElement)result);
if (resultType instanceof PyFunctionType) {
results.add(((PyFunctionType)resultType).getCallable());
continue;
}
}
resolvedToUnknownResult = true;
}
return resolvedToUnknownResult ? Collections.emptyList() : results;
}
else {
return Collections.emptyList();
}
}
@Nullable
public static Boolean isCallable(@Nullable PyType type) {
if (type == null) {
return null;
}
if (type instanceof PyUnionType) {
return isUnionCallable((PyUnionType)type);
}
if (type instanceof PyCallableType) {
return ((PyCallableType)type).isCallable();
}
if (type instanceof PyStructuralType && ((PyStructuralType)type).isInferredFromUsages()) {
return true;
}
return false;
}
/**
* If at least one is callable -- it is callable.
* If at least one is unknown -- it is unknown.
* It is false otherwise.
*/
@Nullable
private static Boolean isUnionCallable(@NotNull final PyUnionType type) {
for (final PyType member : type.getMembers()) {
final Boolean callable = isCallable(member);
if (callable == null) {
return null;
}
if (callable) {
return true;
}
}
return false;
}
public static boolean overridesGetAttr(@NotNull PyClass cls, @NotNull TypeEvalContext context) {
PsiElement method = resolveClassMember(cls, PyNames.GETATTR, context);
if (method != null) {
return true;
}
method = resolveClassMember(cls, PyNames.GETATTRIBUTE, context);
if (method != null && !PyBuiltinCache.getInstance(cls).isBuiltin(method)) {
return true;
}
return false;
}
@Nullable
private static PsiElement resolveClassMember(@NotNull PyClass cls, @NotNull String name, @NotNull TypeEvalContext context) {
final PyType type = context.getType(cls);
if (type != null) {
final PyResolveContext resolveContext = PyResolveContext.noImplicits().withTypeEvalContext(context);
final List<? extends RatedResolveResult> results = type.resolveMember(name, null, AccessDirection.READ, resolveContext);
if (results != null && !results.isEmpty()) {
return results.get(0).getElement();
}
}
return null;
}
@Nullable
public static PyType getTargetTypeFromTupleAssignment(@NotNull PyTargetExpression target, @NotNull PyTupleExpression parentTuple,
@NotNull PyTupleType assignedTupleType) {
final int count = assignedTupleType.getElementCount();
final PyExpression[] elements = parentTuple.getElements();
if (elements.length == count || assignedTupleType.isHomogeneous()) {
final int index = ArrayUtil.indexOf(elements, target);
if (index >= 0) {
return assignedTupleType.getElementType(index);
}
for (int i = 0; i < count; i++) {
PyExpression element = elements[i];
while (element instanceof PyParenthesizedExpression) {
element = ((PyParenthesizedExpression)element).getContainedExpression();
}
if (element instanceof PyTupleExpression) {
final PyType elementType = assignedTupleType.getElementType(i);
if (elementType instanceof PyTupleType) {
final PyType result = getTargetTypeFromTupleAssignment(target, (PyTupleExpression)element, (PyTupleType)elementType);
if (result != null) {
return result;
}
}
}
}
}
return null;
}
@NotNull
public static List<PyParameter> filterExplicitParameters(@NotNull List<PyParameter> parameters, @NotNull PyCallable callable,
@NotNull PyCallSiteExpression callSite,
@NotNull PyResolveContext resolveContext) {
final int implicitOffset;
if (callSite instanceof PyCallExpression) {
final PyCallExpression callExpr = (PyCallExpression)callSite;
final PyExpression callee = callExpr.getCallee();
if (callee instanceof PyReferenceExpression && callable instanceof PyFunction) {
implicitOffset = PyCallExpressionHelper.getImplicitArgumentCount((PyReferenceExpression)callee, (PyFunction)callable,
resolveContext);
}
else {
implicitOffset = 0;
}
}
else if (callSite instanceof PySubscriptionExpression || callSite instanceof PyBinaryExpression) {
implicitOffset = 1;
}
else {
implicitOffset = 0;
}
return parameters.subList(Math.min(implicitOffset, parameters.size()), parameters.size());
}
@NotNull
public static List<PyExpression> getArguments(@NotNull PyCallSiteExpression expr, @NotNull PsiElement resolved) {
if (expr instanceof PyCallExpression) {
return Arrays.asList(((PyCallExpression)expr).getArguments());
}
else if (expr instanceof PySubscriptionExpression) {
return Collections.singletonList(((PySubscriptionExpression)expr).getIndexExpression());
}
else if (expr instanceof PyBinaryExpression) {
final PyBinaryExpression binaryExpr = (PyBinaryExpression)expr;
final boolean isRight = resolved instanceof PsiNamedElement && PyNames.isRightOperatorName(((PsiNamedElement)resolved).getName());
return Collections.singletonList(isRight ? binaryExpr.getLeftExpression() : binaryExpr.getRightExpression());
}
else {
return Collections.emptyList();
}
}
@Nullable
public static PyExpression getReceiver(@NotNull PyCallSiteExpression expr, @NotNull PsiElement resolved) {
if (expr instanceof PyCallExpression) {
if (resolved instanceof PyFunction) {
final PyFunction function = (PyFunction)resolved;
if (function.getModifier() == PyFunction.Modifier.STATICMETHOD) {
return null;
}
}
final PyExpression callee = ((PyCallExpression)expr).getCallee();
return callee instanceof PyQualifiedExpression ? ((PyQualifiedExpression)callee).getQualifier() : null;
}
else if (expr instanceof PySubscriptionExpression) {
return ((PySubscriptionExpression)expr).getOperand();
}
else if (expr instanceof PyBinaryExpression) {
final PyBinaryExpression binaryExpr = (PyBinaryExpression)expr;
final boolean isRight = resolved instanceof PsiNamedElement && PyNames.isRightOperatorName(((PsiNamedElement)resolved).getName());
return isRight ? binaryExpr.getRightExpression() : binaryExpr.getLeftExpression();
}
else {
return null;
}
}
public static class AnalyzeCallResults {
@NotNull private final PyCallable myCallable;
@Nullable private final PyExpression myReceiver;
@NotNull private final Map<PyExpression, PyNamedParameter> myArguments;
public AnalyzeCallResults(@NotNull PyCallable callable, @Nullable PyExpression receiver,
@NotNull Map<PyExpression, PyNamedParameter> arguments) {
myCallable = callable;
myReceiver = receiver;
myArguments = arguments;
}
@NotNull
public PyCallable getCallable() {
return myCallable;
}
@Nullable
public PyExpression getReceiver() {
return myReceiver;
}
@NotNull
public Map<PyExpression, PyNamedParameter> getArguments() {
return myArguments;
}
}
}
| |
/*
* Copyright (c) 2013, SRI International
* All rights reserved.
* Licensed under the The BSD 3-Clause License;
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://opensource.org/licenses/BSD-3-Clause
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* Neither the name of the aic-util nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.sri.ai.util.cache;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import com.google.common.annotations.Beta;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheStats;
import com.google.common.collect.ForwardingMap;
import com.google.common.collect.Sets;
import com.sri.ai.util.AICUtilConfiguration;
import com.sri.ai.util.base.NullaryFunction;
/**
* A default implementation for {@link CacheMap} that, upon garbage collection,
* creates a new internal map and moves entries with reachable entries to it,
* discarding all other entries.
*
* If a garbage collection period <code>n</code> and a reachable object iterator
* maker are provided, then garbage collection happens automatically after every
* <code>n</code> {@link #put(Object, Object)} operations.
*
* @author braz
* @author oreilly
*/
@Beta
public class DefaultCacheMap<K, V> extends ForwardingMap<K, V> implements CacheMap<K, V> {
// Configuration attributes
private boolean weakKeys = false;
private long maximumSize = NO_MAXIMUM_SIZE;
private NullaryFunction<Iterator<K>> reachableObjectIteratorMaker = null;
private int garbageCollectionPeriod = NO_GARBAGE_COLLECTION;
// Working attributes
private Cache<K, V> storage = null;
private Map<K, V> delegate = null;
private int numberOfPutsSinceLastGarbageCollection = 0;
private Lock garbageCollectLock = new ReentrantLock();
public DefaultCacheMap() {
initStorage();
}
public DefaultCacheMap(boolean weakKeys) {
this(weakKeys, NO_MAXIMUM_SIZE);
}
public DefaultCacheMap(long maximumSize) {
this(false, maximumSize);
}
public DefaultCacheMap(boolean weakKeys, long maximumSize) {
this.weakKeys = weakKeys;
this.maximumSize = maximumSize;
initStorage();
}
public DefaultCacheMap(long maximumSize, NullaryFunction<Iterator<K>> reachableObjectIteratorMaker, int garbageCollectionPeriod) {
this(false, maximumSize, reachableObjectIteratorMaker, garbageCollectionPeriod);
}
public DefaultCacheMap(boolean weakKeys, long maximumSize, NullaryFunction<Iterator<K>> reachableObjectIteratorMaker, int garbageCollectionPeriod) {
this.weakKeys = weakKeys;
this.maximumSize = maximumSize;
this.reachableObjectIteratorMaker = reachableObjectIteratorMaker;
this.garbageCollectionPeriod = garbageCollectionPeriod;
initStorage();
}
//
// START-MAP Interface
@Override
public void clear() {
storage.invalidateAll();
storage.cleanUp();
}
@Override
public V get(Object key) {
return storage.getIfPresent(key);
}
@Override
public V put(K key, V value) {
storage.put(key, value);
numberOfPutsSinceLastGarbageCollection++;
checkDoGarbageCollect();
return value;
}
@Override
public void putAll(Map<? extends K, ? extends V> t) {
storage.putAll(t);
numberOfPutsSinceLastGarbageCollection += t.size();
checkDoGarbageCollect();
}
// END-MAP Interface
//
//
// START-CacheMap
@Override
public void garbageCollect(Iterator<K> reachableObjectsIterator) {
// To ensure we don't garbage collect repeatedly across threads.
if (reachableObjectsIterator != null && garbageCollectLock.tryLock()) {
try {
if (isGarbageCollection() && numberOfPutsSinceLastGarbageCollection >= getGarbageCollectionPeriod()) {
Set<K> reachableObjects = makeTempKeySet();
while (reachableObjectsIterator.hasNext()) {
reachableObjects.add(reachableObjectsIterator.next());
}
delegate.keySet().retainAll(reachableObjects);
storage.cleanUp();
numberOfPutsSinceLastGarbageCollection = 0;
}
} finally {
garbageCollectLock.unlock();
}
}
}
@Override
public int getGarbageCollectionPeriod() {
return garbageCollectionPeriod;
}
@Override
public NullaryFunction<Iterator<K>> getReachableObjectIteratorMaker() {
return reachableObjectIteratorMaker;
}
@Override
public void setGarbageCollectionPeriod(int period) {
this.garbageCollectionPeriod = period;
}
@Override
public void setReachableObjectIteratorMaker(NullaryFunction<Iterator<K>> iteratorMaker) {
this.reachableObjectIteratorMaker = iteratorMaker;
}
@Override
public CacheStats stats() {
return storage.stats();
}
// END-CacheMap
//
//
// PROTECTED METHODS
//
@Override
protected Map<K,V> delegate() {
return delegate;
}
//
// PRIVATE METHODS
//
private void initStorage() {
CacheBuilder<Object, Object> cb = CacheBuilder.newBuilder();
if (weakKeys) {
cb.weakKeys();
}
// Note: a maximumSize of
// < 0 means no size restrictions
// = 0 means no cache
// > 0 means maximum size of cache
if (maximumSize >= 0L) {
cb.maximumSize(maximumSize);
}
if (AICUtilConfiguration.isRecordCacheStatistics()) {
cb.recordStats();
}
storage = cb.build();
delegate = storage.asMap();
}
private void checkDoGarbageCollect() {
if (isGarbageCollection() && numberOfPutsSinceLastGarbageCollection >= getGarbageCollectionPeriod()) {
garbageCollect(reachableObjectIteratorMaker.apply());
}
}
private boolean isGarbageCollection() {
return getGarbageCollectionPeriod() != NO_GARBAGE_COLLECTION && reachableObjectIteratorMaker != null;
}
private Set<K> makeTempKeySet() {
Set<K> keys = Sets.newHashSet();
return keys;
}
}
| |
package com.tcheckit;
import android.annotation.TargetApi;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.os.Build;
import android.os.Bundle;
import android.view.Menu;
import android.view.View;
import android.widget.Button;
import android.widget.SeekBar;
import android.widget.SeekBar.OnSeekBarChangeListener;
import android.widget.TextView;
import android.widget.ToggleButton;
import com.facebook.Session;
import com.tcheckit.utils.UiUtils;
import com.tcheckit.vo.DataSession;
public class ReglagesActivity extends MenuBasActivity {
ExtendedSeekBar mSeekBar;
TextView fade_text;
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_reglages);
mSeekBar = (ExtendedSeekBar) findViewById(R.id.seekBar1);
mSeekBar.setY(mSeekBar.getY() - UiUtils.getUiDp(40, getApplicationContext()));
fade_text = (TextView) findViewById(R.id.textView33);
mSeekBar.setTV(fade_text);
fade_text.setY(fade_text.getY() - UiUtils.getUiDp(85, getApplicationContext()));
fade_text.setX(mSeekBar.getX() + 15);
int perimetre = DataSession.getInstance().getPerimetre(getApplicationContext());
switch (perimetre) {
case 500:
mSeekBar.setProgress(0);
fade_text.setText("500 m");
break;
case 2000:
mSeekBar.setProgress(1);
fade_text.setText("2 Km");
break;
case 5000:
mSeekBar.setProgress(2);
fade_text.setText("5 Km");
break;
case 10000:
mSeekBar.setProgress(3);
fade_text.setText("10 Km");
break;
case 20000:
mSeekBar.setProgress(4);
fade_text.setText("20 Km");
break;
default:
mSeekBar.setProgress(5);
fade_text.setText("50 Km");
break;
}
// fade_text.setX(mSeekBar.getX() + 15);
SharedPreferences prefs = getApplicationContext().getSharedPreferences("tcheckit", 0);
String tmp = prefs.getString("filtrePerimetreActif", "NON");
((ToggleButton) findViewById(R.id.toggleButton1)).setText(tmp);
if (tmp.equals("OUI")) {
((ToggleButton) findViewById(R.id.toggleButton1)).setChecked(true);
}
tmp = prefs.getString("filtrePushActif", "NON");
((ToggleButton) findViewById(R.id.toggleButton2)).setText(tmp);
if (tmp.equals("OUI")) {
((ToggleButton) findViewById(R.id.toggleButton2)).setChecked(true);
}
((Button) findViewById(R.id.imageButtonReglages)).setBackgroundColor(getResources().getColor(R.color.orange));
((Button) findViewById(R.id.imageButtonReglages)).setCompoundDrawablesWithIntrinsicBounds(null, getResources().getDrawable(R.drawable.ico_reglages_org), null, null);
mSeekBar.setOnSeekBarChangeListener(new OnSeekBarChangeListener() {
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
// TODO Auto-generated method stub
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
// TODO Auto-generated method stub
}
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
// TODO Auto-generated method stub
mSeekBar.computeScroll();
Rect thumbRect = mSeekBar.getSeekBarThumb().getBounds();
// Log.v("sherif", "(" + thumbRect.left + ", " + thumbRect.top + ", " +
// thumbRect.right + ", " + thumbRect.bottom + ") + " +
// mSeekBar.getMeasuredWidth());
String what_to_say = String.valueOf(progress);
// int seek_label_pos = (int) ((float) (mSeekBar.getMeasuredWidth()) *
// ((float) progress / 10f));
SharedPreferences prefs = getApplicationContext().getSharedPreferences("tcheckit", 0);
SharedPreferences.Editor editor = prefs.edit();
switch (progress) {
case 0:
fade_text.setX(thumbRect.left + 15);
fade_text.setText("500 m");
editor.putInt("perimetre", 500);
break;
case 1:
fade_text.setX(thumbRect.left + 15);
fade_text.setText("2 Km");
editor.putInt("perimetre", 2000);
break;
case 2:
fade_text.setX(thumbRect.left + 15);
fade_text.setText("5 Km");
editor.putInt("perimetre", 5000);
break;
case 3:
fade_text.setX(thumbRect.left + 13);
fade_text.setText("10 Km");
editor.putInt("perimetre", 10000);
break;
case 4:
fade_text.setX(thumbRect.left + 13);
fade_text.setText("20 Km");
editor.putInt("perimetre", 20000);
break;
default:
fade_text.setX(thumbRect.left + 13);
fade_text.setText("50 Km");
editor.putInt("perimetre", 50000);
break;
}
editor.commit();
mSeekBar.refreshDrawableState();
thumbRect = mSeekBar.getSeekBarThumb().getBounds();
}
});
}
@Override
protected void onPostCreate(Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onPostCreate(savedInstanceState);
Rect thumbRect = mSeekBar.getProgressDrawable().getBounds();// SeekBarThumb().getBounds();
mSeekBar.getWidth();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.reglages, menu);
return true;
}
public void goMain(View view) {
Intent intent = new Intent(this, MainActivity.class);
startActivity(intent);
}
/*@TargetApi(Build.VERSION_CODES.HONEYCOMB)
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
mSeekBar.computeScroll();
Rect thumbRect = mSeekBar.getSeekBarThumb().getBounds();
// Log.v("sherif", "(" + thumbRect.left + ", " + thumbRect.top + ", " +
// thumbRect.right + ", " + thumbRect.bottom + ") + " +
// mSeekBar.getMeasuredWidth());
String what_to_say = String.valueOf(progress);
// int seek_label_pos = (int) ((float) (mSeekBar.getMeasuredWidth()) *
// ((float) progress / 10f));
SharedPreferences prefs = getApplicationContext().getSharedPreferences("tcheckit", 0);
SharedPreferences.Editor editor = prefs.edit();
switch (progress) {
case 0:
fade_text.setX(thumbRect.left + 15);
fade_text.setText("500 m");
editor.putInt("perimetre", 500);
break;
case 1:
fade_text.setX(thumbRect.left + 15);
fade_text.setText("2 Km");
editor.putInt("perimetre", 2000);
break;
case 2:
fade_text.setX(thumbRect.left + 15);
fade_text.setText("5 Km");
editor.putInt("perimetre", 5000);
break;
case 3:
fade_text.setX(thumbRect.left + 13);
fade_text.setText("10 Km");
editor.putInt("perimetre", 10000);
break;
case 4:
fade_text.setX(thumbRect.left + 13);
fade_text.setText("20 Km");
editor.putInt("perimetre", 20000);
break;
default:
fade_text.setX(thumbRect.left + 13);
fade_text.setText("50 Km");
editor.putInt("perimetre", 50000);
break;
}
editor.commit();
mSeekBar.refreshDrawableState();
thumbRect = mSeekBar.getSeekBarThumb().getBounds();
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
// TODO Auto-generated method stub
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
// TODO Auto-generated method stub
}
*/
public void goVotreCompteEnBanque(View view) {
Intent intent = new Intent(this, MonCompteBanqueActivity.class);
startActivity(intent);
}
public void goVotreComptePaypal(View view) {
Intent intent = new Intent(this, MonComptePaypalActivity.class);
startActivity(intent);
}
public void goModifierMdp(View view) {
AlertDialog.Builder builder1 = new AlertDialog.Builder(view.getContext());
builder1.setTitle("Attention!");
builder1.setMessage(R.string.action_not_allowed);
builder1.setCancelable(true);
builder1.setPositiveButton("Ok", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
dialog.cancel();
}
});
AlertDialog alert11 = builder1.create();
alert11.show();
}
public void deconnexion(View view) {
Session session = Session.getActiveSession();
if (!session.isClosed()) {
session.closeAndClearTokenInformation();
}
DataSession.getInstance().reset(getApplicationContext());
Intent intent = new Intent(this, LoginActivity.class);
startActivity(intent);
}
public void filtrePerimetre(View view) {
SharedPreferences prefs = getApplicationContext().getSharedPreferences("tcheckit", 0);
SharedPreferences.Editor editor = prefs.edit();
editor.putString("filtrePerimetreActif", ((ToggleButton) findViewById(R.id.toggleButton1)).getText().toString());
editor.commit();
}
public void filtrePush(View view) {
SharedPreferences prefs = getApplicationContext().getSharedPreferences("tcheckit", 0);
SharedPreferences.Editor editor = prefs.edit();
editor.putString("filtrePushActif", ((ToggleButton) findViewById(R.id.toggleButton2)).getText().toString());
editor.commit();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.raid.tools;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.UUID;
import java.util.zip.CRC32;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
import org.apache.hadoop.raid.Codec;
import org.apache.hadoop.raid.Decoder;
import org.apache.hadoop.raid.LogUtils;
import org.apache.hadoop.raid.Decoder.DecoderInputStream;
import org.apache.hadoop.raid.LogUtils.LOGRESULTS;
import org.apache.hadoop.raid.ParallelStreamReader;
import org.apache.hadoop.raid.ParallelStreamReader.ReadResult;
import org.apache.hadoop.raid.ParityFilePair;
import org.apache.hadoop.raid.RaidUtils;
import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.RunningJob;
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
import org.apache.hadoop.mapred.SequenceFileRecordReader;
import org.apache.hadoop.mapred.Utils;
public class FastFileCheck {
final static Log LOG = LogFactory.getLog(
"org.apache.hadoop.raid.tools.FastFileCheck");
private static final SimpleDateFormat dateForm = new SimpleDateFormat("yyyy-MM-dd-HH-mm");
private static int DEFAULT_VERIFY_LEN = 64 * 1024; // 64k
private static int BUFFER_LEN = DEFAULT_VERIFY_LEN;
private static final String NAME = "fastfilecheck";
private static final String JOB_DIR_LABEL = NAME + ".job.dir";
private static final String OP_LIST_LABEL = NAME + ".op.list";
private static final String OP_COUNT_LABEL = NAME + ".op.count";
private static final String SOURCE_ONLY_CONF = "sourceonly";
private static final short OP_LIST_RELICATION = 10;
private static final int TASKS_PER_JOB = 50;
private static long filesPerTask = 10;
private static final int MAX_FILES_PER_TASK = 10000;
private boolean sourceOnly = false;
private Configuration conf;
private static final Random rand = new Random();
enum State{
GOOD_FILE,
BAD_FILE,
NOT_RAIDED,
UNREADABLE,
NOT_FOUND
}
public FastFileCheck(Configuration conf) {
this.conf = conf;
}
/**
* Get the input splits from the operation file list.
*/
static class FileCheckInputFormat implements InputFormat<Text, Text> {
@Override
public InputSplit[] getSplits(JobConf job, int numSplits)
throws IOException {
numSplits = TASKS_PER_JOB;
// get how many records.
final int totalRecords = job.getInt(OP_COUNT_LABEL, -1);
final int targetCount = totalRecords / numSplits;
String fileList = job.get(OP_LIST_LABEL, "");
if (totalRecords < 0 || "".equals(fileList)) {
throw new RuntimeException("Invalid metadata.");
}
Path srcs = new Path(fileList);
FileSystem fs = srcs.getFileSystem(job);
List<FileSplit> splits = new ArrayList<FileSplit>(numSplits);
Text key = new Text();
Text value = new Text();
SequenceFile.Reader in = null;
long prev = 0L;
int count = 0;
// split the files to be checked.
try {
for (in = new SequenceFile.Reader(fs, srcs, job); in.next(key, value);) {
long cur = in.getPosition();
long delta = cur - prev;
if (++count > targetCount) {
count = 0;
splits.add(new FileSplit(srcs, prev, delta, (String[])null));
prev = cur;
}
}
} finally {
in.close();
}
long remaining = fs.getFileStatus(srcs).getLen() - prev;
if (0 != remaining) {
splits.add(new FileSplit(srcs, prev, remaining, (String[])null));
}
return splits.toArray(new FileSplit[splits.size()]);
}
@Override
public RecordReader<Text, Text> getRecordReader(InputSplit split,
JobConf job, Reporter reporter) throws IOException {
return new SequenceFileRecordReader<Text, Text>(job, (FileSplit) split);
}
}
static class FileCheckMapper implements Mapper<Text, Text, Text, Text> {
private JobConf jobConf;
private int failCount = 0;
private int succeedCount = 0;
private boolean sourceOnly = false;
@Override
public void configure(JobConf job) {
this.jobConf = job;
this.sourceOnly = job.getBoolean(SOURCE_ONLY_CONF, false);
}
@Override
public void close() throws IOException {
}
private String getCountString() {
return "Succeeded: " + succeedCount + " Failed: " + failCount;
}
@Override
public void map(Text key, Text value, OutputCollector<Text, Text> output,
Reporter reporter) throws IOException {
// run a file operation
Path p = new Path(key.toString());
String v;
try {
if (sourceOnly) {
v = processSourceFile(p, reporter, jobConf);
} else {
v = processFile(p, reporter, jobConf);
}
LOG.info("File: " + p + ", result: " + v);
output.collect(key, new Text(v));
reporter.progress();
++ succeedCount;
} catch (InterruptedException e) {
++ failCount;
LOG.warn("Interrupted when processing file: " + p);
throw new IOException(e);
} finally {
reporter.setStatus(getCountString());
}
}
/**
* check a source file.
*/
String processSourceFile(Path p, Progressable reporter,
Configuration conf) throws IOException, InterruptedException {
LOG.info("Processing Source file: " + p);
FileSystem fs = p.getFileSystem(conf);
if (!fs.exists(p)) {
return State.NOT_FOUND.toString();
}
Codec codec = Codec.getCodec("rs");
boolean result = false;
try {
result = checkFile(conf, fs, fs, p, null, codec, reporter, true);
} catch (IOException ex) {
LOG.warn("Encounter exception when checking file: " + p +
", " + ex.getMessage());
return State.UNREADABLE.toString();
}
return result ? State.GOOD_FILE.toString() : State.BAD_FILE.toString();
}
/**
* check a single file.
*/
String processFile(Path p, Progressable reporter, Configuration conf)
throws IOException, InterruptedException {
LOG.info("Processing file: " + p);
FileSystem fs = p.getFileSystem(conf);
if (!fs.exists(p)) {
return State.NOT_FOUND.toString();
}
FileStatus srcStat = null;
try {
srcStat = fs.getFileStatus(p);
} catch (Exception e) {
return State.NOT_FOUND.toString();
}
boolean result = false;
boolean raided = false;
for (Codec codec : Codec.getCodecs()) {
ParityFilePair pfPair = ParityFilePair.getParityFile(codec, srcStat, conf);
if (pfPair != null) {
raided = true;
Path parityPath = pfPair.getPath();
try {
result = checkFile(conf, fs, fs, p, parityPath, codec, reporter
, false);
} catch (IOException ex) {
LOG.warn("Encounter exception when checking the file: " + p, ex);
LogUtils.logFileCheckMetrics(LOGRESULTS.FAILURE, codec, p,
fs, -1, -1, ex, reporter);
return State.UNREADABLE.toString();
}
break;
}
}
if (!raided) {
return State.NOT_RAIDED.toString();
}
return result ? State.GOOD_FILE.toString() : State.BAD_FILE.toString();
}
}
private JobConf createJobConf(Configuration conf) {
JobConf jobConf = new JobConf(conf);
String jobName = NAME + "_" + dateForm.format(new Date(System.currentTimeMillis()));
jobConf.setJobName(jobName);
jobConf.setMapSpeculativeExecution(false);
jobConf.setJarByClass(FastFileCheck.class);
jobConf.setInputFormat(FileCheckInputFormat.class);
jobConf.setOutputFormat(SequenceFileOutputFormat.class);
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(Text.class);
jobConf.setMapperClass(FileCheckMapper.class);
jobConf.setNumReduceTasks(0);
jobConf.setBoolean(SOURCE_ONLY_CONF, sourceOnly);
return jobConf;
}
/**
* Check a file.
*/
public static boolean checkFile(Configuration conf,
FileSystem srcFs, FileSystem parityFs,
Path srcPath, Path parityPath, Codec codec,
Progressable reporter,
boolean sourceOnly)
throws IOException, InterruptedException {
FileStatus stat = srcFs.getFileStatus(srcPath);
long blockSize = stat.getBlockSize();
long len = stat.getLen();
List<Long> offsets = new ArrayList<Long>();
// check a small part of each stripe.
for (int i = 0; i * blockSize < len; i += codec.stripeLength) {
offsets.add(i * blockSize);
}
for (long blockOffset : offsets) {
if (sourceOnly) {
if (!verifySourceFile(conf, srcFs,stat,
codec, blockOffset, reporter)) {
return false;
}
}
else {
if (!verifyFile(conf, srcFs, parityFs, stat,
parityPath, codec, blockOffset, reporter)) {
return false;
}
}
}
return true;
}
private static boolean verifySourceFile(Configuration conf,
FileSystem srcFs, FileStatus stat, Codec codec,
long blockOffset, Progressable reporter)
throws IOException, InterruptedException {
Path srcPath = stat.getPath();
LOG.info("Verify file: " + srcPath + " at offset: " + blockOffset);
int limit = (int) Math.min(stat.getBlockSize(), DEFAULT_VERIFY_LEN);
if (reporter == null) {
reporter = RaidUtils.NULL_PROGRESSABLE;
}
List<Long> errorOffsets = new ArrayList<Long>();
// first limit bytes
errorOffsets.add(blockOffset);
long left = Math.min(stat.getBlockSize(), stat.getLen() - blockOffset);
if (left > limit) {
// last limit bytes
errorOffsets.add(blockOffset + left - limit);
// random limit bytes.
errorOffsets.add(blockOffset +
rand.nextInt((int)(left - limit)));
}
long blockSize = stat.getBlockSize();
long fileLen = stat.getLen();
List<InputStream> streamList = new ArrayList<InputStream>();
List<InputStream> tmpList = new ArrayList<InputStream>();
try {
for (long errorOffset : errorOffsets) {
int k = 0;
int len = streamList.size();
tmpList.clear();
for (int i = 0; i < codec.stripeLength; i++) {
if (errorOffset + blockSize * i >= fileLen) {
break;
}
FSDataInputStream is = null;
if (k < len) {
// resue the input stream
is = (FSDataInputStream) streamList.get(k);
k++;
} else {
is = srcFs.open(srcPath);
streamList.add(is);
}
is.seek(errorOffset + blockSize * i);
tmpList.add(is);
}
if (tmpList.size() == 0) {
continue;
}
InputStream[] streams = tmpList.toArray(new InputStream[] {});
ParallelStreamReader reader = null;
try {
reader = new ParallelStreamReader(
reporter, streams,
limit, 4, 2, limit);
reader.start();
int readNum = 0;
while (readNum < limit) {
ReadResult result = reader.getReadResult();
for (IOException ex : result.ioExceptions) {
if (ex != null) {
LOG.warn("Encounter exception when checking file: " + srcPath +
", " + ex.getMessage());
return false;
}
}
readNum += result.readBufs[0].length;
}
} finally {
if (null != reader) {
reader.shutdown();
}
reporter.progress();
}
}
} finally {
if (streamList.size()> 0) {
RaidUtils.closeStreams(streamList.toArray(new InputStream[]{}));
}
}
return true;
}
/**
* Verify the certain offset of a file.
*/
private static boolean verifyFile(Configuration conf,
FileSystem srcFs, FileSystem parityFs,
FileStatus stat, Path parityPath, Codec codec,
long blockOffset, Progressable reporter)
throws IOException, InterruptedException {
Path srcPath = stat.getPath();
LOG.info("Verify file: " + srcPath + " at offset: " + blockOffset);
int limit = (int) Math.min(stat.getBlockSize(), DEFAULT_VERIFY_LEN);
if (reporter == null) {
reporter = RaidUtils.NULL_PROGRESSABLE;
}
// try to decode.
Decoder decoder = new Decoder(conf, codec);
if (codec.isDirRaid) {
decoder.connectToStore(srcPath);
}
List<Long> errorOffsets = new ArrayList<Long>();
// first limit bytes
errorOffsets.add(blockOffset);
long left = Math.min(stat.getBlockSize(), stat.getLen() - blockOffset);
if (left > limit) {
// last limit bytes
errorOffsets.add(blockOffset + left - limit);
// random limit bytes.
errorOffsets.add(blockOffset +
rand.nextInt((int)(left - limit)));
}
byte[] buffer = new byte[limit];
FSDataInputStream is = srcFs.open(srcPath);
try {
for (long errorOffset : errorOffsets) {
is.seek(errorOffset);
is.read(buffer);
// calculate the oldCRC.
CRC32 oldCrc = new CRC32();
oldCrc.update(buffer);
CRC32 newCrc = new CRC32();
DecoderInputStream stream = decoder.new DecoderInputStream(
RaidUtils.NULL_PROGRESSABLE, limit, stat.getBlockSize(), errorOffset,
srcFs, srcPath, parityFs, parityPath, null, null, false);
try {
stream.read(buffer);
newCrc.update(buffer);
if (oldCrc.getValue() != newCrc.getValue()) {
LogUtils.logFileCheckMetrics(LOGRESULTS.FAILURE, codec, srcPath,
srcFs, errorOffset, limit, null, reporter);
LOG.error("mismatch crc, old " + oldCrc.getValue() +
", new " + newCrc.getValue() + ", for file: " + srcPath
+ " at offset " + errorOffset + ", read limit " + limit);
return false;
}
} finally {
reporter.progress();
if (stream != null) {
stream.close();
}
}
}
return true;
} finally {
is.close();
}
}
private static class JobContext {
public RunningJob job;
public JobConf jobConf;
public JobContext(RunningJob job, JobConf jobConf) {
this.job = job;
this.jobConf = jobConf;
}
}
private List<JobContext> submitJobs(BufferedReader reader,
int filesPerJob, Configuration conf)
throws IOException {
List<JobContext> submitted = new ArrayList<JobContext>();
boolean done = false;
Random rand = new Random(new Date().getTime());
filesPerTask = (long) Math.ceil((double)filesPerJob / TASKS_PER_JOB);
filesPerTask = Math.min(filesPerTask, MAX_FILES_PER_TASK);
do {
JobConf jobConf = createJobConf(conf);
JobClient jobClient = new JobClient(jobConf);
String randomId = Integer.toString(rand.nextInt(Integer.MAX_VALUE), 36);
Path jobDir = new Path(jobClient.getSystemDir(), NAME + "_" + randomId);
jobConf.set(JOB_DIR_LABEL, jobDir.toString());
Path log = new Path(jobDir, "_logs");
FileOutputFormat.setOutputPath(jobConf, log);
LOG.info("log=" + log);
// create operation list
FileSystem fs = jobDir.getFileSystem(jobConf);
Path opList = new Path(jobDir, "_" + OP_LIST_LABEL);
jobConf.set(OP_LIST_LABEL, opList.toString());
int opCount = 0;
int synCount = 0;
SequenceFile.Writer opWriter = null;
try {
opWriter = SequenceFile.createWriter(fs, jobConf, opList, Text.class,
Text.class, SequenceFile.CompressionType.NONE);
String f = null;
do {
f = reader.readLine();
if (f == null) {
// no more file
done = true;
break;
}
opWriter.append(new Text(f), new Text(f));
opCount ++;
if (++synCount > filesPerTask) {
opWriter.sync();
synCount = 0;
}
} while (opCount < filesPerJob);
} finally {
if (opWriter != null) {
opWriter.close();
}
fs.setReplication(opList, OP_LIST_RELICATION);
}
jobConf.setInt(OP_COUNT_LABEL, opCount);
RunningJob job = jobClient.submitJob(jobConf);
submitted.add(new JobContext(job, jobConf));
} while (!done);
return submitted;
}
private void waitForJobs(List<JobContext> submitted, Configuration conf)
throws IOException, InterruptedException {
JobConf jobConf = createJobConf(conf);
JobClient jobClient = new JobClient(jobConf);
List<JobContext> running = new ArrayList<JobContext>(submitted);
while (!running.isEmpty()) {
Thread.sleep(60000);
LOG.info("Checking " + running.size() + " running jobs");
for (Iterator<JobContext> it = running.iterator(); it.hasNext();) {
Thread.sleep(2000);
JobContext context = it.next();
try {
if (context.job.isComplete()) {
it.remove();
LOG.info("Job " + context.job.getID() + " complete. URL: " +
context.job.getTrackingURL());
} else {
LOG.info("Job " + context.job.getID() + " still running. URL: " +
context.job.getTrackingURL());
}
} catch (Exception ex) {
LOG.error("Hit error while checking job status.", ex);
it.remove();
try {
context.job.killJob();
} catch (Exception ex2) {
// ignore the exception.
}
}
}
}
}
private void printResult(List<JobContext> submitted, Configuration conf)
throws IOException {
Text key = new Text();
Text value = new Text();
Map<State, Integer> stateToCountMap = new HashMap<State, Integer>();
for (State s : State.values()) {
stateToCountMap.put(s, 0);
}
for (JobContext context : submitted) {
Path outputpath = SequenceFileOutputFormat.getOutputPath(context.jobConf);
FileSystem fs = outputpath.getFileSystem(context.jobConf);
Path dir = SequenceFileOutputFormat.getOutputPath(context.jobConf);
Path[] names = FileUtil.stat2Paths(fs.listStatus(dir));
List<Path> resultPart = new ArrayList<Path>();
for (Path name : names) {
String fileName = name.toUri().getPath();
int index = fileName.lastIndexOf('/');
fileName = fileName.substring(index + 1);
if (fileName.startsWith("part-")) {
resultPart.add(name);
}
}
names = resultPart.toArray(new Path[] {});
// sort names, so that hash partitioning works
Arrays.sort(names);
SequenceFile.Reader[] jobOutputs = new SequenceFile.Reader[names.length];
for (int i = 0; i < names.length; i++) {
jobOutputs[i] = new SequenceFile.Reader(fs, names[i], conf);
}
// read ouput of job.
try {
for (SequenceFile.Reader r : jobOutputs) {
while (r.next(key, value)) {
State state = State.valueOf(value.toString());
stateToCountMap.put(state, stateToCountMap.get(state) + 1);
// print the file result to stdout.
System.out.println(key + " " + value);
}
}
} finally {
for (SequenceFile.Reader r : jobOutputs) {
r.close();
}
}
}
// print summary to std error.
for (State s : State.values()) {
String output = s + " " + stateToCountMap.get(s);
System.err.println(output);
}
}
private void printUsage() {
System.err.println(
"java FastFileCheck [options] [-filesPerJob N] [-sourceOnly] /path/to/inputfile\n");
ToolRunner.printGenericCommandUsage(System.err);
}
public void startFileCheck(String[] args, int startIndex, Configuration conf)
throws IOException, InterruptedException {
JobConf jobConf = createJobConf(conf);
String inputFile = null;
int filesPerJob = Integer.MAX_VALUE;
sourceOnly = false;
for (int i = startIndex; i < args.length; i++) {
String arg = args[i];
if (arg.equalsIgnoreCase("-filesPerJob")) {
i ++;
filesPerJob = Integer.parseInt(args[i]);
} else if (arg.equalsIgnoreCase("-sourceOnly")) {
sourceOnly = true;
} else {
inputFile = arg;
}
}
InputStream in =
inputFile == null ? System.in : new FileInputStream(inputFile);
BufferedReader reader = new BufferedReader(new InputStreamReader(in));
List<JobContext> submitted = submitJobs(reader, filesPerJob, conf);
waitForJobs(submitted, conf);
printResult(submitted, conf);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.