gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* (C) Copyright IBM Corp. 2012, 2016 All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ibm.jaggr.core.deps;
import com.ibm.jaggr.core.util.BooleanTerm;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
public class ModuleDepInfoTest {
@BeforeClass
public static void setUpBeforeClass() throws Exception {
}
@AfterClass
public static void tearDownAfterClass() throws Exception {
}
@Before
public void setUp() throws Exception {
}
@After
public void tearDown() throws Exception {
}
@Test
public void testModuleDepInfo() {
ModuleDepInfo depInfo = null;
boolean exceptionCaught = false;
try {
depInfo = new ModuleDepInfo(null, new BooleanTerm("A"), null);
} catch (NullPointerException ex) {
exceptionCaught = true;
}
Assert.assertTrue(exceptionCaught);
depInfo = new ModuleDepInfo(null, null, null);
Assert.assertNull(depInfo.getComment());
depInfo = new ModuleDepInfo(null, null, "Comment 1");
Assert.assertEquals("Comment 1", depInfo.getComment());
}
@Test
public void testGetHasPluginPrefixes() {
ModuleDepInfo depInfo = new ModuleDepInfo("has", new BooleanTerm("B"), null);
depInfo.add(new ModuleDepInfo("has", new BooleanTerm("A*C"), null));
depInfo.add(new ModuleDepInfo("has", new BooleanTerm("(!A*!C)"), null));
Collection<String> plugins = depInfo.getHasPluginPrefixes();
System.out.println(plugins);
Assert.assertEquals(
new HashSet<String>(Arrays.asList(new String[]{"has!B?", "has!A?C?", "has!A?:C?:"})),
plugins);
depInfo = new ModuleDepInfo("has", new BooleanTerm("!A*A"), null);
depInfo.add(new ModuleDepInfo("has", new BooleanTerm("B*!B"), null));
Assert.assertEquals(0, depInfo.getHasPluginPrefixes().size());
depInfo = new ModuleDepInfo("has", new BooleanTerm("!A"), null);
depInfo.add(new ModuleDepInfo("has", new BooleanTerm("A"), null));
System.out.println(depInfo.getHasPluginPrefixes());
Assert.assertNull(depInfo.getHasPluginPrefixes());
}
@Test
public void testContainsTerm() {
ModuleDepInfo depInfo =
new ModuleDepInfo(null, null, null);
Assert.assertTrue(depInfo.containsTerm(BooleanTerm.FALSE));
Assert.assertTrue(depInfo.containsTerm(BooleanTerm.TRUE));
Assert.assertTrue(depInfo.containsTerm(new BooleanTerm("A")));
depInfo = new ModuleDepInfo("has", BooleanTerm.TRUE, null);
Assert.assertTrue(depInfo.containsTerm(BooleanTerm.TRUE));
Assert.assertTrue(depInfo.containsTerm(BooleanTerm.FALSE));
Assert.assertTrue(depInfo.containsTerm(new BooleanTerm("A")));
depInfo = new ModuleDepInfo("has", BooleanTerm.FALSE, null);
Assert.assertFalse(depInfo.containsTerm(BooleanTerm.TRUE));
Assert.assertTrue(depInfo.containsTerm(BooleanTerm.FALSE));
Assert.assertFalse(depInfo.containsTerm(new BooleanTerm("A")));
depInfo = new ModuleDepInfo("has", new BooleanTerm("A*B"), null);
Assert.assertTrue(depInfo.containsTerm(BooleanTerm.FALSE));
Assert.assertFalse(depInfo.containsTerm(BooleanTerm.TRUE));
Assert.assertFalse(depInfo.containsTerm(new BooleanTerm("A")));
Assert.assertTrue(depInfo.containsTerm(new BooleanTerm("B*A")));
depInfo = new ModuleDepInfo("has", new BooleanTerm("A"), null);
Assert.assertTrue(depInfo.containsTerm(new BooleanTerm("A*B")));
}
@Test
public void testAdd() {
ModuleDepInfo depInfo =
new ModuleDepInfo(null, null, null);
Assert.assertFalse(depInfo.add(new ModuleDepInfo(null, null, null)));
Assert.assertFalse(depInfo.add(new ModuleDepInfo("has", BooleanTerm.TRUE, null)));
Assert.assertFalse(depInfo.add(new ModuleDepInfo("has", new BooleanTerm("A"), null)));
depInfo = new ModuleDepInfo("has", BooleanTerm.TRUE, null);
Assert.assertFalse(depInfo.add(new ModuleDepInfo("has", BooleanTerm.TRUE, null)));
Assert.assertFalse(depInfo.add(new ModuleDepInfo("has", new BooleanTerm("A"), null)));
depInfo = new ModuleDepInfo("has", new BooleanTerm("A"), null);
Assert.assertTrue(depInfo.add(new ModuleDepInfo()));
Assert.assertEquals(new ModuleDepInfo(), depInfo);
depInfo = new ModuleDepInfo("has", BooleanTerm.TRUE, null);
Assert.assertFalse(depInfo.add(new ModuleDepInfo()));
Assert.assertEquals(new ModuleDepInfo(), depInfo);
// Test isPluginNameDeclared constructor option
depInfo = new ModuleDepInfo("has1", new BooleanTerm("A*B"), null);
Assert.assertTrue(depInfo.add(new ModuleDepInfo("has2", new BooleanTerm("B*C"), null, true)));
Collection<String> prefixes = depInfo.getHasPluginPrefixes();
for (String prefix : prefixes) {
Assert.assertTrue(prefix.startsWith("has2!"));
}
depInfo.add(new ModuleDepInfo("has3", new BooleanTerm("C"), null));
for (String prefix : prefixes) {
Assert.assertTrue(prefix.startsWith("has2!"));
}
depInfo = new ModuleDepInfo("has1", new BooleanTerm("A*B"), null, true);
Assert.assertTrue(depInfo.add(new ModuleDepInfo("has2", new BooleanTerm("B*C"), null)));
prefixes = depInfo.getHasPluginPrefixes();
for (String prefix : prefixes) {
Assert.assertTrue(prefix.startsWith("has1!"));
}
// test comments
depInfo = new ModuleDepInfo(null, null, null);
Assert.assertFalse(depInfo.add(new ModuleDepInfo(null, null, null)));
Assert.assertNull(depInfo.getComment());
Assert.assertFalse(depInfo.add(new ModuleDepInfo(null, null, "Comment")));
Assert.assertNull(depInfo.getComment());
Assert.assertFalse(depInfo.add(new ModuleDepInfo("has", new BooleanTerm("A"), "Comment 2")));
Assert.assertNull(depInfo.getComment());
depInfo = new ModuleDepInfo("has", new BooleanTerm("A*B"), "Comment 2");
Assert.assertEquals("Comment 2", depInfo.getComment());
Assert.assertTrue(depInfo.add(new ModuleDepInfo("has", new BooleanTerm("B*C"), "Comment 2.1")));
Assert.assertEquals("Comment 2", depInfo.getComment());
Assert.assertTrue(depInfo.add(new ModuleDepInfo("has", new BooleanTerm("C"), "Comment 1")));
Assert.assertEquals("Comment 1", depInfo.getComment());
Assert.assertTrue(depInfo.add(new ModuleDepInfo("has", new BooleanTerm("D"), "Comment 1.1")));
Assert.assertEquals("Comment 1", depInfo.getComment());
Assert.assertEquals(
new HashSet<String>(Arrays.asList(new String[]{"has!A?B?", "has!C?", "has!D?"})),
depInfo.getHasPluginPrefixes()
);
Assert.assertTrue(depInfo.add(new ModuleDepInfo("has", null, "Comment")));
Assert.assertEquals("Comment", depInfo.getComment());
Assert.assertNull(depInfo.getHasPluginPrefixes());
}
@Test
public void testSubtract() {
ModuleDepInfo depInfo = new ModuleDepInfo("has", new BooleanTerm("A*B"), null);
Assert.assertTrue(depInfo.add(new ModuleDepInfo("has", new BooleanTerm("B*C"), null)));
Assert.assertTrue(depInfo.add(new ModuleDepInfo("has", new BooleanTerm("C*D"), null)));
Assert.assertTrue(depInfo.subtract(new ModuleDepInfo("has", new BooleanTerm("B*C"), null)));
Assert.assertEquals(
new HashSet<String>(Arrays.asList(new String[]{"has!A?B?", "has!C?D?"})),
depInfo.getHasPluginPrefixes()
);
Assert.assertTrue(depInfo.subtract(new ModuleDepInfo("has", new BooleanTerm("A"), null)));
Assert.assertEquals(
new HashSet<String>(Arrays.asList(new String[]{"has!C?D?"})),
depInfo.getHasPluginPrefixes()
);
Assert.assertTrue(depInfo.subtract(new ModuleDepInfo("has", new BooleanTerm("C*D"), null)));
Assert.assertEquals(0, depInfo.getHasPluginPrefixes().size());
depInfo = new ModuleDepInfo("has", new BooleanTerm("B*C"), null);
Assert.assertTrue(depInfo.add(new ModuleDepInfo("has", new BooleanTerm("C*D"), null)));
Assert.assertTrue(depInfo.subtract(new ModuleDepInfo("has", new BooleanTerm("C"), null)));
Assert.assertEquals(0, depInfo.getHasPluginPrefixes().size());
depInfo = new ModuleDepInfo("has", null, null);
Assert.assertFalse(depInfo.subtract(new ModuleDepInfo("has", new BooleanTerm("B*C"), null)));
Assert.assertTrue(depInfo.subtract(new ModuleDepInfo(null, null, null)));
Assert.assertEquals(0, depInfo.getHasPluginPrefixes().size());
depInfo = new ModuleDepInfo("has", new BooleanTerm("A*B"), null);
Assert.assertTrue(depInfo.subtract(new ModuleDepInfo(null, null, null)));
Assert.assertEquals(0, depInfo.getHasPluginPrefixes().size());
depInfo = new ModuleDepInfo("has", new BooleanTerm("A"), "Comment A");
Assert.assertTrue(depInfo.add(new ModuleDepInfo("has", new BooleanTerm("B"), "Comment B")));
Assert.assertTrue(depInfo.add(new ModuleDepInfo("has", new BooleanTerm("C"), "Comment C")));
Assert.assertEquals("Comment A", depInfo.getComment());
Assert.assertTrue(depInfo.subtract(new ModuleDepInfo("has", new BooleanTerm("A"), null)));
Assert.assertEquals("Comment A", depInfo.getComment());
Assert.assertTrue(depInfo.subtract(new ModuleDepInfo("has", new BooleanTerm("C"), null)));
Assert.assertEquals("Comment A", depInfo.getComment());
Assert.assertTrue(depInfo.subtract(new ModuleDepInfo("has", new BooleanTerm("B"), null)));
Assert.assertNull(depInfo.getComment());
}
}
| |
/*
* Copyright 2015 peter.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.trainwatch.io.ftp;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.function.Consumer;
import uk.trainwatch.io.IOAction;
import uk.trainwatch.io.IOBiConsumer;
import uk.trainwatch.io.IOConsumer;
import uk.trainwatch.io.IOFunction;
import uk.trainwatch.io.IOPredicate;
import uk.trainwatch.io.IOSupplier;
import uk.trainwatch.util.Consumers;
public class FTPClientBuilder
{
String proxy = null;
int proxyPort;
String proxyUser;
String proxyPass;
IOConsumer<DefaultFTPClient> connect;
IOPredicate<DefaultFTPClient> login;
Consumer<String> consumer;
boolean printCommands;
boolean debuggingEnabled;
int keepAliveTimeout = 0;
int controlKeepAliveReplyTimeout = 0;
boolean useEpsvWithIPv4 = false;
boolean localActive = false;
boolean binaryTransfer = true;
boolean listHiddenFiles = false;
final Map<String, Object> attributes = new HashMap<>();
IOBiConsumer<FTPClient, IOAction.Builder> action = null;
public FTPClientBuilder addAction( IOBiConsumer<FTPClient, IOAction.Builder> action )
{
this.action = this.action == null ? action : this.action.andThen( action );
return this;
}
/**
* Perform some action on the FTPClient
*
* @param action
*
* @return
*/
public FTPClientBuilder invoke( IOConsumer<FTPClient> action )
{
return addAction( ( c, b ) -> action.accept( c ) );
}
/**
* Apply a mapping function on the FTPClient (possibly performing some action on it) and if it returns an IOAction add that to the actions to be
* invoked after the client has been closed.
*
* @param action
*
* @return
*/
public FTPClientBuilder invokeLater( IOFunction<FTPClient, IOAction> action )
{
return addAction( ( c, b ) -> b.add( action.apply( c ) ) );
}
public FTPClientBuilder setLogger( Consumer<String> consumer )
{
this.consumer = consumer;
return this;
}
public FTPClientBuilder logger( Consumer<String> consumer )
{
this.consumer = Consumers.andThen( this.consumer, consumer );
return this;
}
public FTPClientBuilder enableDebugging()
{
debuggingEnabled = true;
return this;
}
public FTPClientBuilder printCommands()
{
printCommands = true;
return this;
}
/**
* HTTP proxy
* <p>
* @param proxy
* <p>
* @return
*/
public FTPClientBuilder proxy( String proxy )
{
return proxy( proxy, 80 );
}
public FTPClientBuilder proxy( String proxy, int proxyPort )
{
this.proxy = proxy;
this.proxyPort = proxyPort;
return this;
}
public FTPClientBuilder proxyPort( int proxyPort )
{
this.proxyPort = proxyPort;
return this;
}
public FTPClientBuilder proxyUser( String proxyUser )
{
this.proxyUser = proxyUser;
return this;
}
public FTPClientBuilder proxyPass( String proxyPass )
{
this.proxyPass = proxyPass;
return this;
}
/**
* TCP Keep Alive Timeout
* <p>
* @param keepAliveTimeout
* <p>
* @return
*/
public FTPClientBuilder keepAliveTimeout( int keepAliveTimeout )
{
this.keepAliveTimeout = keepAliveTimeout;
return this;
}
/**
* TCP Keep Alive Timeout for replies
* <p>
* @param controlKeepAliveReplyTimeout
* <p>
* @return
*/
public FTPClientBuilder controlKeepAliveReplyTimeout( int controlKeepAliveReplyTimeout )
{
this.controlKeepAliveReplyTimeout = controlKeepAliveReplyTimeout;
return this;
}
public FTPClientBuilder useEpsvWithIPv4()
{
useEpsvWithIPv4 = true;
return this;
}
/**
* Local or remote (behind firewall) server
* <p>
* @return
*/
public FTPClientBuilder localActive()
{
localActive = true;
return this;
}
/**
* Use Passive mode (default) for when behind a firewall
* <p>
* @return
*/
public FTPClientBuilder passive()
{
localActive = false;
return this;
}
/**
* Set binary mode (default)
* <p>
* @return
*/
public FTPClientBuilder binary()
{
binaryTransfer = true;
return this;
}
public FTPClientBuilder ascii()
{
binaryTransfer = false;
return this;
}
/**
* List hidden files
* <p>
* @param listHiddenFiles
* <p>
* @return
*/
public FTPClientBuilder listHiddenFiles()
{
listHiddenFiles = true;
return this;
}
public FTPClientBuilder connect( String s )
{
connect = c -> c.getDelegate().connect( s );
return this;
}
public FTPClientBuilder connect( String s, int p )
{
connect = c -> c.getDelegate().connect( s, p );
return this;
}
public FTPClientBuilder login( String user, String password )
{
login = c -> c.getDelegate().login( user, password );
return this;
}
public FTPClientBuilder login( String user, String password, String account )
{
login = c -> c.getDelegate().login( user, password, account );
return this;
}
public FTPClientBuilder setAttribute( String n, Object v )
{
attributes.put( n, v );
return this;
}
/**
* Build an FTPClient instance
*
* @return
*/
public FTPClient build()
{
return new DefaultFTPClient( this );
}
/**
* Builds a lazy FTPClient instance. This client will only connect if an action is required upon it.
*
* @return
*/
public FTPClient buildLazyClient()
{
return new LazyFTPClient( build() );
}
public IOConsumer<IOAction.Builder> buildIOActionChain()
throws IOException
{
Objects.requireNonNull( action, "No action chain defined" );
FTPClient client = buildLazyClient();
return b -> {
try {
action.accept( client, b );
}
finally {
client.close();
}
};
}
/**
* Builds an IOAction which will execute any actions applied to this builder and return an IOAction of any actions to run once the client has closed.
*
* @return
*
* @throws IOException
*/
public IOSupplier<IOAction> buildIOSupplier()
throws IOException
{
IOConsumer<IOAction.Builder> c = buildIOActionChain();
return () -> IOAction.builder().invoke( c ).build();
}
public void execute()
throws IOException
{
buildIOSupplier().get().invoke();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.datatorrent.api;
import java.io.Serializable;
import java.util.Collection;
import java.util.Map;
import javax.annotation.Nonnull;
import org.apache.hadoop.classification.InterfaceStability;
import com.datatorrent.api.Context.DAGContext;
import com.datatorrent.api.annotation.OperatorAnnotation;
/**
* DAG contains the logical declarations of operators and streams.
* <p>
* Operators have ports that are connected through streams. Ports can be
* mandatory or optional with respect to their need to connect a stream to it.
* Each port can be connected to a single stream only. A stream has to be
* connected to one output port and can go to multiple input ports.
* <p>
* The DAG will be serialized and deployed to the cluster, where it is translated
* into the physical plan.
*
* @since 0.3.2
*/
public interface DAG extends DAGContext, Serializable
{
interface InputPortMeta extends Serializable, PortContext, NameableEntity
{
/**
* Return port object represented by this InputPortMeta
* @return
*/
Operator.InputPort<?> getPort();
<T extends OperatorMeta> T getOperatorMeta();
}
interface OutputPortMeta extends Serializable, PortContext, NameableEntity
{
OperatorMeta getUnifierMeta();
/**
* Return port object represented by this OutputPortMeta
* @return
*/
Operator.OutputPort<?> getPort();
<T extends OperatorMeta> T getOperatorMeta();
}
/**
* Locality setting affects how operators are scheduled for deployment by
* the platform. The setting serves as hint to the planner and can yield
* significant performance gains. Optimizations are subject to resource
* availability.
*/
enum Locality
{
/**
* Adjacent operators should be deployed into the same executing thread,
* effectively serializing the computation. This setting is beneficial
* where the cost of intermediate queuing exceeds the benefit of parallel
* processing. An example could be chaining of multiple operators with low
* compute requirements in a parallel partition setup.
*/
THREAD_LOCAL,
/**
* Adjacent operators should be deployed into the same process, executing
* in different threads. Useful when interprocess communication is a
* limiting factor and sufficient resources can be provisioned in a single
* container. Eliminates data serialization and networking stack overhead.
*/
CONTAINER_LOCAL,
/**
* Adjacent operators should be deployed into processes on the same machine.
* Eliminates network as bottleneck, as the loop back interface can be used
* instead.
*/
NODE_LOCAL,
/**
* Adjacent operators should be deployed into processes on nodes in the same
* rack. Best effort to not have allocation on same node.
* Not implemented yet.
*/
RACK_LOCAL
}
/**
* Representation of streams in the logical layer. Instances are created through {@link DAG#addStream}.
*/
interface StreamMeta extends Serializable
{
String getName();
/**
* Returns the locality for this stream.
* @return locality for this stream, default is null.
*/
Locality getLocality();
/**
* Set locality for the stream. The setting is best-effort, engine can
* override due to other settings or constraints.
*
* @param locality
* @return Object that describes the meta for the stream.
*/
StreamMeta setLocality(Locality locality);
StreamMeta setSource(Operator.OutputPort<?> port);
StreamMeta addSink(Operator.InputPort<?> port);
/**
* Persist entire stream using operator passed.
*
* @param Persist Operator name
* @param Operator to use for persisting
* @param Input port to use for persisting
* @return Object that describes the meta for the stream.
*/
StreamMeta persistUsing(String name, Operator persistOperator, Operator.InputPort<?> persistOperatorInputPort);
/**
* Set locality for the stream. The setting is best-effort, engine can
* override due to other settings or constraints.
*
* @param Persist Operator name
* @param Operator to use for persisting
* @return Object that describes the meta for the stream.
*/
StreamMeta persistUsing(String name, Operator persistOperator);
/**
* Set locality for the stream. The setting is best-effort, engine can
* override due to other settings or constraints.
*
* @param Persist Operator name
* @param Operator to use for persisting
* @param Input port to use for persisting
* @param Sink to persist
* @return Object that describes the meta for the stream.
*/
StreamMeta persistUsing(String name, Operator persistOperator, Operator.InputPort<?> persistOperatorInputPort, Operator.InputPort<?> sinkToPersist);
/**
* Return source of the stream.
* @param <T>
* @return
*/
<T extends OutputPortMeta> T getSource();
/**
* Return all sinks connected to this stream.
* @param <T>
* @return
*/
<T extends InputPortMeta> Collection<T> getSinks();
}
/**
* Operator meta object.
*/
interface OperatorMeta extends Serializable, Context, NameableEntity
{
String getName();
Operator getOperator();
InputPortMeta getMeta(Operator.InputPort<?> port);
OutputPortMeta getMeta(Operator.OutputPort<?> port);
OperatorAnnotation getOperatorAnnotation();
/**
* Return collection of stream which are connected to this operator's
* input ports.
* @param <T>
* @return
*/
<K extends InputPortMeta, V extends StreamMeta> Map<K, V> getInputStreams();
/**
* Return collection of stream which are connected to this operator's
* output ports.
* @param <T>
* @return
*/
<K extends OutputPortMeta, V extends StreamMeta> Map<K, V> getOutputStreams();
}
/**
* Add new instance of operator under given name to the DAG.
* The operator class must have a default constructor.
* If the class extends {@link BaseOperator}, the name is passed on to the instance.
* Throws exception if the name is already linked to another operator instance.
*
* @param <T> Concrete type of the operator
* @param name Logical name of the operator used to identify the operator in the DAG
* @param clazz Concrete class with default constructor so that instance of it can be initialized and added to the DAG.
* @return Instance of the operator that has been added to the DAG.
*/
<T extends Operator> T addOperator(@Nonnull String name, Class<T> clazz);
/**
* <p>addOperator.</p>
* @param <T> Concrete type of the operator
* @param name Logical name of the operator used to identify the operator in the DAG
* @param operator Instance of the operator that needs to be added to the DAG
* @return Instance of the operator that has been added to the DAG.
*/
<T extends Operator> T addOperator(@Nonnull String name, T operator);
@InterfaceStability.Evolving
<T extends Module> T addModule(@Nonnull String name, Class<T> moduleClass);
@InterfaceStability.Evolving
<T extends Module> T addModule(@Nonnull String name, T module);
/**
* <p>addStream.</p>
* @param id Identifier of the stream that will be used to identify stream in DAG
* @return
*/
StreamMeta addStream(@Nonnull String id);
/**
* Add identified stream for given source and sinks. Multiple sinks can be
* connected to a stream, but each port can only be connected to a single
* stream. Attempt to add stream to an already connected port will throw an
* error.
* <p>
* This method allows to connect all interested ports to a stream at
* once. Alternatively, use the returned {@link StreamMeta} builder object to
* add more sinks and set other stream properties.
*
* @param <T>
* @param id
* @param source
* @param sinks
* @return StreamMeta
*/
<T> StreamMeta addStream(@Nonnull String id, Operator.OutputPort<? extends T> source, Operator.InputPort<? super T>... sinks);
/**
* Overload varargs version to avoid generic array type safety warnings in calling code.
* "Type safety: A generic array of Operator.InputPort<> is created for a varargs parameter"
*
* @param <T>
* @link <a href=http://www.angelikalanger.com/GenericsFAQ/FAQSections/ProgrammingIdioms.html#FAQ300>Programming Idioms</a>
* @param id
* @param source
* @param sink1
* @return StreamMeta
*/
<T> StreamMeta addStream(@Nonnull String id, Operator.OutputPort<? extends T> source, Operator.InputPort<? super T> sink1);
/**
* <p>addStream.</p>
*/
<T> StreamMeta addStream(@Nonnull String id, Operator.OutputPort<? extends T> source, Operator.InputPort<? super T> sink1, Operator.InputPort<? super T> sink2);
/**
* <p>setAttribute.</p>
*/
<T> void setAttribute(Attribute<T> key, T value);
/**
* @Deprecated
* Use {@link #setOperatorAttribute} instead
*/
@Deprecated
<T> void setAttribute(Operator operator, Attribute<T> key, T value);
/**
* Set an attribute for an operator.
* @param <T> Value type of the attribute.
* @param operator The Operator for which the attribute is being set.
* @param key The attribute which needs to be tuned.
* @param value The new value of the attribute.
*/
<T> void setOperatorAttribute(Operator operator, Attribute<T> key, T value);
/**
* <p>setOutputPortAttribute.</p>
*/
<T> void setOutputPortAttribute(Operator.OutputPort<?> port, Attribute<T> key, T value);
/**
* Set an attribute on the unifier for an output of an operator.
* @param <T> Object type of the attribute
* @param port The port for which the unifier is needed.
* @param key The attribute which needs to be tuned.
* @param value The new value of the attribute.
*/
<T> void setUnifierAttribute(Operator.OutputPort<?> port, Attribute<T> key, T value);
/**
* <p>setInputPortAttribute.</p>
*/
<T> void setInputPortAttribute(Operator.InputPort<?> port, Attribute<T> key, T value);
/**
* <p>getOperatorMeta.</p>
*/
OperatorMeta getOperatorMeta(String operatorId);
/**
* <p>getMeta.</p>
*/
OperatorMeta getMeta(Operator operator);
/**
* Return all operators present in the DAG.
* @param <T>
* @return
*/
<T extends OperatorMeta> Collection<T> getAllOperatorsMeta();
/**
* Get all input operators in the DAG. This method returns operators which are
* not connected to any upstream operator. i.e the operators which do not have
* any input ports or operators which is not connected through any input ports
* in the DAG.
*
* @param <T>
* @return list of {@see OperatorMeta} for root operators in the DAG.
*/
<T extends OperatorMeta> Collection<T> getRootOperatorsMeta();
/**
* Returns all Streams present in the DAG.
* @param <T>
* @return
*/
<T extends StreamMeta> Collection<T> getAllStreamsMeta();
/**
* Marker interface for the Node in the DAG. Any object which can be added as a Node in the DAG
* needs to implement this interface.
*/
interface GenericOperator
{
}
}
| |
/*******************************************************************************
* Copyright 2011 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.badlogic.gdx.tools.hiero;
import java.awt.Font;
import java.awt.font.GlyphMetrics;
import java.awt.font.GlyphVector;
import java.awt.image.BufferedImage;
import java.awt.image.WritableRaster;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.nio.IntBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import javax.imageio.ImageIO;
import org.lwjgl.BufferUtils;
import org.lwjgl.opengl.GL11;
import org.lwjgl.opengl.GL12;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.tools.hiero.unicodefont.Glyph;
import com.badlogic.gdx.tools.hiero.unicodefont.GlyphPage;
import com.badlogic.gdx.tools.hiero.unicodefont.UnicodeFont;
/** @author Nathan Sweet */
public class BMFontUtil {
private final UnicodeFont unicodeFont;
public BMFontUtil (UnicodeFont unicodeFont) {
this.unicodeFont = unicodeFont;
}
public void save (File outputBMFontFile) throws IOException {
File outputDir = outputBMFontFile.getParentFile();
String outputName = outputBMFontFile.getName();
if (outputName.endsWith(".fnt")) outputName = outputName.substring(0, outputName.length() - 4);
unicodeFont.loadGlyphs();
PrintStream out = new PrintStream(new FileOutputStream(new File(outputDir, outputName + ".fnt")));
Font font = unicodeFont.getFont();
int pageWidth = unicodeFont.getGlyphPageWidth();
int pageHeight = unicodeFont.getGlyphPageHeight();
out.println("info face=\"" + font.getFontName() + "\" size=" + font.getSize() + " bold=" + (font.isBold() ? 1 : 0)
+ " italic=" + (font.isItalic() ? 1 : 0) + " charset=\"\" unicode=0 stretchH=100 smooth=1 aa=1 padding="
+ unicodeFont.getPaddingTop() + "," + unicodeFont.getPaddingLeft() + "," + unicodeFont.getPaddingBottom() + ","
+ unicodeFont.getPaddingRight() + " spacing=" + unicodeFont.getPaddingAdvanceX() + ","
+ unicodeFont.getPaddingAdvanceY());
out.println("common lineHeight=" + unicodeFont.getLineHeight() + " base=" + unicodeFont.getAscent() + " scaleW="
+ pageWidth + " scaleH=" + pageHeight + " pages=" + unicodeFont.getGlyphPages().size() + " packed=0");
int pageIndex = 0, glyphCount = 0;
for (Iterator pageIter = unicodeFont.getGlyphPages().iterator(); pageIter.hasNext();) {
GlyphPage page = (GlyphPage)pageIter.next();
String fileName;
if (pageIndex == 0 && !pageIter.hasNext())
fileName = outputName + ".png";
else
fileName = outputName + (pageIndex + 1) + ".png";
out.println("page id=" + pageIndex + " file=\"" + fileName + "\"");
glyphCount += page.getGlyphs().size();
pageIndex++;
}
out.println("chars count=" + glyphCount);
// Always output space entry (codepoint 32).
int[] glyphMetrics = getGlyphMetrics(font, 32);
int xAdvance = glyphMetrics[1];
out.println("char id=32 x=0 y=0 width=0 height=0 xoffset=0 yoffset=" + unicodeFont.getAscent()
+ " xadvance=" + xAdvance + " page=0 chnl=0 ");
pageIndex = 0;
List allGlyphs = new ArrayList(512);
for (Iterator pageIter = unicodeFont.getGlyphPages().iterator(); pageIter.hasNext();) {
GlyphPage page = (GlyphPage)pageIter.next();
for (Iterator glyphIter = page.getGlyphs().iterator(); glyphIter.hasNext();) {
Glyph glyph = (Glyph)glyphIter.next();
glyphMetrics = getGlyphMetrics(font, glyph.getCodePoint());
int xOffset = glyphMetrics[0];
xAdvance = glyphMetrics[1];
out.println("char id=" + glyph.getCodePoint() + " " + "x=" + (int)(glyph.getU() * pageWidth) + " y="
+ (int)(glyph.getV() * pageHeight) + " width=" + glyph.getWidth() + " height=" + glyph.getHeight()
+ " xoffset=" + xOffset + " yoffset=" + glyph.getYOffset() + " xadvance=" + xAdvance + " page="
+ pageIndex + " chnl=0 ");
}
allGlyphs.addAll(page.getGlyphs());
pageIndex++;
}
String ttfFileRef = unicodeFont.getFontFile();
if (ttfFileRef == null)
System.out.println("Kerning information could not be output because a TTF font file was not specified.");
else {
Kerning kerning = new Kerning();
try {
kerning.load(Gdx.files.internal(ttfFileRef).read(), font.getSize());
} catch (IOException ex) {
System.out.println("Unable to read kerning information from font: " + ttfFileRef);
}
Map glyphCodeToCodePoint = new HashMap();
for (Iterator iter = allGlyphs.iterator(); iter.hasNext();) {
Glyph glyph = (Glyph)iter.next();
glyphCodeToCodePoint.put(new Integer(getGlyphCode(font, glyph.getCodePoint())), new Integer(glyph.getCodePoint()));
}
List kernings = new ArrayList(256);
class KerningPair {
public int firstCodePoint, secondCodePoint, offset;
}
for (Iterator iter1 = allGlyphs.iterator(); iter1.hasNext();) {
Glyph firstGlyph = (Glyph)iter1.next();
int firstGlyphCode = getGlyphCode(font, firstGlyph.getCodePoint());
int[] values = kerning.getValues(firstGlyphCode);
if (values == null) continue;
for (int i = 0; i < values.length; i++) {
Integer secondCodePoint = (Integer)glyphCodeToCodePoint.get(new Integer(values[i] & 0xffff));
if (secondCodePoint == null) continue; // We may not be outputting the second character.
int offset = values[i] >> 16;
KerningPair pair = new KerningPair();
pair.firstCodePoint = firstGlyph.getCodePoint();
pair.secondCodePoint = secondCodePoint.intValue();
pair.offset = offset;
kernings.add(pair);
}
}
out.println("kernings count=" + kerning.getCount());
for (Iterator iter = kernings.iterator(); iter.hasNext();) {
KerningPair pair = (KerningPair)iter.next();
out.println("kerning first=" + pair.firstCodePoint + " second=" + pair.secondCodePoint + " amount=" + pair.offset);
}
}
out.close();
int width = unicodeFont.getGlyphPageWidth();
int height = unicodeFont.getGlyphPageHeight();
IntBuffer buffer = BufferUtils.createIntBuffer(width * height);
BufferedImage pageImage = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB);
int[] row = new int[width];
pageIndex = 0;
for (Iterator pageIter = unicodeFont.getGlyphPages().iterator(); pageIter.hasNext();) {
GlyphPage page = (GlyphPage)pageIter.next();
String fileName;
if (pageIndex == 0 && !pageIter.hasNext())
fileName = outputName + ".png";
else
fileName = outputName + (pageIndex + 1) + ".png";
page.getTexture().bind();
buffer.clear();
GL11.glGetTexImage(GL11.GL_TEXTURE_2D, 0, GL12.GL_BGRA, GL11.GL_UNSIGNED_BYTE, buffer);
WritableRaster raster = pageImage.getRaster();
for (int y = 0; y < height; y++) {
buffer.get(row);
raster.setDataElements(0, y, width, 1, row);
}
File imageOutputFile = new File(outputDir, fileName);
ImageIO.write(pageImage, "png", imageOutputFile);
pageIndex++;
}
}
private int getGlyphCode (Font font, int codePoint) {
char[] chars = Character.toChars(codePoint);
GlyphVector vector = font.layoutGlyphVector(GlyphPage.renderContext, chars, 0, chars.length, Font.LAYOUT_LEFT_TO_RIGHT);
return vector.getGlyphCode(0);
}
private int[] getGlyphMetrics (Font font, int codePoint) {
// xOffset and xAdvance will be incorrect for unicode characters such as combining marks or non-spacing characters
// (eg Pnujabi's "\u0A1C\u0A47") that require the context of surrounding glyphs to determine spacing, but thisis the
// best we can do with the BMFont format.
char[] chars = Character.toChars(codePoint);
GlyphVector vector = font.layoutGlyphVector(GlyphPage.renderContext, chars, 0, chars.length, Font.LAYOUT_LEFT_TO_RIGHT);
GlyphMetrics metrics = vector.getGlyphMetrics(0);
int xOffset = vector.getGlyphPixelBounds(0, GlyphPage.renderContext, 0.5f, 0).x - unicodeFont.getPaddingLeft();
int xAdvance = (int)(metrics.getAdvanceX() + unicodeFont.getPaddingAdvanceX() + unicodeFont.getPaddingLeft() + unicodeFont
.getPaddingRight());
return new int[] {xOffset, xAdvance};
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.beanutils2;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
/**
* Test case for BeanUtils when the underlying bean is actually a DynaBean.
*
*/
public class DynaBeanUtilsTestCase extends TestCase {
/**
* The basic test bean for each test.
*/
protected DynaBean bean = null;
/**
* The nested bean pointed at by the "nested" property.
*/
protected TestBean nested = null;
/**
* The set of properties that should be described.
*/
protected String[] describes =
{ "booleanProperty",
"booleanSecond",
"byteProperty",
"doubleProperty",
"dupProperty",
"floatProperty",
"intArray",
"intIndexed",
"intProperty",
"listIndexed",
"longProperty",
"mapProperty",
"mappedProperty",
"mappedIntProperty",
"nested",
"nullProperty",
// "readOnlyProperty",
"shortProperty",
"stringArray",
"stringIndexed",
"stringProperty"
};
/**
* Constructs a new instance of this test case.
*
* @param name Name of the test case
*/
public DynaBeanUtilsTestCase(final String name) {
super(name);
}
/**
* Sets up instance variables required by this test case.
*/
@Override
public void setUp() throws Exception {
ConvertUtils.deregister();
// Instantiate a new DynaBean instance
final DynaClass dynaClass = createDynaClass();
bean = dynaClass.newInstance();
// Initialize the DynaBean's property values (like TestBean)
bean.set("booleanProperty", new Boolean(true));
bean.set("booleanSecond", new Boolean(true));
bean.set("byteProperty", new Byte((byte) 121));
bean.set("doubleProperty", new Double(321.0));
bean.set("floatProperty", new Float((float) 123.0));
final String[] dupProperty = { "Dup 0", "Dup 1", "Dup 2", "Dup 3", "Dup 4"};
bean.set("dupProperty", dupProperty);
final int[] intArray = { 0, 10, 20, 30, 40 };
bean.set("intArray", intArray);
final int[] intIndexed = { 0, 10, 20, 30, 40 };
bean.set("intIndexed", intIndexed);
bean.set("intProperty", new Integer(123));
final List<String> listIndexed = new ArrayList<>();
listIndexed.add("String 0");
listIndexed.add("String 1");
listIndexed.add("String 2");
listIndexed.add("String 3");
listIndexed.add("String 4");
bean.set("listIndexed", listIndexed);
bean.set("longProperty", new Long(321));
final HashMap<String, Object> mapProperty = new HashMap<>();
mapProperty.put("First Key", "First Value");
mapProperty.put("Second Key", "Second Value");
bean.set("mapProperty", mapProperty);
final HashMap<String, Object> mappedProperty = new HashMap<>();
mappedProperty.put("First Key", "First Value");
mappedProperty.put("Second Key", "Second Value");
bean.set("mappedProperty", mappedProperty);
final HashMap<String, Integer> mappedIntProperty = new HashMap<>();
mappedIntProperty.put("One", new Integer(1));
mappedIntProperty.put("Two", new Integer(2));
bean.set("mappedIntProperty", mappedIntProperty);
nested = new TestBean();
bean.set("nested", nested);
// Property "nullProperty" is not initialized, so it should return null
bean.set("shortProperty", new Short((short) 987));
final String[] stringArray =
{ "String 0", "String 1", "String 2", "String 3", "String 4" };
bean.set("stringArray", stringArray);
final String[] stringIndexed =
{ "String 0", "String 1", "String 2", "String 3", "String 4" };
bean.set("stringIndexed", stringIndexed);
bean.set("stringProperty", "This is a string");
}
/**
* Creates the tests included in this test suite.
*/
public static Test suite() {
return new TestSuite(DynaBeanUtilsTestCase.class);
}
/**
* Tear down instance variables required by this test case.
*/
@Override
public void tearDown() {
bean = null;
nested = null;
}
/**
* Test the cloneBean() method from a DynaBean.
*/
public void testCloneDynaBean() {
// Set up an origin bean with customized properties
final DynaClass dynaClass = DynaBeanUtilsTestCase.createDynaClass();
DynaBean orig = null;
try {
orig = dynaClass.newInstance();
} catch (final Exception e) {
fail("newInstance(): " + e);
}
orig.set("booleanProperty", Boolean.FALSE);
orig.set("byteProperty", new Byte((byte)111));
orig.set("doubleProperty", new Double(333.33));
orig.set("dupProperty", new String[] { "New 0", "New 1", "New 2" });
orig.set("intArray", new int[] { 100, 200, 300 });
orig.set("intProperty", new Integer(333));
orig.set("longProperty", new Long(3333));
orig.set("shortProperty", new Short((short) 33));
orig.set("stringArray", new String[] { "New 0", "New 1" });
orig.set("stringProperty", "Custom string");
// Copy the origin bean to our destination test bean
DynaBean clonedBean = null;
try {
clonedBean = (DynaBean) BeanUtils.cloneBean(orig);
} catch (final Exception e) {
fail("Threw exception: " + e);
}
// Validate the results for scalar properties
assertEquals("Cloned boolean property",
false,
((Boolean) clonedBean.get("booleanProperty")).booleanValue());
assertEquals("Cloned byte property",
(byte) 111,
((Byte) clonedBean.get("byteProperty")).byteValue());
assertEquals("Cloned double property",
333.33,
((Double) clonedBean.get("doubleProperty")).doubleValue(),
0.005);
assertEquals("Cloned int property",
333,
((Integer) clonedBean.get("intProperty")).intValue());
assertEquals("Cloned long property",
3333,
((Long) clonedBean.get("longProperty")).longValue());
assertEquals("Cloned short property",
(short) 33,
((Short) clonedBean.get("shortProperty")).shortValue());
assertEquals("Cloned string property",
"Custom string",
(String) clonedBean.get("stringProperty"));
// Validate the results for array properties
final String[] dupProperty = (String[]) clonedBean.get("dupProperty");
assertNotNull("dupProperty present", dupProperty);
assertEquals("dupProperty length", 3, dupProperty.length);
assertEquals("dupProperty[0]", "New 0", dupProperty[0]);
assertEquals("dupProperty[1]", "New 1", dupProperty[1]);
assertEquals("dupProperty[2]", "New 2", dupProperty[2]);
final int[] intArray = (int[]) clonedBean.get("intArray");
assertNotNull("intArray present", intArray);
assertEquals("intArray length", 3, intArray.length);
assertEquals("intArray[0]", 100, intArray[0]);
assertEquals("intArray[1]", 200, intArray[1]);
assertEquals("intArray[2]", 300, intArray[2]);
final String[] stringArray = (String[]) clonedBean.get("stringArray");
assertNotNull("stringArray present", stringArray);
assertEquals("stringArray length", 2, stringArray.length);
assertEquals("stringArray[0]", "New 0", stringArray[0]);
assertEquals("stringArray[1]", "New 1", stringArray[1]);
}
/**
* Test the copyProperties() method from a DynaBean.
*/
public void testCopyPropertiesDynaBean() {
// Set up an origin bean with customized properties
final DynaClass dynaClass = DynaBeanUtilsTestCase.createDynaClass();
DynaBean orig = null;
try {
orig = dynaClass.newInstance();
} catch (final Exception e) {
fail("newInstance(): " + e);
}
orig.set("booleanProperty", Boolean.FALSE);
orig.set("byteProperty", new Byte((byte)111));
orig.set("doubleProperty", new Double(333.33));
orig.set("dupProperty", new String[] { "New 0", "New 1", "New 2" });
orig.set("intArray", new int[] { 100, 200, 300 });
orig.set("intProperty", new Integer(333));
orig.set("longProperty", new Long(3333));
orig.set("shortProperty", new Short((short) 33));
orig.set("stringArray", new String[] { "New 0", "New 1" });
orig.set("stringProperty", "Custom string");
// Copy the origin bean to our destination test bean
try {
BeanUtils.copyProperties(bean, orig);
} catch (final Exception e) {
fail("Threw exception: " + e);
}
// Validate the results for scalar properties
assertEquals("Copied boolean property",
false,
((Boolean) bean.get("booleanProperty")).booleanValue());
assertEquals("Copied byte property",
(byte) 111,
((Byte) bean.get("byteProperty")).byteValue());
assertEquals("Copied double property",
333.33,
((Double) bean.get("doubleProperty")).doubleValue(),
0.005);
assertEquals("Copied int property",
333,
((Integer) bean.get("intProperty")).intValue());
assertEquals("Copied long property",
3333,
((Long) bean.get("longProperty")).longValue());
assertEquals("Copied short property",
(short) 33,
((Short) bean.get("shortProperty")).shortValue());
assertEquals("Copied string property",
"Custom string",
(String) bean.get("stringProperty"));
// Validate the results for array properties
final String[] dupProperty = (String[]) bean.get("dupProperty");
assertNotNull("dupProperty present", dupProperty);
assertEquals("dupProperty length", 3, dupProperty.length);
assertEquals("dupProperty[0]", "New 0", dupProperty[0]);
assertEquals("dupProperty[1]", "New 1", dupProperty[1]);
assertEquals("dupProperty[2]", "New 2", dupProperty[2]);
final int[] intArray = (int[]) bean.get("intArray");
assertNotNull("intArray present", intArray);
assertEquals("intArray length", 3, intArray.length);
assertEquals("intArray[0]", 100, intArray[0]);
assertEquals("intArray[1]", 200, intArray[1]);
assertEquals("intArray[2]", 300, intArray[2]);
final String[] stringArray = (String[]) bean.get("stringArray");
assertNotNull("stringArray present", stringArray);
assertEquals("stringArray length", 2, stringArray.length);
assertEquals("stringArray[0]", "New 0", stringArray[0]);
assertEquals("stringArray[1]", "New 1", stringArray[1]);
}
/**
* Test copyProperties() when the origin is a a {@code Map}.
*/
public void testCopyPropertiesMap() {
final Map<String, Object> map = new HashMap<>();
map.put("booleanProperty", "false");
map.put("byteProperty", "111");
map.put("doubleProperty", "333.0");
map.put("dupProperty", new String[] { "New 0", "New 1", "New 2" });
map.put("floatProperty", "222.0");
map.put("intArray", new String[] { "0", "100", "200" });
map.put("intProperty", "111");
map.put("longProperty", "444");
map.put("shortProperty", "555");
map.put("stringProperty", "New String Property");
try {
BeanUtils.copyProperties(bean, map);
} catch (final Throwable t) {
fail("Threw " + t.toString());
}
// Scalar properties
assertEquals("booleanProperty", false,
((Boolean) bean.get("booleanProperty")).booleanValue());
assertEquals("byteProperty", (byte) 111,
((Byte) bean.get("byteProperty")).byteValue());
assertEquals("doubleProperty", 333.0,
((Double) bean.get("doubleProperty")).doubleValue(),
0.005);
assertEquals("floatProperty", (float) 222.0,
((Float) bean.get("floatProperty")).floatValue(),
(float) 0.005);
assertEquals("intProperty", 111,
((Integer) bean.get("intProperty")).intValue());
assertEquals("longProperty", 444,
((Long) bean.get("longProperty")).longValue());
assertEquals("shortProperty", (short) 555,
((Short) bean.get("shortProperty")).shortValue());
assertEquals("stringProperty", "New String Property",
(String) bean.get("stringProperty"));
// Indexed Properties
final String[] dupProperty = (String[]) bean.get("dupProperty");
assertNotNull("dupProperty present", dupProperty);
assertEquals("dupProperty length", 3, dupProperty.length);
assertEquals("dupProperty[0]", "New 0", dupProperty[0]);
assertEquals("dupProperty[1]", "New 1", dupProperty[1]);
assertEquals("dupProperty[2]", "New 2", dupProperty[2]);
final int[] intArray = (int[]) bean.get("intArray");
assertNotNull("intArray present", intArray);
assertEquals("intArray length", 3, intArray.length);
assertEquals("intArray[0]", 0, intArray[0]);
assertEquals("intArray[1]", 100, intArray[1]);
assertEquals("intArray[2]", 200, intArray[2]);
}
/**
* Test the copyProperties() method from a standard JavaBean.
*/
public void testCopyPropertiesStandard() {
// Set up an origin bean with customized properties
final TestBean orig = new TestBean();
orig.setBooleanProperty(false);
orig.setByteProperty((byte) 111);
orig.setDoubleProperty(333.33);
orig.setDupProperty(new String[] { "New 0", "New 1", "New 2" });
orig.setIntArray(new int[] { 100, 200, 300 });
orig.setIntProperty(333);
orig.setLongProperty(3333);
orig.setShortProperty((short) 33);
orig.setStringArray(new String[] { "New 0", "New 1" });
orig.setStringProperty("Custom string");
// Copy the origin bean to our destination test bean
try {
BeanUtils.copyProperties(bean, orig);
} catch (final Exception e) {
fail("Threw exception: " + e);
}
// Validate the results for scalar properties
assertEquals("Copied boolean property",
false,
((Boolean) bean.get("booleanProperty")).booleanValue());
assertEquals("Copied byte property",
(byte) 111,
((Byte) bean.get("byteProperty")).byteValue());
assertEquals("Copied double property",
333.33,
((Double) bean.get("doubleProperty")).doubleValue(),
0.005);
assertEquals("Copied int property",
333,
((Integer) bean.get("intProperty")).intValue());
assertEquals("Copied long property",
3333,
((Long) bean.get("longProperty")).longValue());
assertEquals("Copied short property",
(short) 33,
((Short) bean.get("shortProperty")).shortValue());
assertEquals("Copied string property",
"Custom string",
(String) bean.get("stringProperty"));
// Validate the results for array properties
final String[] dupProperty = (String[]) bean.get("dupProperty");
assertNotNull("dupProperty present", dupProperty);
assertEquals("dupProperty length", 3, dupProperty.length);
assertEquals("dupProperty[0]", "New 0", dupProperty[0]);
assertEquals("dupProperty[1]", "New 1", dupProperty[1]);
assertEquals("dupProperty[2]", "New 2", dupProperty[2]);
final int[] intArray = (int[]) bean.get("intArray");
assertNotNull("intArray present", intArray);
assertEquals("intArray length", 3, intArray.length);
assertEquals("intArray[0]", 100, intArray[0]);
assertEquals("intArray[1]", 200, intArray[1]);
assertEquals("intArray[2]", 300, intArray[2]);
final String[] stringArray = (String[]) bean.get("stringArray");
assertNotNull("stringArray present", stringArray);
assertEquals("stringArray length", 2, stringArray.length);
assertEquals("stringArray[0]", "New 0", stringArray[0]);
assertEquals("stringArray[1]", "New 1", stringArray[1]);
}
/**
* Test the describe() method.
*/
public void testDescribe() {
Map<String, Object> map = null;
try {
map = PropertyUtils.describe(bean);
} catch (final Exception e) {
fail("Threw exception " + e);
}
// Verify existence of all the properties that should be present
for (final String describe : describes) {
assertTrue("Property '" + describe + "' is present",
map.containsKey(describe));
}
assertTrue("Property 'writeOnlyProperty' is not present",
!map.containsKey("writeOnlyProperty"));
// Verify the values of scalar properties
assertEquals("Value of 'booleanProperty'",
Boolean.TRUE,
map.get("booleanProperty"));
assertEquals("Value of 'byteProperty'",
new Byte((byte) 121),
map.get("byteProperty"));
assertEquals("Value of 'doubleProperty'",
new Double(321.0),
map.get("doubleProperty"));
assertEquals("Value of 'floatProperty'",
new Float((float) 123.0),
map.get("floatProperty"));
assertEquals("Value of 'intProperty'",
new Integer(123),
map.get("intProperty"));
assertEquals("Value of 'longProperty'",
new Long(321),
map.get("longProperty"));
assertEquals("Value of 'shortProperty'",
new Short((short) 987),
map.get("shortProperty"));
assertEquals("Value of 'stringProperty'",
"This is a string",
(String) map.get("stringProperty"));
}
/**
* Test populate() method on array properties as a whole.
*/
public void testPopulateArrayProperties() {
try {
final HashMap<String, Object> map = new HashMap<>();
// int intArray[] = new int[] { 123, 456, 789 };
final String[] intArrayIn = new String[] { "123", "456", "789" };
map.put("intArray", intArrayIn);
String[] stringArray = new String[]
{ "New String 0", "New String 1" };
map.put("stringArray", stringArray);
BeanUtils.populate(bean, map);
final int[] intArray = (int[]) bean.get("intArray");
assertNotNull("intArray is present", intArray);
assertEquals("intArray length",
3, intArray.length);
assertEquals("intArray[0]", 123, intArray[0]);
assertEquals("intArray[1]", 456, intArray[1]);
assertEquals("intArray[2]", 789, intArray[2]);
stringArray = (String[]) bean.get("stringArray");
assertNotNull("stringArray is present", stringArray);
assertEquals("stringArray length", 2, stringArray.length);
assertEquals("stringArray[0]", "New String 0", stringArray[0]);
assertEquals("stringArray[1]", "New String 1", stringArray[1]);
} catch (final IllegalAccessException e) {
fail("IllegalAccessException");
} catch (final InvocationTargetException e) {
fail("InvocationTargetException");
}
}
/**
* tests the string and int arrays of TestBean
*/
public void testGetArrayProperty() {
try {
String[] arr = BeanUtils.getArrayProperty(bean, "stringArray");
final String[] comp = (String[]) bean.get("stringArray");
assertTrue("String array length = " + comp.length,
comp.length == arr.length);
arr = BeanUtils.getArrayProperty(bean, "intArray");
final int[] iarr = (int[]) bean.get("intArray");
assertTrue("String array length = " + iarr.length,
iarr.length == arr.length);
} catch (final IllegalAccessException e) {
fail("IllegalAccessException");
} catch (final InvocationTargetException e) {
fail("InvocationTargetException");
} catch (final NoSuchMethodException e) {
fail("NoSuchMethodException");
}
}
/**
* tests getting an indexed property
*/
public void testGetIndexedProperty1() {
try {
String val = BeanUtils.getIndexedProperty(bean, "intIndexed[3]");
String comp = String.valueOf(bean.get("intIndexed", 3));
assertTrue("intIndexed[3] == " + comp, val.equals(comp));
val = BeanUtils.getIndexedProperty(bean, "stringIndexed[3]");
comp = (String) bean.get("stringIndexed", 3);
assertTrue("stringIndexed[3] == " + comp, val.equals(comp));
} catch (final IllegalAccessException e) {
fail("IllegalAccessException");
} catch (final InvocationTargetException e) {
fail("InvocationTargetException");
} catch (final NoSuchMethodException e) {
fail("NoSuchMethodException");
}
}
/**
* tests getting an indexed property
*/
public void testGetIndexedProperty2() {
try {
String val = BeanUtils.getIndexedProperty(bean, "intIndexed", 3);
String comp = String.valueOf(bean.get("intIndexed", 3));
assertTrue("intIndexed,3 == " + comp, val.equals(comp));
val = BeanUtils.getIndexedProperty(bean, "stringIndexed", 3);
comp = (String) bean.get("stringIndexed", 3);
assertTrue("stringIndexed,3 == " + comp, val.equals(comp));
} catch (final IllegalAccessException e) {
fail("IllegalAccessException");
} catch (final InvocationTargetException e) {
fail("InvocationTargetException");
} catch (final NoSuchMethodException e) {
fail("NoSuchMethodException");
}
}
/**
* tests getting a nested property
*/
public void testGetNestedProperty() {
try {
final String val = BeanUtils.getNestedProperty(bean, "nested.stringProperty");
final String comp = nested.getStringProperty();
assertTrue("nested.StringProperty == " + comp,
val.equals(comp));
} catch (final IllegalAccessException e) {
fail("IllegalAccessException");
} catch (final InvocationTargetException e) {
fail("InvocationTargetException");
} catch (final NoSuchMethodException e) {
fail("NoSuchMethodException");
}
}
/**
* tests getting a 'whatever' property
*/
public void testGetGeneralProperty() {
try {
final String val = BeanUtils.getProperty(bean, "nested.intIndexed[2]");
final String comp = String.valueOf(bean.get("intIndexed", 2));
assertTrue("nested.intIndexed[2] == " + comp,
val.equals(comp));
} catch (final IllegalAccessException e) {
fail("IllegalAccessException");
} catch (final InvocationTargetException e) {
fail("InvocationTargetException");
} catch (final NoSuchMethodException e) {
fail("NoSuchMethodException");
}
}
/**
* tests getting a 'whatever' property
*/
public void testGetSimpleProperty() {
try {
final String val = BeanUtils.getSimpleProperty(bean, "shortProperty");
final String comp = String.valueOf(bean.get("shortProperty"));
assertTrue("shortProperty == " + comp,
val.equals(comp));
} catch (final IllegalAccessException e) {
fail("IllegalAccessException");
} catch (final InvocationTargetException e) {
fail("InvocationTargetException");
} catch (final NoSuchMethodException e) {
fail("NoSuchMethodException");
}
}
/**
* Test populate() method on individual array elements.
*/
public void testPopulateArrayElements() {
try {
final HashMap<String, Object> map = new HashMap<>();
map.put("intIndexed[0]", "100");
map.put("intIndexed[2]", "120");
map.put("intIndexed[4]", "140");
BeanUtils.populate(bean, map);
final Integer intIndexed0 = (Integer) bean.get("intIndexed", 0);
assertEquals("intIndexed[0] is 100",
100, intIndexed0.intValue());
final Integer intIndexed1 = (Integer) bean.get("intIndexed", 1);
assertEquals("intIndexed[1] is 10",
10, intIndexed1.intValue());
final Integer intIndexed2 = (Integer) bean.get("intIndexed", 2);
assertEquals("intIndexed[2] is 120",
120, intIndexed2.intValue());
final Integer intIndexed3 = (Integer) bean.get("intIndexed", 3);
assertEquals("intIndexed[3] is 30",
30, intIndexed3.intValue());
final Integer intIndexed4 = (Integer) bean.get("intIndexed", 4);
assertEquals("intIndexed[4] is 140",
140, intIndexed4.intValue());
map.clear();
map.put("stringIndexed[1]", "New String 1");
map.put("stringIndexed[3]", "New String 3");
BeanUtils.populate(bean, map);
assertEquals("stringIndexed[0] is \"String 0\"",
"String 0",
(String) bean.get("stringIndexed", 0));
assertEquals("stringIndexed[1] is \"New String 1\"",
"New String 1",
(String) bean.get("stringIndexed", 1));
assertEquals("stringIndexed[2] is \"String 2\"",
"String 2",
(String) bean.get("stringIndexed", 2));
assertEquals("stringIndexed[3] is \"New String 3\"",
"New String 3",
(String) bean.get("stringIndexed", 3));
assertEquals("stringIndexed[4] is \"String 4\"",
"String 4",
(String) bean.get("stringIndexed", 4));
} catch (final IllegalAccessException e) {
fail("IllegalAccessException");
} catch (final InvocationTargetException e) {
fail("InvocationTargetException");
}
}
/**
* Test populate() on mapped properties.
*/
public void testPopulateMapped() {
try {
final HashMap<String, Object> map = new HashMap<>();
map.put("mappedProperty(First Key)", "New First Value");
map.put("mappedProperty(Third Key)", "New Third Value");
BeanUtils.populate(bean, map);
assertEquals("mappedProperty(First Key)",
"New First Value",
(String) bean.get("mappedProperty", "First Key"));
assertEquals("mappedProperty(Second Key)",
"Second Value",
(String) bean.get("mappedProperty", "Second Key"));
assertEquals("mappedProperty(Third Key)",
"New Third Value",
(String) bean.get("mappedProperty", "Third Key"));
assertNull("mappedProperty(Fourth Key",
bean.get("mappedProperty", "Fourth Key"));
} catch (final IllegalAccessException e) {
fail("IllegalAccessException");
} catch (final InvocationTargetException e) {
fail("InvocationTargetException");
}
}
/**
* Test populate() method on nested properties.
*/
public void testPopulateNested() {
try {
final HashMap<String, Object> map = new HashMap<>();
map.put("nested.booleanProperty", "false");
// booleanSecond is left at true
map.put("nested.doubleProperty", "432.0");
// floatProperty is left at 123.0
map.put("nested.intProperty", "543");
// longProperty is left at 321
map.put("nested.shortProperty", "654");
// stringProperty is left at "This is a string"
BeanUtils.populate(bean, map);
final TestBean nested = (TestBean) bean.get("nested");
assertTrue("booleanProperty is false",
!nested.getBooleanProperty());
assertTrue("booleanSecond is true",
nested.isBooleanSecond());
assertEquals("doubleProperty is 432.0",
432.0,
nested.getDoubleProperty(),
0.005);
assertEquals("floatProperty is 123.0",
(float) 123.0,
nested.getFloatProperty(),
(float) 0.005);
assertEquals("intProperty is 543",
543, nested.getIntProperty());
assertEquals("longProperty is 321",
321, nested.getLongProperty());
assertEquals("shortProperty is 654",
(short) 654, nested.getShortProperty());
assertEquals("stringProperty is \"This is a string\"",
"This is a string",
nested.getStringProperty());
} catch (final IllegalAccessException e) {
fail("IllegalAccessException");
} catch (final InvocationTargetException e) {
fail("InvocationTargetException");
}
}
/**
* Test populate() method on scalar properties.
*/
public void testPopulateScalar() {
try {
bean.set("nullProperty", "non-null value");
final HashMap<String, Object> map = new HashMap<>();
map.put("booleanProperty", "false");
// booleanSecond is left at true
map.put("doubleProperty", "432.0");
// floatProperty is left at 123.0
map.put("intProperty", "543");
// longProperty is left at 321
map.put("nullProperty", null);
map.put("shortProperty", "654");
// stringProperty is left at "This is a string"
BeanUtils.populate(bean, map);
final Boolean booleanProperty = (Boolean) bean.get("booleanProperty");
assertTrue("booleanProperty is false", !booleanProperty.booleanValue());
final Boolean booleanSecond = (Boolean) bean.get("booleanSecond");
assertTrue("booleanSecond is true", booleanSecond.booleanValue());
final Double doubleProperty = (Double) bean.get("doubleProperty");
assertEquals("doubleProperty is 432.0",
432.0, doubleProperty.doubleValue(), 0.005);
final Float floatProperty = (Float) bean.get("floatProperty");
assertEquals("floatProperty is 123.0",
(float) 123.0, floatProperty.floatValue(),
(float) 0.005);
final Integer intProperty = (Integer) bean.get("intProperty");
assertEquals("intProperty is 543",
543, intProperty.intValue());
final Long longProperty = (Long) bean.get("longProperty");
assertEquals("longProperty is 321",
321, longProperty.longValue());
assertNull("nullProperty is null", bean.get("nullProperty"));
final Short shortProperty = (Short) bean.get("shortProperty");
assertEquals("shortProperty is 654",
(short) 654, shortProperty.shortValue());
assertEquals("stringProperty is \"This is a string\"",
"This is a string",
(String) bean.get("stringProperty"));
} catch (final IllegalAccessException e) {
fail("IllegalAccessException");
} catch (final InvocationTargetException e) {
fail("InvocationTargetException");
}
}
/**
* Test calling setProperty() with null property values.
*/
public void testSetPropertyNullValues() throws Exception {
Object oldValue;
Object newValue;
// Scalar value into array
oldValue = PropertyUtils.getSimpleProperty(bean, "stringArray");
BeanUtils.setProperty(bean, "stringArray", null);
newValue = PropertyUtils.getSimpleProperty(bean, "stringArray");
assertNotNull("stringArray is not null", newValue);
assertTrue("stringArray of correct type",
newValue instanceof String[]);
assertEquals("stringArray length",
1, ((String[]) newValue).length);
PropertyUtils.setProperty(bean, "stringArray", oldValue);
// Indexed value into array
oldValue = PropertyUtils.getSimpleProperty(bean, "stringArray");
BeanUtils.setProperty(bean, "stringArray[2]", null);
newValue = PropertyUtils.getSimpleProperty(bean, "stringArray");
assertNotNull("stringArray is not null", newValue);
assertTrue("stringArray of correct type",
newValue instanceof String[]);
assertEquals("stringArray length",
5, ((String[]) newValue).length);
assertTrue("stringArray[2] is null",
((String[]) newValue)[2] == null);
PropertyUtils.setProperty(bean, "stringArray", oldValue);
// Value into scalar
BeanUtils.setProperty(bean, "stringProperty", null);
assertTrue("stringProperty is now null",
BeanUtils.getProperty(bean, "stringProperty") == null);
}
/**
* Test converting to and from primitive wrapper types.
*/
public void testSetPropertyOnPrimitiveWrappers() throws Exception {
BeanUtils.setProperty(bean,"intProperty", new Integer(1));
assertEquals(1,((Integer) bean.get("intProperty")).intValue());
BeanUtils.setProperty(bean,"stringProperty", new Integer(1));
assertEquals(1, Integer.parseInt((String) bean.get("stringProperty")));
}
/**
* Test setting a null property value.
*/
public void testSetPropertyNull() throws Exception {
bean.set("nullProperty", "non-null value");
BeanUtils.setProperty(bean, "nullProperty", null);
assertNull("nullProperty is null", bean.get("nullProperty"));
}
/**
* Test narrowing and widening conversions on byte.
*/
public void testCopyPropertyByte() throws Exception {
BeanUtils.setProperty(bean, "byteProperty", new Byte((byte) 123));
assertEquals((byte) 123, ((Byte) bean.get("byteProperty")).byteValue());
/*
BeanUtils.setProperty(bean, "byteProperty", new Double((double) 123));
assertEquals((byte) 123, ((Byte) bean.get("byteProperty")).byteValue());
BeanUtils.setProperty(bean, "byteProperty", new Float((float) 123));
assertEquals((byte) 123, ((Byte) bean.get("byteProperty")).byteValue());
*/
BeanUtils.setProperty(bean, "byteProperty", new Integer(123));
assertEquals((byte) 123, ((Byte) bean.get("byteProperty")).byteValue());
BeanUtils.setProperty(bean, "byteProperty", new Long(123));
assertEquals((byte) 123, ((Byte) bean.get("byteProperty")).byteValue());
BeanUtils.setProperty(bean, "byteProperty", new Short((short) 123));
assertEquals((byte) 123, ((Byte) bean.get("byteProperty")).byteValue());
}
/**
* Test narrowing and widening conversions on double.
*/
public void testCopyPropertyDouble() throws Exception {
BeanUtils.setProperty(bean, "doubleProperty", new Byte((byte) 123));
assertEquals(123, ((Double) bean.get("doubleProperty")).doubleValue(), 0.005);
BeanUtils.setProperty(bean, "doubleProperty", new Double(123));
assertEquals(123, ((Double) bean.get("doubleProperty")).doubleValue(), 0.005);
BeanUtils.setProperty(bean, "doubleProperty", new Float(123));
assertEquals(123, ((Double) bean.get("doubleProperty")).doubleValue(), 0.005);
BeanUtils.setProperty(bean, "doubleProperty", new Integer(123));
assertEquals(123, ((Double) bean.get("doubleProperty")).doubleValue(), 0.005);
BeanUtils.setProperty(bean, "doubleProperty", new Long(123));
assertEquals(123, ((Double) bean.get("doubleProperty")).doubleValue(), 0.005);
BeanUtils.setProperty(bean, "doubleProperty", new Short((short) 123));
assertEquals(123, ((Double) bean.get("doubleProperty")).doubleValue(), 0.005);
}
/**
* Test narrowing and widening conversions on float.
*/
public void testCopyPropertyFloat() throws Exception {
BeanUtils.setProperty(bean, "floatProperty", new Byte((byte) 123));
assertEquals(123, ((Float) bean.get("floatProperty")).floatValue(), 0.005);
BeanUtils.setProperty(bean, "floatProperty", new Double(123));
assertEquals(123, ((Float) bean.get("floatProperty")).floatValue(), 0.005);
BeanUtils.setProperty(bean, "floatProperty", new Float(123));
assertEquals(123, ((Float) bean.get("floatProperty")).floatValue(), 0.005);
BeanUtils.setProperty(bean, "floatProperty", new Integer(123));
assertEquals(123, ((Float) bean.get("floatProperty")).floatValue(), 0.005);
BeanUtils.setProperty(bean, "floatProperty", new Long(123));
assertEquals(123, ((Float) bean.get("floatProperty")).floatValue(), 0.005);
BeanUtils.setProperty(bean, "floatProperty", new Short((short) 123));
assertEquals(123, ((Float) bean.get("floatProperty")).floatValue(), 0.005);
}
/**
* Test narrowing and widening conversions on int.
*/
public void testCopyPropertyInteger() throws Exception {
BeanUtils.setProperty(bean, "longProperty", new Byte((byte) 123));
assertEquals(123, ((Integer) bean.get("intProperty")).intValue());
/*
BeanUtils.setProperty(bean, "longProperty", new Double((double) 123));
assertEquals((int) 123, ((Integer) bean.get("intProperty")).intValue());
BeanUtils.setProperty(bean, "longProperty", new Float((float) 123));
assertEquals((int) 123, ((Integer) bean.get("intProperty")).intValue());
*/
BeanUtils.setProperty(bean, "longProperty", new Integer(123));
assertEquals(123, ((Integer) bean.get("intProperty")).intValue());
BeanUtils.setProperty(bean, "longProperty", new Long(123));
assertEquals(123, ((Integer) bean.get("intProperty")).intValue());
BeanUtils.setProperty(bean, "longProperty", new Short((short) 123));
assertEquals(123, ((Integer) bean.get("intProperty")).intValue());
}
/**
* Test narrowing and widening conversions on long.
*/
public void testCopyPropertyLong() throws Exception {
BeanUtils.setProperty(bean, "longProperty", new Byte((byte) 123));
assertEquals(123, ((Long) bean.get("longProperty")).longValue());
/*
BeanUtils.setProperty(bean, "longProperty", new Double((double) 123));
assertEquals((long) 123, ((Long) bean.get("longProperty")).longValue());
BeanUtils.setProperty(bean, "longProperty", new Float((float) 123));
assertEquals((long) 123, ((Long) bean.get("longProperty")).longValue());
*/
BeanUtils.setProperty(bean, "longProperty", new Integer(123));
assertEquals(123, ((Long) bean.get("longProperty")).longValue());
BeanUtils.setProperty(bean, "longProperty", new Long(123));
assertEquals(123, ((Long) bean.get("longProperty")).longValue());
BeanUtils.setProperty(bean, "longProperty", new Short((short) 123));
assertEquals(123, ((Long) bean.get("longProperty")).longValue());
}
/**
* Test copying a null property value.
*/
public void testCopyPropertyNull() throws Exception {
bean.set("nullProperty", "non-null value");
BeanUtils.copyProperty(bean, "nullProperty", null);
assertNull("nullProperty is null", bean.get("nullProperty"));
}
/**
* Test narrowing and widening conversions on short.
*/
public void testCopyPropertyShort() throws Exception {
BeanUtils.setProperty(bean, "shortProperty", new Byte((byte) 123));
assertEquals((short) 123, ((Short) bean.get("shortProperty")).shortValue());
/*
BeanUtils.setProperty(bean, "shortProperty", new Double((double) 123));
assertEquals((short) 123, ((Short) bean.get("shortProperty")).shortValue());
BeanUtils.setProperty(bean, "shortProperty", new Float((float) 123));
assertEquals((short) 123, ((Short) bean.get("shortProperty")).shortValue());
*/
BeanUtils.setProperty(bean, "shortProperty", new Integer(123));
assertEquals((short) 123, ((Short) bean.get("shortProperty")).shortValue());
BeanUtils.setProperty(bean, "shortProperty", new Long(123));
assertEquals((short) 123, ((Short) bean.get("shortProperty")).shortValue());
BeanUtils.setProperty(bean, "shortProperty", new Short((short) 123));
assertEquals((short) 123, ((Short) bean.get("shortProperty")).shortValue());
}
/**
* Test copying a property using a nested indexed array expression,
* with and without conversions.
*/
public void testCopyPropertyNestedIndexedArray() throws Exception {
final int[] origArray = { 0, 10, 20, 30, 40};
final int[] intArray = { 0, 0, 0 };
((TestBean) bean.get("nested")).setIntArray(intArray);
final int[] intChanged = { 0, 0, 0 };
// No conversion required
BeanUtils.copyProperty(bean, "nested.intArray[1]", new Integer(1));
checkIntArray((int[]) bean.get("intArray"), origArray);
intChanged[1] = 1;
checkIntArray(((TestBean) bean.get("nested")).getIntArray(),
intChanged);
// Widening conversion required
BeanUtils.copyProperty(bean, "nested.intArray[1]", new Byte((byte) 2));
checkIntArray((int[]) bean.get("intArray"), origArray);
intChanged[1] = 2;
checkIntArray(((TestBean) bean.get("nested")).getIntArray(),
intChanged);
// Narrowing conversion required
BeanUtils.copyProperty(bean, "nested.intArray[1]", new Long(3));
checkIntArray((int[]) bean.get("intArray"), origArray);
intChanged[1] = 3;
checkIntArray(((TestBean) bean.get("nested")).getIntArray(),
intChanged);
// String conversion required
BeanUtils.copyProperty(bean, "nested.intArray[1]", "4");
checkIntArray((int[]) bean.get("intArray"), origArray);
intChanged[1] = 4;
checkIntArray(((TestBean) bean.get("nested")).getIntArray(),
intChanged);
}
/**
* Test copying a property using a nested mapped map property.
*/
public void testCopyPropertyNestedMappedMap() throws Exception {
final Map<String, Object> origMap = new HashMap<>();
origMap.put("First Key", "First Value");
origMap.put("Second Key", "Second Value");
final Map<String, Object> changedMap = new HashMap<>();
changedMap.put("First Key", "First Value");
changedMap.put("Second Key", "Second Value");
// No conversion required
BeanUtils.copyProperty(bean, "nested.mapProperty(Second Key)",
"New Second Value");
checkMap((Map<?, ?>) bean.get("mapProperty"), origMap);
changedMap.put("Second Key", "New Second Value");
checkMap(((TestBean) bean.get("nested")).getMapProperty(), changedMap);
}
/**
* Test copying a property using a nested simple expression, with and
* without conversions.
*/
public void testCopyPropertyNestedSimple() throws Exception {
bean.set("intProperty", new Integer(0));
nested.setIntProperty(0);
// No conversion required
BeanUtils.copyProperty(bean, "nested.intProperty", new Integer(1));
assertEquals(0, ((Integer) bean.get("intProperty")).intValue());
assertEquals(1, nested.getIntProperty());
// Widening conversion required
BeanUtils.copyProperty(bean, "nested.intProperty", new Byte((byte) 2));
assertEquals(0, ((Integer) bean.get("intProperty")).intValue());
assertEquals(2, nested.getIntProperty());
// Narrowing conversion required
BeanUtils.copyProperty(bean, "nested.intProperty", new Long(3));
assertEquals(0, ((Integer) bean.get("intProperty")).intValue());
assertEquals(3, nested.getIntProperty());
// String conversion required
BeanUtils.copyProperty(bean, "nested.intProperty", "4");
assertEquals(0, ((Integer) bean.get("intProperty")).intValue());
assertEquals(4, nested.getIntProperty());
}
// Ensure that the nested intArray matches the specified values
protected void checkIntArray(final int[] actual, final int[] expected) {
assertNotNull("actual array not null", actual);
assertEquals("actual array length", expected.length, actual.length);
for (int i = 0; i < actual.length; i++) {
assertEquals("actual array value[" + i + "]",
expected[i], actual[i]);
}
}
// Ensure that the actual Map matches the expected Map
protected void checkMap(final Map<?, ?> actual, final Map<?, ?> expected) {
assertNotNull("actual map not null", actual);
assertEquals("actual map size", expected.size(), actual.size());
final Iterator<?> keys = expected.keySet().iterator();
while (keys.hasNext()) {
final Object key = keys.next();
assertEquals("actual map value(" + key + ")",
expected.get(key), actual.get(key));
}
}
/**
* Create and return a {@code DynaClass} instance for our test
* {@code DynaBean}.
*/
protected static DynaClass createDynaClass() {
final int[] intArray = new int[0];
final String[] stringArray = new String[0];
final DynaClass dynaClass = new BasicDynaClass
("TestDynaClass", null,
new DynaProperty[]{
new DynaProperty("booleanProperty", Boolean.TYPE),
new DynaProperty("booleanSecond", Boolean.TYPE),
new DynaProperty("byteProperty", Byte.TYPE),
new DynaProperty("doubleProperty", Double.TYPE),
new DynaProperty("dupProperty", stringArray.getClass()),
new DynaProperty("floatProperty", Float.TYPE),
new DynaProperty("intArray", intArray.getClass()),
new DynaProperty("intIndexed", intArray.getClass()),
new DynaProperty("intProperty", Integer.TYPE),
new DynaProperty("listIndexed", List.class),
new DynaProperty("longProperty", Long.TYPE),
new DynaProperty("mapProperty", Map.class),
new DynaProperty("mappedProperty", Map.class),
new DynaProperty("mappedIntProperty", Map.class),
new DynaProperty("nested", TestBean.class),
new DynaProperty("nullProperty", String.class),
new DynaProperty("shortProperty", Short.TYPE),
new DynaProperty("stringArray", stringArray.getClass()),
new DynaProperty("stringIndexed", stringArray.getClass()),
new DynaProperty("stringProperty", String.class),
});
return dynaClass;
}
}
| |
package de.prttstft.materialmensa.ui.activities.details;
import android.os.Bundle;
import android.support.design.widget.FloatingActionButton;
import android.support.v4.app.ShareCompat;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.MenuItem;
import android.widget.ImageView;
import android.widget.TextView;
import com.bumptech.glide.Glide;
import com.google.gson.Gson;
import net.opacapp.multilinecollapsingtoolbar.CollapsingToolbarLayout;
import java.util.List;
import butterknife.Bind;
import butterknife.ButterKnife;
import butterknife.OnClick;
import de.prttstft.materialmensa.R;
import de.prttstft.materialmensa.extras.Analytics;
import de.prttstft.materialmensa.extras.DateTimeUtilities;
import de.prttstft.materialmensa.extras.Utilities;
import de.prttstft.materialmensa.model.Meal;
import timber.log.Timber;
import static de.prttstft.materialmensa.constants.GeneralConstants.MEAL;
public class DetailsActivity extends AppCompatActivity {
private static final String LOCALE_DE = "Deutsch";
private static final String MIME_TYPE_TEXT = "text/plain";
private static final String PLAY_STORE_URL = "https://goo.gl/HD2ed2";
@Bind(R.id.activity_details_additives_allergens) TextView additivesAllergens;
@Bind(R.id.activity_details_image) ImageView mealImage;
@Bind(R.id.activity_details_collapsing_toolbar_layout) CollapsingToolbarLayout collapsingToolbarLayout;
@Bind(R.id.activity_details_toolbar) Toolbar toolbar;
@Bind(R.id.activity_details_floating_action_button) FloatingActionButton floatingActionButton;
private Meal meal;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_details);
ButterKnife.bind(this);
getExtras();
setUpToolbar();
setUpView();
Analytics.activityDetailsViewed();
}
private void getExtras() {
if (getIntent().getExtras() != null) {
if (getIntent().getExtras().getString(MEAL) != null) {
meal = new Gson().fromJson(getIntent().getStringExtra(MEAL), Meal.class);
}
} else {
finish();
}
}
private void setUpToolbar() {
setSupportActionBar(toolbar);
if (getSupportActionBar() != null) {
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
}
}
private void setUpView() {
if (Utilities.getSystemLanguage().equals(LOCALE_DE)) {
collapsingToolbarLayout.setTitle(meal.getNameDe());
} else {
collapsingToolbarLayout.setTitle(meal.getNameEn());
}
if (meal.getImage().isEmpty()) {
mealImage.setImageResource(R.drawable.placeholder_meal_blur);
} else {
Glide.with(this)
.load(meal.getThumbnail())
.fitCenter()
.into(mealImage);
}
additivesAllergens.setText(buildAdditiveAllergenString(meal.getAllergens()));
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
switch (id) {
default:
onBackPressed();
return true;
}
}
@OnClick(R.id.activity_details_floating_action_button)
public void onFloatingActionButtonClicked() {
shareMeal();
}
private String buildAdditiveAllergenString(List<String> additivesAllergens) {
String builder = "";
for (int i = 0; i < additivesAllergens.size(); i++) {
String additiveAllergenString = getAdditiveAllergenString(additivesAllergens.get(i));
if (!additiveAllergenString.isEmpty()) {
builder = builder + additiveAllergenString + "\n";
}
}
return builder;
}
private String buildShareString() {
String shareString = getString(R.string.share_string_prefix, DateTimeUtilities.getShareDayString(meal.getDate()));
if (Utilities.getSystemLanguage().equals(LOCALE_DE)) {
shareString = shareString + meal.getNameDe();
} else {
shareString = shareString + meal.getNameEn();
}
shareString = shareString + getString(R.string.share_string_suffix, PLAY_STORE_URL);
return shareString;
}
private String getAdditiveAllergenString(String additiveAllergen) {
String[] additives = getResources().getStringArray(R.array.activity_settings_preferences_additives_array);
String[] allergens = getResources().getStringArray(R.array.activity_settings_preferences_allergens_array);
switch (additiveAllergen) {
case "1":
return additives[0];
case "2":
return additives[1];
case "3":
return additives[2];
case "4":
return additives[3];
case "5":
return additives[4];
case "6":
return additives[5];
case "7":
return additives[6];
case "8":
return additives[7];
case "9":
return additives[8];
case "10":
return additives[9];
case "11":
return additives[10];
case "12":
return additives[11];
case "13":
return additives[12];
case "14":
return additives[13];
case "15":
return additives[14];
case "A1":
return allergens[0];
case "A2":
return allergens[1];
case "A3":
return allergens[2];
case "A4":
return allergens[3];
case "A5":
return allergens[4];
case "A6":
return allergens[5];
case "A7":
return allergens[6];
case "A8":
return allergens[7];
case "A9":
return allergens[8];
case "A10":
return allergens[9];
case "A11":
return allergens[10];
case "A12":
return allergens[11];
case "A13":
return allergens[12];
case "A14":
return allergens[13];
default:
Timber.w("Default AdditiveAllergene called: $additiveAllergen");
return "";
}
}
private void shareMeal() {
ShareCompat.IntentBuilder
.from(this)
.setType(MIME_TYPE_TEXT)
.setText(buildShareString())
.setChooserTitle(R.string.share_chooser_title)
.startChooser();
Analytics.mealShared();
}
}
| |
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master;
import java.io.IOException;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Scanner;
import java.util.Set;
import java.util.TreeMap;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ClusterConnection;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.favored.FavoredNodeAssignmentHelper;
import org.apache.hadoop.hbase.favored.FavoredNodesPlan;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.MunkresAssignment;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
import org.apache.hbase.thirdparty.org.apache.commons.cli.GnuParser;
import org.apache.hbase.thirdparty.org.apache.commons.cli.HelpFormatter;
import org.apache.hbase.thirdparty.org.apache.commons.cli.Options;
import org.apache.hbase.thirdparty.org.apache.commons.cli.ParseException;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse;
/**
* A tool that is used for manipulating and viewing favored nodes information
* for regions. Run with -h to get a list of the options
*/
@InterfaceAudience.Private
// TODO: Remove? Unused. Partially implemented only.
public class RegionPlacementMaintainer {
private static final Logger LOG = LoggerFactory.getLogger(RegionPlacementMaintainer.class
.getName());
//The cost of a placement that should never be assigned.
private static final float MAX_COST = Float.POSITIVE_INFINITY;
// The cost of a placement that is undesirable but acceptable.
private static final float AVOID_COST = 100000f;
// The amount by which the cost of a placement is increased if it is the
// last slot of the server. This is done to more evenly distribute the slop
// amongst servers.
private static final float LAST_SLOT_COST_PENALTY = 0.5f;
// The amount by which the cost of a primary placement is penalized if it is
// not the host currently serving the region. This is done to minimize moves.
private static final float NOT_CURRENT_HOST_PENALTY = 0.1f;
private static boolean USE_MUNKRES_FOR_PLACING_SECONDARY_AND_TERTIARY = false;
private Configuration conf;
private final boolean enforceLocality;
private final boolean enforceMinAssignmentMove;
private RackManager rackManager;
private Set<TableName> targetTableSet;
private final Connection connection;
public RegionPlacementMaintainer(Configuration conf) {
this(conf, true, true);
}
public RegionPlacementMaintainer(Configuration conf, boolean enforceLocality,
boolean enforceMinAssignmentMove) {
this.conf = conf;
this.enforceLocality = enforceLocality;
this.enforceMinAssignmentMove = enforceMinAssignmentMove;
this.targetTableSet = new HashSet<>();
this.rackManager = new RackManager(conf);
try {
this.connection = ConnectionFactory.createConnection(this.conf);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private static void printHelp(Options opt) {
new HelpFormatter().printHelp(
"RegionPlacement < -w | -u | -n | -v | -t | -h | -overwrite -r regionName -f favoredNodes " +
"-diff>" +
" [-l false] [-m false] [-d] [-tables t1,t2,...tn] [-zk zk1,zk2,zk3]" +
" [-fs hdfs://a.b.c.d:9000] [-hbase_root /HBASE]", opt);
}
public void setTargetTableName(String[] tableNames) {
if (tableNames != null) {
for (String table : tableNames)
this.targetTableSet.add(TableName.valueOf(table));
}
}
/**
* @return the new RegionAssignmentSnapshot
* @throws IOException
*/
public SnapshotOfRegionAssignmentFromMeta getRegionAssignmentSnapshot()
throws IOException {
SnapshotOfRegionAssignmentFromMeta currentAssignmentShapshot =
new SnapshotOfRegionAssignmentFromMeta(ConnectionFactory.createConnection(conf));
currentAssignmentShapshot.initialize();
return currentAssignmentShapshot;
}
/**
* Verify the region placement is consistent with the assignment plan
* @param isDetailMode
* @return reports
* @throws IOException
*/
public List<AssignmentVerificationReport> verifyRegionPlacement(boolean isDetailMode)
throws IOException {
System.out.println("Start to verify the region assignment and " +
"generate the verification report");
// Get the region assignment snapshot
SnapshotOfRegionAssignmentFromMeta snapshot = this.getRegionAssignmentSnapshot();
// Get all the tables
Set<TableName> tables = snapshot.getTableSet();
// Get the region locality map
Map<String, Map<String, Float>> regionLocalityMap = null;
if (this.enforceLocality == true) {
regionLocalityMap = FSUtils.getRegionDegreeLocalityMappingFromFS(conf);
}
List<AssignmentVerificationReport> reports = new ArrayList<>();
// Iterate all the tables to fill up the verification report
for (TableName table : tables) {
if (!this.targetTableSet.isEmpty() &&
!this.targetTableSet.contains(table)) {
continue;
}
AssignmentVerificationReport report = new AssignmentVerificationReport();
report.fillUp(table, snapshot, regionLocalityMap);
report.print(isDetailMode);
reports.add(report);
}
return reports;
}
/**
* Generate the assignment plan for the existing table
*
* @param tableName
* @param assignmentSnapshot
* @param regionLocalityMap
* @param plan
* @param munkresForSecondaryAndTertiary if set on true the assignment plan
* for the tertiary and secondary will be generated with Munkres algorithm,
* otherwise will be generated using placeSecondaryAndTertiaryRS
* @throws IOException
*/
private void genAssignmentPlan(TableName tableName,
SnapshotOfRegionAssignmentFromMeta assignmentSnapshot,
Map<String, Map<String, Float>> regionLocalityMap, FavoredNodesPlan plan,
boolean munkresForSecondaryAndTertiary) throws IOException {
// Get the all the regions for the current table
List<RegionInfo> regions =
assignmentSnapshot.getTableToRegionMap().get(tableName);
int numRegions = regions.size();
// Get the current assignment map
Map<RegionInfo, ServerName> currentAssignmentMap =
assignmentSnapshot.getRegionToRegionServerMap();
// Get the all the region servers
List<ServerName> servers = new ArrayList<>();
try (Admin admin = this.connection.getAdmin()) {
servers.addAll(admin.getRegionServers());
}
LOG.info("Start to generate assignment plan for " + numRegions +
" regions from table " + tableName + " with " +
servers.size() + " region servers");
int slotsPerServer = (int) Math.ceil((float) numRegions /
servers.size());
int regionSlots = slotsPerServer * servers.size();
// Compute the primary, secondary and tertiary costs for each region/server
// pair. These costs are based only on node locality and rack locality, and
// will be modified later.
float[][] primaryCost = new float[numRegions][regionSlots];
float[][] secondaryCost = new float[numRegions][regionSlots];
float[][] tertiaryCost = new float[numRegions][regionSlots];
if (this.enforceLocality && regionLocalityMap != null) {
// Transform the locality mapping into a 2D array, assuming that any
// unspecified locality value is 0.
float[][] localityPerServer = new float[numRegions][regionSlots];
for (int i = 0; i < numRegions; i++) {
Map<String, Float> serverLocalityMap =
regionLocalityMap.get(regions.get(i).getEncodedName());
if (serverLocalityMap == null) {
continue;
}
for (int j = 0; j < servers.size(); j++) {
String serverName = servers.get(j).getHostname();
if (serverName == null) {
continue;
}
Float locality = serverLocalityMap.get(serverName);
if (locality == null) {
continue;
}
for (int k = 0; k < slotsPerServer; k++) {
// If we can't find the locality of a region to a server, which occurs
// because locality is only reported for servers which have some
// blocks of a region local, then the locality for that pair is 0.
localityPerServer[i][j * slotsPerServer + k] = locality.floatValue();
}
}
}
// Compute the total rack locality for each region in each rack. The total
// rack locality is the sum of the localities of a region on all servers in
// a rack.
Map<String, Map<RegionInfo, Float>> rackRegionLocality = new HashMap<>();
for (int i = 0; i < numRegions; i++) {
RegionInfo region = regions.get(i);
for (int j = 0; j < regionSlots; j += slotsPerServer) {
String rack = rackManager.getRack(servers.get(j / slotsPerServer));
Map<RegionInfo, Float> rackLocality = rackRegionLocality.get(rack);
if (rackLocality == null) {
rackLocality = new HashMap<>();
rackRegionLocality.put(rack, rackLocality);
}
Float localityObj = rackLocality.get(region);
float locality = localityObj == null ? 0 : localityObj.floatValue();
locality += localityPerServer[i][j];
rackLocality.put(region, locality);
}
}
for (int i = 0; i < numRegions; i++) {
for (int j = 0; j < regionSlots; j++) {
String rack = rackManager.getRack(servers.get(j / slotsPerServer));
Float totalRackLocalityObj =
rackRegionLocality.get(rack).get(regions.get(i));
float totalRackLocality = totalRackLocalityObj == null ?
0 : totalRackLocalityObj.floatValue();
// Primary cost aims to favor servers with high node locality and low
// rack locality, so that secondaries and tertiaries can be chosen for
// nodes with high rack locality. This might give primaries with
// slightly less locality at first compared to a cost which only
// considers the node locality, but should be better in the long run.
primaryCost[i][j] = 1 - (2 * localityPerServer[i][j] -
totalRackLocality);
// Secondary cost aims to favor servers with high node locality and high
// rack locality since the tertiary will be chosen from the same rack as
// the secondary. This could be negative, but that is okay.
secondaryCost[i][j] = 2 - (localityPerServer[i][j] + totalRackLocality);
// Tertiary cost is only concerned with the node locality. It will later
// be restricted to only hosts on the same rack as the secondary.
tertiaryCost[i][j] = 1 - localityPerServer[i][j];
}
}
}
if (this.enforceMinAssignmentMove && currentAssignmentMap != null) {
// We want to minimize the number of regions which move as the result of a
// new assignment. Therefore, slightly penalize any placement which is for
// a host that is not currently serving the region.
for (int i = 0; i < numRegions; i++) {
for (int j = 0; j < servers.size(); j++) {
ServerName currentAddress = currentAssignmentMap.get(regions.get(i));
if (currentAddress != null &&
!currentAddress.equals(servers.get(j))) {
for (int k = 0; k < slotsPerServer; k++) {
primaryCost[i][j * slotsPerServer + k] += NOT_CURRENT_HOST_PENALTY;
}
}
}
}
}
// Artificially increase cost of last slot of each server to evenly
// distribute the slop, otherwise there will be a few servers with too few
// regions and many servers with the max number of regions.
for (int i = 0; i < numRegions; i++) {
for (int j = 0; j < regionSlots; j += slotsPerServer) {
primaryCost[i][j] += LAST_SLOT_COST_PENALTY;
secondaryCost[i][j] += LAST_SLOT_COST_PENALTY;
tertiaryCost[i][j] += LAST_SLOT_COST_PENALTY;
}
}
RandomizedMatrix randomizedMatrix = new RandomizedMatrix(numRegions,
regionSlots);
primaryCost = randomizedMatrix.transform(primaryCost);
int[] primaryAssignment = new MunkresAssignment(primaryCost).solve();
primaryAssignment = randomizedMatrix.invertIndices(primaryAssignment);
// Modify the secondary and tertiary costs for each region/server pair to
// prevent a region from being assigned to the same rack for both primary
// and either one of secondary or tertiary.
for (int i = 0; i < numRegions; i++) {
int slot = primaryAssignment[i];
String rack = rackManager.getRack(servers.get(slot / slotsPerServer));
for (int k = 0; k < servers.size(); k++) {
if (!rackManager.getRack(servers.get(k)).equals(rack)) {
continue;
}
if (k == slot / slotsPerServer) {
// Same node, do not place secondary or tertiary here ever.
for (int m = 0; m < slotsPerServer; m++) {
secondaryCost[i][k * slotsPerServer + m] = MAX_COST;
tertiaryCost[i][k * slotsPerServer + m] = MAX_COST;
}
} else {
// Same rack, do not place secondary or tertiary here if possible.
for (int m = 0; m < slotsPerServer; m++) {
secondaryCost[i][k * slotsPerServer + m] = AVOID_COST;
tertiaryCost[i][k * slotsPerServer + m] = AVOID_COST;
}
}
}
}
if (munkresForSecondaryAndTertiary) {
randomizedMatrix = new RandomizedMatrix(numRegions, regionSlots);
secondaryCost = randomizedMatrix.transform(secondaryCost);
int[] secondaryAssignment = new MunkresAssignment(secondaryCost).solve();
secondaryAssignment = randomizedMatrix.invertIndices(secondaryAssignment);
// Modify the tertiary costs for each region/server pair to ensure that a
// region is assigned to a tertiary server on the same rack as its secondary
// server, but not the same server in that rack.
for (int i = 0; i < numRegions; i++) {
int slot = secondaryAssignment[i];
String rack = rackManager.getRack(servers.get(slot / slotsPerServer));
for (int k = 0; k < servers.size(); k++) {
if (k == slot / slotsPerServer) {
// Same node, do not place tertiary here ever.
for (int m = 0; m < slotsPerServer; m++) {
tertiaryCost[i][k * slotsPerServer + m] = MAX_COST;
}
} else {
if (rackManager.getRack(servers.get(k)).equals(rack)) {
continue;
}
// Different rack, do not place tertiary here if possible.
for (int m = 0; m < slotsPerServer; m++) {
tertiaryCost[i][k * slotsPerServer + m] = AVOID_COST;
}
}
}
}
randomizedMatrix = new RandomizedMatrix(numRegions, regionSlots);
tertiaryCost = randomizedMatrix.transform(tertiaryCost);
int[] tertiaryAssignment = new MunkresAssignment(tertiaryCost).solve();
tertiaryAssignment = randomizedMatrix.invertIndices(tertiaryAssignment);
for (int i = 0; i < numRegions; i++) {
List<ServerName> favoredServers
= new ArrayList<>(FavoredNodeAssignmentHelper.FAVORED_NODES_NUM);
ServerName s = servers.get(primaryAssignment[i] / slotsPerServer);
favoredServers.add(ServerName.valueOf(s.getHostname(), s.getPort(),
ServerName.NON_STARTCODE));
s = servers.get(secondaryAssignment[i] / slotsPerServer);
favoredServers.add(ServerName.valueOf(s.getHostname(), s.getPort(),
ServerName.NON_STARTCODE));
s = servers.get(tertiaryAssignment[i] / slotsPerServer);
favoredServers.add(ServerName.valueOf(s.getHostname(), s.getPort(),
ServerName.NON_STARTCODE));
// Update the assignment plan
plan.updateFavoredNodesMap(regions.get(i), favoredServers);
}
LOG.info("Generated the assignment plan for " + numRegions +
" regions from table " + tableName + " with " +
servers.size() + " region servers");
LOG.info("Assignment plan for secondary and tertiary generated " +
"using MunkresAssignment");
} else {
Map<RegionInfo, ServerName> primaryRSMap = new HashMap<>();
for (int i = 0; i < numRegions; i++) {
primaryRSMap.put(regions.get(i), servers.get(primaryAssignment[i] / slotsPerServer));
}
FavoredNodeAssignmentHelper favoredNodeHelper =
new FavoredNodeAssignmentHelper(servers, conf);
favoredNodeHelper.initialize();
Map<RegionInfo, ServerName[]> secondaryAndTertiaryMap =
favoredNodeHelper.placeSecondaryAndTertiaryWithRestrictions(primaryRSMap);
for (int i = 0; i < numRegions; i++) {
List<ServerName> favoredServers
= new ArrayList<>(FavoredNodeAssignmentHelper.FAVORED_NODES_NUM);
RegionInfo currentRegion = regions.get(i);
ServerName s = primaryRSMap.get(currentRegion);
favoredServers.add(ServerName.valueOf(s.getHostname(), s.getPort(),
ServerName.NON_STARTCODE));
ServerName[] secondaryAndTertiary =
secondaryAndTertiaryMap.get(currentRegion);
s = secondaryAndTertiary[0];
favoredServers.add(ServerName.valueOf(s.getHostname(), s.getPort(),
ServerName.NON_STARTCODE));
s = secondaryAndTertiary[1];
favoredServers.add(ServerName.valueOf(s.getHostname(), s.getPort(),
ServerName.NON_STARTCODE));
// Update the assignment plan
plan.updateFavoredNodesMap(regions.get(i), favoredServers);
}
LOG.info("Generated the assignment plan for " + numRegions +
" regions from table " + tableName + " with " +
servers.size() + " region servers");
LOG.info("Assignment plan for secondary and tertiary generated " +
"using placeSecondaryAndTertiaryWithRestrictions method");
}
}
public FavoredNodesPlan getNewAssignmentPlan() throws IOException {
// Get the current region assignment snapshot by scanning from the META
SnapshotOfRegionAssignmentFromMeta assignmentSnapshot =
this.getRegionAssignmentSnapshot();
// Get the region locality map
Map<String, Map<String, Float>> regionLocalityMap = null;
if (this.enforceLocality) {
regionLocalityMap = FSUtils.getRegionDegreeLocalityMappingFromFS(conf);
}
// Initialize the assignment plan
FavoredNodesPlan plan = new FavoredNodesPlan();
// Get the table to region mapping
Map<TableName, List<RegionInfo>> tableToRegionMap =
assignmentSnapshot.getTableToRegionMap();
LOG.info("Start to generate the new assignment plan for the " +
+ tableToRegionMap.keySet().size() + " tables" );
for (TableName table : tableToRegionMap.keySet()) {
try {
if (!this.targetTableSet.isEmpty() &&
!this.targetTableSet.contains(table)) {
continue;
}
// TODO: maybe run the placement in parallel for each table
genAssignmentPlan(table, assignmentSnapshot, regionLocalityMap, plan,
USE_MUNKRES_FOR_PLACING_SECONDARY_AND_TERTIARY);
} catch (Exception e) {
LOG.error("Get some exceptions for placing primary region server" +
"for table " + table + " because " + e);
}
}
LOG.info("Finish to generate the new assignment plan for the " +
+ tableToRegionMap.keySet().size() + " tables" );
return plan;
}
/**
* Some algorithms for solving the assignment problem may traverse workers or
* jobs in linear order which may result in skewing the assignments of the
* first jobs in the matrix toward the last workers in the matrix if the
* costs are uniform. To avoid this kind of clumping, we can randomize the
* rows and columns of the cost matrix in a reversible way, such that the
* solution to the assignment problem can be interpreted in terms of the
* original untransformed cost matrix. Rows and columns are transformed
* independently such that the elements contained in any row of the input
* matrix are the same as the elements in the corresponding output matrix,
* and each row has its elements transformed in the same way. Similarly for
* columns.
*/
protected static class RandomizedMatrix {
private final int rows;
private final int cols;
private final int[] rowTransform;
private final int[] rowInverse;
private final int[] colTransform;
private final int[] colInverse;
/**
* Create a randomization scheme for a matrix of a given size.
* @param rows the number of rows in the matrix
* @param cols the number of columns in the matrix
*/
public RandomizedMatrix(int rows, int cols) {
this.rows = rows;
this.cols = cols;
Random random = new Random();
rowTransform = new int[rows];
rowInverse = new int[rows];
for (int i = 0; i < rows; i++) {
rowTransform[i] = i;
}
// Shuffle the row indices.
for (int i = rows - 1; i >= 0; i--) {
int r = random.nextInt(i + 1);
int temp = rowTransform[r];
rowTransform[r] = rowTransform[i];
rowTransform[i] = temp;
}
// Generate the inverse row indices.
for (int i = 0; i < rows; i++) {
rowInverse[rowTransform[i]] = i;
}
colTransform = new int[cols];
colInverse = new int[cols];
for (int i = 0; i < cols; i++) {
colTransform[i] = i;
}
// Shuffle the column indices.
for (int i = cols - 1; i >= 0; i--) {
int r = random.nextInt(i + 1);
int temp = colTransform[r];
colTransform[r] = colTransform[i];
colTransform[i] = temp;
}
// Generate the inverse column indices.
for (int i = 0; i < cols; i++) {
colInverse[colTransform[i]] = i;
}
}
/**
* Copy a given matrix into a new matrix, transforming each row index and
* each column index according to the randomization scheme that was created
* at construction time.
* @param matrix the cost matrix to transform
* @return a new matrix with row and column indices transformed
*/
public float[][] transform(float[][] matrix) {
float[][] result = new float[rows][cols];
for (int i = 0; i < rows; i++) {
for (int j = 0; j < cols; j++) {
result[rowTransform[i]][colTransform[j]] = matrix[i][j];
}
}
return result;
}
/**
* Copy a given matrix into a new matrix, transforming each row index and
* each column index according to the inverse of the randomization scheme
* that was created at construction time.
* @param matrix the cost matrix to be inverted
* @return a new matrix with row and column indices inverted
*/
public float[][] invert(float[][] matrix) {
float[][] result = new float[rows][cols];
for (int i = 0; i < rows; i++) {
for (int j = 0; j < cols; j++) {
result[rowInverse[i]][colInverse[j]] = matrix[i][j];
}
}
return result;
}
/**
* Given an array where each element {@code indices[i]} represents the
* randomized column index corresponding to randomized row index {@code i},
* create a new array with the corresponding inverted indices.
* @param indices an array of transformed indices to be inverted
* @return an array of inverted indices
*/
public int[] invertIndices(int[] indices) {
int[] result = new int[indices.length];
for (int i = 0; i < indices.length; i++) {
result[rowInverse[i]] = colInverse[indices[i]];
}
return result;
}
}
/**
* Print the assignment plan to the system output stream
* @param plan
*/
public static void printAssignmentPlan(FavoredNodesPlan plan) {
if (plan == null) return;
LOG.info("========== Start to print the assignment plan ================");
// sort the map based on region info
Map<String, List<ServerName>> assignmentMap = new TreeMap<>(plan.getAssignmentMap());
for (Map.Entry<String, List<ServerName>> entry : assignmentMap.entrySet()) {
String serverList = FavoredNodeAssignmentHelper.getFavoredNodesAsString(entry.getValue());
String regionName = entry.getKey();
LOG.info("Region: " + regionName );
LOG.info("Its favored nodes: " + serverList);
}
LOG.info("========== Finish to print the assignment plan ================");
}
/**
* Update the assignment plan into hbase:meta
* @param plan the assignments plan to be updated into hbase:meta
* @throws IOException if cannot update assignment plan in hbase:meta
*/
public void updateAssignmentPlanToMeta(FavoredNodesPlan plan)
throws IOException {
try {
LOG.info("Start to update the hbase:meta with the new assignment plan");
Map<String, List<ServerName>> assignmentMap = plan.getAssignmentMap();
Map<RegionInfo, List<ServerName>> planToUpdate = new HashMap<>(assignmentMap.size());
Map<String, RegionInfo> regionToRegionInfoMap =
getRegionAssignmentSnapshot().getRegionNameToRegionInfoMap();
for (Map.Entry<String, List<ServerName>> entry : assignmentMap.entrySet()) {
planToUpdate.put(regionToRegionInfoMap.get(entry.getKey()), entry.getValue());
}
FavoredNodeAssignmentHelper.updateMetaWithFavoredNodesInfo(planToUpdate, conf);
LOG.info("Updated the hbase:meta with the new assignment plan");
} catch (Exception e) {
LOG.error("Failed to update hbase:meta with the new assignment" +
"plan because " + e.getMessage());
}
}
/**
* Update the assignment plan to all the region servers
* @param plan
* @throws IOException
*/
private void updateAssignmentPlanToRegionServers(FavoredNodesPlan plan)
throws IOException{
LOG.info("Start to update the region servers with the new assignment plan");
// Get the region to region server map
Map<ServerName, List<RegionInfo>> currentAssignment =
this.getRegionAssignmentSnapshot().getRegionServerToRegionMap();
// track of the failed and succeeded updates
int succeededNum = 0;
Map<ServerName, Exception> failedUpdateMap = new HashMap<>();
for (Map.Entry<ServerName, List<RegionInfo>> entry :
currentAssignment.entrySet()) {
List<Pair<RegionInfo, List<ServerName>>> regionUpdateInfos = new ArrayList<>();
try {
// Keep track of the favored updates for the current region server
FavoredNodesPlan singleServerPlan = null;
// Find out all the updates for the current region server
for (RegionInfo region : entry.getValue()) {
List<ServerName> favoredServerList = plan.getFavoredNodes(region);
if (favoredServerList != null &&
favoredServerList.size() == FavoredNodeAssignmentHelper.FAVORED_NODES_NUM) {
// Create the single server plan if necessary
if (singleServerPlan == null) {
singleServerPlan = new FavoredNodesPlan();
}
// Update the single server update
singleServerPlan.updateFavoredNodesMap(region, favoredServerList);
regionUpdateInfos.add(new Pair<>(region, favoredServerList));
}
}
if (singleServerPlan != null) {
// Update the current region server with its updated favored nodes
BlockingInterface currentRegionServer =
((ClusterConnection)this.connection).getAdmin(entry.getKey());
UpdateFavoredNodesRequest request =
RequestConverter.buildUpdateFavoredNodesRequest(regionUpdateInfos);
UpdateFavoredNodesResponse updateFavoredNodesResponse =
currentRegionServer.updateFavoredNodes(null, request);
LOG.info("Region server " +
ProtobufUtil.getServerInfo(null, currentRegionServer).getServerName() +
" has updated " + updateFavoredNodesResponse.getResponse() + " / " +
singleServerPlan.size() +
" regions with the assignment plan");
succeededNum ++;
}
} catch (Exception e) {
failedUpdateMap.put(entry.getKey(), e);
}
}
// log the succeeded updates
LOG.info("Updated " + succeededNum + " region servers with " +
"the new assignment plan");
// log the failed updates
int failedNum = failedUpdateMap.size();
if (failedNum != 0) {
LOG.error("Failed to update the following + " + failedNum +
" region servers with its corresponding favored nodes");
for (Map.Entry<ServerName, Exception> entry :
failedUpdateMap.entrySet() ) {
LOG.error("Failed to update " + entry.getKey().getAddress() +
" because of " + entry.getValue().getMessage());
}
}
}
public void updateAssignmentPlan(FavoredNodesPlan plan)
throws IOException {
LOG.info("Start to update the new assignment plan for the hbase:meta table and" +
" the region servers");
// Update the new assignment plan to META
updateAssignmentPlanToMeta(plan);
// Update the new assignment plan to Region Servers
updateAssignmentPlanToRegionServers(plan);
LOG.info("Finish to update the new assignment plan for the hbase:meta table and" +
" the region servers");
}
/**
* Return how many regions will move per table since their primary RS will
* change
*
* @param newPlan - new AssignmentPlan
* @return how many primaries will move per table
*/
public Map<TableName, Integer> getRegionsMovement(FavoredNodesPlan newPlan)
throws IOException {
Map<TableName, Integer> movesPerTable = new HashMap<>();
SnapshotOfRegionAssignmentFromMeta snapshot = this.getRegionAssignmentSnapshot();
Map<TableName, List<RegionInfo>> tableToRegions = snapshot
.getTableToRegionMap();
FavoredNodesPlan oldPlan = snapshot.getExistingAssignmentPlan();
Set<TableName> tables = snapshot.getTableSet();
for (TableName table : tables) {
int movedPrimaries = 0;
if (!this.targetTableSet.isEmpty()
&& !this.targetTableSet.contains(table)) {
continue;
}
List<RegionInfo> regions = tableToRegions.get(table);
for (RegionInfo region : regions) {
List<ServerName> oldServers = oldPlan.getFavoredNodes(region);
List<ServerName> newServers = newPlan.getFavoredNodes(region);
if (oldServers != null && newServers != null) {
ServerName oldPrimary = oldServers.get(0);
ServerName newPrimary = newServers.get(0);
if (oldPrimary.compareTo(newPrimary) != 0) {
movedPrimaries++;
}
}
}
movesPerTable.put(table, movedPrimaries);
}
return movesPerTable;
}
/**
* Compares two plans and check whether the locality dropped or increased
* (prints the information as a string) also prints the baseline locality
*
* @param movesPerTable - how many primary regions will move per table
* @param regionLocalityMap - locality map from FS
* @param newPlan - new assignment plan
* @throws IOException
*/
public void checkDifferencesWithOldPlan(Map<TableName, Integer> movesPerTable,
Map<String, Map<String, Float>> regionLocalityMap, FavoredNodesPlan newPlan)
throws IOException {
// localities for primary, secondary and tertiary
SnapshotOfRegionAssignmentFromMeta snapshot = this.getRegionAssignmentSnapshot();
FavoredNodesPlan oldPlan = snapshot.getExistingAssignmentPlan();
Set<TableName> tables = snapshot.getTableSet();
Map<TableName, List<RegionInfo>> tableToRegionsMap = snapshot.getTableToRegionMap();
for (TableName table : tables) {
float[] deltaLocality = new float[3];
float[] locality = new float[3];
if (!this.targetTableSet.isEmpty()
&& !this.targetTableSet.contains(table)) {
continue;
}
List<RegionInfo> regions = tableToRegionsMap.get(table);
System.out.println("==================================================");
System.out.println("Assignment Plan Projection Report For Table: " + table);
System.out.println("\t Total regions: " + regions.size());
System.out.println("\t" + movesPerTable.get(table)
+ " primaries will move due to their primary has changed");
for (RegionInfo currentRegion : regions) {
Map<String, Float> regionLocality = regionLocalityMap.get(currentRegion
.getEncodedName());
if (regionLocality == null) {
continue;
}
List<ServerName> oldServers = oldPlan.getFavoredNodes(currentRegion);
List<ServerName> newServers = newPlan.getFavoredNodes(currentRegion);
if (newServers != null && oldServers != null) {
int i=0;
for (FavoredNodesPlan.Position p : FavoredNodesPlan.Position.values()) {
ServerName newServer = newServers.get(p.ordinal());
ServerName oldServer = oldServers.get(p.ordinal());
Float oldLocality = 0f;
if (oldServers != null) {
oldLocality = regionLocality.get(oldServer.getHostname());
if (oldLocality == null) {
oldLocality = 0f;
}
locality[i] += oldLocality;
}
Float newLocality = regionLocality.get(newServer.getHostname());
if (newLocality == null) {
newLocality = 0f;
}
deltaLocality[i] += newLocality - oldLocality;
i++;
}
}
}
DecimalFormat df = new java.text.DecimalFormat( "#.##");
for (int i = 0; i < deltaLocality.length; i++) {
System.out.print("\t\t Baseline locality for ");
if (i == 0) {
System.out.print("primary ");
} else if (i == 1) {
System.out.print("secondary ");
} else if (i == 2) {
System.out.print("tertiary ");
}
System.out.println(df.format(100 * locality[i] / regions.size()) + "%");
System.out.print("\t\t Locality will change with the new plan: ");
System.out.println(df.format(100 * deltaLocality[i] / regions.size())
+ "%");
}
System.out.println("\t Baseline dispersion");
printDispersionScores(table, snapshot, regions.size(), null, true);
System.out.println("\t Projected dispersion");
printDispersionScores(table, snapshot, regions.size(), newPlan, true);
}
}
public void printDispersionScores(TableName table,
SnapshotOfRegionAssignmentFromMeta snapshot, int numRegions, FavoredNodesPlan newPlan,
boolean simplePrint) {
if (!this.targetTableSet.isEmpty() && !this.targetTableSet.contains(table)) {
return;
}
AssignmentVerificationReport report = new AssignmentVerificationReport();
report.fillUpDispersion(table, snapshot, newPlan);
List<Float> dispersion = report.getDispersionInformation();
if (simplePrint) {
DecimalFormat df = new java.text.DecimalFormat("#.##");
System.out.println("\tAvg dispersion score: "
+ df.format(dispersion.get(0)) + " hosts;\tMax dispersion score: "
+ df.format(dispersion.get(1)) + " hosts;\tMin dispersion score: "
+ df.format(dispersion.get(2)) + " hosts;");
} else {
LOG.info("For Table: " + table + " ; #Total Regions: " + numRegions
+ " ; The average dispersion score is " + dispersion.get(0));
}
}
public void printLocalityAndDispersionForCurrentPlan(
Map<String, Map<String, Float>> regionLocalityMap) throws IOException {
SnapshotOfRegionAssignmentFromMeta snapshot = this.getRegionAssignmentSnapshot();
FavoredNodesPlan assignmentPlan = snapshot.getExistingAssignmentPlan();
Set<TableName> tables = snapshot.getTableSet();
Map<TableName, List<RegionInfo>> tableToRegionsMap = snapshot
.getTableToRegionMap();
for (TableName table : tables) {
float[] locality = new float[3];
if (!this.targetTableSet.isEmpty()
&& !this.targetTableSet.contains(table)) {
continue;
}
List<RegionInfo> regions = tableToRegionsMap.get(table);
for (RegionInfo currentRegion : regions) {
Map<String, Float> regionLocality = regionLocalityMap.get(currentRegion
.getEncodedName());
if (regionLocality == null) {
continue;
}
List<ServerName> servers = assignmentPlan.getFavoredNodes(currentRegion);
if (servers != null) {
int i = 0;
for (FavoredNodesPlan.Position p : FavoredNodesPlan.Position.values()) {
ServerName server = servers.get(p.ordinal());
Float currentLocality = 0f;
if (servers != null) {
currentLocality = regionLocality.get(server.getHostname());
if (currentLocality == null) {
currentLocality = 0f;
}
locality[i] += currentLocality;
}
i++;
}
}
}
for (int i = 0; i < locality.length; i++) {
String copy = null;
if (i == 0) {
copy = "primary";
} else if (i == 1) {
copy = "secondary";
} else if (i == 2) {
copy = "tertiary" ;
}
float avgLocality = 100 * locality[i] / regions.size();
LOG.info("For Table: " + table + " ; #Total Regions: " + regions.size()
+ " ; The average locality for " + copy+ " is " + avgLocality + " %");
}
printDispersionScores(table, snapshot, regions.size(), null, false);
}
}
/**
* @param favoredNodesStr The String of favored nodes
* @return the list of ServerName for the byte array of favored nodes.
*/
public static List<ServerName> getFavoredNodeList(String favoredNodesStr) {
String[] favoredNodesArray = StringUtils.split(favoredNodesStr, ",");
if (favoredNodesArray == null)
return null;
List<ServerName> serverList = new ArrayList<>();
for (String hostNameAndPort : favoredNodesArray) {
serverList.add(ServerName.valueOf(hostNameAndPort, ServerName.NON_STARTCODE));
}
return serverList;
}
public static void main(String args[]) throws IOException {
Options opt = new Options();
opt.addOption("w", "write", false, "write the assignments to hbase:meta only");
opt.addOption("u", "update", false,
"update the assignments to hbase:meta and RegionServers together");
opt.addOption("n", "dry-run", false, "do not write assignments to META");
opt.addOption("v", "verify", false, "verify current assignments against META");
opt.addOption("p", "print", false, "print the current assignment plan in META");
opt.addOption("h", "help", false, "print usage");
opt.addOption("d", "verification-details", false,
"print the details of verification report");
opt.addOption("zk", true, "to set the zookeeper quorum");
opt.addOption("fs", true, "to set HDFS");
opt.addOption("hbase_root", true, "to set hbase_root directory");
opt.addOption("overwrite", false,
"overwrite the favored nodes for a single region," +
"for example: -update -r regionName -f server1:port,server2:port,server3:port");
opt.addOption("r", true, "The region name that needs to be updated");
opt.addOption("f", true, "The new favored nodes");
opt.addOption("tables", true,
"The list of table names splitted by ',' ;" +
"For example: -tables: t1,t2,...,tn");
opt.addOption("l", "locality", true, "enforce the maximum locality");
opt.addOption("m", "min-move", true, "enforce minimum assignment move");
opt.addOption("diff", false, "calculate difference between assignment plans");
opt.addOption("munkres", false,
"use munkres to place secondaries and tertiaries");
opt.addOption("ld", "locality-dispersion", false, "print locality and dispersion " +
"information for current plan");
try {
CommandLine cmd = new GnuParser().parse(opt, args);
Configuration conf = HBaseConfiguration.create();
boolean enforceMinAssignmentMove = true;
boolean enforceLocality = true;
boolean verificationDetails = false;
// Read all the options
if ((cmd.hasOption("l") &&
cmd.getOptionValue("l").equalsIgnoreCase("false")) ||
(cmd.hasOption("locality") &&
cmd.getOptionValue("locality").equalsIgnoreCase("false"))) {
enforceLocality = false;
}
if ((cmd.hasOption("m") &&
cmd.getOptionValue("m").equalsIgnoreCase("false")) ||
(cmd.hasOption("min-move") &&
cmd.getOptionValue("min-move").equalsIgnoreCase("false"))) {
enforceMinAssignmentMove = false;
}
if (cmd.hasOption("zk")) {
conf.set(HConstants.ZOOKEEPER_QUORUM, cmd.getOptionValue("zk"));
LOG.info("Setting the zk quorum: " + conf.get(HConstants.ZOOKEEPER_QUORUM));
}
if (cmd.hasOption("fs")) {
conf.set(FileSystem.FS_DEFAULT_NAME_KEY, cmd.getOptionValue("fs"));
LOG.info("Setting the HDFS: " + conf.get(FileSystem.FS_DEFAULT_NAME_KEY));
}
if (cmd.hasOption("hbase_root")) {
conf.set(HConstants.HBASE_DIR, cmd.getOptionValue("hbase_root"));
LOG.info("Setting the hbase root directory: " + conf.get(HConstants.HBASE_DIR));
}
// Create the region placement obj
RegionPlacementMaintainer rp = new RegionPlacementMaintainer(conf, enforceLocality,
enforceMinAssignmentMove);
if (cmd.hasOption("d") || cmd.hasOption("verification-details")) {
verificationDetails = true;
}
if (cmd.hasOption("tables")) {
String tableNameListStr = cmd.getOptionValue("tables");
String[] tableNames = StringUtils.split(tableNameListStr, ",");
rp.setTargetTableName(tableNames);
}
if (cmd.hasOption("munkres")) {
USE_MUNKRES_FOR_PLACING_SECONDARY_AND_TERTIARY = true;
}
// Read all the modes
if (cmd.hasOption("v") || cmd.hasOption("verify")) {
// Verify the region placement.
rp.verifyRegionPlacement(verificationDetails);
} else if (cmd.hasOption("n") || cmd.hasOption("dry-run")) {
// Generate the assignment plan only without updating the hbase:meta and RS
FavoredNodesPlan plan = rp.getNewAssignmentPlan();
printAssignmentPlan(plan);
} else if (cmd.hasOption("w") || cmd.hasOption("write")) {
// Generate the new assignment plan
FavoredNodesPlan plan = rp.getNewAssignmentPlan();
// Print the new assignment plan
printAssignmentPlan(plan);
// Write the new assignment plan to META
rp.updateAssignmentPlanToMeta(plan);
} else if (cmd.hasOption("u") || cmd.hasOption("update")) {
// Generate the new assignment plan
FavoredNodesPlan plan = rp.getNewAssignmentPlan();
// Print the new assignment plan
printAssignmentPlan(plan);
// Update the assignment to hbase:meta and Region Servers
rp.updateAssignmentPlan(plan);
} else if (cmd.hasOption("diff")) {
FavoredNodesPlan newPlan = rp.getNewAssignmentPlan();
Map<String, Map<String, Float>> locality = FSUtils
.getRegionDegreeLocalityMappingFromFS(conf);
Map<TableName, Integer> movesPerTable = rp.getRegionsMovement(newPlan);
rp.checkDifferencesWithOldPlan(movesPerTable, locality, newPlan);
System.out.println("Do you want to update the assignment plan? [y/n]");
Scanner s = new Scanner(System.in);
String input = s.nextLine().trim();
if (input.equals("y")) {
System.out.println("Updating assignment plan...");
rp.updateAssignmentPlan(newPlan);
}
s.close();
} else if (cmd.hasOption("ld")) {
Map<String, Map<String, Float>> locality = FSUtils
.getRegionDegreeLocalityMappingFromFS(conf);
rp.printLocalityAndDispersionForCurrentPlan(locality);
} else if (cmd.hasOption("p") || cmd.hasOption("print")) {
FavoredNodesPlan plan = rp.getRegionAssignmentSnapshot().getExistingAssignmentPlan();
printAssignmentPlan(plan);
} else if (cmd.hasOption("overwrite")) {
if (!cmd.hasOption("f") || !cmd.hasOption("r")) {
throw new IllegalArgumentException("Please specify: " +
" -update -r regionName -f server1:port,server2:port,server3:port");
}
String regionName = cmd.getOptionValue("r");
String favoredNodesStr = cmd.getOptionValue("f");
LOG.info("Going to update the region " + regionName + " with the new favored nodes " +
favoredNodesStr);
List<ServerName> favoredNodes = null;
RegionInfo regionInfo =
rp.getRegionAssignmentSnapshot().getRegionNameToRegionInfoMap().get(regionName);
if (regionInfo == null) {
LOG.error("Cannot find the region " + regionName + " from the META");
} else {
try {
favoredNodes = getFavoredNodeList(favoredNodesStr);
} catch (IllegalArgumentException e) {
LOG.error("Cannot parse the invalid favored nodes because " + e);
}
FavoredNodesPlan newPlan = new FavoredNodesPlan();
newPlan.updateFavoredNodesMap(regionInfo, favoredNodes);
rp.updateAssignmentPlan(newPlan);
}
} else {
printHelp(opt);
}
} catch (ParseException e) {
printHelp(opt);
}
}
}
| |
package org.broadinstitute.hellbender.tools.walkers.annotator;
import htsjdk.variant.variantcontext.*;
import org.broadinstitute.hellbender.GATKBaseTest;
import org.broadinstitute.hellbender.utils.variant.GATKVCFConstants;
import org.broadinstitute.hellbender.utils.variant.GATKVCFHeaderLines;
import org.testng.Assert;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.util.*;
import static org.mockito.ArgumentMatchers.refEq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public final class ExcessHetUnitTest extends GATKBaseTest {
private static double DELTA_PRECISION = .001;
private Allele Aref= Allele.create("A", true);
private Allele T = Allele.create("T");
private Allele C = Allele.create("C");
private int[] hetPLs = {240, 0, 240};
private int[] homRefPLs= {0, 60, 600};
@Override
public String getToolTestDataDir() {
return toolsTestDir + "walkers/annotator/";
}
private Genotype makeG(String sample, Allele a1, Allele a2, int... pls) {
return new GenotypeBuilder(sample, Arrays.asList(a1, a2)).PL(pls).make();
}
private VariantContext makeVC(String source, List<Allele> alleles, Genotype... genotypes) {
int start = 10;
int stop = start; // alleles.contains(ATC) ? start + 3 : start;
return new VariantContextBuilder(source, "1", start, stop, alleles)
.genotypes(Arrays.asList(genotypes))
.unfiltered()
.make();
}
@Test
public void testExcessHetForMultiallelicVC_compondHets() {
//make sure that compound gets (with no ref) don't add to het count
VariantContext test1 = makeVC("1", Arrays.asList(Aref, T, C),
makeG("s1", Aref, T, 2530, 0, 7099, 366, 3056, 14931),
makeG("s2", T, T, 7099, 2530, 0, 7099, 366, 3056),
makeG("s3", T, C, 7099, 2530, 7099, 3056, 0, 14931),
makeG("s4", Aref, T, 2530, 0, 7099, 366, 3056, 14931),
makeG("s5", T, T, 7099, 2530, 0, 7099, 366, 3056),
makeG("s6", Aref, T, 2530, 0, 7099, 366, 3056, 14931),
makeG("s7", T, T, 7099, 2530, 0, 7099, 366, 3056),
makeG("s8", Aref, T, 2530, 0, 7099, 366, 3056, 14931),
makeG("s9", T, T, 7099, 2530, 0, 7099, 366, 3056),
makeG("s10", Aref, T, 2530, 0, 7099, 366, 3056, 14931));
final double result = ExcessHet.calculateEH(test1, test1.getGenotypes()).getValue();
Assert.assertEquals(result, 5.85, DELTA_PRECISION, "Pass");
}
@Test
public void testExcessHetForMultiallelicVC_compondHetsRefAltFlip() {
//make sure that compound gets (with no ref) don't add to het count
VariantContext test1 = makeVC("1", Arrays.asList(Aref, T, C),
makeG("s1", Aref, T, 2530, 0, 7099, 366, 3056, 14931),
makeG("s2", T, T, 7099, 2530, 0, 7099, 366, 3056),
makeG("s3", T, C, 7099, 2530, 7099, 3056, 0, 14931),
makeG("s4", T, Aref, 2530, 0, 7099, 366, 3056, 14931),
makeG("s5", T, T, 7099, 2530, 0, 7099, 366, 3056),
makeG("s6", Aref, T, 2530, 0, 7099, 366, 3056, 14931),
makeG("s7", T, T, 7099, 2530, 0, 7099, 366, 3056),
makeG("s8", Aref, T, 2530, 0, 7099, 366, 3056, 14931),
makeG("s9", T, T, 7099, 2530, 0, 7099, 366, 3056),
makeG("s10", Aref, T, 2530, 0, 7099, 366, 3056, 14931));
final double result = ExcessHet.calculateEH(test1, test1.getGenotypes()).getValue();
Assert.assertEquals(result, 5.85, DELTA_PRECISION, "Pass");
}
@Test
public void testExcessHetForMultiallelicVC_differentAlts() {
//make sure that hets with different alternate alleles all get counted
VariantContext test2 = makeVC("2", Arrays.asList(Aref, T, C),
makeG("s1", Aref, C, 4878, 1623, 11297, 0, 7970, 8847),
makeG("s2", Aref, T, 2530, 0, 7099, 366, 3056, 14931),
makeG("s3", Aref, T, 3382, 0, 6364, 1817, 5867, 12246),
makeG("s4", Aref, T, 2488, 0, 9110, 3131, 9374, 12505),
makeG("s5", Aref, C, 4530, 2006, 18875, 0, 6847, 23949),
makeG("s6", Aref, T, 5325, 0, 18692, 389, 16014, 24570),
makeG("s7", Aref, T, 2936, 0, 29743, 499, 21979, 38630),
makeG("s8", Aref, T, 6902, 0, 8976, 45, 5844, 9061),
makeG("s9", Aref, T, 5732, 0, 10876, 6394, 11408, 17802),
makeG("s10", Aref, T, 2780, 0, 25045, 824, 23330, 30939));
final double result2 = ExcessHet.calculateEH(test2, test2.getGenotypes()).getValue();
final double result = 25.573;
Assert.assertEquals(result2, result, DELTA_PRECISION, "Pass");
//test the annotate method
final Map<String, Object> annots = new ExcessHet().annotate(null, test2, null);
Assert.assertEquals(annots.keySet(), Collections.singleton(GATKVCFConstants.EXCESS_HET_KEY), "annots");
Assert.assertEquals(annots.values().size(), 1, "size");
Assert.assertEquals(Double.parseDouble((String)annots.values().iterator().next()), result, DELTA_PRECISION, "het");
}
@Test
public void testFounderIDsAndPedigreeFile() {
//make sure that hets with different alternate alleles all get counted
VariantContext test2 = makeVC("2", Arrays.asList(Aref, T, C),
makeG("s1", Aref, C, 4878, 1623, 11297, 0, 7970, 8847),
makeG("s2", Aref, T, 2530, 0, 7099, 366, 3056, 14931),
makeG("s3", Aref, T, 3382, 0, 6364, 1817, 5867, 12246),
makeG("s4", Aref, T, 2488, 0, 9110, 3131, 9374, 12505),
makeG("s5", Aref, C, 4530, 2006, 18875, 0, 6847, 23949),
makeG("s6", Aref, T, 5325, 0, 18692, 389, 16014, 24570),
makeG("s7", Aref, T, 2936, 0, 29743, 499, 21979, 38630),
makeG("s8", Aref, T, 6902, 0, 8976, 45, 5844, 9061),
makeG("s9", Aref, T, 5732, 0, 10876, 6394, 11408, 17802),
makeG("s10", Aref, T, 2780, 0, 25045, 824, 23330, 30939));
Set<String> founderIDs = new HashSet<String>();
founderIDs.addAll(Arrays.asList("s1","s2","s3","s4","s5"));
final double result2 = ExcessHet.calculateEH(test2, test2.getGenotypes(founderIDs)).getValue();
final double result = 11.972;
Assert.assertEquals(result2, result, DELTA_PRECISION, "Pass");
//test the annotate method with FounderIDs
Map<String, Object> annots = new ExcessHet(founderIDs).annotate(null, test2, null);
Assert.assertEquals(annots.keySet(), Collections.singleton(GATKVCFConstants.EXCESS_HET_KEY), "annots");
Assert.assertEquals(annots.values().size(), 1, "size");
Assert.assertEquals(Double.parseDouble((String)annots.values().iterator().next()), result, DELTA_PRECISION, "het");
//test the annotate method with a Pedigree File
annots = new ExcessHet(getTestFileGATKPath("testPedigree.ped")).annotate(null, test2, null);
Assert.assertEquals(annots.keySet(), Collections.singleton(GATKVCFConstants.EXCESS_HET_KEY), "annots");
Assert.assertEquals(annots.values().size(), 1, "size");
Assert.assertEquals(Double.parseDouble((String)annots.values().iterator().next()), result, DELTA_PRECISION, "het");
}
@Test
public void testSingletonVsCommonAllele() {
final List<Genotype> allGTs = new ArrayList<>();
final int numHomRefGTs = 10000;
for (int i = 0; i < numHomRefGTs; i++) {
allGTs.add(makeG("ref" + i, Aref, Aref, homRefPLs));
}
allGTs.add(makeG("het0", Aref, T, hetPLs));
int numHetGTs = 1;
final VariantContext singleton = makeVC("singleton", Arrays.asList(Aref, T), allGTs.toArray(new Genotype[allGTs.size()]));
final double singletonValue = ExcessHet.calculateEH(singleton, singleton.getGenotypes()).getValue();
final int targetNumHetGTs = 20;
for (int i = numHetGTs; i < targetNumHetGTs; i++) {
allGTs.add(makeG("het" + i, Aref, T, hetPLs));
}
final VariantContext common = makeVC("common", Arrays.asList(Aref, T), allGTs.toArray(new Genotype[allGTs.size()]));
final double EHcommon = ExcessHet.calculateEH(common, common.getGenotypes()).getValue();
Assert.assertTrue(Math.abs(singletonValue) < Math.abs(EHcommon), String.format("singleton=%f common=%f", singletonValue, EHcommon));
}
@Test
public void testLargeCohorts() {
final List<Genotype> allGTs = new ArrayList<>();
final int numHomRefGTs = 1000000;
for (int i = 0; i < numHomRefGTs; i++) {
allGTs.add(makeG("ref" + i, Aref, Aref, homRefPLs));
}
allGTs.add(makeG("het0", Aref, T, hetPLs));
int numHetGTs = 1;
final VariantContext singleton = makeVC("singleton", Arrays.asList(Aref, T), allGTs.toArray(new Genotype[allGTs.size()]));
final double singletonValue = ExcessHet.calculateEH(singleton, singleton.getGenotypes()).getValue();
for (int i = numHetGTs; i < 100; i++) {
allGTs.add(makeG("het" + i, Aref, T, hetPLs));
numHetGTs++;
}
final VariantContext hundredton = makeVC("hundredton", Arrays.asList(Aref, T), allGTs.toArray(new Genotype[allGTs.size()]));
final double hundredtonValue = ExcessHet.calculateEH(hundredton, hundredton.getGenotypes()).getValue();
Assert.assertTrue(Math.abs(singletonValue) < Math.abs(hundredtonValue), String.format("singleton=%f hundredton=%f", singletonValue, hundredtonValue));
for (int i = numHetGTs; i < numHomRefGTs; i++)
allGTs.add(makeG("het" + i, Aref, T, hetPLs));
final VariantContext common = makeVC("common", Arrays.asList(Aref, T), allGTs.toArray(new Genotype[allGTs.size()]));
final double commonValue = ExcessHet.calculateEH(common, common.getGenotypes()).getValue();
Assert.assertTrue(Math.abs(hundredtonValue) < Math.abs(commonValue), String.format("hundredton=%f common=%f", hundredtonValue, commonValue));
}
@Test
public void testAllHetsForLargeCohorts() {
final int numGTs = 1000000;
final List<Genotype> singletonGTs = new ArrayList<>();
for (int i = 0; i < numGTs; i++) {
singletonGTs.add(makeG("ref" + i, Aref, Aref, homRefPLs));
}
singletonGTs.add(makeG("het0", Aref, T, hetPLs));
final VariantContext singleton = makeVC("singleton", Arrays.asList(Aref, T), singletonGTs.toArray(new Genotype[singletonGTs.size()]));
final double singletonValue = ExcessHet.calculateEH(singleton, singleton.getGenotypes()).getValue();
final List<Genotype> allHetGTs = new ArrayList<>();
for (int i = 0; i < numGTs; i++) {
allHetGTs.add(makeG("het" + i, Aref, T, hetPLs));
}
final VariantContext allHet = makeVC("allHet", Arrays.asList(Aref, T), allHetGTs.toArray(new Genotype[allHetGTs.size()]));
final double hetsValue = ExcessHet.calculateEH(allHet, allHet.getGenotypes()).getValue();
Assert.assertTrue(Math.abs(singletonValue) < Math.abs(hetsValue), String.format("singleton=%f allHets=%f", singletonValue, hetsValue));
//Since all hets is such an extreme case and the sample size is large here, we know that the p-value should be 0
Assert.assertEquals(hetsValue, ExcessHet.PHRED_SCALED_MIN_P_VALUE, DELTA_PRECISION, String.format("P-value of 0 should be phred scaled to " + ExcessHet.PHRED_SCALED_MIN_P_VALUE));
}
@DataProvider(name = "smallSets")
public Object[][] counts() {
return new Object[][]{
{1, 0, 0, 0.5},
{1, 1, 0, 0.5},
{1, 1, 1, 0.7},
{4, 0, 0, 0.114},
{2, 1, 1, 0.571},
{0, 2, 2, 0.957},
{1, 1, 40, 0.982},
{3, 0, 39, 0.482},
};
}
@Test(dataProvider = "smallSets")
public void smallSets(int hetCount, int homrefCount, int homvarCount, double expected) {
final double actual = ExcessHet.exactTest(hetCount, homrefCount, homvarCount);
Assert.assertEquals(actual, expected, DELTA_PRECISION, "Pass");
}
@DataProvider(name = "illegalArgsForExactTest")
public Object[][] illegalArgsForExactTest() {
return new Object[][]{
{-1, 1, 1},
{1, -1, 1},
{1, 1, -1},
};
}
@Test(expectedExceptions = IllegalArgumentException.class, dataProvider = "illegalArgsForExactTest")
public void testIllegalArgs(final int hetCount, final int refCount, final int homCount){
ExcessHet.exactTest(hetCount, refCount, homCount);
}
@Test
public void testLabels(){
Assert.assertEquals(new ExcessHet().getKeyNames(), Collections.singletonList(GATKVCFConstants.EXCESS_HET_KEY));
Assert.assertEquals(new ExcessHet().getDescriptions(), Collections.singletonList(GATKVCFHeaderLines.getInfoLine(GATKVCFConstants.EXCESS_HET_KEY)));
}
@Test
public void testEmptyIfNoGenotypes() throws Exception {
final ExcessHet ann = new ExcessHet();
final Map<String, Object> annotate = ann.annotate(null, when(mock(VariantContext.class).getGenotypesOrderedByName()).thenReturn(Collections.<Genotype>emptyList()).getMock(), null);
Assert.assertTrue(annotate.isEmpty());
}
}
| |
/*
* Copyright (c) 2001, 2004, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package com.sun.corba.se.impl.encoding;
import java.util.Map;
import java.util.HashMap;
import java.nio.ByteBuffer;
import java.nio.CharBuffer;
import java.nio.charset.Charset;
import java.nio.charset.CharsetEncoder;
import java.nio.charset.CharsetDecoder;
import java.nio.charset.CharacterCodingException;
import java.nio.charset.IllegalCharsetNameException;
import java.nio.charset.MalformedInputException;
import java.nio.charset.UnsupportedCharsetException;
import java.nio.charset.UnmappableCharacterException;
import com.sun.corba.se.impl.logging.ORBUtilSystemException;
import com.sun.corba.se.impl.logging.OMGSystemException;
import com.sun.corba.se.spi.logging.CORBALogDomains;
/**
* Collection of classes, interfaces, and factory methods for
* CORBA code set conversion.
*
* This is mainly used to shield other code from the sun.io
* converters which might change, as well as provide some basic
* translation from conversion to CORBA error exceptions. Some
* extra work is required here to facilitate the way CORBA
* says it uses UTF-16 as of the 00-11-03 spec.
*
* REVISIT - Since the nio.Charset and nio.Charset.Encoder/Decoder
* use NIO ByteBuffer and NIO CharBuffer, the interaction
* and interface between this class and the CDR streams
* should be looked at more closely for optimizations to
* avoid unnecessary copying of data between char[] &
* CharBuffer and byte[] & ByteBuffer, especially
* DirectByteBuffers.
*
*/
public class CodeSetConversion
{
/**
* Abstraction for char to byte conversion.
*
* Must be used in the proper sequence:
*
* 1) convert
* 2) Optional getNumBytes and/or getAlignment (if necessary)
* 3) getBytes (see warning)
*/
public abstract static class CTBConverter
{
// Perform the conversion of the provided char or String,
// allowing the caller to query for more information
// before writing.
public abstract void convert(char chToConvert);
public abstract void convert(String strToConvert);
// How many bytes resulted from the conversion?
public abstract int getNumBytes();
// What's the maximum number of bytes per character?
public abstract float getMaxBytesPerChar();
public abstract boolean isFixedWidthEncoding();
// What byte boundary should the stream align to before
// calling writeBytes? For instance, a fixed width
// encoding with 2 bytes per char in a stream which
// doesn't encapsulate the char's bytes should align
// on a 2 byte boundary. (Ex: UTF16 in GIOP1.1)
//
// Note: This has no effect on the converted bytes. It
// is just information available to the caller.
public abstract int getAlignment();
// Get the resulting bytes. Warning: You must use getNumBytes()
// to determine the end of the data in the byte array instead
// of array.length! The array may be used internally, so don't
// save references.
public abstract byte[] getBytes();
}
/**
* Abstraction for byte to char conversion.
*/
public abstract static class BTCConverter
{
// In GIOP 1.1, interoperability can only be achieved with
// fixed width encodings like UTF-16. This is because wstrings
// specified how many code points follow rather than specifying
// the length in octets.
public abstract boolean isFixedWidthEncoding();
public abstract int getFixedCharWidth();
// Called after getChars to determine the true size of the
// converted array.
public abstract int getNumChars();
// Perform the conversion using length bytes from the given
// input stream. Warning: You must use getNumChars() to
// determine the correct length of the resulting array.
// The same array may be used internally over multiple
// calls.
public abstract char[] getChars(byte[] bytes, int offset, int length);
}
/**
* Implementation of CTBConverter which uses a nio.Charset.CharsetEncoder
* to do the real work. Handles translation of exceptions to the
* appropriate CORBA versions.
*/
private class JavaCTBConverter extends CTBConverter
{
private ORBUtilSystemException wrapper = ORBUtilSystemException.get(
CORBALogDomains.RPC_ENCODING ) ;
private OMGSystemException omgWrapper = OMGSystemException.get(
CORBALogDomains.RPC_ENCODING ) ;
// nio.Charset.CharsetEncoder actually does the work here
// have to use it directly rather than through String's interface
// because we want to know when errors occur during the conversion.
private CharsetEncoder ctb;
// Proper alignment for this type of converter. For instance,
// ASCII has alignment of 1 (1 byte per char) but UTF16 has
// alignment of 2 (2 bytes per char)
private int alignment;
// Char buffer to hold the input.
private char[] chars = null;
// How many bytes are generated from the conversion?
private int numBytes = 0;
// How many characters were converted (temporary variable
// for cross method communication)
private int numChars = 0;
// ByteBuffer holding the converted input. This is necessary
// since we have to do calculations that require the conversion
// before writing the array to the stream.
private ByteBuffer buffer;
// What code set are we using?
private OSFCodeSetRegistry.Entry codeset;
public JavaCTBConverter(OSFCodeSetRegistry.Entry codeset,
int alignmentForEncoding) {
try {
ctb = cache.getCharToByteConverter(codeset.getName());
if (ctb == null) {
Charset tmpCharset = Charset.forName(codeset.getName());
ctb = tmpCharset.newEncoder();
cache.setConverter(codeset.getName(), ctb);
}
} catch(IllegalCharsetNameException icne) {
// This can only happen if one of our Entries has
// an invalid name.
throw wrapper.invalidCtbConverterName(icne,codeset.getName());
} catch(UnsupportedCharsetException ucne) {
// This can only happen if one of our Entries has
// an unsupported name.
throw wrapper.invalidCtbConverterName(ucne,codeset.getName());
}
this.codeset = codeset;
alignment = alignmentForEncoding;
}
public final float getMaxBytesPerChar() {
return ctb.maxBytesPerChar();
}
public void convert(char chToConvert) {
if (chars == null)
chars = new char[1];
// The CharToByteConverter only takes a char[]
chars[0] = chToConvert;
numChars = 1;
convertCharArray();
}
public void convert(String strToConvert) {
// Try to save a memory allocation if possible. Usual
// space/time trade off. If we could get the char[] out of
// the String without copying, that would be great, but
// it's forbidden since String is immutable.
if (chars == null || chars.length < strToConvert.length())
chars = new char[strToConvert.length()];
numChars = strToConvert.length();
strToConvert.getChars(0, numChars, chars, 0);
convertCharArray();
}
public final int getNumBytes() {
return numBytes;
}
public final int getAlignment() {
return alignment;
}
public final boolean isFixedWidthEncoding() {
return codeset.isFixedWidth();
}
public byte[] getBytes() {
// Note that you can't use buffer.length since the buffer might
// be larger than the actual number of converted bytes depending
// on the encoding.
return buffer.array();
}
private void convertCharArray() {
try {
// Possible optimization of directly converting into the CDR buffer.
// However, that means the CDR code would have to reserve
// a 4 byte string length ahead of time, and we'd need a
// confusing partial conversion scheme for when we couldn't
// fit everything in the buffer but needed to know the
// converted length before proceeding due to fragmentation.
// Then there's the issue of the chunking code.
//
// For right now, this is less messy and basic tests don't
// show more than a 1 ms penalty worst case. Less than a
// factor of 2 increase.
// Convert the characters
buffer = ctb.encode(CharBuffer.wrap(chars,0,numChars));
// ByteBuffer returned by the encoder will set its limit
// to byte immediately after the last written byte.
numBytes = buffer.limit();
} catch (IllegalStateException ise) {
// an encoding operation is already in progress
throw wrapper.ctbConverterFailure( ise ) ;
} catch (MalformedInputException mie) {
// There were illegal Unicode char pairs
throw wrapper.badUnicodePair( mie ) ;
} catch (UnmappableCharacterException uce) {
// A character doesn't map to the desired code set
// CORBA formal 00-11-03.
throw omgWrapper.charNotInCodeset( uce ) ;
} catch (CharacterCodingException cce) {
// If this happens, then some other encoding error occured
throw wrapper.ctbConverterFailure( cce ) ;
}
}
}
/**
* Special UTF16 converter which can either always write a BOM
* or use a specified byte order without one.
*/
private class UTF16CTBConverter extends JavaCTBConverter
{
// Using this constructor, we will always write a BOM
public UTF16CTBConverter() {
super(OSFCodeSetRegistry.UTF_16, 2);
}
// Using this constructor, we don't use a BOM and use the
// byte order specified
public UTF16CTBConverter(boolean littleEndian) {
super(littleEndian ?
OSFCodeSetRegistry.UTF_16LE :
OSFCodeSetRegistry.UTF_16BE,
2);
}
}
/**
* Implementation of BTCConverter which uses a sun.io.ByteToCharConverter
* for the real work. Handles translation of exceptions to the
* appropriate CORBA versions.
*/
private class JavaBTCConverter extends BTCConverter
{
private ORBUtilSystemException wrapper = ORBUtilSystemException.get(
CORBALogDomains.RPC_ENCODING ) ;
private OMGSystemException omgWrapper = OMGSystemException.get(
CORBALogDomains.RPC_ENCODING ) ;
protected CharsetDecoder btc;
private char[] buffer;
private int resultingNumChars;
private OSFCodeSetRegistry.Entry codeset;
public JavaBTCConverter(OSFCodeSetRegistry.Entry codeset) {
// Obtain a Decoder
btc = this.getConverter(codeset.getName());
this.codeset = codeset;
}
public final boolean isFixedWidthEncoding() {
return codeset.isFixedWidth();
}
// Should only be called if isFixedWidthEncoding is true
// IMPORTANT: This calls OSFCodeSetRegistry.Entry, not
// CharsetDecoder.maxCharsPerByte().
public final int getFixedCharWidth() {
return codeset.getMaxBytesPerChar();
}
public final int getNumChars() {
return resultingNumChars;
}
public char[] getChars(byte[] bytes, int offset, int numBytes) {
// Possible optimization of reading directly from the CDR
// byte buffer. The sun.io converter supposedly can handle
// incremental conversions in which a char is broken across
// two convert calls.
//
// Basic tests didn't show more than a 1 ms increase
// worst case. It's less than a factor of 2 increase.
// Also makes the interface more difficult.
try {
ByteBuffer byteBuf = ByteBuffer.wrap(bytes, offset, numBytes);
CharBuffer charBuf = btc.decode(byteBuf);
// CharBuffer returned by the decoder will set its limit
// to byte immediately after the last written byte.
resultingNumChars = charBuf.limit();
// IMPORTANT - It's possible the underlying char[] in the
// CharBuffer returned by btc.decode(byteBuf)
// is longer in length than the number of characters
// decoded. Hence, the check below to ensure the
// char[] returned contains all the chars that have
// been decoded and no more.
if (charBuf.limit() == charBuf.capacity()) {
buffer = charBuf.array();
} else {
buffer = new char[charBuf.limit()];
charBuf.get(buffer, 0, charBuf.limit()).position(0);
}
return buffer;
} catch (IllegalStateException ile) {
// There were a decoding operation already in progress
throw wrapper.btcConverterFailure( ile ) ;
} catch (MalformedInputException mie) {
// There were illegal Unicode char pairs
throw wrapper.badUnicodePair( mie ) ;
} catch (UnmappableCharacterException uce) {
// A character doesn't map to the desired code set.
// CORBA formal 00-11-03.
throw omgWrapper.charNotInCodeset( uce ) ;
} catch (CharacterCodingException cce) {
// If this happens, then a character decoding error occured.
throw wrapper.btcConverterFailure( cce ) ;
}
}
/**
* Utility method to find a CharsetDecoder in the
* cache or create a new one if necessary. Throws an
* INTERNAL if the code set is unknown.
*/
protected CharsetDecoder getConverter(String javaCodeSetName) {
CharsetDecoder result = null;
try {
result = cache.getByteToCharConverter(javaCodeSetName);
if (result == null) {
Charset tmpCharset = Charset.forName(javaCodeSetName);
result = tmpCharset.newDecoder();
cache.setConverter(javaCodeSetName, result);
}
} catch(IllegalCharsetNameException icne) {
// This can only happen if one of our charset entries has
// an illegal name.
throw wrapper.invalidBtcConverterName( icne, javaCodeSetName ) ;
}
return result;
}
}
/**
* Special converter for UTF16 since it's required to optionally
* support a byte order marker while the internal Java converters
* either require it or require that it isn't there.
*
* The solution is to check for the byte order marker, and if we
* need to do something differently, switch internal converters.
*/
private class UTF16BTCConverter extends JavaBTCConverter
{
private boolean defaultToLittleEndian;
private boolean converterUsesBOM = true;
private static final char UTF16_BE_MARKER = (char) 0xfeff;
private static final char UTF16_LE_MARKER = (char) 0xfffe;
// When there isn't a byte order marker, used the byte
// order specified.
public UTF16BTCConverter(boolean defaultToLittleEndian) {
super(OSFCodeSetRegistry.UTF_16);
this.defaultToLittleEndian = defaultToLittleEndian;
}
public char[] getChars(byte[] bytes, int offset, int numBytes) {
if (hasUTF16ByteOrderMarker(bytes, offset, numBytes)) {
if (!converterUsesBOM)
switchToConverter(OSFCodeSetRegistry.UTF_16);
converterUsesBOM = true;
return super.getChars(bytes, offset, numBytes);
} else {
if (converterUsesBOM) {
if (defaultToLittleEndian)
switchToConverter(OSFCodeSetRegistry.UTF_16LE);
else
switchToConverter(OSFCodeSetRegistry.UTF_16BE);
converterUsesBOM = false;
}
return super.getChars(bytes, offset, numBytes);
}
}
/**
* Utility method for determining if a UTF-16 byte order marker is present.
*/
private boolean hasUTF16ByteOrderMarker(byte[] array, int offset, int length) {
// If there aren't enough bytes to represent the marker and data,
// return false.
if (length >= 4) {
int b1 = array[offset] & 0x00FF;
int b2 = array[offset + 1] & 0x00FF;
char marker = (char)((b1 << 8) | (b2 << 0));
return (marker == UTF16_BE_MARKER || marker == UTF16_LE_MARKER);
} else
return false;
}
/**
* The current solution for dealing with UTF-16 in CORBA
* is that if our sun.io converter requires byte order markers,
* and then we see a CORBA wstring/wchar without them, we
* switch to the sun.io converter that doesn't require them.
*/
private void switchToConverter(OSFCodeSetRegistry.Entry newCodeSet) {
// Use the getConverter method from our superclass.
btc = super.getConverter(newCodeSet.getName());
}
}
/**
* CTB converter factory for single byte or variable length encodings.
*/
public CTBConverter getCTBConverter(OSFCodeSetRegistry.Entry codeset) {
int alignment = (!codeset.isFixedWidth() ?
1 :
codeset.getMaxBytesPerChar());
return new JavaCTBConverter(codeset, alignment);
}
/**
* CTB converter factory for multibyte (mainly fixed) encodings.
*
* Because of the awkwardness with byte order markers and the possibility of
* using UCS-2, you must specify both the endianness of the stream as well as
* whether or not to use byte order markers if applicable. UCS-2 has no byte
* order markers. UTF-16 has optional markers.
*
* If you select useByteOrderMarkers, there is no guarantee that the encoding
* will use the endianness specified.
*
*/
public CTBConverter getCTBConverter(OSFCodeSetRegistry.Entry codeset,
boolean littleEndian,
boolean useByteOrderMarkers) {
// UCS2 doesn't have byte order markers, and we're encoding it
// as UTF-16 since UCS2 isn't available in all Java platforms.
// They should be identical with only minor differences in
// negative cases.
if (codeset == OSFCodeSetRegistry.UCS_2)
return new UTF16CTBConverter(littleEndian);
// We can write UTF-16 with or without a byte order marker.
if (codeset == OSFCodeSetRegistry.UTF_16) {
if (useByteOrderMarkers)
return new UTF16CTBConverter();
else
return new UTF16CTBConverter(littleEndian);
}
// Everything else uses the generic JavaCTBConverter.
//
// Variable width encodings are aligned on 1 byte boundaries.
// A fixed width encoding with a max. of 4 bytes/char should
// align on a 4 byte boundary. Note that UTF-16 is a special
// case because of the optional byte order marker, so it's
// handled above.
//
// This doesn't matter for GIOP 1.2 wchars and wstrings
// since the encoded bytes are treated as an encapsulation.
int alignment = (!codeset.isFixedWidth() ?
1 :
codeset.getMaxBytesPerChar());
return new JavaCTBConverter(codeset, alignment);
}
/**
* BTCConverter factory for single byte or variable width encodings.
*/
public BTCConverter getBTCConverter(OSFCodeSetRegistry.Entry codeset) {
return new JavaBTCConverter(codeset);
}
/**
* BTCConverter factory for fixed width multibyte encodings.
*/
public BTCConverter getBTCConverter(OSFCodeSetRegistry.Entry codeset,
boolean defaultToLittleEndian) {
if (codeset == OSFCodeSetRegistry.UTF_16 ||
codeset == OSFCodeSetRegistry.UCS_2) {
return new UTF16BTCConverter(defaultToLittleEndian);
} else {
return new JavaBTCConverter(codeset);
}
}
/**
* Follows the code set negotiation algorithm in CORBA formal 99-10-07 13.7.2.
*
* Returns the proper negotiated OSF character encoding number or
* CodeSetConversion.FALLBACK_CODESET.
*/
private int selectEncoding(CodeSetComponentInfo.CodeSetComponent client,
CodeSetComponentInfo.CodeSetComponent server) {
// A "null" value for the server's nativeCodeSet means that
// the server desired not to indicate one. We'll take that
// to mean that it wants the first thing in its conversion list.
// If it's conversion list is empty, too, then use the fallback
// codeset.
int serverNative = server.nativeCodeSet;
if (serverNative == 0) {
if (server.conversionCodeSets.length > 0)
serverNative = server.conversionCodeSets[0];
else
return CodeSetConversion.FALLBACK_CODESET;
}
if (client.nativeCodeSet == serverNative) {
// Best case -- client and server don't have to convert
return serverNative;
}
// Is this client capable of converting to the server's
// native code set?
for (int i = 0; i < client.conversionCodeSets.length; i++) {
if (serverNative == client.conversionCodeSets[i]) {
// The client will convert to the server's
// native code set.
return serverNative;
}
}
// Is the server capable of converting to the client's
// native code set?
for (int i = 0; i < server.conversionCodeSets.length; i++) {
if (client.nativeCodeSet == server.conversionCodeSets[i]) {
// The server will convert to the client's
// native code set.
return client.nativeCodeSet;
}
}
// See if there are any code sets that both the server and client
// support (giving preference to the server). The order
// of conversion sets is from most to least desired.
for (int i = 0; i < server.conversionCodeSets.length; i++) {
for (int y = 0; y < client.conversionCodeSets.length; y++) {
if (server.conversionCodeSets[i] == client.conversionCodeSets[y]) {
return server.conversionCodeSets[i];
}
}
}
// Before using the fallback codesets, the spec calls for a
// compatibility check on the native code sets. It doesn't make
// sense because loss free communication is always possible with
// UTF8 and UTF16, the fall back code sets. It's also a lot
// of work to implement. In the case of incompatibility, the
// spec says to throw a CODESET_INCOMPATIBLE exception.
// Use the fallback
return CodeSetConversion.FALLBACK_CODESET;
}
/**
* Perform the code set negotiation algorithm and come up with
* the two encodings to use.
*/
public CodeSetComponentInfo.CodeSetContext negotiate(CodeSetComponentInfo client,
CodeSetComponentInfo server) {
int charData
= selectEncoding(client.getCharComponent(),
server.getCharComponent());
if (charData == CodeSetConversion.FALLBACK_CODESET) {
charData = OSFCodeSetRegistry.UTF_8.getNumber();
}
int wcharData
= selectEncoding(client.getWCharComponent(),
server.getWCharComponent());
if (wcharData == CodeSetConversion.FALLBACK_CODESET) {
wcharData = OSFCodeSetRegistry.UTF_16.getNumber();
}
return new CodeSetComponentInfo.CodeSetContext(charData,
wcharData);
}
// No one should instantiate a CodeSetConversion but the singleton
// instance method
private CodeSetConversion() {}
// initialize-on-demand holder
private static class CodeSetConversionHolder {
static final CodeSetConversion csc = new CodeSetConversion() ;
}
/**
* CodeSetConversion is a singleton, and this is the access point.
*/
public final static CodeSetConversion impl() {
return CodeSetConversionHolder.csc ;
}
// Singleton instance
private static CodeSetConversion implementation;
// Number used internally to indicate the fallback code
// set.
private static final int FALLBACK_CODESET = 0;
// Provides a thread local cache for the sun.io
// converters.
private CodeSetCache cache = new CodeSetCache();
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapreduce.lib.output;
import java.io.IOException;
import java.text.NumberFormat;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.mapred.FileAlreadyExistsException;
import org.apache.hadoop.mapred.InvalidJobConfException;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.OutputCommitter;
import org.apache.hadoop.mapreduce.OutputFormat;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.TaskID;
import org.apache.hadoop.mapreduce.TaskInputOutputContext;
import org.apache.hadoop.mapreduce.security.TokenCache;
/** A base class for {@link OutputFormat}s that read from {@link FileSystem}s.*/
public abstract class FileOutputFormat<K, V> extends OutputFormat<K, V> {
protected static final String BASE_OUTPUT_NAME = "mapreduce.output.basename";
protected static final String PART = "part";
public static enum Counter {
BYTES_WRITTEN
}
/** Construct output file names so that, when an output directory listing is
* sorted lexicographically, positions correspond to output partitions.*/
private static final NumberFormat NUMBER_FORMAT = NumberFormat.getInstance();
static {
NUMBER_FORMAT.setMinimumIntegerDigits(5);
NUMBER_FORMAT.setGroupingUsed(false);
}
private FileOutputCommitter committer = null;
/**
* Set whether the output of the job is compressed.
* @param job the job to modify
* @param compress should the output of the job be compressed?
*/
public static void setCompressOutput(Job job, boolean compress) {
job.getConfiguration().setBoolean("mapred.output.compress", compress);
}
/**
* Is the job output compressed?
* @param job the Job to look in
* @return <code>true</code> if the job output should be compressed,
* <code>false</code> otherwise
*/
public static boolean getCompressOutput(JobContext job) {
return job.getConfiguration().getBoolean("mapred.output.compress", false);
}
/**
* Set the {@link CompressionCodec} to be used to compress job outputs.
* @param job the job to modify
* @param codecClass the {@link CompressionCodec} to be used to
* compress the job outputs
*/
public static void
setOutputCompressorClass(Job job,
Class<? extends CompressionCodec> codecClass) {
setCompressOutput(job, true);
job.getConfiguration().setClass("mapred.output.compression.codec",
codecClass,
CompressionCodec.class);
}
/**
* Get the {@link CompressionCodec} for compressing the job outputs.
* @param job the {@link Job} to look in
* @param defaultValue the {@link CompressionCodec} to return if not set
* @return the {@link CompressionCodec} to be used to compress the
* job outputs
* @throws IllegalArgumentException if the class was specified, but not found
*/
public static Class<? extends CompressionCodec>
getOutputCompressorClass(JobContext job,
Class<? extends CompressionCodec> defaultValue) {
Class<? extends CompressionCodec> codecClass = defaultValue;
Configuration conf = job.getConfiguration();
String name = conf.get("mapred.output.compression.codec");
if (name != null) {
try {
codecClass =
conf.getClassByName(name).asSubclass(CompressionCodec.class);
} catch (ClassNotFoundException e) {
throw new IllegalArgumentException("Compression codec " + name +
" was not found.", e);
}
}
return codecClass;
}
public abstract RecordWriter<K, V>
getRecordWriter(TaskAttemptContext job
) throws IOException, InterruptedException;
public void checkOutputSpecs(JobContext job
) throws FileAlreadyExistsException, IOException{
// Ensure that the output directory is set and not already there
Path outDir = getOutputPath(job);
if (outDir == null) {
throw new InvalidJobConfException("Output directory not set.");
}
// get delegation token for outDir's file system
TokenCache.obtainTokensForNamenodes(job.getCredentials(),
new Path[] {outDir},
job.getConfiguration());
if (outDir.getFileSystem(job.getConfiguration()).exists(outDir)) {
throw new FileAlreadyExistsException("Output directory " + outDir +
" already exists");
}
}
/**
* Set the {@link Path} of the output directory for the map-reduce job.
*
* @param job The job to modify
* @param outputDir the {@link Path} of the output directory for
* the map-reduce job.
*/
public static void setOutputPath(Job job, Path outputDir) {
job.getConfiguration().set("mapred.output.dir", outputDir.toString());
}
/**
* Get the {@link Path} to the output directory for the map-reduce job.
*
* @return the {@link Path} to the output directory for the map-reduce job.
* @see FileOutputFormat#getWorkOutputPath(TaskInputOutputContext)
*/
public static Path getOutputPath(JobContext job) {
String name = job.getConfiguration().get("mapred.output.dir");
return name == null ? null: new Path(name);
}
/**
* Get the {@link Path} to the task's temporary output directory
* for the map-reduce job
*
* <h4 id="SideEffectFiles">Tasks' Side-Effect Files</h4>
*
* <p>Some applications need to create/write-to side-files, which differ from
* the actual job-outputs.
*
* <p>In such cases there could be issues with 2 instances of the same TIP
* (running simultaneously e.g. speculative tasks) trying to open/write-to the
* same file (path) on HDFS. Hence the application-writer will have to pick
* unique names per task-attempt (e.g. using the attemptid, say
* <tt>attempt_200709221812_0001_m_000000_0</tt>), not just per TIP.</p>
*
* <p>To get around this the Map-Reduce framework helps the application-writer
* out by maintaining a special
* <tt>${mapred.output.dir}/_temporary/_${taskid}</tt>
* sub-directory for each task-attempt on HDFS where the output of the
* task-attempt goes. On successful completion of the task-attempt the files
* in the <tt>${mapred.output.dir}/_temporary/_${taskid}</tt> (only)
* are <i>promoted</i> to <tt>${mapred.output.dir}</tt>. Of course, the
* framework discards the sub-directory of unsuccessful task-attempts. This
* is completely transparent to the application.</p>
*
* <p>The application-writer can take advantage of this by creating any
* side-files required in a work directory during execution
* of his task i.e. via
* {@link #getWorkOutputPath(TaskInputOutputContext)}, and
* the framework will move them out similarly - thus she doesn't have to pick
* unique paths per task-attempt.</p>
*
* <p>The entire discussion holds true for maps of jobs with
* reducer=NONE (i.e. 0 reduces) since output of the map, in that case,
* goes directly to HDFS.</p>
*
* @return the {@link Path} to the task's temporary output directory
* for the map-reduce job.
*/
public static Path getWorkOutputPath(TaskInputOutputContext<?,?,?,?> context
) throws IOException,
InterruptedException {
FileOutputCommitter committer = (FileOutputCommitter)
context.getOutputCommitter();
return committer.getWorkPath();
}
/**
* Helper function to generate a {@link Path} for a file that is unique for
* the task within the job output directory.
*
* <p>The path can be used to create custom files from within the map and
* reduce tasks. The path name will be unique for each task. The path parent
* will be the job output directory.</p>ls
*
* <p>This method uses the {@link #getUniqueFile} method to make the file name
* unique for the task.</p>
*
* @param context the context for the task.
* @param name the name for the file.
* @param extension the extension for the file
* @return a unique path accross all tasks of the job.
*/
public
static Path getPathForWorkFile(TaskInputOutputContext<?,?,?,?> context,
String name,
String extension
) throws IOException, InterruptedException {
return new Path(getWorkOutputPath(context),
getUniqueFile(context, name, extension));
}
/**
* Generate a unique filename, based on the task id, name, and extension
* @param context the task that is calling this
* @param name the base filename
* @param extension the filename extension
* @return a string like $name-[mr]-$id$extension
*/
public synchronized static String getUniqueFile(TaskAttemptContext context,
String name,
String extension) {
TaskID taskId = context.getTaskAttemptID().getTaskID();
int partition = taskId.getId();
StringBuilder result = new StringBuilder();
result.append(name);
result.append('-');
result.append(taskId.isMap() ? 'm' : 'r');
result.append('-');
result.append(NUMBER_FORMAT.format(partition));
result.append(extension);
return result.toString();
}
/**
* Get the default path and filename for the output format.
* @param context the task context
* @param extension an extension to add to the filename
* @return a full path $output/_temporary/$taskid/part-[mr]-$id
* @throws IOException
*/
public Path getDefaultWorkFile(TaskAttemptContext context,
String extension) throws IOException{
FileOutputCommitter committer =
(FileOutputCommitter) getOutputCommitter(context);
return new Path(committer.getWorkPath(), getUniqueFile(context,
getOutputName(context), extension));
}
/**
* Get the base output name for the output file.
*/
protected static String getOutputName(JobContext job) {
return job.getConfiguration().get(BASE_OUTPUT_NAME, PART);
}
/**
* Set the base output name for output file to be created.
*/
protected static void setOutputName(JobContext job, String name) {
job.getConfiguration().set(BASE_OUTPUT_NAME, name);
}
public synchronized
OutputCommitter getOutputCommitter(TaskAttemptContext context
) throws IOException {
if (committer == null) {
Path output = getOutputPath(context);
committer = new FileOutputCommitter(output, context);
}
return committer;
}
}
| |
/*
* Copyright 2013 MovingBlocks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.input;
import com.google.common.collect.Queues;
import org.terasology.config.ControllerConfig.ControllerInfo;
import org.terasology.config.facade.InputDeviceConfiguration;
import org.terasology.engine.Time;
import org.terasology.engine.subsystem.DisplayDevice;
import org.terasology.engine.subsystem.config.BindsManager;
import org.terasology.entitySystem.entity.EntityRef;
import org.terasology.entitySystem.systems.BaseComponentSystem;
import org.terasology.input.cameraTarget.CameraTargetSystem;
import org.terasology.input.device.ControllerAction;
import org.terasology.input.device.KeyboardAction;
import org.terasology.input.device.KeyboardDevice;
import org.terasology.input.device.MouseAction;
import org.terasology.input.device.MouseDevice;
import org.terasology.input.device.nulldevices.NullControllerDevice;
import org.terasology.input.device.nulldevices.NullKeyboardDevice;
import org.terasology.input.device.nulldevices.NullMouseDevice;
import org.terasology.input.events.InputEvent;
import org.terasology.input.events.KeyDownEvent;
import org.terasology.input.events.KeyEvent;
import org.terasology.input.events.KeyRepeatEvent;
import org.terasology.input.events.KeyUpEvent;
import org.terasology.input.events.LeftMouseDownButtonEvent;
import org.terasology.input.events.LeftMouseUpButtonEvent;
import org.terasology.input.events.MouseAxisEvent;
import org.terasology.input.events.MouseAxisEvent.MouseAxis;
import org.terasology.input.events.MouseButtonEvent;
import org.terasology.input.events.MouseDownButtonEvent;
import org.terasology.input.events.MouseUpButtonEvent;
import org.terasology.input.events.MouseWheelEvent;
import org.terasology.input.events.RightMouseDownButtonEvent;
import org.terasology.input.events.RightMouseUpButtonEvent;
import org.terasology.input.internal.AbstractBindableAxis;
import org.terasology.input.internal.BindableRealAxis;
import org.terasology.logic.players.LocalPlayer;
import org.terasology.math.geom.Vector2i;
import org.terasology.registry.In;
import java.util.Queue;
/**
* This system processes input, sending it out as events against the LocalPlayer entity.
* <br><br>
* In addition to raw keyboard and mouse input, the system handles Bind Buttons and Bind Axis, which can be mapped
* to one or more inputs.
*/
public class InputSystem extends BaseComponentSystem {
@In
private InputDeviceConfiguration inputDeviceConfig;
@In
private BindsManager bindsManager;
@In
private Time time;
@In
private DisplayDevice display;
@In
private LocalPlayer localPlayer;
@In
private CameraTargetSystem targetSystem;
private MouseDevice mouse = new NullMouseDevice();
private KeyboardDevice keyboard = new NullKeyboardDevice();
private ControllerDevice controllers = new NullControllerDevice();
private Queue<KeyboardAction> simulatedKeys = Queues.newArrayDeque();
private EntityRef[] inputEntities;
public void setMouseDevice(MouseDevice mouseDevice) {
this.mouse = mouseDevice;
}
public void setKeyboardDevice(KeyboardDevice keyboardDevice) {
this.keyboard = keyboardDevice;
}
public MouseDevice getMouseDevice() {
return mouse;
}
public KeyboardDevice getKeyboard() {
return keyboard;
}
public ControllerDevice getControllerDevice() {
return controllers;
}
public void setControllerDevice(ControllerDevice controllerDevice) {
this.controllers = controllerDevice;
}
@Override
public void initialise() {
bindsManager.registerBinds();
}
public void update(float delta) {
updateInputEntities();
processMouseInput(delta);
processKeyboardInput(delta);
processControllerInput(delta);
processBindRepeats(delta);
processBindAxis(delta);
}
public boolean isCapturingMouse() {
return display.hasFocus();
}
private void updateInputEntities() {
inputEntities = new EntityRef[] {localPlayer.getClientEntity(), localPlayer.getCharacterEntity()};
}
private void processMouseInput(float delta) {
if (!isCapturingMouse()) {
return;
}
Vector2i deltaMouse = mouse.getDelta();
//process mouse movement x axis
if (deltaMouse.x != 0) {
float xValue = deltaMouse.x * inputDeviceConfig.getMouseSensitivity();
MouseAxisEvent event = MouseAxisEvent.create(MouseAxis.X, xValue, delta);
send(event);
}
//process mouse movement y axis
if (deltaMouse.y != 0) {
int yMovement = inputDeviceConfig.isMouseYAxisInverted() ? deltaMouse.y * -1 : deltaMouse.y;
float yValue = yMovement * inputDeviceConfig.getMouseSensitivity();
MouseAxisEvent event = MouseAxisEvent.create(MouseAxis.Y, yValue, delta);
send(event);
}
//process mouse clicks
for (MouseAction action : mouse.getInputQueue()) {
switch (action.getInput().getType()) {
case MOUSE_BUTTON:
processMouseButtonInput(delta, action);
break;
case MOUSE_WHEEL:
processMouseWheelInput(delta, action);
break;
default:
break;
}
}
}
private void processMouseButtonInput(float delta, MouseAction action) {
int id = action.getInput().getId();
if (id != MouseInput.NONE.getId()) {
MouseInput button = MouseInput.find(action.getInput().getType(), action.getInput().getId());
boolean consumed = sendMouseEvent(button, action.getState().isDown(), action.getMousePosition(), delta);
BindableButton bind = bindsManager.getMouseButtonBinds().get(button);
if (bind != null) {
updateBindState(bind, action.getInput(), action.getState().isDown(), delta, consumed);
}
}
}
private void processMouseWheelInput(float delta, MouseAction action) {
int dir = action.getInput().getId();
if (dir != 0 && action.getTurns() != 0) {
boolean consumed = sendMouseWheelEvent(action.getMousePosition(), dir * action.getTurns(), delta);
BindableButton bind = (dir == 1) ? bindsManager.getMouseWheelUpBind() : bindsManager.getMouseWheelDownBind();
if (bind != null) {
for (int i = 0; i < action.getTurns(); ++i) {
updateBindState(bind, action.getInput(), true, delta, consumed);
updateBindState(bind, action.getInput(), false, delta, consumed);
}
}
}
}
private void processControllerInput(float delta) {
if (!isCapturingMouse()) {
return;
}
for (ControllerAction action : controllers.getInputQueue()) {
// TODO: send event to entity system
boolean consumed = false;
Input input = action.getInput();
if (input.getType() == InputType.CONTROLLER_BUTTON) {
processControllerButtonInput(delta, action, consumed, input);
} else if (input.getType() == InputType.CONTROLLER_AXIS) {
processControllerAxisInput(action, input);
}
}
}
private void processControllerButtonInput(float delta, ControllerAction action, boolean consumed, Input input) {
BindableButton bind = bindsManager.getControllerBinds().get(input);
if (bind != null) {
boolean pressed = action.getState() == ButtonState.DOWN;
updateBindState(bind, input, pressed, delta, consumed);
}
}
private void processControllerAxisInput(ControllerAction action, Input input) {
BindableRealAxis axis = bindsManager.getControllerAxisBinds().get(input);
if (axis != null) {
ControllerInfo info = inputDeviceConfig.getController(action.getController());
boolean isX = action.getInput().getId() == ControllerId.X_AXIS;
boolean isY = action.getInput().getId() == ControllerId.Y_AXIS;
boolean isZ = action.getInput().getId() == ControllerId.Z_AXIS;
float f = (isX && info.isInvertX() || isY && info.isInvertY() || isZ && info.isInvertZ()) ? -1 : 1;
axis.setTargetValue(action.getAxisValue() * f);
}
}
private void updateBindState(BindableButton bind, Input input, boolean pressed, float delta, boolean consumed) {
bind.updateBindState(
input,
pressed,
delta, inputEntities,
targetSystem.getTarget(),
targetSystem.getTargetBlockPosition(),
targetSystem.getHitPosition(),
targetSystem.getHitNormal(),
consumed,
time.getGameTimeInMs());
}
/**
* Simulated key strokes: To simulate input from a keyboard, we simply have to extract the Input associated to the action
* and this function adds it to the keyboard's input queue.
* @param key The key to be simulated.
*/
public void simulateSingleKeyStroke(Input key) {
/* TODO: Perhaps there is a better way to extract the character.
All the simulate functions extract keyChar by getting the first character from it's display string.
While it works for normal character buttons, might not work for special buttons if required later.
*/
char keyChar = key.getDisplayName().charAt(0);
KeyboardAction action = new KeyboardAction(key, ButtonState.DOWN, keyChar);
simulatedKeys.add(action);
}
public void simulateRepeatedKeyStroke(Input key) {
char keyChar = key.getDisplayName().charAt(0);
KeyboardAction action = new KeyboardAction(key, ButtonState.REPEAT, keyChar);
simulatedKeys.add(action);
}
public void cancelSimulatedKeyStroke(Input key) {
char keyChar = key.getDisplayName().charAt(0);
KeyboardAction action = new KeyboardAction(key, ButtonState.UP, keyChar);
simulatedKeys.add(action);
}
private void processKeyboardInput(float delta) {
Queue<KeyboardAction> keyQueue = keyboard.getInputQueue();
keyQueue.addAll(simulatedKeys);
simulatedKeys.clear();
for (KeyboardAction action : keyQueue) {
boolean consumed = sendKeyEvent(action.getInput(), action.getInputChar(), action.getState(), delta);
// Update bind
BindableButton bind = bindsManager.getKeyBinds().get(action.getInput().getId());
if (bind != null && action.getState() != ButtonState.REPEAT) {
boolean pressed = action.getState() == ButtonState.DOWN;
updateBindState(bind, action.getInput(), pressed, delta, consumed);
}
}
}
private void processBindAxis(float delta) {
for (AbstractBindableAxis axis : bindsManager.getAxisBinds()) {
axis.update(inputEntities, delta, targetSystem.getTarget(), targetSystem.getTargetBlockPosition(),
targetSystem.getHitPosition(), targetSystem.getHitNormal());
}
}
private void processBindRepeats(float delta) {
for (BindableButton button : bindsManager.getButtonBinds()) {
button.update(inputEntities,
delta,
targetSystem.getTarget(),
targetSystem.getTargetBlockPosition(),
targetSystem.getHitPosition(),
targetSystem.getHitNormal(),
time.getGameTimeInMs());
}
}
private boolean sendKeyEvent(Input key, char keyChar, ButtonState state, float delta) {
KeyEvent event;
switch (state) {
case UP:
event = KeyUpEvent.create(key, keyChar, delta);
break;
case DOWN:
event = KeyDownEvent.create(key, keyChar, delta);
break;
case REPEAT:
event = KeyRepeatEvent.create(key, keyChar, delta);
break;
default:
return false;
}
boolean consumed = send(event);
event.reset();
return consumed;
}
private boolean sendMouseEvent(MouseInput button, boolean buttonDown, Vector2i position, float delta) {
MouseButtonEvent event;
switch (button) {
case NONE:
return false;
case MOUSE_LEFT:
event = (buttonDown) ? LeftMouseDownButtonEvent.create(position, delta) : LeftMouseUpButtonEvent.create(position, delta);
break;
case MOUSE_RIGHT:
event = (buttonDown) ? RightMouseDownButtonEvent.create(position, delta) : RightMouseUpButtonEvent.create(position, delta);
break;
default:
event = (buttonDown) ? MouseDownButtonEvent.create(button, position, delta) : MouseUpButtonEvent.create(button, position, delta);
break;
}
boolean consumed = send(event);
event.reset();
return consumed;
}
private boolean sendMouseWheelEvent(Vector2i pos, int wheelTurns, float delta) {
MouseWheelEvent mouseWheelEvent = new MouseWheelEvent(pos, wheelTurns, delta);
return send(mouseWheelEvent);
}
private boolean send(InputEvent event) {
setupTarget(event);
for (EntityRef entity : inputEntities) {
entity.send(event);
if (event.isConsumed()) {
break;
}
}
return event.isConsumed();
}
private void setupTarget(InputEvent event) {
if (targetSystem.isTargetAvailable()) {
event.setTargetInfo(targetSystem.getTarget(), targetSystem.getTargetBlockPosition(), targetSystem.getHitPosition(), targetSystem.getHitNormal());
}
}
/**
* Drop all pending/unprocessed input events.
*/
public void drainQueues() {
mouse.getInputQueue();
keyboard.getInputQueue();
controllers.getInputQueue();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sling.distribution.packaging.impl;
import javax.annotation.CheckForNull;
import javax.annotation.Nonnull;
import javax.jcr.RepositoryException;
import javax.jcr.Session;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.io.IOUtils;
import org.apache.sling.api.resource.ResourceResolver;
import org.apache.sling.distribution.DistributionRequest;
import org.apache.sling.distribution.DistributionRequestType;
import org.apache.sling.distribution.common.DistributionException;
import org.apache.sling.distribution.packaging.DistributionPackage;
import org.apache.sling.distribution.packaging.DistributionPackageBuilder;
import org.apache.sling.distribution.packaging.DistributionPackageInfo;
import org.apache.sling.distribution.serialization.impl.vlt.VltUtils;
import org.apache.sling.distribution.util.DistributionJcrUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* base abstract implementation of a JCR based {@link DistributionPackageBuilder}
*/
public abstract class AbstractDistributionPackageBuilder implements DistributionPackageBuilder {
private final Logger log = LoggerFactory.getLogger(getClass());
private final String type;
protected AbstractDistributionPackageBuilder(String type) {
this.type = type;
}
public String getType() {
return type;
}
@Nonnull
public DistributionPackage createPackage(@Nonnull ResourceResolver resourceResolver, @Nonnull DistributionRequest request)
throws DistributionException {
DistributionPackage distributionPackage;
request = VltUtils.sanitizeRequest(request);
if (DistributionRequestType.ADD.equals(request.getRequestType())) {
distributionPackage = createPackageForAdd(resourceResolver, request);
} else if (DistributionRequestType.DELETE.equals(request.getRequestType())) {
distributionPackage = new SimpleDistributionPackage(request, type);
} else if (DistributionRequestType.PULL.equals(request.getRequestType())) {
distributionPackage = new SimpleDistributionPackage(request, type);
} else if (DistributionRequestType.TEST.equals(request.getRequestType())) {
distributionPackage = new SimpleDistributionPackage(request, type);
} else {
throw new DistributionException("unknown action type " + request.getRequestType());
}
DistributionPackageUtils.fillInfo(distributionPackage.getInfo(), request);
return distributionPackage;
}
@Nonnull
public DistributionPackage readPackage(@Nonnull ResourceResolver resourceResolver, @Nonnull InputStream stream) throws DistributionException {
if (!stream.markSupported()) {
stream = new BufferedInputStream(stream);
}
Map<String, Object> headerInfo = new HashMap<String, Object>();
DistributionPackageUtils.readInfo(stream, headerInfo);
try {
stream.reset();
} catch (IOException e) {
// do nothing
}
DistributionPackage distributionPackage = SimpleDistributionPackage.fromStream(stream, type);
try {
stream.reset();
} catch (IOException e) {
// do nothing
}
// not a simple package
if (distributionPackage == null) {
distributionPackage = readPackageInternal(resourceResolver, stream);
}
distributionPackage.getInfo().putAll(headerInfo);
return distributionPackage;
}
public boolean installPackage(@Nonnull ResourceResolver resourceResolver, @Nonnull DistributionPackage distributionPackage) throws DistributionException {
DistributionRequestType actionType = distributionPackage.getInfo().getRequestType();
if (!type.equals(distributionPackage.getType())) {
throw new DistributionException("not supported package type" + distributionPackage.getType());
}
boolean installed = false;
if (DistributionRequestType.DELETE.equals(actionType)) {
installed = installDeletePackage(resourceResolver, distributionPackage);
} else if (DistributionRequestType.TEST.equals(actionType)) {
// do nothing for test packages
installed = true;
} else if (DistributionRequestType.ADD.equals(actionType)) {
installed = installAddPackage(resourceResolver, distributionPackage);
}
return installed;
}
@Nonnull
@Override
public DistributionPackageInfo installPackage(@Nonnull ResourceResolver resourceResolver, @Nonnull InputStream stream) throws DistributionException {
if (!stream.markSupported()) {
stream = new BufferedInputStream(stream);
}
DistributionPackageInfo packageInfo = new DistributionPackageInfo(type);
DistributionPackageUtils.readInfo(stream, packageInfo);
DistributionPackage distributionPackage = SimpleDistributionPackage.fromStream(stream, type);
boolean installed;
// not a simple package
if (distributionPackage == null) {
installed = installPackageInternal(resourceResolver, stream);
} else {
installed = installPackage(resourceResolver, distributionPackage);
packageInfo.putAll(distributionPackage.getInfo());
}
if (installed) {
return packageInfo;
} else {
throw new DistributionException("could not install package from stream");
}
}
private boolean installDeletePackage(@Nonnull ResourceResolver resourceResolver, @CheckForNull DistributionPackage distributionPackage) throws DistributionException {
Session session = null;
try {
if (distributionPackage != null) {
session = getSession(resourceResolver);
for (String path : distributionPackage.getInfo().getPaths()) {
if (session.itemExists(path)) {
session.removeItem(path);
}
}
return true;
}
} catch (Exception e) {
throw new DistributionException(e);
} finally {
ungetSession(session);
}
return false;
}
private boolean installAddPackage(@Nonnull ResourceResolver resourceResolver, @Nonnull DistributionPackage distributionPackage)
throws DistributionException {
InputStream inputStream = null;
try {
inputStream = distributionPackage.createInputStream();
return installPackageInternal(resourceResolver, inputStream);
} catch (IOException e) {
throw new DistributionException(e);
} finally {
IOUtils.closeQuietly(inputStream);
}
}
@CheckForNull
public DistributionPackage getPackage(@Nonnull ResourceResolver resourceResolver, @Nonnull String id) {
DistributionPackage distributionPackage = SimpleDistributionPackage.fromIdString(id, type);
// not a simple package
if (distributionPackage == null) {
if (id.startsWith("reference")) {
String localId = id.substring("reference-".length());
distributionPackage = new ReferencePackage(getPackageInternal(resourceResolver, localId));
} else {
distributionPackage = getPackageInternal(resourceResolver, id);
}
}
return distributionPackage;
}
protected Session getSession(ResourceResolver resourceResolver) throws RepositoryException {
Session session = resourceResolver.adaptTo(Session.class);
if (session != null) {
DistributionJcrUtils.setDoNotDistribute(session);
} else {
throw new RepositoryException("could not obtain a session from calling user " + resourceResolver.getUserID());
}
return session;
}
protected void ungetSession(Session session) {
if (session != null) {
try {
if (session.hasPendingChanges()) {
session.save();
}
} catch (RepositoryException e) {
log.debug("Cannot save session", e);
}
}
}
@CheckForNull
protected abstract DistributionPackage createPackageForAdd(@Nonnull ResourceResolver resourceResolver, @Nonnull DistributionRequest request)
throws DistributionException;
@CheckForNull
protected abstract DistributionPackage readPackageInternal(@Nonnull ResourceResolver resourceResolver, @Nonnull InputStream stream)
throws DistributionException;
protected abstract boolean installPackageInternal(@Nonnull ResourceResolver resourceResolver, @Nonnull InputStream stream)
throws DistributionException;
@CheckForNull
protected abstract DistributionPackage getPackageInternal(@Nonnull ResourceResolver resourceResolver, @Nonnull String id);
}
| |
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.fielddata.plain;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IntsRef;
import org.apache.lucene.util.fst.*;
import org.apache.lucene.util.fst.FST.Arc;
import org.apache.lucene.util.fst.FST.BytesReader;
import org.elasticsearch.common.util.BigIntArray;
import org.elasticsearch.index.fielddata.AtomicFieldData;
import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.index.fielddata.ordinals.EmptyOrdinals;
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
import org.elasticsearch.index.fielddata.ordinals.Ordinals.Docs;
import java.io.IOException;
/**
*/
public class FSTBytesAtomicFieldData implements AtomicFieldData.WithOrdinals<ScriptDocValues.Strings> {
public static FSTBytesAtomicFieldData empty(int numDocs) {
return new Empty(numDocs);
}
// 0 ordinal in values means no value (its null)
protected final Ordinals ordinals;
private volatile BigIntArray hashes;
private long size = -1;
private final FST<Long> fst;
public FSTBytesAtomicFieldData(FST<Long> fst, Ordinals ordinals) {
this.ordinals = ordinals;
this.fst = fst;
}
@Override
public void close() {
}
@Override
public boolean isMultiValued() {
return ordinals.isMultiValued();
}
@Override
public int getNumDocs() {
return ordinals.getNumDocs();
}
@Override
public long getNumberUniqueValues() {
return ordinals.getNumOrds();
}
@Override
public boolean isValuesOrdered() {
return true;
}
@Override
public long getMemorySizeInBytes() {
if (size == -1) {
long size = ordinals.getMemorySizeInBytes();
// FST
size += fst == null ? 0 : fst.sizeInBytes();
this.size = size;
}
return size;
}
@Override
public BytesValues.WithOrdinals getBytesValues(boolean needsHashes) {
assert fst != null;
if (needsHashes) {
if (hashes == null) {
BytesRefFSTEnum<Long> fstEnum = new BytesRefFSTEnum<Long>(fst);
BigIntArray hashes = new BigIntArray(ordinals.getMaxOrd());
// we don't store an ord 0 in the FST since we could have an empty string in there and FST don't support
// empty strings twice. ie. them merge fails for long output.
hashes.set(0, new BytesRef().hashCode());
try {
for (long i = 1, maxOrd = ordinals.getMaxOrd(); i < maxOrd; ++i) {
hashes.set(i, fstEnum.next().input.hashCode());
}
assert fstEnum.next() == null;
} catch (IOException e) {
// Don't use new "AssertionError("Cannot happen", e)" directly as this is a Java 1.7-only API
final AssertionError error = new AssertionError("Cannot happen");
error.initCause(e);
throw error;
}
this.hashes = hashes;
}
return new HashedBytesValues(fst, ordinals.ordinals(), hashes);
} else {
return new BytesValues(fst, ordinals.ordinals());
}
}
@Override
public ScriptDocValues.Strings getScriptValues() {
assert fst != null;
return new ScriptDocValues.Strings(getBytesValues(false));
}
static class BytesValues extends org.elasticsearch.index.fielddata.BytesValues.WithOrdinals {
protected final FST<Long> fst;
protected final Ordinals.Docs ordinals;
// per-thread resources
protected final BytesReader in;
protected final Arc<Long> firstArc = new Arc<Long>();
protected final Arc<Long> scratchArc = new Arc<Long>();
protected final IntsRef scratchInts = new IntsRef();
BytesValues(FST<Long> fst, Ordinals.Docs ordinals) {
super(ordinals);
this.fst = fst;
this.ordinals = ordinals;
in = fst.getBytesReader();
}
@Override
public BytesRef getValueByOrd(long ord) {
assert ord != Ordinals.MISSING_ORDINAL;
in.setPosition(0);
fst.getFirstArc(firstArc);
try {
IntsRef output = Util.getByOutput(fst, ord, in, firstArc, scratchArc, scratchInts);
scratch.length = scratch.offset = 0;
scratch.grow(output.length);
Util.toBytesRef(output, scratch);
} catch (IOException ex) {
//bogus
}
return scratch;
}
}
static final class HashedBytesValues extends BytesValues {
private final BigIntArray hashes;
HashedBytesValues(FST<Long> fst, Docs ordinals, BigIntArray hashes) {
super(fst, ordinals);
this.hashes = hashes;
}
@Override
public int currentValueHash() {
assert ordinals.currentOrd() >= 0;
return hashes.get(ordinals.currentOrd());
}
}
final static class Empty extends FSTBytesAtomicFieldData {
Empty(int numDocs) {
super(null, new EmptyOrdinals(numDocs));
}
@Override
public boolean isMultiValued() {
return false;
}
@Override
public int getNumDocs() {
return ordinals.getNumDocs();
}
@Override
public boolean isValuesOrdered() {
return true;
}
@Override
public BytesValues.WithOrdinals getBytesValues(boolean needsHashes) {
return new EmptyByteValuesWithOrdinals(ordinals.ordinals());
}
@Override
public ScriptDocValues.Strings getScriptValues() {
return ScriptDocValues.EMPTY_STRINGS;
}
}
}
| |
/*
* Copyright 2016 Pinterest, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.pinterest.deployservice;
import com.pinterest.deployservice.chat.ChatManager;
import com.pinterest.deployservice.dao.*;
import com.pinterest.deployservice.email.MailManager;
import com.pinterest.deployservice.events.EventSender;
import com.pinterest.deployservice.group.HostGroupManager;
import com.pinterest.deployservice.rodimus.RodimusManager;
import com.pinterest.deployservice.scm.SourceControlManager;
import org.apache.commons.dbcp.BasicDataSource;
import java.util.concurrent.ExecutorService;
public class ServiceContext {
private BasicDataSource dataSource;
private BuildDAO buildDAO;
private AgentDAO agentDAO;
private AgentErrorDAO agentErrorDAO;
private DeployDAO deployDAO;
private EnvironDAO environDAO;
private HostDAO hostDAO;
private HotfixDAO hotfixDAO;
private DataDAO dataDAO;
private UtilDAO utilDAO;
private RatingDAO ratingDAO;
private EventSender eventSender;
private PromoteDAO promoteDAO;
private GroupDAO groupDAO;
private HostGroupManager hostGroupDAO;
private UserRolesDAO userRolesDAO;
private GroupRolesDAO groupRolesDAO;
private TokenRolesDAO tokenRolesDAO;
private ConfigHistoryDAO configHistoryDAO;
private TagDAO tagDAO;
private ScheduleDAO scheduleDAO;
private String serviceStage;
private MailManager mailManager;
private SourceControlManager sourceControlManager;
private ChatManager chatManager;
private ExecutorService jobPool;
private RodimusManager rodimusManager;
private boolean buildCacheEnabled;
private String buildCacheSpec;
private String deployCacheSpec;
private boolean deployCacheEnabled;
private String deployBoardUrlPrefix;
private String changeFeedUrl;
public GroupRolesDAO getGroupRolesDAO() {
return groupRolesDAO;
}
public void setGroupRolesDAO(GroupRolesDAO groupRolesDAO) {
this.groupRolesDAO = groupRolesDAO;
}
public BuildDAO getBuildDAO() {
return buildDAO;
}
public void setBuildDAO(BuildDAO buildDAO) {
this.buildDAO = buildDAO;
}
public AgentDAO getAgentDAO() {
return agentDAO;
}
public void setAgentDAO(AgentDAO agentDAO) {
this.agentDAO = agentDAO;
}
public AgentErrorDAO getAgentErrorDAO() {
return agentErrorDAO;
}
public void setAgentErrorDAO(AgentErrorDAO agentErrorDAO) {
this.agentErrorDAO = agentErrorDAO;
}
public DeployDAO getDeployDAO() {
return deployDAO;
}
public void setDeployDAO(DeployDAO deployDAO) {
this.deployDAO = deployDAO;
}
public EnvironDAO getEnvironDAO() {
return environDAO;
}
public void setEnvironDAO(EnvironDAO environDAO) {
this.environDAO = environDAO;
}
public HotfixDAO getHotfixDAO() {
return hotfixDAO;
}
public void setHotfixDAO(HotfixDAO hotfixDAO) {
this.hotfixDAO = hotfixDAO;
}
public MailManager getMailManager() {
return mailManager;
}
public void setMailManager(MailManager mailManager) {
this.mailManager = mailManager;
}
public DataDAO getDataDAO() {
return dataDAO;
}
public void setDataDAO(DataDAO dataDAO) {
this.dataDAO = dataDAO;
}
public HostDAO getHostDAO() {
return hostDAO;
}
public void setHostDAO(HostDAO hostDAO) {
this.hostDAO = hostDAO;
}
public UtilDAO getUtilDAO() {
return utilDAO;
}
public void setUtilDAO(UtilDAO utilDAO) {
this.utilDAO = utilDAO;
}
public void setDataSource(BasicDataSource dataSource) {
this.dataSource = dataSource;
}
public BasicDataSource getDataSource() {
return dataSource;
}
public PromoteDAO getPromoteDAO() {
return promoteDAO;
}
public void setPromoteDAO(PromoteDAO promoteDAO) {
this.promoteDAO = promoteDAO;
}
public GroupDAO getGroupDAO() {
return groupDAO;
}
public void setGroupDAO(GroupDAO groupDAO) {
this.groupDAO = groupDAO;
}
public void setHostGroupDAO(HostGroupManager hostGroupDAO) {
this.hostGroupDAO = hostGroupDAO;
}
public HostGroupManager getHostGroupDAO() {
return hostGroupDAO;
}
public void setConfigHistoryDAO(ConfigHistoryDAO configHistoryDAO) {
this.configHistoryDAO = configHistoryDAO;
}
public ConfigHistoryDAO getConfigHistoryDAO() {
return configHistoryDAO;
}
public void setEventSender(EventSender sender) {
this.eventSender = sender;
}
public EventSender getEventSender() {
return this.eventSender;
}
public void setServiceStage(String serviceStage) {
this.serviceStage = serviceStage;
}
public String getServiceStage() {
return this.serviceStage;
}
public SourceControlManager getSourceControlManager() {
return sourceControlManager;
}
public void setSourceControlManager(SourceControlManager sourceControlManager) {
this.sourceControlManager = sourceControlManager;
}
public ChatManager getChatManager() {
return chatManager;
}
public void setChatManager(ChatManager chatManager) {
this.chatManager = chatManager;
}
public ExecutorService getJobPool() {
return jobPool;
}
public void setJobPool(ExecutorService jobPool) {
this.jobPool = jobPool;
}
public RatingDAO getRatingDAO() {
return ratingDAO;
}
public void setRatingDAO(RatingDAO ratingDAO) {
this.ratingDAO = ratingDAO;
}
public UserRolesDAO getUserRolesDAO() {
return userRolesDAO;
}
public void setUserRolesDAO(UserRolesDAO userRolesDAO) {
this.userRolesDAO = userRolesDAO;
}
public TokenRolesDAO getTokenRolesDAO() {
return tokenRolesDAO;
}
public void setTokenRolesDAO(TokenRolesDAO tokenRolesDAO) {
this.tokenRolesDAO = tokenRolesDAO;
}
public RodimusManager getRodimusManager() {
return rodimusManager;
}
public void setRodimusManager(RodimusManager rodimusManager) {
this.rodimusManager = rodimusManager;
}
public void setBuildCacheEnabled(boolean buildCacheEnabled) {
this.buildCacheEnabled = buildCacheEnabled;
}
public void setBuildCacheSpec(String buildCacheSpec) {
this.buildCacheSpec = buildCacheSpec;
}
public void setDeployCacheSpec(String deployCacheSpec) {
this.deployCacheSpec = deployCacheSpec;
}
public void setDeployCacheEnabled(boolean deployCacheEnabled) {
this.deployCacheEnabled = deployCacheEnabled;
}
public void setDeployBoardUrlPrefix(String deployBoardUrlPrefix) {
this.deployBoardUrlPrefix = deployBoardUrlPrefix;
}
public boolean isBuildCacheEnabled() {
return buildCacheEnabled;
}
public String getBuildCacheSpec() {
return buildCacheSpec;
}
public boolean isDeployCacheEnabled() {
return deployCacheEnabled;
}
public String getDeployCacheSpec() {
return deployCacheSpec;
}
public String getDeployBoardUrlPrefix() {
return deployBoardUrlPrefix;
}
public String getChangeFeedUrl() {
return changeFeedUrl;
}
public void setChangeFeedUrl(String changeFeedUrl) {
this.changeFeedUrl = changeFeedUrl;
}
public TagDAO getTagDAO() {
return tagDAO;
}
public void setTagDAO(TagDAO tagDAO) {
this.tagDAO = tagDAO;
}
public ScheduleDAO getScheduleDAO() {
return scheduleDAO;
}
public void setScheduleDAO(ScheduleDAO scheduleDAO) {
this.scheduleDAO = scheduleDAO;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Created on 17.10.2005
* Last modification G.Seryakova
* Last modified on 17.10.2005
*
* Tests hashCode() for StackTraceElement and constructors for some Errors and Exceptions.
* scenario
*/
package org.apache.harmony.test.func.api.java.lang.F_ExceptionTest_03;
import org.apache.harmony.test.func.share.ScenarioTest;
import java.math.BigInteger;
class SimpleHashTable {
private static Item DISABLED = new Item(null, 0);
public int N = 0;
private int size = 0;
private Item table[];
static class Item {
Object key;
int value;
Item(Object key, int value) {
this.key = key;
this.value = value;
}
}
SimpleHashTable(int N) {
this.N = N;
table = new Item[N];
}
void put(Object key, int value) {
int ind = findIndex(key);
table[ind] = new Item(key, value);
size++;
}
int get(Object key) {
int ind = findKeyIndex(key);
if (ind != -1) {
return table[ind].value;
} else {
return 0;
}
}
// void remove(Object key) {
// int ind = findKeyIndex(key);
// if (ind != -1) table[ind] = DISABLED;
// size--;
// }
//
void clear() {
for (int i = 0; i < table.length; i++) {
table[i] = null;
}
}
private int getHashValue(Object key, int shift) {
BigInteger value = BigInteger.valueOf(key.hashCode() + shift);
value = value.mod(BigInteger.valueOf(N));
return value.intValue();
}
private int findIndex(Object key) {
int ind = findKeyIndex(key);
if (ind != -1) {
return ind;
}
int shift = 0;
while (true) {
ind = getHashValue(key, shift);
if (table[ind] == null || table[ind] == DISABLED) {
return ind;
} else {
shift++;
}
}
}
private int findKeyIndex(Object key) {
int shift = 0;
while (true) {
int ind = getHashValue(key, shift);
if (table[ind] == null) {
return -1;
} else if (table[ind].key.equals(key)) {
return ind;
} else {
shift++;
}
}
}
}
/**
* Tests hashCode() for StackTraceElement and constructors for some Errors and Exceptions.
*
*/
public class F_ExceptionTest_03 extends ScenarioTest {
private SimpleHashTable table;
private int stat = 0;
class MyVirtualMachineError extends VirtualMachineError {
MyVirtualMachineError() {
super();
}
MyVirtualMachineError(String msg) {
super(msg);
}
}
public static void main(String[] args) {
System.exit(new F_ExceptionTest_03().test(args));
}
public int test() {
table = new SimpleHashTable(50);
table.clear();
int arr[] = new int[18];
int num;
table.put(testAssertionError1().getStackTrace()[0], 1);
table.put(testAssertionError2().getStackTrace()[0], 1);
table.put(testAssertionError3().getStackTrace()[0], 1);
table.put(testAssertionError4().getStackTrace()[0], 1);
table.put(testAssertionError5().getStackTrace()[0], 1);
table.put(testAssertionError6().getStackTrace()[0], 1);
table.put(testAssertionError7().getStackTrace()[0], 1);
table.put(testAssertionError8().getStackTrace()[0], 1);
table.put(testUnsupportedClassVersionError1().getStackTrace()[0], 1);
table.put(testUnsupportedClassVersionError2().getStackTrace()[0], 1);
table.put(testVirtualMachineError1().getStackTrace()[0], 1);
table.put(testVirtualMachineError2().getStackTrace()[0], 1);
table.put(testUnsupportedOperationException1().getStackTrace()[0], 1);
table.put(testUnsupportedOperationException2().getStackTrace()[0], 1);
table.put(testError1().getStackTrace()[0], 1);
table.put(testError2().getStackTrace()[0], 1);
table.put(testRuntimeException1().getStackTrace()[0], 1);
table.put(testRuntimeException2().getStackTrace()[0], 1);
for (int i = 0; i < arr.length; i++){
arr[i] = 1;
}
for (int i = 0; i < 1000; i++) {
num = (int)Math.round(Math.random() * 17);
arr[num]++;
StackTraceElement elem = null;
switch(num) {
case 0 : elem = testAssertionError1().getStackTrace()[0]; break;
case 1 : elem = testAssertionError2().getStackTrace()[0]; break;
case 2 : elem = testAssertionError3().getStackTrace()[0]; break;
case 3 : elem = testAssertionError4().getStackTrace()[0]; break;
case 4 : elem = testAssertionError5().getStackTrace()[0]; break;
case 5 : elem = testAssertionError6().getStackTrace()[0]; break;
case 6 : elem = testAssertionError7().getStackTrace()[0]; break;
case 7 : elem = testAssertionError8().getStackTrace()[0]; break;
case 8 : elem = testUnsupportedClassVersionError1().getStackTrace()[0]; break;
case 9 : elem = testUnsupportedClassVersionError2().getStackTrace()[0]; break;
case 10 : elem = testVirtualMachineError1().getStackTrace()[0]; break;
case 11 : elem = testVirtualMachineError2().getStackTrace()[0]; break;
case 12 : elem = testUnsupportedOperationException1().getStackTrace()[0]; break;
case 13 : elem = testUnsupportedOperationException2().getStackTrace()[0]; break;
case 14 : elem = testError1().getStackTrace()[0]; break;
case 15 : elem = testError2().getStackTrace()[0]; break;
case 16 : elem = testRuntimeException1().getStackTrace()[0]; break;
case 17 : elem = testRuntimeException2().getStackTrace()[0]; break;
}
table.put(elem, table.get(elem) + 1);
}
for (int i = 0; i < arr.length; i++) {
StackTraceElement elem = null;
switch(i) {
case 0 : elem = testAssertionError1().getStackTrace()[0]; break;
case 1 : elem = testAssertionError2().getStackTrace()[0]; break;
case 2 : elem = testAssertionError3().getStackTrace()[0]; break;
case 3 : elem = testAssertionError4().getStackTrace()[0]; break;
case 4 : elem = testAssertionError5().getStackTrace()[0]; break;
case 5 : elem = testAssertionError6().getStackTrace()[0]; break;
case 6 : elem = testAssertionError7().getStackTrace()[0]; break;
case 7 : elem = testAssertionError8().getStackTrace()[0]; break;
case 8 : elem = testUnsupportedClassVersionError1().getStackTrace()[0]; break;
case 9 : elem = testUnsupportedClassVersionError2().getStackTrace()[0]; break;
case 10 : elem = testVirtualMachineError1().getStackTrace()[0]; break;
case 11 : elem = testVirtualMachineError2().getStackTrace()[0]; break;
case 12 : elem = testUnsupportedOperationException1().getStackTrace()[0]; break;
case 13 : elem = testUnsupportedOperationException2().getStackTrace()[0]; break;
case 14 : elem = testError1().getStackTrace()[0]; break;
case 15 : elem = testError2().getStackTrace()[0]; break;
case 16 : elem = testRuntimeException1().getStackTrace()[0]; break;
case 17 : elem = testRuntimeException2().getStackTrace()[0]; break;
}
if (table.get(elem) != arr[i]) {
fail("FAIL: fail for " + i + " elemant.");
stat++;
}
}
if (stat == 0) {
return pass();
} else {
return fail("test failed.");
}
}
private Throwable testAssertionError1() {
AssertionError as = new AssertionError();
if (!(as instanceof AssertionError)) {
fail("FAIL: In testAssertionError1() is not expected class");
stat++;
}
if (as.getMessage() != null) {
fail("FAIL: In testAssertionError1() getMessage() is not null");
stat++;
}
return as;
}
private Throwable testAssertionError2() {
String str = new String("test.");
AssertionError as = new AssertionError(str);
if (!(as instanceof AssertionError)) {
fail("FAIL: In testAssertionError2() is not expected class");
stat++;
}
if (!as.getMessage().trim().equals(str)) {
fail("FAIL: In testAssertionError2() getMessage() is not \"test.\"");
stat++;
}
return as;
}
private Throwable testAssertionError3() {
boolean b = true;
AssertionError as = new AssertionError(b);
if (!(as instanceof AssertionError)) {
fail("FAIL: In testAssertionError3() is not expected class");
stat++;
}
if (!as.getMessage().trim().equals("true")) {
fail("FAIL: In testAssertionError3() getMessage() is not \"true\"");
stat++;
}
return as;
}
private Throwable testAssertionError4() {
char c = 'q';
AssertionError as = new AssertionError(c);
if (!(as instanceof AssertionError)) {
fail("FAIL: In testAssertionError4() is not expected class");
stat++;
}
if (!as.getMessage().trim().equals("q")) {
fail("FAIL: In testAssertionError4() getMessage() is not \"q\"");
stat++;
}
return as;
}
private Throwable testAssertionError5() {
double d = 3.256d;
AssertionError as = new AssertionError(d);
if (!(as instanceof AssertionError)) {
fail("FAIL: In testAssertionError5() is not expected class");
stat++;
}
if (!as.getMessage().trim().equals("3.256")) {
fail("FAIL: In testAssertionError5() getMessage() is not \"3.256\"");
stat++;
}
return as;
}
private Throwable testAssertionError6() {
float f = 10.4566f;
AssertionError as = new AssertionError(f);
if (!(as instanceof AssertionError)) {
fail("FAIL: In testAssertionError6() is not expected class");
stat++;
}
if (!as.getMessage().trim().equals("10.4566")) {
fail("FAIL: In testAssertionError6() getMessage() is not \"10.4566\"");
stat++;
}
return as;
}
private Throwable testAssertionError7() {
AssertionError as = new AssertionError(2467);
if (!(as instanceof AssertionError)) {
fail("FAIL: In testAssertionError7() is not expected class");
stat++;
}
if (!as.getMessage().trim().equals("2467")) {
fail("FAIL: In testAssertionError7() getMessage() is not \"2467\"");
stat++;
}
return as;
}
private Throwable testAssertionError8() {
AssertionError as = new AssertionError(457475979l);
if (!(as instanceof AssertionError)) {
fail("FAIL: In testAssertionError3() is not expected class");
stat++;
}
if (!as.getMessage().trim().equals("457475979")) {
fail("FAIL: In testAssertionError3() getMessage() is not \"457475979\"");
stat++;
}
return as;
}
private Throwable testUnsupportedClassVersionError1() {
UnsupportedClassVersionError as = new UnsupportedClassVersionError();
if (!(as instanceof UnsupportedClassVersionError)) {
fail("FAIL: In testUnsupportedClassVersionError1() is not expected class");
stat++;
}
if (as.getMessage() != null) {
fail("FAIL: In testUnsupportedClassVersionError1() getMessage() is not null");
stat++;
}
return as;
}
private Throwable testUnsupportedClassVersionError2() {
UnsupportedClassVersionError as = new UnsupportedClassVersionError("this is test.");
if (!(as instanceof UnsupportedClassVersionError)) {
fail("FAIL: In testUnsupportedClassVersionError2() is not expected class");
stat++;
}
if (!as.getMessage().trim().equals("this is test.")) {
fail("FAIL: In testUnsupportedClassVersionError2() getMessage() is not \"this is test.\"");
stat++;
}
return as;
}
private Throwable testVirtualMachineError1() {
MyVirtualMachineError as = new MyVirtualMachineError();
if (!(as instanceof VirtualMachineError)) {
fail("FAIL: In VirtualMachineError1() is not expected class");
stat++;
}
if (as.getMessage() != null) {
fail("FAIL: In VirtualMachineError1() getMessage() is not null");
stat++;
}
return as;
}
private Throwable testVirtualMachineError2() {
MyVirtualMachineError as = new MyVirtualMachineError("this is test2.");
if (!(as instanceof VirtualMachineError)) {
fail("FAIL: In testVirtualMachineError2() is not expected class");
stat++;
}
if (!as.getMessage().trim().equals("this is test2.")) {
fail("FAIL: In testVirtualMachineError2() getMessage() is not \"this is test2.\"");
stat++;
}
return as;
}
private Throwable testUnsupportedOperationException1() {
UnsupportedOperationException as = new UnsupportedOperationException();
if (!(as instanceof UnsupportedOperationException)) {
fail("FAIL: In UnsupportedOperationException1() is not expected class");
stat++;
}
if (as.getMessage() != null) {
fail("FAIL: In UnsupportedOperationException1() getMessage() is not null");
stat++;
}
return as;
}
private Throwable testUnsupportedOperationException2() {
UnsupportedOperationException as = new UnsupportedOperationException("this is test3.");
if (!(as instanceof UnsupportedOperationException)) {
fail("FAIL: In testUnsupportedOperationException2() is not expected class");
stat++;
}
if (!as.getMessage().trim().equals("this is test3.")) {
fail("FAIL: In testUnsupportedOperationException2() getMessage() is not \"this is test3.\"");
stat++;
}
return as;
}
private Throwable testError1() {
Exception ex = new Exception();
Error as = new Error(ex);
if (!(as instanceof Error)) {
fail("FAIL: In testError1() is not expected class");
stat++;
}
if (!as.getMessage().equals(ex.toString())) {
fail("FAIL: In testError1() getMessage() is not equal to couse.toString.");
stat++;
}
if (!as.getCause().equals(ex)) {
fail("FAIL: In testError1() Error.getCause() is unexpected");
stat++;
}
return as;
}
private Throwable testError2() {
Exception ex = new Exception();
Error as = new Error("this is test4.", ex);
if (!(as instanceof Error)) {
fail("FAIL: In testError2() is not expected class");
stat++;
}
if (!as.getMessage().trim().equals("this is test4.")) {
fail("FAIL: In testError2() getMessage() is not \"this is test4.\"");
stat++;
}
if (!as.getCause().equals(ex)) {
fail("FAIL: In testError2() Error.getCause() is unexpected");
stat++;
}
return as;
}
private Throwable testRuntimeException1() {
Exception ex = new Exception("t");
RuntimeException as = new RuntimeException(ex);
if (!(as instanceof RuntimeException)) {
fail("FAIL: In testRuntimeException1() is not expected class");
stat++;
}
if (!as.getMessage().equals(ex.toString())) {
fail("FAIL: In testRuntimeException1() getMessage() is not equal to couse.toString.");
stat++;
}
if (!as.getCause().equals(ex)) {
fail("FAIL: In testRuntimeException1() Error.getCause() is unexpected");
stat++;
}
return as;
}
private Throwable testRuntimeException2() {
Exception ex = new Exception();
RuntimeException as = new RuntimeException("this is test5.", ex);
if (!(as instanceof RuntimeException)) {
fail("FAIL: In testRuntimeException2() is not expected class");
stat++;
}
if (!as.getMessage().trim().equals("this is test5.")) {
fail("FAIL: In testRuntimeException2() getMessage() is not \"this is test5.\"");
stat++;
}
if (!as.getCause().equals(ex)) {
fail("FAIL: In testRuntimeException2() Error.getCause() is unexpected");
stat++;
}
return as;
}
}
| |
package hudson.plugins.analysis.core;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.util.Collection;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import hudson.FilePath;
import hudson.Launcher;
import hudson.matrix.MatrixAggregatable;
import hudson.model.BuildListener;
import hudson.model.Result;
import hudson.model.AbstractBuild;
import hudson.model.Project;
import hudson.plugins.analysis.util.PluginLogger;
import hudson.plugins.analysis.util.model.AbstractAnnotation;
import hudson.plugins.analysis.util.model.AnnotationContainer;
import hudson.plugins.analysis.util.model.DefaultAnnotationContainer;
import hudson.plugins.analysis.util.model.FileAnnotation;
import hudson.plugins.analysis.util.model.Priority;
import hudson.plugins.analysis.util.model.WorkspaceFile;
import hudson.remoting.VirtualChannel;
import hudson.tasks.BuildStep;
import hudson.tasks.BuildStepMonitor;
import hudson.tasks.Builder;
import hudson.tasks.Recorder;
import hudson.tasks.Ant;
import hudson.tasks.Maven;
/**
* A base class for publishers with the following two characteristics:
* <ul>
* <li>It provides a unstable threshold, that could be enabled and set in the
* configuration screen. If the number of annotations in a build exceeds this
* value then the build is considered as {@link Result#UNSTABLE UNSTABLE}.
* </li>
* <li>It provides thresholds for the build health, that could be adjusted in
* the configuration screen. These values are used by the
* {@link HealthReportBuilder} to compute the health and the health trend graph.</li>
* </ul>
*
* @author Ulli Hafner
*/
// CHECKSTYLE:COUPLING-OFF
@SuppressWarnings("PMD.TooManyFields")
public abstract class HealthAwarePublisher extends Recorder implements HealthDescriptor, MatrixAggregatable {
private static final long serialVersionUID = -7945220365563528457L;
private static final String SLASH = "/";
/** Default threshold priority limit. */
private static final String DEFAULT_PRIORITY_THRESHOLD_LIMIT = "low";
/** Report health as 100% when the number of warnings is less than this value. */
private final String healthy;
/** Report health as 0% when the number of warnings is greater than this value. */
private final String unHealthy;
/** Determines which warning priorities should be considered when evaluating the build health. */
private String thresholdLimit;
/** The name of the plug-in. */
private final String pluginName;
/** The default encoding to be used when reading and parsing files. */
private final String defaultEncoding;
/** Determines whether the plug-in should run for failed builds, too. @since 1.6 */
private final boolean canRunOnFailed;
/**
* Determines whether the absolute annotations delta or the actual
* annotations set difference should be used to evaluate the build stability.
*
* @since 1.4
*/
private final boolean useDeltaValues;
/**
* Thresholds for build status unstable and failed, resp. and priorities
* all, high, normal, and low, resp.
*
* @since 1.14
*/
private Thresholds thresholds = new Thresholds();
/**
* Determines whether module names should be derived from Maven POM or Ant build files.
*
* @since 1.19
*/
private final boolean shouldDetectModules;
/**
* Determines whether new warnings should be computed (with respect to baseline).
*
* @since 1.34
*/
private final boolean dontComputeNew;
/**
* Determines whether relative paths in warnings should be resolved using a
* time expensive operation that scans the whole workspace for matching
* files.
*
* @since 1.43
*/
private final boolean doNotResolveRelativePaths;
/**
* Creates a new instance of {@link HealthAwarePublisher}.
*
* @param healthy
* Report health as 100% when the number of open tasks is less
* than this value
* @param unHealthy
* Report health as 0% when the number of open tasks is greater
* than this value
* @param thresholdLimit
* determines which warning priorities should be considered when
* evaluating the build stability and health
* @param defaultEncoding
* the default encoding to be used when reading and parsing files
* @param useDeltaValues
* determines whether the absolute annotations delta or the
* actual annotations set difference should be used to evaluate
* the build stability
* @param unstableTotalAll
* annotation threshold
* @param unstableTotalHigh
* annotation threshold
* @param unstableTotalNormal
* annotation threshold
* @param unstableTotalLow
* annotation threshold
* @param unstableNewAll
* annotation threshold
* @param unstableNewHigh
* annotation threshold
* @param unstableNewNormal
* annotation threshold
* @param unstableNewLow
* annotation threshold
* @param failedTotalAll
* annotation threshold
* @param failedTotalHigh
* annotation threshold
* @param failedTotalNormal
* annotation threshold
* @param failedTotalLow
* annotation threshold
* @param failedNewAll
* annotation threshold
* @param failedNewHigh
* annotation threshold
* @param failedNewNormal
* annotation threshold
* @param failedNewLow
* annotation threshold
* @param canRunOnFailed
* determines whether the plug-in can run for failed builds, too
* @param shouldDetectModules
* determines whether module names should be derived from Maven
* POM or Ant build files
* @param canComputeNew
* determines whether new warnings should be computed (with
* respect to baseline)
* @param canResolveRelativePaths
* determines whether relative paths in warnings should be
* resolved using a time expensive operation that scans the whole
* workspace for matching files.
* @param pluginName
* the name of the plug-in
*/
// CHECKSTYLE:OFF
@SuppressWarnings("PMD")
public HealthAwarePublisher(final String healthy, final String unHealthy, final String thresholdLimit,
final String defaultEncoding, final boolean useDeltaValues,
final String unstableTotalAll, final String unstableTotalHigh, final String unstableTotalNormal, final String unstableTotalLow,
final String unstableNewAll, final String unstableNewHigh, final String unstableNewNormal, final String unstableNewLow,
final String failedTotalAll, final String failedTotalHigh, final String failedTotalNormal, final String failedTotalLow,
final String failedNewAll, final String failedNewHigh, final String failedNewNormal, final String failedNewLow,
final boolean canRunOnFailed, final boolean shouldDetectModules, final boolean canComputeNew, final boolean canResolveRelativePaths,
final String pluginName) {
super();
this.healthy = healthy;
this.unHealthy = unHealthy;
this.thresholdLimit = thresholdLimit;
this.defaultEncoding = defaultEncoding;
this.useDeltaValues = useDeltaValues;
doNotResolveRelativePaths = !canResolveRelativePaths;
dontComputeNew = !canComputeNew;
thresholds.unstableTotalAll = unstableTotalAll;
thresholds.unstableTotalHigh = unstableTotalHigh;
thresholds.unstableTotalNormal = unstableTotalNormal;
thresholds.unstableTotalLow = unstableTotalLow;
thresholds.unstableNewAll = unstableNewAll;
thresholds.unstableNewHigh = unstableNewHigh;
thresholds.unstableNewNormal = unstableNewNormal;
thresholds.unstableNewLow = unstableNewLow;
thresholds.failedTotalAll = failedTotalAll;
thresholds.failedTotalHigh = failedTotalHigh;
thresholds.failedTotalNormal = failedTotalNormal;
thresholds.failedTotalLow = failedTotalLow;
thresholds.failedNewAll = failedNewAll;
thresholds.failedNewHigh = failedNewHigh;
thresholds.failedNewNormal = failedNewNormal;
thresholds.failedNewLow = failedNewLow;
this.canRunOnFailed = canRunOnFailed;
this.shouldDetectModules = shouldDetectModules;
this.pluginName = "[" + pluginName + "] ";
}
/**
* Creates a new instance of {@link HealthAwarePublisher}.
*
* @param healthy
* Report health as 100% when the number of open tasks is less
* than this value
* @param unHealthy
* Report health as 0% when the number of open tasks is greater
* than this value
* @param thresholdLimit
* determines which warning priorities should be considered when
* evaluating the build stability and health
* @param defaultEncoding
* the default encoding to be used when reading and parsing files
* @param useDeltaValues
* determines whether the absolute annotations delta or the
* actual annotations set difference should be used to evaluate
* the build stability
* @param unstableTotalAll
* annotation threshold
* @param unstableTotalHigh
* annotation threshold
* @param unstableTotalNormal
* annotation threshold
* @param unstableTotalLow
* annotation threshold
* @param unstableNewAll
* annotation threshold
* @param unstableNewHigh
* annotation threshold
* @param unstableNewNormal
* annotation threshold
* @param unstableNewLow
* annotation threshold
* @param failedTotalAll
* annotation threshold
* @param failedTotalHigh
* annotation threshold
* @param failedTotalNormal
* annotation threshold
* @param failedTotalLow
* annotation threshold
* @param failedNewAll
* annotation threshold
* @param failedNewHigh
* annotation threshold
* @param failedNewNormal
* annotation threshold
* @param failedNewLow
* annotation threshold
* @param canRunOnFailed
* determines whether the plug-in can run for failed builds, too
* @param shouldDetectModules
* determines whether module names should be derived from Maven POM or Ant build files
* @param canComputeNew
* determines whether new warnings should be computed (with respect to baseline)
* @param pluginName
* the name of the plug-in
*/
@SuppressWarnings("PMD")
public HealthAwarePublisher(final String healthy, final String unHealthy, final String thresholdLimit,
final String defaultEncoding, final boolean useDeltaValues,
final String unstableTotalAll, final String unstableTotalHigh, final String unstableTotalNormal, final String unstableTotalLow,
final String unstableNewAll, final String unstableNewHigh, final String unstableNewNormal, final String unstableNewLow,
final String failedTotalAll, final String failedTotalHigh, final String failedTotalNormal, final String failedTotalLow,
final String failedNewAll, final String failedNewHigh, final String failedNewNormal, final String failedNewLow,
final boolean canRunOnFailed, final boolean shouldDetectModules, final boolean canComputeNew,
final String pluginName) {
this(healthy, unHealthy, thresholdLimit, defaultEncoding, useDeltaValues,
unstableTotalAll, unstableTotalHigh, unstableTotalNormal, unstableTotalLow,
unstableNewAll, unstableNewHigh, unstableNewNormal, unstableNewLow,
failedTotalAll, failedTotalHigh, failedTotalNormal, failedTotalLow,
failedNewAll, failedNewHigh, failedNewNormal, failedNewLow,
canRunOnFailed, shouldDetectModules, canComputeNew, true, pluginName);
}
// CHECKSTYLE:ON
/**
* Returns whether relative paths in warnings should be resolved using a
* time expensive operation that scans the whole workspace for matching
* files.
*
* @return <code>true</code> if relative paths can be resolved,
* <code>false</code> otherwise
*/
public boolean getCanResolveRelativePaths() {
return !doNotResolveRelativePaths;
}
/**
* Returns whether relative paths in warnings should be resolved using a
* time expensive operation that scans the whole workspace for matching
* files.
*
* @return <code>true</code> if relative paths can be resolved,
* <code>false</code> otherwise
*/
public boolean canResolveRelativePaths() {
return getCanResolveRelativePaths();
}
/**
* Initializes new fields that are not serialized yet.
*
* @return the object
*/
protected Object readResolve() {
if (thresholdLimit == null) {
thresholdLimit = DEFAULT_PRIORITY_THRESHOLD_LIMIT;
}
if (thresholds == null) {
thresholds = new Thresholds();
if (threshold != null) {
thresholds.unstableTotalAll = threshold;
threshold = null; // NOPMD
}
if (newThreshold != null) {
thresholds.unstableNewAll = newThreshold;
newThreshold = null; // NOPMD
}
if (failureThreshold != null) {
thresholds.failedTotalAll = failureThreshold;
failureThreshold = null; //NOPMD
}
if (newFailureThreshold != null) {
thresholds.failedNewAll = newFailureThreshold;
newFailureThreshold = null; // NOPMD
}
}
return this;
}
/** {@inheritDoc} */
@SuppressWarnings("deprecation") // Eclipse bug #298563
@Override
public final boolean perform(final AbstractBuild<?, ?> build, final Launcher launcher, final BuildListener listener) throws InterruptedException, IOException {
PluginLogger logger = new PluginLogger(listener.getLogger(), pluginName);
if (canContinue(build.getResult())) {
BuildResult result;
try {
result = perform(build, logger);
AbstractBuild<?, ?> referenceBuild = result.getHistory().getReferenceBuild();
if (referenceBuild != null) {
logger.log("Computing warning deltas based on reference build " + referenceBuild.getDisplayName());
}
}
catch (InterruptedException exception) {
logger.log(exception.getMessage());
return false;
}
if (new NullHealthDescriptor(this).isThresholdEnabled()) {
updateBuildResult(result, logger);
}
copyFilesWithAnnotationsToBuildFolder(build.getRootDir(), launcher.getChannel(), result.getAnnotations());
}
else {
logger.log("Skipping publisher since build result is " + build.getResult());
}
return true;
}
/**
* Will be invoked after the build result has been evaluated.
*
* @param result
* the evaluated build result
* @param logger
* the logger
*/
protected void updateBuildResult(final BuildResult result, final PluginLogger logger) {
String baseUrl = getDescriptor().getPluginResultUrlName();
result.evaluateStatus(getThresholds(), useDeltaValues, canComputeNew(), logger, baseUrl);
}
@Override
public PluginDescriptor getDescriptor() {
return (PluginDescriptor)super.getDescriptor();
}
/**
* Copies all files with annotations from the workspace to the build folder.
*
* @param rootDir
* directory to store the copied files in
* @param channel
* channel to get the files from
* @param annotations
* annotations determining the actual files to copy
* @throws IOException
* if the files could not be written
* @throws FileNotFoundException
* if the files could not be written
* @throws InterruptedException
* if the user cancels the processing
*/
private void copyFilesWithAnnotationsToBuildFolder(final File rootDir,
final VirtualChannel channel, final Collection<FileAnnotation> annotations) throws IOException,
FileNotFoundException, InterruptedException {
File directory = new File(rootDir, AbstractAnnotation.WORKSPACE_FILES);
if (!directory.exists() && !directory.mkdir()) {
throw new IOException("Can't create directory for workspace files that contain annotations: " + directory.getAbsolutePath());
}
AnnotationContainer container = new DefaultAnnotationContainer(annotations);
for (WorkspaceFile file : container.getFiles()) {
File masterFile = new File(directory, file.getTempName());
if (!masterFile.exists()) {
try {
FileOutputStream outputStream = new FileOutputStream(masterFile);
new FilePath(channel, file.getName()).copyTo(outputStream);
}
catch (IOException exception) {
logExceptionToFile(exception, masterFile, file.getName());
}
}
}
}
/**
* Logs the specified exception in the specified file.
* @param exception
* the exception
* @param masterFile
* the file on the master
* @param slaveFileName
* the file name of the slave
*/
private void logExceptionToFile(final IOException exception, final File masterFile, final String slaveFileName) {
FileOutputStream outputStream = null;
try {
outputStream = new FileOutputStream(masterFile);
print(outputStream,
"Copying the source file '%s' from the workspace to the build folder '%s' on the Hudson master failed.%n",
slaveFileName, masterFile.getAbsolutePath());
if (!slaveFileName.startsWith(SLASH) && !slaveFileName.contains(":")) {
print(outputStream, "Seems that the path is relative, however an absolute path is required when copying the sources.%n");
String base;
if (slaveFileName.contains(SLASH)) {
base = StringUtils.substringAfterLast(slaveFileName, SLASH);
}
else {
base = slaveFileName;
}
print(outputStream, "Is the file '%s' contained more than once in your workspace?%n", base);
}
print(outputStream, "Is the file '%s' a valid filename?%n", slaveFileName);
print(outputStream, "If you are building on a slave: please check if the file is accessible under '$JENKINS_HOME/[job-name]/%s'%n", slaveFileName);
print(outputStream, "If you are building on the master: please check if the file is accessible under '$JENKINS_HOME/[job-name]/workspace/%s'%n", slaveFileName);
exception.printStackTrace(new PrintStream(outputStream, false, getDefaultEncoding()));
}
catch (IOException error) {
// ignore
}
finally {
IOUtils.closeQuietly(outputStream);
}
}
private void print(final FileOutputStream outputStream, final String message, final Object... arguments) throws IOException {
IOUtils.write(String.format(message, arguments), outputStream, getDefaultEncoding());
}
/**
* Returns whether new warnings should be computed (with respect to
* baseline).
*
* @return <code>true</code> if new warnings should be computed (with
* respect to baseline), <code>false</code> otherwise
*/
public boolean getCanComputeNew() {
return canComputeNew();
}
/**
* Returns whether new warnings should be computed (with respect to
* baseline).
*
* @return <code>true</code> if new warnings should be computed (with
* respect to baseline), <code>false</code> otherwise
*/
public boolean canComputeNew() {
return !dontComputeNew;
}
/**
* Returns whether this plug-in can run for failed builds, too.
*
* @return <code>true</code> if this plug-in can run for failed builds,
* <code>false</code> otherwise
*/
public boolean getCanRunOnFailed() {
return canRunOnFailed;
}
/**
* Returns whether module names should be derived from Maven POM or Ant build files.
*
* @return the can run on failed
*/
public boolean getShouldDetectModules() {
return shouldDetectModules;
}
/**
* Returns whether module names should be derived from Maven POM or Ant build files.
*
* @return the can run on failed
*/
public boolean shouldDetectModules() {
return shouldDetectModules;
}
/**
* Returns whether this publisher can continue processing. This default
* implementation returns <code>true</code> if the property
* <code>canRunOnFailed</code> is set or if the build is not aborted or
* failed.
*
* @param result
* build result
* @return <code>true</code> if the build can continue
*/
protected boolean canContinue(final Result result) {
if (canRunOnFailed) {
return result != Result.ABORTED;
}
else {
return result != Result.ABORTED && result != Result.FAILURE;
}
}
/**
* Performs the publishing of the results of this plug-in.
*
* @param build
* the build
* @param logger
* the logger to report the progress to
* @return the java project containing the found annotations
* @throws InterruptedException
* If the build is interrupted by the user (in an attempt to
* abort the build.) Normally the {@link BuildStep}
* implementations may simply forward the exception it got from
* its lower-level functions.
* @throws IOException
* If the implementation wants to abort the processing when an
* {@link IOException} happens, it can simply propagate the
* exception to the caller. This will cause the build to fail,
* with the default error message. Implementations are
* encouraged to catch {@link IOException} on its own to provide
* a better error message, if it can do so, so that users have
* better understanding on why it failed.
*/
protected abstract BuildResult perform(AbstractBuild<?, ?> build, PluginLogger logger) throws InterruptedException, IOException;
/** {@inheritDoc} */
public Thresholds getThresholds() {
return thresholds;
}
/**
* Returns whether absolute annotations delta or the actual annotations set
* difference should be used to evaluate the build stability.
*
* @return <code>true</code> if the annotation count should be used,
* <code>false</code> if the actual (set) difference should be
* computed
*/
public boolean getUseDeltaValues() {
return useDeltaValues;
}
/**
* Returns the healthy threshold, i.e. when health is reported as 100%.
*
* @return the 100% healthiness
*/
public String getHealthy() {
return healthy;
}
/**
* Returns the unhealthy threshold, i.e. when health is reported as 0%.
*
* @return the 0% unhealthiness
*/
public String getUnHealthy() {
return unHealthy;
}
/**
* Returns the defined default encoding.
*
* @return the default encoding
*/
public String getDefaultEncoding() {
return defaultEncoding;
}
/**
* Returns whether the current build uses maven.
*
* @param build
* the current build
* @return <code>true</code> if the current build uses maven,
* <code>false</code> otherwise
*/
protected boolean isMavenBuild(final AbstractBuild<?, ?> build) {
if (build.getProject() instanceof Project) {
Project<?, ?> project = (Project<?, ?>)build.getProject();
for (Builder builder : project.getBuilders()) {
if (builder instanceof Maven) {
return true;
}
}
}
return false;
}
/**
* Returns whether the current build uses ant.
*
* @param build
* the current build
* @return <code>true</code> if the current build uses ant,
* <code>false</code> otherwise
*/
protected boolean isAntBuild(final AbstractBuild<?, ?> build) {
if (build.getProject() instanceof Project) {
Project<?, ?> project = (Project<?, ?>)build.getProject();
for (Builder builder : project.getBuilders()) {
if (builder instanceof Ant) {
return true;
}
}
}
return false;
}
/** {@inheritDoc} */
public Priority getMinimumPriority() {
return Priority.valueOf(StringUtils.upperCase(getThresholdLimit()));
}
/**
* Returns the threshold limit.
*
* @return the threshold limit
*/
public String getThresholdLimit() {
return thresholdLimit;
}
/** {@inheritDoc} */
public BuildStepMonitor getRequiredMonitorService() {
return canComputeNew() ? BuildStepMonitor.STEP : BuildStepMonitor.NONE;
}
// CHECKSTYLE:OFF
/** Backward compatibility. @deprecated */
@Deprecated
private transient String threshold;
/** Backward compatibility. @deprecated */
@Deprecated
private transient String newThreshold;
/** Backward compatibility. @deprecated */
@Deprecated
private transient String failureThreshold;
/** Backward compatibility. @deprecated */
@Deprecated
private transient String newFailureThreshold;
/** Backward compatibility. @deprecated */
@SuppressWarnings("unused")
@Deprecated
private transient boolean thresholdEnabled;
/** Backward compatibility. @deprecated */
@SuppressWarnings("unused")
@Deprecated
private transient int minimumAnnotations;
/** Backward compatibility. @deprecated */
@SuppressWarnings("unused")
@Deprecated
private transient int healthyAnnotations;
/** Backward compatibility. @deprecated */
@SuppressWarnings("unused")
@Deprecated
private transient int unHealthyAnnotations;
/** Backward compatibility. @deprecated */
@SuppressWarnings("unused")
@Deprecated
private transient boolean healthyReportEnabled;
/** Backward compatibility. @deprecated */
@SuppressWarnings("unused")
@Deprecated
private transient String height;
@Deprecated
@SuppressWarnings("PMD.ExcessiveParameterList")
public HealthAwarePublisher(final String healthy, final String unHealthy, final String thresholdLimit,
final String defaultEncoding, final boolean useDeltaValues,
final String unstableTotalAll, final String unstableTotalHigh, final String unstableTotalNormal, final String unstableTotalLow,
final String unstableNewAll, final String unstableNewHigh, final String unstableNewNormal, final String unstableNewLow,
final String failedTotalAll, final String failedTotalHigh, final String failedTotalNormal, final String failedTotalLow,
final String failedNewAll, final String failedNewHigh, final String failedNewNormal, final String failedNewLow,
final boolean canRunOnFailed, final boolean shouldDetectModules, final String pluginName) {
this(healthy, unHealthy, thresholdLimit, defaultEncoding, useDeltaValues,
unstableTotalAll, unstableTotalHigh, unstableTotalNormal, unstableTotalLow,
unstableNewAll, unstableNewHigh, unstableNewNormal, unstableNewLow,
failedTotalAll, failedTotalHigh, failedTotalNormal, failedTotalLow,
failedNewAll, failedNewHigh, failedNewNormal, failedNewLow,
canRunOnFailed, shouldDetectModules, true, pluginName);
}
@Deprecated
@SuppressWarnings("PMD.ExcessiveParameterList")
public HealthAwarePublisher(final String healthy, final String unHealthy, final String thresholdLimit,
final String defaultEncoding, final boolean useDeltaValues,
final String unstableTotalAll, final String unstableTotalHigh, final String unstableTotalNormal, final String unstableTotalLow,
final String unstableNewAll, final String unstableNewHigh, final String unstableNewNormal, final String unstableNewLow,
final String failedTotalAll, final String failedTotalHigh, final String failedTotalNormal, final String failedTotalLow,
final String failedNewAll, final String failedNewHigh, final String failedNewNormal, final String failedNewLow,
final boolean canRunOnFailed, final String pluginName) {
this(healthy, unHealthy, thresholdLimit,
defaultEncoding, useDeltaValues,
unstableTotalAll, unstableTotalHigh, unstableTotalNormal, unstableTotalLow,
unstableNewAll, unstableNewHigh, unstableNewNormal, unstableNewLow,
failedTotalAll, failedTotalHigh, failedTotalNormal, failedTotalLow,
failedNewAll, failedNewHigh, failedNewNormal, failedNewLow,
canRunOnFailed, false, pluginName);
}
@SuppressWarnings("PMD")
@Deprecated
public HealthAwarePublisher(final String threshold, final String newThreshold,
final String failureThreshold, final String newFailureThreshold, final String healthy,
final String unHealthy, final String thresholdLimit,
final String defaultEncoding, final boolean useDeltaValues, final boolean canRunOnFailed,
final String pluginName) {
super();
thresholds.unstableTotalAll = threshold;
thresholds.unstableNewAll = newThreshold;
thresholds.failedTotalAll = failureThreshold;
thresholds.failedNewAll = newFailureThreshold;
doNotResolveRelativePaths = false;
this.healthy = healthy;
this.unHealthy = unHealthy;
this.thresholdLimit = thresholdLimit;
this.defaultEncoding = defaultEncoding;
this.useDeltaValues = useDeltaValues;
this.canRunOnFailed = canRunOnFailed;
dontComputeNew = false;
shouldDetectModules = false;
this.pluginName = "[" + pluginName + "] ";
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.pipeline;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.bucket.terms.Terms.Order;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
import org.elasticsearch.search.aggregations.metrics.sum.Sum;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
import org.elasticsearch.test.ESIntegTestCase;
import java.util.ArrayList;
import java.util.List;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
import static org.elasticsearch.search.aggregations.AggregationBuilders.sum;
import static org.elasticsearch.search.aggregations.AggregationBuilders.terms;
import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.sumBucket;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.core.IsNull.notNullValue;
@ESIntegTestCase.SuiteScopeTestCase
public class SumBucketIT extends ESIntegTestCase {
private static final String SINGLE_VALUED_FIELD_NAME = "l_value";
static int numDocs;
static int interval;
static int minRandomValue;
static int maxRandomValue;
static int numValueBuckets;
static long[] valueCounts;
@Override
public void setupSuiteScopeCluster() throws Exception {
assertAcked(client().admin().indices().prepareCreate("idx")
.addMapping("type", "tag", "type=keyword").get());
createIndex("idx_unmapped");
numDocs = randomIntBetween(6, 20);
interval = randomIntBetween(2, 5);
minRandomValue = 0;
maxRandomValue = 20;
numValueBuckets = ((maxRandomValue - minRandomValue) / interval) + 1;
valueCounts = new long[numValueBuckets];
List<IndexRequestBuilder> builders = new ArrayList<>();
for (int i = 0; i < numDocs; i++) {
int fieldValue = randomIntBetween(minRandomValue, maxRandomValue);
builders.add(client().prepareIndex("idx", "type").setSource(
jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, fieldValue).field("tag", "tag" + (i % interval))
.endObject()));
final int bucket = (fieldValue / interval); // + (fieldValue < 0 ? -1 : 0) - (minRandomValue / interval - 1);
valueCounts[bucket]++;
}
assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer"));
for (int i = 0; i < 2; i++) {
builders.add(client().prepareIndex("empty_bucket_idx", "type", "" + i).setSource(
jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject()));
}
indexRandom(true, builders);
ensureSearchable();
}
public void testDocCountTopLevel() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue))
.addAggregation(sumBucket("sum_bucket", "histo>_count")).execute().actionGet();
assertSearchResponse(response);
Histogram histo = response.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(numValueBuckets));
double sum = 0;
for (int i = 0; i < numValueBuckets; ++i) {
Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval));
assertThat(bucket.getDocCount(), equalTo(valueCounts[i]));
sum += bucket.getDocCount();
}
InternalSimpleValue sumBucketValue = response.getAggregations().get("sum_bucket");
assertThat(sumBucketValue, notNullValue());
assertThat(sumBucketValue.getName(), equalTo("sum_bucket"));
assertThat(sumBucketValue.value(), equalTo(sum));
}
public void testDocCountAsSubAgg() throws Exception {
SearchResponse response = client()
.prepareSearch("idx")
.addAggregation(
terms("terms")
.field("tag")
.order(Order.term(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue))
.subAggregation(sumBucket("sum_bucket", "histo>_count"))).execute().actionGet();
assertSearchResponse(response);
Terms terms = response.getAggregations().get("terms");
assertThat(terms, notNullValue());
assertThat(terms.getName(), equalTo("terms"));
List<Terms.Bucket> termsBuckets = terms.getBuckets();
assertThat(termsBuckets.size(), equalTo(interval));
for (int i = 0; i < interval; ++i) {
Terms.Bucket termsBucket = termsBuckets.get(i);
assertThat(termsBucket, notNullValue());
assertThat((String) termsBucket.getKey(), equalTo("tag" + (i % interval)));
Histogram histo = termsBucket.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
List<? extends Bucket> buckets = histo.getBuckets();
double sum = 0;
for (int j = 0; j < numValueBuckets; ++j) {
Histogram.Bucket bucket = buckets.get(j);
assertThat(bucket, notNullValue());
assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) j * interval));
sum += bucket.getDocCount();
}
InternalSimpleValue sumBucketValue = termsBucket.getAggregations().get("sum_bucket");
assertThat(sumBucketValue, notNullValue());
assertThat(sumBucketValue.getName(), equalTo("sum_bucket"));
assertThat(sumBucketValue.value(), equalTo(sum));
}
}
public void testMetricTopLevel() throws Exception {
SearchResponse response = client()
.prepareSearch("idx")
.addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)))
.addAggregation(sumBucket("sum_bucket", "terms>sum")).execute().actionGet();
assertSearchResponse(response);
Terms terms = response.getAggregations().get("terms");
assertThat(terms, notNullValue());
assertThat(terms.getName(), equalTo("terms"));
List<Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(interval));
double bucketSum = 0;
for (int i = 0; i < interval; ++i) {
Terms.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("tag" + (i % interval)));
assertThat(bucket.getDocCount(), greaterThan(0L));
Sum sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
bucketSum += sum.value();
}
InternalSimpleValue sumBucketValue = response.getAggregations().get("sum_bucket");
assertThat(sumBucketValue, notNullValue());
assertThat(sumBucketValue.getName(), equalTo("sum_bucket"));
assertThat(sumBucketValue.value(), equalTo(bucketSum));
}
public void testMetricAsSubAgg() throws Exception {
SearchResponse response = client()
.prepareSearch("idx")
.addAggregation(
terms("terms")
.field("tag")
.order(Order.term(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue)
.subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)))
.subAggregation(sumBucket("sum_bucket", "histo>sum"))).execute().actionGet();
assertSearchResponse(response);
Terms terms = response.getAggregations().get("terms");
assertThat(terms, notNullValue());
assertThat(terms.getName(), equalTo("terms"));
List<Terms.Bucket> termsBuckets = terms.getBuckets();
assertThat(termsBuckets.size(), equalTo(interval));
for (int i = 0; i < interval; ++i) {
Terms.Bucket termsBucket = termsBuckets.get(i);
assertThat(termsBucket, notNullValue());
assertThat((String) termsBucket.getKey(), equalTo("tag" + (i % interval)));
Histogram histo = termsBucket.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
List<? extends Bucket> buckets = histo.getBuckets();
double bucketSum = 0;
for (int j = 0; j < numValueBuckets; ++j) {
Histogram.Bucket bucket = buckets.get(j);
assertThat(bucket, notNullValue());
assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) j * interval));
if (bucket.getDocCount() != 0) {
Sum sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
bucketSum += sum.value();
}
}
InternalSimpleValue sumBucketValue = termsBucket.getAggregations().get("sum_bucket");
assertThat(sumBucketValue, notNullValue());
assertThat(sumBucketValue.getName(), equalTo("sum_bucket"));
assertThat(sumBucketValue.value(), equalTo(bucketSum));
}
}
public void testMetricAsSubAggWithInsertZeros() throws Exception {
SearchResponse response = client()
.prepareSearch("idx")
.addAggregation(
terms("terms")
.field("tag")
.order(Order.term(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue)
.subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)))
.subAggregation(sumBucket("sum_bucket", "histo>sum").gapPolicy(GapPolicy.INSERT_ZEROS)))
.execute().actionGet();
assertSearchResponse(response);
Terms terms = response.getAggregations().get("terms");
assertThat(terms, notNullValue());
assertThat(terms.getName(), equalTo("terms"));
List<Terms.Bucket> termsBuckets = terms.getBuckets();
assertThat(termsBuckets.size(), equalTo(interval));
for (int i = 0; i < interval; ++i) {
Terms.Bucket termsBucket = termsBuckets.get(i);
assertThat(termsBucket, notNullValue());
assertThat((String) termsBucket.getKey(), equalTo("tag" + (i % interval)));
Histogram histo = termsBucket.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
List<? extends Bucket> buckets = histo.getBuckets();
double bucketSum = 0;
for (int j = 0; j < numValueBuckets; ++j) {
Histogram.Bucket bucket = buckets.get(j);
assertThat(bucket, notNullValue());
assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) j * interval));
Sum sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
bucketSum += sum.value();
}
InternalSimpleValue sumBucketValue = termsBucket.getAggregations().get("sum_bucket");
assertThat(sumBucketValue, notNullValue());
assertThat(sumBucketValue.getName(), equalTo("sum_bucket"));
assertThat(sumBucketValue.value(), equalTo(bucketSum));
}
}
public void testNoBuckets() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(terms("terms").field("tag").includeExclude(new IncludeExclude(null, "tag.*"))
.subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)))
.addAggregation(sumBucket("sum_bucket", "terms>sum")).execute().actionGet();
assertSearchResponse(response);
Terms terms = response.getAggregations().get("terms");
assertThat(terms, notNullValue());
assertThat(terms.getName(), equalTo("terms"));
List<Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(0));
InternalSimpleValue sumBucketValue = response.getAggregations().get("sum_bucket");
assertThat(sumBucketValue, notNullValue());
assertThat(sumBucketValue.getName(), equalTo("sum_bucket"));
assertThat(sumBucketValue.value(), equalTo(0.0));
}
public void testNested() throws Exception {
SearchResponse response = client()
.prepareSearch("idx")
.addAggregation(
terms("terms")
.field("tag")
.order(Order.term(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue))
.subAggregation(sumBucket("sum_histo_bucket", "histo>_count")))
.addAggregation(sumBucket("sum_terms_bucket", "terms>sum_histo_bucket")).execute().actionGet();
assertSearchResponse(response);
Terms terms = response.getAggregations().get("terms");
assertThat(terms, notNullValue());
assertThat(terms.getName(), equalTo("terms"));
List<Terms.Bucket> termsBuckets = terms.getBuckets();
assertThat(termsBuckets.size(), equalTo(interval));
double aggTermsSum = 0;
for (int i = 0; i < interval; ++i) {
Terms.Bucket termsBucket = termsBuckets.get(i);
assertThat(termsBucket, notNullValue());
assertThat((String) termsBucket.getKey(), equalTo("tag" + (i % interval)));
Histogram histo = termsBucket.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
List<? extends Bucket> buckets = histo.getBuckets();
double aggHistoSum = 0;
for (int j = 0; j < numValueBuckets; ++j) {
Histogram.Bucket bucket = buckets.get(j);
assertThat(bucket, notNullValue());
assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) j * interval));
aggHistoSum += bucket.getDocCount();
}
InternalSimpleValue sumBucketValue = termsBucket.getAggregations().get("sum_histo_bucket");
assertThat(sumBucketValue, notNullValue());
assertThat(sumBucketValue.getName(), equalTo("sum_histo_bucket"));
assertThat(sumBucketValue.value(), equalTo(aggHistoSum));
aggTermsSum += aggHistoSum;
}
InternalSimpleValue sumBucketValue = response.getAggregations().get("sum_terms_bucket");
assertThat(sumBucketValue, notNullValue());
assertThat(sumBucketValue.getName(), equalTo("sum_terms_bucket"));
assertThat(sumBucketValue.value(), equalTo(aggTermsSum));
}
}
| |
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.test.integration.indices.settings;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus;
import org.elasticsearch.action.count.CountResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.test.integration.AbstractNodesTests;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import static org.elasticsearch.client.AdminRequests.createIndexRequest;
import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
/**
*
*/
public class UpdateNumberOfReplicasTests extends AbstractNodesTests {
protected Client client1;
protected Client client2;
@BeforeMethod
public void startNodes() {
startNode("node1");
startNode("node2");
client1 = getClient1();
client2 = getClient2();
}
@AfterMethod
public void closeNodes() {
client1.close();
client2.close();
closeAllNodes();
}
protected Client getClient1() {
return client("node1");
}
protected Client getClient2() {
return client("node2");
}
@Test
public void simpleUpdateNumberOfReplicasTests() throws Exception {
logger.info("Creating index test");
client1.admin().indices().create(createIndexRequest("test")).actionGet();
logger.info("Running Cluster Health");
ClusterHealthResponse clusterHealth = client1.admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet();
logger.info("Done Cluster Health, status " + clusterHealth.status());
assertThat(clusterHealth.timedOut(), equalTo(false));
assertThat(clusterHealth.status(), equalTo(ClusterHealthStatus.GREEN));
assertThat(clusterHealth.indices().get("test").activePrimaryShards(), equalTo(5));
assertThat(clusterHealth.indices().get("test").numberOfReplicas(), equalTo(1));
assertThat(clusterHealth.indices().get("test").activeShards(), equalTo(10));
for (int i = 0; i < 10; i++) {
client1.prepareIndex("test", "type1", Integer.toString(i)).setSource(jsonBuilder().startObject()
.field("value", "test" + i)
.endObject()).execute().actionGet();
}
client1.admin().indices().prepareRefresh().execute().actionGet();
for (int i = 0; i < 10; i++) {
CountResponse countResponse = client1.prepareCount().setQuery(matchAllQuery()).execute().actionGet();
assertThat(countResponse.count(), equalTo(10l));
}
logger.info("Increasing the number of replicas from 1 to 2");
client1.admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put("index.number_of_replicas", 2)).execute().actionGet();
Thread.sleep(200);
logger.info("Running Cluster Health");
clusterHealth = client1.admin().cluster().prepareHealth().setWaitForYellowStatus().setWaitForActiveShards(10).execute().actionGet();
logger.info("Done Cluster Health, status " + clusterHealth.status());
assertThat(clusterHealth.timedOut(), equalTo(false));
assertThat(clusterHealth.status(), equalTo(ClusterHealthStatus.YELLOW));
assertThat(clusterHealth.indices().get("test").activePrimaryShards(), equalTo(5));
assertThat(clusterHealth.indices().get("test").numberOfReplicas(), equalTo(2));
assertThat(clusterHealth.indices().get("test").activeShards(), equalTo(10));
logger.info("starting another node to new replicas will be allocated to it");
startNode("node3");
Thread.sleep(100);
logger.info("Running Cluster Health");
clusterHealth = client1.admin().cluster().prepareHealth().setWaitForGreenStatus().setWaitForNodes("3").execute().actionGet();
logger.info("Done Cluster Health, status " + clusterHealth.status());
assertThat(clusterHealth.timedOut(), equalTo(false));
assertThat(clusterHealth.status(), equalTo(ClusterHealthStatus.GREEN));
assertThat(clusterHealth.indices().get("test").activePrimaryShards(), equalTo(5));
assertThat(clusterHealth.indices().get("test").numberOfReplicas(), equalTo(2));
assertThat(clusterHealth.indices().get("test").activeShards(), equalTo(15));
for (int i = 0; i < 10; i++) {
CountResponse countResponse = client1.prepareCount().setQuery(matchAllQuery()).execute().actionGet();
assertThat(countResponse.count(), equalTo(10l));
}
logger.info("Decreasing number of replicas from 2 to 0");
client1.admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put("index.number_of_replicas", 0)).execute().actionGet();
Thread.sleep(200);
logger.info("Running Cluster Health");
clusterHealth = client1.admin().cluster().prepareHealth().setWaitForGreenStatus().setWaitForNodes("3").execute().actionGet();
logger.info("Done Cluster Health, status " + clusterHealth.status());
assertThat(clusterHealth.timedOut(), equalTo(false));
assertThat(clusterHealth.status(), equalTo(ClusterHealthStatus.GREEN));
assertThat(clusterHealth.indices().get("test").activePrimaryShards(), equalTo(5));
assertThat(clusterHealth.indices().get("test").numberOfReplicas(), equalTo(0));
assertThat(clusterHealth.indices().get("test").activeShards(), equalTo(5));
for (int i = 0; i < 10; i++) {
CountResponse countResponse = client1.prepareCount().setQuery(matchAllQuery()).execute().actionGet();
assertThat(countResponse.shardFailures().toString(), countResponse.failedShards(), equalTo(0));
assertThat(countResponse.count(), equalTo(10l));
}
}
@Test
public void testAutoExpandNumberOfReplicas0ToData() {
logger.info("--> creating index test with auto expand replicas");
client1.admin().indices().prepareCreate("test").setSettings(settingsBuilder().put("number_of_shards", 2).put("auto_expand_replicas", "0-all")).execute().actionGet();
logger.info("--> running cluster health");
ClusterHealthResponse clusterHealth = client1.admin().cluster().prepareHealth().setWaitForGreenStatus().setWaitForActiveShards(4).execute().actionGet();
logger.info("--> done cluster health, status " + clusterHealth.status());
assertThat(clusterHealth.timedOut(), equalTo(false));
assertThat(clusterHealth.status(), equalTo(ClusterHealthStatus.GREEN));
assertThat(clusterHealth.indices().get("test").activePrimaryShards(), equalTo(2));
assertThat(clusterHealth.indices().get("test").numberOfReplicas(), equalTo(1));
assertThat(clusterHealth.indices().get("test").activeShards(), equalTo(4));
logger.info("--> add another node, should increase the number of replicas");
startNode("node3");
logger.info("--> running cluster health");
clusterHealth = client1.admin().cluster().prepareHealth().setWaitForGreenStatus().setWaitForActiveShards(6).execute().actionGet();
logger.info("--> done cluster health, status " + clusterHealth.status());
assertThat(clusterHealth.timedOut(), equalTo(false));
assertThat(clusterHealth.status(), equalTo(ClusterHealthStatus.GREEN));
assertThat(clusterHealth.indices().get("test").activePrimaryShards(), equalTo(2));
assertThat(clusterHealth.indices().get("test").numberOfReplicas(), equalTo(2));
assertThat(clusterHealth.indices().get("test").activeShards(), equalTo(6));
logger.info("--> closing one node");
closeNode("node3");
logger.info("--> running cluster health");
clusterHealth = client1.admin().cluster().prepareHealth().setWaitForGreenStatus().setWaitForActiveShards(4).execute().actionGet();
logger.info("--> done cluster health, status " + clusterHealth.status());
assertThat(clusterHealth.timedOut(), equalTo(false));
assertThat(clusterHealth.status(), equalTo(ClusterHealthStatus.GREEN));
assertThat(clusterHealth.indices().get("test").activePrimaryShards(), equalTo(2));
assertThat(clusterHealth.indices().get("test").numberOfReplicas(), equalTo(1));
assertThat(clusterHealth.indices().get("test").activeShards(), equalTo(4));
logger.info("--> closing another node");
closeNode("node2");
logger.info("--> running cluster health");
clusterHealth = client1.admin().cluster().prepareHealth().setWaitForGreenStatus().setWaitForActiveShards(2).execute().actionGet();
logger.info("--> done cluster health, status " + clusterHealth.status());
assertThat(clusterHealth.timedOut(), equalTo(false));
assertThat(clusterHealth.status(), equalTo(ClusterHealthStatus.GREEN));
assertThat(clusterHealth.indices().get("test").activePrimaryShards(), equalTo(2));
assertThat(clusterHealth.indices().get("test").numberOfReplicas(), equalTo(0));
assertThat(clusterHealth.indices().get("test").activeShards(), equalTo(2));
}
@Test
public void testAutoExpandNumberReplicas1ToData() {
logger.info("--> creating index test with auto expand replicas");
client1.admin().indices().prepareCreate("test").setSettings(settingsBuilder().put("number_of_shards", 2).put("auto_expand_replicas", "1-all")).execute().actionGet();
logger.info("--> running cluster health");
ClusterHealthResponse clusterHealth = client1.admin().cluster().prepareHealth().setWaitForGreenStatus().setWaitForActiveShards(4).execute().actionGet();
logger.info("--> done cluster health, status " + clusterHealth.status());
assertThat(clusterHealth.timedOut(), equalTo(false));
assertThat(clusterHealth.status(), equalTo(ClusterHealthStatus.GREEN));
assertThat(clusterHealth.indices().get("test").activePrimaryShards(), equalTo(2));
assertThat(clusterHealth.indices().get("test").numberOfReplicas(), equalTo(1));
assertThat(clusterHealth.indices().get("test").activeShards(), equalTo(4));
logger.info("--> add another node, should increase the number of replicas");
startNode("node3");
logger.info("--> running cluster health");
clusterHealth = client1.admin().cluster().prepareHealth().setWaitForGreenStatus().setWaitForActiveShards(6).execute().actionGet();
logger.info("--> done cluster health, status " + clusterHealth.status());
assertThat(clusterHealth.timedOut(), equalTo(false));
assertThat(clusterHealth.status(), equalTo(ClusterHealthStatus.GREEN));
assertThat(clusterHealth.indices().get("test").activePrimaryShards(), equalTo(2));
assertThat(clusterHealth.indices().get("test").numberOfReplicas(), equalTo(2));
assertThat(clusterHealth.indices().get("test").activeShards(), equalTo(6));
logger.info("--> closing one node");
closeNode("node3");
logger.info("--> running cluster health");
clusterHealth = client1.admin().cluster().prepareHealth().setWaitForGreenStatus().setWaitForActiveShards(4).execute().actionGet();
logger.info("--> done cluster health, status " + clusterHealth.status());
assertThat(clusterHealth.timedOut(), equalTo(false));
assertThat(clusterHealth.status(), equalTo(ClusterHealthStatus.GREEN));
assertThat(clusterHealth.indices().get("test").activePrimaryShards(), equalTo(2));
assertThat(clusterHealth.indices().get("test").numberOfReplicas(), equalTo(1));
assertThat(clusterHealth.indices().get("test").activeShards(), equalTo(4));
logger.info("--> closing another node");
closeNode("node2");
logger.info("--> running cluster health");
clusterHealth = client1.admin().cluster().prepareHealth().setWaitForYellowStatus().setWaitForActiveShards(2).execute().actionGet();
logger.info("--> done cluster health, status " + clusterHealth.status());
assertThat(clusterHealth.timedOut(), equalTo(false));
assertThat(clusterHealth.status(), equalTo(ClusterHealthStatus.YELLOW));
assertThat(clusterHealth.indices().get("test").activePrimaryShards(), equalTo(2));
assertThat(clusterHealth.indices().get("test").numberOfReplicas(), equalTo(1));
assertThat(clusterHealth.indices().get("test").activeShards(), equalTo(2));
}
@Test
public void testAutoExpandNumberReplicas2() {
logger.info("--> add another node");
startNode("node3");
logger.info("--> creating index test with auto expand replicas set to 0-2");
client1.admin().indices().prepareCreate("test").setSettings(settingsBuilder().put("number_of_shards", 2).put("auto_expand_replicas", "0-2")).execute().actionGet();
logger.info("--> running cluster health");
ClusterHealthResponse clusterHealth = client1.admin().cluster().prepareHealth().setWaitForGreenStatus().setWaitForActiveShards(6).execute().actionGet();
logger.info("--> done cluster health, status " + clusterHealth.status());
assertThat(clusterHealth.timedOut(), equalTo(false));
assertThat(clusterHealth.status(), equalTo(ClusterHealthStatus.GREEN));
assertThat(clusterHealth.indices().get("test").activePrimaryShards(), equalTo(2));
assertThat(clusterHealth.indices().get("test").numberOfReplicas(), equalTo(2));
assertThat(clusterHealth.indices().get("test").activeShards(), equalTo(6));
logger.info("--> add two more nodes");
startNode("node4");
startNode("node5");
logger.info("--> update the auto expand replicas to 0-3");
client1.admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put("auto_expand_replicas", "0-3")).execute().actionGet();
logger.info("--> running cluster health");
clusterHealth = client1.admin().cluster().prepareHealth().setWaitForGreenStatus().setWaitForActiveShards(8).execute().actionGet();
logger.info("--> done cluster health, status " + clusterHealth.status());
assertThat(clusterHealth.timedOut(), equalTo(false));
assertThat(clusterHealth.status(), equalTo(ClusterHealthStatus.GREEN));
assertThat(clusterHealth.indices().get("test").activePrimaryShards(), equalTo(2));
assertThat(clusterHealth.indices().get("test").numberOfReplicas(), equalTo(3));
assertThat(clusterHealth.indices().get("test").activeShards(), equalTo(8));
}
}
| |
package proj.me.bitframe;
import android.content.Context;
import android.graphics.Bitmap;
import android.os.Handler;
import android.os.Message;
import android.text.TextUtils;
import com.squareup.picasso.MemoryPolicy;
import com.squareup.picasso.NetworkPolicy;
import com.squareup.picasso.Picasso;
import java.lang.ref.SoftReference;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import proj.me.bitframe.exceptions.FrameException;
import proj.me.bitframe.helper.Utils;
import proj.me.bitframe.shading_four.ImageShadingFour;
import proj.me.bitframe.shading_one.ImageShadingOne;
import proj.me.bitframe.shading_three.ImageShadingThree;
import proj.me.bitframe.shading_two.ImageShadingTwo;
/**
* Created by Deepak.Tiwari on 28-09-2015.
*/
final class ImageShading implements ImageResult{
Context context;
List<Bitmap> images;
int totalImages;
ImageCallback layoutCallback;
boolean result;
boolean doneLoading;
List<BeanImage> loadedBeanImages;
FrameModel frameModel;
int unframedImageCounter;
FrameHandler frameHandler;
RecycleHandler recycleHandler;
List<BeanImage> beanImages = new ArrayList<>();
List<UnframedPicassoTargetNew> targets;
Picasso picasso;
static int SORT_DIFFERENCE_THRESHOLD = 0;
ImageShading(Context context, ImageCallback layoutCallback, FrameModel frameModel, Picasso picasso){
this.context =context;
images = new ArrayList<>();
loadedBeanImages = new ArrayList<>();
this.layoutCallback = layoutCallback;
this.frameModel = frameModel;
frameHandler = new FrameHandler(this);
recycleHandler = new RecycleHandler(this);
this.picasso = picasso;
SORT_DIFFERENCE_THRESHOLD = frameModel.getSortDifferenceThreshold();
}
void mapUnframedImages(List<BeanImage> beanImages, List<UnframedPicassoTargetNew> targets){
totalImages = beanImages.size();
this.targets = targets;
if(totalImages > frameModel.getMaxFrameCount() && frameModel.isShouldSortImages()){
//sort image a/c to primary and secondary value
Collections.sort(beanImages);
}
this.beanImages.addAll(beanImages);
unframedImageCounter = 0;
BeanImage beanImage = beanImages.get(0);
if(Utils.isLocalPath(beanImage.getImageLink())){
Utils.logVerbose("LADING AS : "+"local image " + beanImage.getImageLink());
new UnframedLocalTask(this).execute(beanImage);
} else {
Utils.logVerbose("LADING AS : " + "server image " + beanImage.getImageLink());
UnframedPicassoTargetNew target = new UnframedPicassoTargetNew(this, beanImage);
targets.add(target);
Utils.getPicassoRequestCreator(picasso, beanImage.getImageLink()).memoryPolicy(MemoryPolicy.NO_STORE)
.networkPolicy(NetworkPolicy.NO_STORE)
.noPlaceholder()
.transform(new ScaleTransformation(frameModel.getMaxContainerWidth(),
frameModel.getMaxContainerHeight(), totalImages, beanImage.getImageLink(),
beanImage, this))
.into(target);
}
}
@Override
public void callNextCycle(String lastImagePath) {
if(!TextUtils.isEmpty(lastImagePath)) picasso.invalidate(lastImagePath);
if(beanImages != null && beanImages.size() > 0) beanImages.remove(0);
//because targets are also running sequential, in case of parallel need to shift it to respective class
if(targets != null && targets.size() > 0) targets.remove(0);
if(beanImages.size() == 0) return;
BeanImage beanImage = beanImages.get(0);
if(Utils.isLocalPath(beanImage.getImageLink())){
Utils.logVerbose("LADING AS : "+"local image " + beanImage.getImageLink());
new UnframedLocalTask(this).execute(beanImage);
} else {
Utils.logVerbose("LADING AS : "+"server image " + beanImage.getImageLink());
UnframedPicassoTargetNew target = new UnframedPicassoTargetNew(this, beanImage);
targets.add(target);
Utils.getPicassoRequestCreator(picasso, beanImage.getImageLink()).memoryPolicy(MemoryPolicy.NO_STORE)
.networkPolicy(NetworkPolicy.NO_STORE)
.noPlaceholder()
.transform(new ScaleTransformation(frameModel.getMaxContainerWidth(),
frameModel.getMaxContainerHeight(), totalImages, beanImage.getImageLink(),
beanImage, this))
.into(target);
}
}
@Override
public void handleTransformedResult(Bitmap bitmap, BeanImage beanImage) {
//caller responsibility
if(beanImage == null){
Message message = frameHandler.obtainMessage(3, bitmap);
message.sendToTarget();
return;
}
BeanHandler beanHandler = new BeanHandler();
beanHandler.setBitmap(bitmap);
beanHandler.setBeanImage(beanImage);
Message message = frameHandler.obtainMessage(1, beanHandler);
message.sendToTarget();
}
@Override
public List<Bitmap> getImages() {
return images;
}
@Override
public void onImageLoaded(boolean result, Bitmap bitmap, BeanImage beanImage) throws FrameException{
if(result) {
images.add(bitmap);
loadedBeanImages.add(beanImage);
}
if(!this.result) this.result = result;
if(doneLoading && !this.result){
loadedBeanImages.add(beanImage);
ImageShades imageShades = new ImageShadingOne(context, totalImages, frameModel);
imageShades.setImageCallback(layoutCallback);
imageShades.setCurrentFramePicasso(picasso);
imageShades.setResult(false);
imageShades.updateFrameUi(null, loadedBeanImages, false);
images.clear();
loadedBeanImages.clear();
this.result =false;
doneLoading = false;
}else if(doneLoading){
ImageShades imageShades = null;
switch(images.size()){
case 1:
Utils.logMessage("going to load 1");
imageShades = new ImageShadingOne(context, totalImages, frameModel);
imageShades.setResult(true);
break;
case 2:
Utils.logMessage("going to load 2");
imageShades = new ImageShadingTwo(context, totalImages, frameModel);
break;
case 3:
Utils.logMessage("going to load 3");
imageShades = new ImageShadingThree(context, totalImages, frameModel);
break;
case 4:
Utils.logMessage("going to load 4");
imageShades = new ImageShadingFour(context, totalImages, frameModel);
break;
}
if(imageShades != null){
imageShades.setImageCallback(layoutCallback);
imageShades.setCurrentFramePicasso(picasso);
imageShades.updateFrameUi(images, loadedBeanImages, false);
}
//RecycleHandler responsibility
if(frameModel.isShouldRecycleBitmaps()) recycleHandler.sendEmptyMessageDelayed(2, 500);
else images.clear();
loadedBeanImages.clear();
this.result =false;
doneLoading = false;
}
}
@Override
public void setDoneLoading(boolean doneLoading) {
this.doneLoading = doneLoading;
}
@Override
public FrameModel getFrameModel() {
return frameModel;
}
@Override
public ImageCallback getImageCallback() {
return layoutCallback;
}
@Override
public int getTotalImages() {
return totalImages;
}
@Override
public Context getContext() {
return context.getApplicationContext();
}
@Override
public void updateCounter() {
unframedImageCounter++;
}
@Override
public int getCounter() {
return unframedImageCounter;
}
/**
* picasso will be loaded in last when the mapping
* of the images is done and decided that which image will go in which frame
* */
void mapFramedImages(List<BeanImage> beanBitFrames) throws FrameException{
totalImages = beanBitFrames.size();
if(totalImages > frameModel.getMaxFrameCount()){
if(frameModel.isShouldSortImages()){
//sort image a/c to primary and secondary value
Collections.sort(beanBitFrames);
}
//go to mapping with top max frame count images
for(int i =0;i<frameModel.getMaxFrameCount();i++){
loadedBeanImages.add(beanBitFrames.get(i));
}
ImageShades imageShades = new ImageShadingFour(context, totalImages, frameModel);
imageShades.setImageCallback(layoutCallback);
imageShades.setCurrentFramePicasso(picasso);
imageShades.updateFrameUi(null, loadedBeanImages, true);
}else{
//go to mapping directly
for(BeanImage beanImage : beanBitFrames){
loadedBeanImages.add(beanImage);
}
ImageShades imageShades = null;
switch(totalImages){
case 1:
imageShades = new ImageShadingOne(context, totalImages, frameModel);
imageShades.setResult(true);
break;
case 2:
imageShades = new ImageShadingTwo(context, totalImages, frameModel);
break;
case 3:
imageShades = new ImageShadingThree(context, totalImages, frameModel);
break;
case 4:
imageShades = new ImageShadingFour(context, totalImages, frameModel);
break;
}
if(imageShades != null){
imageShades.setImageCallback(layoutCallback);
imageShades.setCurrentFramePicasso(picasso);
imageShades.updateFrameUi(null, loadedBeanImages, true);
}
}
}
static class RecycleHandler extends Handler{
WeakReference<ImageResult> imageResultSoftReference;
RecycleHandler(ImageResult imageResult){
imageResultSoftReference = new WeakReference<>(imageResult);
}
@Override
public void handleMessage(Message msg) {
ImageResult imageResult = imageResultSoftReference.get();
if(imageResult == null) {
super.handleMessage(msg);
return;
}
switch(msg.what){
case 2:
List<Bitmap> bitmaps = imageResult.getImages();
for (Bitmap bitmap1 : bitmaps) {
Utils.logVerbose("exppp normal width " + bitmap1.getWidth() + " height " + bitmap1.getHeight());
if (!bitmap1.isRecycled()) bitmap1.recycle();
bitmap1 = null;
}
bitmaps.clear();
break;
}
}
}
}
| |
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.indexer;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.jsontype.NamedType;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import io.druid.data.input.impl.CSVParseSpec;
import io.druid.data.input.impl.DimensionsSpec;
import io.druid.data.input.impl.InputRowParser;
import io.druid.data.input.impl.JSONParseSpec;
import io.druid.data.input.impl.StringInputRowParser;
import io.druid.data.input.impl.TimestampSpec;
import io.druid.java.util.common.RE;
import io.druid.java.util.common.StringUtils;
import io.druid.java.util.common.granularity.Granularities;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.CountAggregatorFactory;
import io.druid.query.aggregation.LongSumAggregatorFactory;
import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory;
import io.druid.segment.indexing.DataSchema;
import io.druid.segment.indexing.granularity.UniformGranularitySpec;
import io.druid.timeline.DataSegment;
import io.druid.timeline.partition.HashBasedNumberedShardSpec;
import io.druid.timeline.partition.NumberedShardSpec;
import io.druid.timeline.partition.ShardSpec;
import io.druid.timeline.partition.SingleDimensionShardSpec;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.SequenceFile.Writer;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.joda.time.DateTime;
import org.joda.time.DateTimeComparator;
import org.joda.time.Interval;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@RunWith(Parameterized.class)
public class IndexGeneratorJobTest
{
final private static AggregatorFactory[] aggs1 = {
new LongSumAggregatorFactory("visited_num", "visited_num"),
new HyperUniquesAggregatorFactory("unique_hosts", "host")
};
final private static AggregatorFactory[] aggs2 = {
new CountAggregatorFactory("count")
};
@Parameterized.Parameters(name = "useCombiner={0}, partitionType={1}, interval={2}, shardInfoForEachSegment={3}, " +
"data={4}, inputFormatName={5}, inputRowParser={6}, maxRowsInMemory={7}, " +
"aggs={8}, datasourceName={9}, forceExtendableShardSpecs={10}")
public static Collection<Object[]> constructFeed()
{
final List<Object[]> baseConstructors = Arrays.asList(
new Object[][]{
{
false,
"single",
"2014-10-22T00:00:00Z/P2D",
new String[][][]{
{
{null, "c.example.com"},
{"c.example.com", "e.example.com"},
{"e.example.com", "g.example.com"},
{"g.example.com", "i.example.com"},
{"i.example.com", null}
},
{
{null, "c.example.com"},
{"c.example.com", "e.example.com"},
{"e.example.com", "g.example.com"},
{"g.example.com", "i.example.com"},
{"i.example.com", null}
}
},
ImmutableList.of(
"2014102200,a.example.com,100",
"2014102200,b.exmaple.com,50",
"2014102200,c.example.com,200",
"2014102200,d.example.com,250",
"2014102200,e.example.com,123",
"2014102200,f.example.com,567",
"2014102200,g.example.com,11",
"2014102200,h.example.com,251",
"2014102200,i.example.com,963",
"2014102200,j.example.com,333",
"2014102300,a.example.com,100",
"2014102300,b.exmaple.com,50",
"2014102300,c.example.com,200",
"2014102300,d.example.com,250",
"2014102300,e.example.com,123",
"2014102300,f.example.com,567",
"2014102300,g.example.com,11",
"2014102300,h.example.com,251",
"2014102300,i.example.com,963",
"2014102300,j.example.com,333"
),
null,
new StringInputRowParser(
new CSVParseSpec(
new TimestampSpec("timestamp", "yyyyMMddHH", null),
new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("host")), null, null),
null,
ImmutableList.of("timestamp", "host", "visited_num"),
false,
0
),
null
),
null,
aggs1,
"website"
},
{
false,
"hashed",
"2014-10-22T00:00:00Z/P1D",
new Integer[][][]{
{
{0, 4},
{1, 4},
{2, 4},
{3, 4}
}
},
ImmutableList.of(
"2014102200,a.example.com,100",
"2014102201,b.exmaple.com,50",
"2014102202,c.example.com,200",
"2014102203,d.example.com,250",
"2014102204,e.example.com,123",
"2014102205,f.example.com,567",
"2014102206,g.example.com,11",
"2014102207,h.example.com,251",
"2014102208,i.example.com,963",
"2014102209,j.example.com,333",
"2014102210,k.example.com,253",
"2014102211,l.example.com,321",
"2014102212,m.example.com,3125",
"2014102213,n.example.com,234",
"2014102214,o.example.com,325",
"2014102215,p.example.com,3533",
"2014102216,q.example.com,500",
"2014102216,q.example.com,87"
),
null,
new HadoopyStringInputRowParser(
new CSVParseSpec(
new TimestampSpec("timestamp", "yyyyMMddHH", null),
new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("host")), null, null),
null,
ImmutableList.of("timestamp", "host", "visited_num"),
false,
0
)
),
null,
aggs1,
"website"
},
{
true,
"hashed",
"2014-10-22T00:00:00Z/P1D",
new Integer[][][]{
{
{0, 4},
{1, 4},
{2, 4},
{3, 4}
}
},
ImmutableList.of(
"2014102200,a.example.com,100",
"2014102201,b.exmaple.com,50",
"2014102202,c.example.com,200",
"2014102203,d.example.com,250",
"2014102204,e.example.com,123",
"2014102205,f.example.com,567",
"2014102206,g.example.com,11",
"2014102207,h.example.com,251",
"2014102208,i.example.com,963",
"2014102209,j.example.com,333",
"2014102210,k.example.com,253",
"2014102211,l.example.com,321",
"2014102212,m.example.com,3125",
"2014102213,n.example.com,234",
"2014102214,o.example.com,325",
"2014102215,p.example.com,3533",
"2014102216,q.example.com,500",
"2014102216,q.example.com,87"
),
null,
new StringInputRowParser(
new CSVParseSpec(
new TimestampSpec("timestamp", "yyyyMMddHH", null),
new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("host")), null, null),
null,
ImmutableList.of("timestamp", "host", "visited_num"),
false,
0
),
null
),
null,
aggs1,
"website"
},
{
false,
"single",
"2014-10-22T00:00:00Z/P2D",
new String[][][]{
{
{null, "c.example.com"},
{"c.example.com", "e.example.com"},
{"e.example.com", "g.example.com"},
{"g.example.com", "i.example.com"},
{"i.example.com", null}
},
{
{null, "c.example.com"},
{"c.example.com", "e.example.com"},
{"e.example.com", "g.example.com"},
{"g.example.com", "i.example.com"},
{"i.example.com", null}
}
},
ImmutableList.of(
"2014102200,a.example.com,100",
"2014102200,b.exmaple.com,50",
"2014102200,c.example.com,200",
"2014102200,d.example.com,250",
"2014102200,e.example.com,123",
"2014102200,f.example.com,567",
"2014102200,g.example.com,11",
"2014102200,h.example.com,251",
"2014102200,i.example.com,963",
"2014102200,j.example.com,333",
"2014102300,a.example.com,100",
"2014102300,b.exmaple.com,50",
"2014102300,c.example.com,200",
"2014102300,d.example.com,250",
"2014102300,e.example.com,123",
"2014102300,f.example.com,567",
"2014102300,g.example.com,11",
"2014102300,h.example.com,251",
"2014102300,i.example.com,963",
"2014102300,j.example.com,333"
),
SequenceFileInputFormat.class.getName(),
new HadoopyStringInputRowParser(
new CSVParseSpec(
new TimestampSpec("timestamp", "yyyyMMddHH", null),
new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("host")), null, null),
null,
ImmutableList.of("timestamp", "host", "visited_num"),
false,
0
)
),
null,
aggs1,
"website"
},
{
// Tests that new indexes inherit the dimension order from previous index
false,
"hashed",
"2014-10-22T00:00:00Z/P1D",
new Integer[][][]{
{
{0, 1} // use a single partition, dimension order inheritance is not supported across partitions
}
},
ImmutableList.of(
"{\"ts\":\"2014102200\", \"X\":\"x.example.com\"}",
"{\"ts\":\"2014102201\", \"Y\":\"y.example.com\"}",
"{\"ts\":\"2014102202\", \"M\":\"m.example.com\"}",
"{\"ts\":\"2014102203\", \"Q\":\"q.example.com\"}",
"{\"ts\":\"2014102204\", \"B\":\"b.example.com\"}",
"{\"ts\":\"2014102205\", \"F\":\"f.example.com\"}"
),
null,
new StringInputRowParser(
new JSONParseSpec(
new TimestampSpec("ts", "yyyyMMddHH", null),
new DimensionsSpec(null, null, null),
null,
null
),
null
),
1, // force 1 row max per index for easier testing
aggs2,
"inherit_dims"
},
{
// Tests that pre-specified dim order is maintained across indexes.
false,
"hashed",
"2014-10-22T00:00:00Z/P1D",
new Integer[][][]{
{
{0, 1}
}
},
ImmutableList.of(
"{\"ts\":\"2014102200\", \"X\":\"x.example.com\"}",
"{\"ts\":\"2014102201\", \"Y\":\"y.example.com\"}",
"{\"ts\":\"2014102202\", \"M\":\"m.example.com\"}",
"{\"ts\":\"2014102203\", \"Q\":\"q.example.com\"}",
"{\"ts\":\"2014102204\", \"B\":\"b.example.com\"}",
"{\"ts\":\"2014102205\", \"F\":\"f.example.com\"}"
),
null,
new StringInputRowParser(
new JSONParseSpec(
new TimestampSpec("ts", "yyyyMMddHH", null),
new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("B", "F", "M", "Q", "X", "Y")), null, null),
null,
null
),
null
),
1, // force 1 row max per index for easier testing
aggs2,
"inherit_dims2"
}
}
);
// Run each baseConstructor with/without forceExtendableShardSpecs.
final List<Object[]> constructors = Lists.newArrayList();
for (Object[] baseConstructor : baseConstructors) {
for (int forceExtendableShardSpecs = 0; forceExtendableShardSpecs < 2 ; forceExtendableShardSpecs++) {
final Object[] fullConstructor = new Object[baseConstructor.length + 1];
System.arraycopy(baseConstructor, 0, fullConstructor, 0, baseConstructor.length);
fullConstructor[baseConstructor.length] = forceExtendableShardSpecs == 0;
constructors.add(fullConstructor);
}
}
return constructors;
}
@Rule
public final TemporaryFolder temporaryFolder = new TemporaryFolder();
private final boolean useCombiner;
private final String partitionType;
private final Interval interval;
private final Object[][][] shardInfoForEachSegment;
private final List<String> data;
private final String inputFormatName;
private final InputRowParser inputRowParser;
private final Integer maxRowsInMemory;
private final AggregatorFactory[] aggs;
private final String datasourceName;
private final boolean forceExtendableShardSpecs;
private ObjectMapper mapper;
private HadoopDruidIndexerConfig config;
private File dataFile;
private File tmpDir;
public IndexGeneratorJobTest(
boolean useCombiner,
String partitionType,
String interval,
Object[][][] shardInfoForEachSegment,
List<String> data,
String inputFormatName,
InputRowParser inputRowParser,
Integer maxRowsInMemory,
AggregatorFactory[] aggs,
String datasourceName,
boolean forceExtendableShardSpecs
) throws IOException
{
this.useCombiner = useCombiner;
this.partitionType = partitionType;
this.shardInfoForEachSegment = shardInfoForEachSegment;
this.interval = new Interval(interval);
this.data = data;
this.inputFormatName = inputFormatName;
this.inputRowParser = inputRowParser;
this.maxRowsInMemory = maxRowsInMemory;
this.aggs = aggs;
this.datasourceName = datasourceName;
this.forceExtendableShardSpecs = forceExtendableShardSpecs;
}
private void writeDataToLocalSequenceFile(File outputFile, List<String> data) throws IOException
{
Configuration conf = new Configuration();
LocalFileSystem fs = FileSystem.getLocal(conf);
Writer fileWriter = SequenceFile.createWriter(
fs,
conf,
new Path(outputFile.getAbsolutePath()),
BytesWritable.class,
BytesWritable.class,
SequenceFile.CompressionType.NONE,
(CompressionCodec) null
);
int keyCount = 10;
for (String line : data) {
ByteBuffer buf = ByteBuffer.allocate(4);
buf.putInt(keyCount);
BytesWritable key = new BytesWritable(buf.array());
BytesWritable value = new BytesWritable(StringUtils.toUtf8(line));
fileWriter.append(key, value);
keyCount += 1;
}
fileWriter.close();
}
@Before
public void setUp() throws Exception
{
mapper = HadoopDruidIndexerConfig.JSON_MAPPER;
mapper.registerSubtypes(new NamedType(HashBasedNumberedShardSpec.class, "hashed"));
mapper.registerSubtypes(new NamedType(SingleDimensionShardSpec.class, "single"));
dataFile = temporaryFolder.newFile();
tmpDir = temporaryFolder.newFolder();
HashMap<String, Object> inputSpec = new HashMap<String, Object>();
inputSpec.put("paths", dataFile.getCanonicalPath());
inputSpec.put("type", "static");
if (inputFormatName != null) {
inputSpec.put("inputFormat", inputFormatName);
}
if (SequenceFileInputFormat.class.getName().equals(inputFormatName)) {
writeDataToLocalSequenceFile(dataFile, data);
} else {
FileUtils.writeLines(dataFile, data);
}
config = new HadoopDruidIndexerConfig(
new HadoopIngestionSpec(
new DataSchema(
datasourceName,
mapper.convertValue(
inputRowParser,
Map.class
),
aggs,
new UniformGranularitySpec(
Granularities.DAY, Granularities.NONE, ImmutableList.of(this.interval)
),
mapper
),
new HadoopIOConfig(
ImmutableMap.copyOf(inputSpec),
null,
tmpDir.getCanonicalPath()
),
new HadoopTuningConfig(
tmpDir.getCanonicalPath(),
null,
null,
null,
null,
maxRowsInMemory,
false,
false,
false,
false,
ImmutableMap.of(JobContext.NUM_REDUCES, "0"), //verifies that set num reducers is ignored
false,
useCombiner,
null,
true,
null,
forceExtendableShardSpecs,
false,
null
)
)
);
config.setShardSpecs(loadShardSpecs(partitionType, shardInfoForEachSegment));
config = HadoopDruidIndexerConfig.fromSpec(config.getSchema());
}
private List<ShardSpec> constructShardSpecFromShardInfo(String partitionType, Object[][] shardInfoForEachShard)
{
List<ShardSpec> specs = Lists.newArrayList();
if (partitionType.equals("hashed")) {
for (Integer[] shardInfo : (Integer[][]) shardInfoForEachShard) {
specs.add(new HashBasedNumberedShardSpec(shardInfo[0], shardInfo[1], null, HadoopDruidIndexerConfig.JSON_MAPPER));
}
} else if (partitionType.equals("single")) {
int partitionNum = 0;
for (String[] shardInfo : (String[][]) shardInfoForEachShard) {
specs.add(new SingleDimensionShardSpec("host", shardInfo[0], shardInfo[1], partitionNum++));
}
} else {
throw new RE("Invalid partition type:[%s]", partitionType);
}
return specs;
}
private Map<Long, List<HadoopyShardSpec>> loadShardSpecs(
String partitionType,
Object[][][] shardInfoForEachShard
)
{
Map<Long, List<HadoopyShardSpec>> shardSpecs = Maps.newTreeMap(DateTimeComparator.getInstance());
int shardCount = 0;
int segmentNum = 0;
for (Interval segmentGranularity : config.getSegmentGranularIntervals().get()) {
List<ShardSpec> specs = constructShardSpecFromShardInfo(partitionType, shardInfoForEachShard[segmentNum++]);
List<HadoopyShardSpec> actualSpecs = Lists.newArrayListWithExpectedSize(specs.size());
for (int i = 0; i < specs.size(); ++i) {
actualSpecs.add(new HadoopyShardSpec(specs.get(i), shardCount++));
}
shardSpecs.put(segmentGranularity.getStartMillis(), actualSpecs);
}
return shardSpecs;
}
@Test
public void testIndexGeneratorJob() throws IOException
{
verifyJob(new IndexGeneratorJob(config));
}
private void verifyJob(IndexGeneratorJob job) throws IOException
{
JobHelper.runJobs(ImmutableList.<Jobby>of(job), config);
int segmentNum = 0;
for (DateTime currTime = interval.getStart(); currTime.isBefore(interval.getEnd()); currTime = currTime.plusDays(1)) {
Object[][] shardInfo = shardInfoForEachSegment[segmentNum++];
File segmentOutputFolder = new File(
StringUtils.format(
"%s/%s/%s_%s/%s",
config.getSchema().getIOConfig().getSegmentOutputPath(),
config.getSchema().getDataSchema().getDataSource(),
currTime.toString(),
currTime.plusDays(1).toString(),
config.getSchema().getTuningConfig().getVersion()
)
);
Assert.assertTrue(segmentOutputFolder.exists());
Assert.assertEquals(shardInfo.length, segmentOutputFolder.list().length);
for (int partitionNum = 0; partitionNum < shardInfo.length; ++partitionNum) {
File individualSegmentFolder = new File(segmentOutputFolder, Integer.toString(partitionNum));
Assert.assertTrue(individualSegmentFolder.exists());
File descriptor = new File(individualSegmentFolder, "descriptor.json");
File indexZip = new File(individualSegmentFolder, "index.zip");
Assert.assertTrue(descriptor.exists());
Assert.assertTrue(indexZip.exists());
DataSegment dataSegment = mapper.readValue(descriptor, DataSegment.class);
Assert.assertEquals(config.getSchema().getTuningConfig().getVersion(), dataSegment.getVersion());
Assert.assertEquals(new Interval(currTime, currTime.plusDays(1)), dataSegment.getInterval());
Assert.assertEquals("local", dataSegment.getLoadSpec().get("type"));
Assert.assertEquals(indexZip.getCanonicalPath(), dataSegment.getLoadSpec().get("path"));
Assert.assertEquals(Integer.valueOf(9), dataSegment.getBinaryVersion());
if (datasourceName.equals("website")) {
Assert.assertEquals("website", dataSegment.getDataSource());
Assert.assertEquals("host", dataSegment.getDimensions().get(0));
Assert.assertEquals("visited_num", dataSegment.getMetrics().get(0));
Assert.assertEquals("unique_hosts", dataSegment.getMetrics().get(1));
} else if (datasourceName.equals("inherit_dims")) {
Assert.assertEquals("inherit_dims", dataSegment.getDataSource());
Assert.assertEquals(ImmutableList.of("X", "Y", "M", "Q", "B", "F"), dataSegment.getDimensions());
Assert.assertEquals("count", dataSegment.getMetrics().get(0));
} else if (datasourceName.equals("inherit_dims2")) {
Assert.assertEquals("inherit_dims2", dataSegment.getDataSource());
Assert.assertEquals(ImmutableList.of("B", "F", "M", "Q", "X", "Y"), dataSegment.getDimensions());
Assert.assertEquals("count", dataSegment.getMetrics().get(0));
} else {
Assert.fail("Test did not specify supported datasource name");
}
if (forceExtendableShardSpecs) {
NumberedShardSpec spec = (NumberedShardSpec) dataSegment.getShardSpec();
Assert.assertEquals(partitionNum, spec.getPartitionNum());
Assert.assertEquals(shardInfo.length, spec.getPartitions());
} else if (partitionType.equals("hashed")) {
Integer[] hashShardInfo = (Integer[]) shardInfo[partitionNum];
HashBasedNumberedShardSpec spec = (HashBasedNumberedShardSpec) dataSegment.getShardSpec();
Assert.assertEquals((int) hashShardInfo[0], spec.getPartitionNum());
Assert.assertEquals((int) hashShardInfo[1], spec.getPartitions());
} else if (partitionType.equals("single")) {
String[] singleDimensionShardInfo = (String[]) shardInfo[partitionNum];
SingleDimensionShardSpec spec = (SingleDimensionShardSpec) dataSegment.getShardSpec();
Assert.assertEquals(singleDimensionShardInfo[0], spec.getStart());
Assert.assertEquals(singleDimensionShardInfo[1], spec.getEnd());
} else {
throw new RE("Invalid partition type:[%s]", partitionType);
}
}
}
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.network.v2018_07_01.implementation;
import retrofit2.Retrofit;
import com.google.common.reflect.TypeToken;
import com.microsoft.azure.AzureServiceFuture;
import com.microsoft.azure.CloudException;
import com.microsoft.azure.ListOperationCallback;
import com.microsoft.azure.Page;
import com.microsoft.azure.PagedList;
import com.microsoft.rest.ServiceCallback;
import com.microsoft.rest.ServiceFuture;
import com.microsoft.rest.ServiceResponse;
import com.microsoft.rest.Validator;
import java.io.IOException;
import java.util.List;
import okhttp3.ResponseBody;
import retrofit2.http.Body;
import retrofit2.http.GET;
import retrofit2.http.Header;
import retrofit2.http.Headers;
import retrofit2.http.HTTP;
import retrofit2.http.Path;
import retrofit2.http.PUT;
import retrofit2.http.Query;
import retrofit2.http.Url;
import retrofit2.Response;
import rx.functions.Func1;
import rx.Observable;
/**
* An instance of this class provides access to all the operations defined
* in ServiceEndpointPolicyDefinitions.
*/
public class ServiceEndpointPolicyDefinitionsInner {
/** The Retrofit service to perform REST calls. */
private ServiceEndpointPolicyDefinitionsService service;
/** The service client containing this operation class. */
private NetworkManagementClientImpl client;
/**
* Initializes an instance of ServiceEndpointPolicyDefinitionsInner.
*
* @param retrofit the Retrofit instance built from a Retrofit Builder.
* @param client the instance of the service client containing this operation class.
*/
public ServiceEndpointPolicyDefinitionsInner(Retrofit retrofit, NetworkManagementClientImpl client) {
this.service = retrofit.create(ServiceEndpointPolicyDefinitionsService.class);
this.client = client;
}
/**
* The interface defining all the services for ServiceEndpointPolicyDefinitions to be
* used by Retrofit to perform actually REST calls.
*/
interface ServiceEndpointPolicyDefinitionsService {
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2018_07_01.ServiceEndpointPolicyDefinitions delete" })
@HTTP(path = "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/serviceEndpointPolicies/{serviceEndpointPolicyName}/serviceEndpointPolicyDefinitions/{serviceEndpointPolicyDefinitionName}", method = "DELETE", hasBody = true)
Observable<Response<ResponseBody>> delete(@Path("resourceGroupName") String resourceGroupName, @Path("serviceEndpointPolicyName") String serviceEndpointPolicyName, @Path("serviceEndpointPolicyDefinitionName") String serviceEndpointPolicyDefinitionName, @Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2018_07_01.ServiceEndpointPolicyDefinitions beginDelete" })
@HTTP(path = "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/serviceEndpointPolicies/{serviceEndpointPolicyName}/serviceEndpointPolicyDefinitions/{serviceEndpointPolicyDefinitionName}", method = "DELETE", hasBody = true)
Observable<Response<ResponseBody>> beginDelete(@Path("resourceGroupName") String resourceGroupName, @Path("serviceEndpointPolicyName") String serviceEndpointPolicyName, @Path("serviceEndpointPolicyDefinitionName") String serviceEndpointPolicyDefinitionName, @Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2018_07_01.ServiceEndpointPolicyDefinitions get" })
@GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/serviceEndpointPolicies/{serviceEndpointPolicyName}/serviceEndpointPolicyDefinitions/{serviceEndpointPolicyDefinitionName}")
Observable<Response<ResponseBody>> get(@Path("resourceGroupName") String resourceGroupName, @Path("serviceEndpointPolicyName") String serviceEndpointPolicyName, @Path("serviceEndpointPolicyDefinitionName") String serviceEndpointPolicyDefinitionName, @Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2018_07_01.ServiceEndpointPolicyDefinitions createOrUpdate" })
@PUT("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/serviceEndpointPolicies/{serviceEndpointPolicyName}/serviceEndpointPolicyDefinitions/{serviceEndpointPolicyDefinitionName}")
Observable<Response<ResponseBody>> createOrUpdate(@Path("resourceGroupName") String resourceGroupName, @Path("serviceEndpointPolicyName") String serviceEndpointPolicyName, @Path("serviceEndpointPolicyDefinitionName") String serviceEndpointPolicyDefinitionName, @Path("subscriptionId") String subscriptionId, @Body ServiceEndpointPolicyDefinitionInner serviceEndpointPolicyDefinitions, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2018_07_01.ServiceEndpointPolicyDefinitions beginCreateOrUpdate" })
@PUT("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/serviceEndpointPolicies/{serviceEndpointPolicyName}/serviceEndpointPolicyDefinitions/{serviceEndpointPolicyDefinitionName}")
Observable<Response<ResponseBody>> beginCreateOrUpdate(@Path("resourceGroupName") String resourceGroupName, @Path("serviceEndpointPolicyName") String serviceEndpointPolicyName, @Path("serviceEndpointPolicyDefinitionName") String serviceEndpointPolicyDefinitionName, @Path("subscriptionId") String subscriptionId, @Body ServiceEndpointPolicyDefinitionInner serviceEndpointPolicyDefinitions, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2018_07_01.ServiceEndpointPolicyDefinitions listByResourceGroup" })
@GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/serviceEndpointPolicies/{serviceEndpointPolicyName}/serviceEndpointPolicyDefinitions")
Observable<Response<ResponseBody>> listByResourceGroup(@Path("resourceGroupName") String resourceGroupName, @Path("serviceEndpointPolicyName") String serviceEndpointPolicyName, @Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2018_07_01.ServiceEndpointPolicyDefinitions listByResourceGroupNext" })
@GET
Observable<Response<ResponseBody>> listByResourceGroupNext(@Url String nextUrl, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
}
/**
* Deletes the specified ServiceEndpoint policy definitions.
*
* @param resourceGroupName The name of the resource group.
* @param serviceEndpointPolicyName The name of the Service Endpoint Policy.
* @param serviceEndpointPolicyDefinitionName The name of the service endpoint policy definition.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
*/
public void delete(String resourceGroupName, String serviceEndpointPolicyName, String serviceEndpointPolicyDefinitionName) {
deleteWithServiceResponseAsync(resourceGroupName, serviceEndpointPolicyName, serviceEndpointPolicyDefinitionName).toBlocking().last().body();
}
/**
* Deletes the specified ServiceEndpoint policy definitions.
*
* @param resourceGroupName The name of the resource group.
* @param serviceEndpointPolicyName The name of the Service Endpoint Policy.
* @param serviceEndpointPolicyDefinitionName The name of the service endpoint policy definition.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<Void> deleteAsync(String resourceGroupName, String serviceEndpointPolicyName, String serviceEndpointPolicyDefinitionName, final ServiceCallback<Void> serviceCallback) {
return ServiceFuture.fromResponse(deleteWithServiceResponseAsync(resourceGroupName, serviceEndpointPolicyName, serviceEndpointPolicyDefinitionName), serviceCallback);
}
/**
* Deletes the specified ServiceEndpoint policy definitions.
*
* @param resourceGroupName The name of the resource group.
* @param serviceEndpointPolicyName The name of the Service Endpoint Policy.
* @param serviceEndpointPolicyDefinitionName The name of the service endpoint policy definition.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
public Observable<Void> deleteAsync(String resourceGroupName, String serviceEndpointPolicyName, String serviceEndpointPolicyDefinitionName) {
return deleteWithServiceResponseAsync(resourceGroupName, serviceEndpointPolicyName, serviceEndpointPolicyDefinitionName).map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.body();
}
});
}
/**
* Deletes the specified ServiceEndpoint policy definitions.
*
* @param resourceGroupName The name of the resource group.
* @param serviceEndpointPolicyName The name of the Service Endpoint Policy.
* @param serviceEndpointPolicyDefinitionName The name of the service endpoint policy definition.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
public Observable<ServiceResponse<Void>> deleteWithServiceResponseAsync(String resourceGroupName, String serviceEndpointPolicyName, String serviceEndpointPolicyDefinitionName) {
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (serviceEndpointPolicyName == null) {
throw new IllegalArgumentException("Parameter serviceEndpointPolicyName is required and cannot be null.");
}
if (serviceEndpointPolicyDefinitionName == null) {
throw new IllegalArgumentException("Parameter serviceEndpointPolicyDefinitionName is required and cannot be null.");
}
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
final String apiVersion = "2018-07-01";
Observable<Response<ResponseBody>> observable = service.delete(resourceGroupName, serviceEndpointPolicyName, serviceEndpointPolicyDefinitionName, this.client.subscriptionId(), apiVersion, this.client.acceptLanguage(), this.client.userAgent());
return client.getAzureClient().getPostOrDeleteResultAsync(observable, new TypeToken<Void>() { }.getType());
}
/**
* Deletes the specified ServiceEndpoint policy definitions.
*
* @param resourceGroupName The name of the resource group.
* @param serviceEndpointPolicyName The name of the Service Endpoint Policy.
* @param serviceEndpointPolicyDefinitionName The name of the service endpoint policy definition.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
*/
public void beginDelete(String resourceGroupName, String serviceEndpointPolicyName, String serviceEndpointPolicyDefinitionName) {
beginDeleteWithServiceResponseAsync(resourceGroupName, serviceEndpointPolicyName, serviceEndpointPolicyDefinitionName).toBlocking().single().body();
}
/**
* Deletes the specified ServiceEndpoint policy definitions.
*
* @param resourceGroupName The name of the resource group.
* @param serviceEndpointPolicyName The name of the Service Endpoint Policy.
* @param serviceEndpointPolicyDefinitionName The name of the service endpoint policy definition.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<Void> beginDeleteAsync(String resourceGroupName, String serviceEndpointPolicyName, String serviceEndpointPolicyDefinitionName, final ServiceCallback<Void> serviceCallback) {
return ServiceFuture.fromResponse(beginDeleteWithServiceResponseAsync(resourceGroupName, serviceEndpointPolicyName, serviceEndpointPolicyDefinitionName), serviceCallback);
}
/**
* Deletes the specified ServiceEndpoint policy definitions.
*
* @param resourceGroupName The name of the resource group.
* @param serviceEndpointPolicyName The name of the Service Endpoint Policy.
* @param serviceEndpointPolicyDefinitionName The name of the service endpoint policy definition.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<Void> beginDeleteAsync(String resourceGroupName, String serviceEndpointPolicyName, String serviceEndpointPolicyDefinitionName) {
return beginDeleteWithServiceResponseAsync(resourceGroupName, serviceEndpointPolicyName, serviceEndpointPolicyDefinitionName).map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.body();
}
});
}
/**
* Deletes the specified ServiceEndpoint policy definitions.
*
* @param resourceGroupName The name of the resource group.
* @param serviceEndpointPolicyName The name of the Service Endpoint Policy.
* @param serviceEndpointPolicyDefinitionName The name of the service endpoint policy definition.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<ServiceResponse<Void>> beginDeleteWithServiceResponseAsync(String resourceGroupName, String serviceEndpointPolicyName, String serviceEndpointPolicyDefinitionName) {
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (serviceEndpointPolicyName == null) {
throw new IllegalArgumentException("Parameter serviceEndpointPolicyName is required and cannot be null.");
}
if (serviceEndpointPolicyDefinitionName == null) {
throw new IllegalArgumentException("Parameter serviceEndpointPolicyDefinitionName is required and cannot be null.");
}
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
final String apiVersion = "2018-07-01";
return service.beginDelete(resourceGroupName, serviceEndpointPolicyName, serviceEndpointPolicyDefinitionName, this.client.subscriptionId(), apiVersion, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() {
@Override
public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) {
try {
ServiceResponse<Void> clientResponse = beginDeleteDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<Void> beginDeleteDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<Void, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<Void>() { }.getType())
.register(202, new TypeToken<Void>() { }.getType())
.register(204, new TypeToken<Void>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* Get the specified service endpoint policy definitions from service endpoint policy.
*
* @param resourceGroupName The name of the resource group.
* @param serviceEndpointPolicyName The name of the service endpoint policy name.
* @param serviceEndpointPolicyDefinitionName The name of the service endpoint policy definition name.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the ServiceEndpointPolicyDefinitionInner object if successful.
*/
public ServiceEndpointPolicyDefinitionInner get(String resourceGroupName, String serviceEndpointPolicyName, String serviceEndpointPolicyDefinitionName) {
return getWithServiceResponseAsync(resourceGroupName, serviceEndpointPolicyName, serviceEndpointPolicyDefinitionName).toBlocking().single().body();
}
/**
* Get the specified service endpoint policy definitions from service endpoint policy.
*
* @param resourceGroupName The name of the resource group.
* @param serviceEndpointPolicyName The name of the service endpoint policy name.
* @param serviceEndpointPolicyDefinitionName The name of the service endpoint policy definition name.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<ServiceEndpointPolicyDefinitionInner> getAsync(String resourceGroupName, String serviceEndpointPolicyName, String serviceEndpointPolicyDefinitionName, final ServiceCallback<ServiceEndpointPolicyDefinitionInner> serviceCallback) {
return ServiceFuture.fromResponse(getWithServiceResponseAsync(resourceGroupName, serviceEndpointPolicyName, serviceEndpointPolicyDefinitionName), serviceCallback);
}
/**
* Get the specified service endpoint policy definitions from service endpoint policy.
*
* @param resourceGroupName The name of the resource group.
* @param serviceEndpointPolicyName The name of the service endpoint policy name.
* @param serviceEndpointPolicyDefinitionName The name of the service endpoint policy definition name.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the ServiceEndpointPolicyDefinitionInner object
*/
public Observable<ServiceEndpointPolicyDefinitionInner> getAsync(String resourceGroupName, String serviceEndpointPolicyName, String serviceEndpointPolicyDefinitionName) {
return getWithServiceResponseAsync(resourceGroupName, serviceEndpointPolicyName, serviceEndpointPolicyDefinitionName).map(new Func1<ServiceResponse<ServiceEndpointPolicyDefinitionInner>, ServiceEndpointPolicyDefinitionInner>() {
@Override
public ServiceEndpointPolicyDefinitionInner call(ServiceResponse<ServiceEndpointPolicyDefinitionInner> response) {
return response.body();
}
});
}
/**
* Get the specified service endpoint policy definitions from service endpoint policy.
*
* @param resourceGroupName The name of the resource group.
* @param serviceEndpointPolicyName The name of the service endpoint policy name.
* @param serviceEndpointPolicyDefinitionName The name of the service endpoint policy definition name.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the ServiceEndpointPolicyDefinitionInner object
*/
public Observable<ServiceResponse<ServiceEndpointPolicyDefinitionInner>> getWithServiceResponseAsync(String resourceGroupName, String serviceEndpointPolicyName, String serviceEndpointPolicyDefinitionName) {
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (serviceEndpointPolicyName == null) {
throw new IllegalArgumentException("Parameter serviceEndpointPolicyName is required and cannot be null.");
}
if (serviceEndpointPolicyDefinitionName == null) {
throw new IllegalArgumentException("Parameter serviceEndpointPolicyDefinitionName is required and cannot be null.");
}
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
final String apiVersion = "2018-07-01";
return service.get(resourceGroupName, serviceEndpointPolicyName, serviceEndpointPolicyDefinitionName, this.client.subscriptionId(), apiVersion, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<ServiceEndpointPolicyDefinitionInner>>>() {
@Override
public Observable<ServiceResponse<ServiceEndpointPolicyDefinitionInner>> call(Response<ResponseBody> response) {
try {
ServiceResponse<ServiceEndpointPolicyDefinitionInner> clientResponse = getDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<ServiceEndpointPolicyDefinitionInner> getDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<ServiceEndpointPolicyDefinitionInner, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<ServiceEndpointPolicyDefinitionInner>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* Creates or updates a service endpoint policy definition in the specified service endpoint policy.
*
* @param resourceGroupName The name of the resource group.
* @param serviceEndpointPolicyName The name of the service endpoint policy.
* @param serviceEndpointPolicyDefinitionName The name of the service endpoint policy definition name.
* @param serviceEndpointPolicyDefinitions Parameters supplied to the create or update service endpoint policy operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the ServiceEndpointPolicyDefinitionInner object if successful.
*/
public ServiceEndpointPolicyDefinitionInner createOrUpdate(String resourceGroupName, String serviceEndpointPolicyName, String serviceEndpointPolicyDefinitionName, ServiceEndpointPolicyDefinitionInner serviceEndpointPolicyDefinitions) {
return createOrUpdateWithServiceResponseAsync(resourceGroupName, serviceEndpointPolicyName, serviceEndpointPolicyDefinitionName, serviceEndpointPolicyDefinitions).toBlocking().last().body();
}
/**
* Creates or updates a service endpoint policy definition in the specified service endpoint policy.
*
* @param resourceGroupName The name of the resource group.
* @param serviceEndpointPolicyName The name of the service endpoint policy.
* @param serviceEndpointPolicyDefinitionName The name of the service endpoint policy definition name.
* @param serviceEndpointPolicyDefinitions Parameters supplied to the create or update service endpoint policy operation.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<ServiceEndpointPolicyDefinitionInner> createOrUpdateAsync(String resourceGroupName, String serviceEndpointPolicyName, String serviceEndpointPolicyDefinitionName, ServiceEndpointPolicyDefinitionInner serviceEndpointPolicyDefinitions, final ServiceCallback<ServiceEndpointPolicyDefinitionInner> serviceCallback) {
return ServiceFuture.fromResponse(createOrUpdateWithServiceResponseAsync(resourceGroupName, serviceEndpointPolicyName, serviceEndpointPolicyDefinitionName, serviceEndpointPolicyDefinitions), serviceCallback);
}
/**
* Creates or updates a service endpoint policy definition in the specified service endpoint policy.
*
* @param resourceGroupName The name of the resource group.
* @param serviceEndpointPolicyName The name of the service endpoint policy.
* @param serviceEndpointPolicyDefinitionName The name of the service endpoint policy definition name.
* @param serviceEndpointPolicyDefinitions Parameters supplied to the create or update service endpoint policy operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
public Observable<ServiceEndpointPolicyDefinitionInner> createOrUpdateAsync(String resourceGroupName, String serviceEndpointPolicyName, String serviceEndpointPolicyDefinitionName, ServiceEndpointPolicyDefinitionInner serviceEndpointPolicyDefinitions) {
return createOrUpdateWithServiceResponseAsync(resourceGroupName, serviceEndpointPolicyName, serviceEndpointPolicyDefinitionName, serviceEndpointPolicyDefinitions).map(new Func1<ServiceResponse<ServiceEndpointPolicyDefinitionInner>, ServiceEndpointPolicyDefinitionInner>() {
@Override
public ServiceEndpointPolicyDefinitionInner call(ServiceResponse<ServiceEndpointPolicyDefinitionInner> response) {
return response.body();
}
});
}
/**
* Creates or updates a service endpoint policy definition in the specified service endpoint policy.
*
* @param resourceGroupName The name of the resource group.
* @param serviceEndpointPolicyName The name of the service endpoint policy.
* @param serviceEndpointPolicyDefinitionName The name of the service endpoint policy definition name.
* @param serviceEndpointPolicyDefinitions Parameters supplied to the create or update service endpoint policy operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
public Observable<ServiceResponse<ServiceEndpointPolicyDefinitionInner>> createOrUpdateWithServiceResponseAsync(String resourceGroupName, String serviceEndpointPolicyName, String serviceEndpointPolicyDefinitionName, ServiceEndpointPolicyDefinitionInner serviceEndpointPolicyDefinitions) {
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (serviceEndpointPolicyName == null) {
throw new IllegalArgumentException("Parameter serviceEndpointPolicyName is required and cannot be null.");
}
if (serviceEndpointPolicyDefinitionName == null) {
throw new IllegalArgumentException("Parameter serviceEndpointPolicyDefinitionName is required and cannot be null.");
}
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (serviceEndpointPolicyDefinitions == null) {
throw new IllegalArgumentException("Parameter serviceEndpointPolicyDefinitions is required and cannot be null.");
}
Validator.validate(serviceEndpointPolicyDefinitions);
final String apiVersion = "2018-07-01";
Observable<Response<ResponseBody>> observable = service.createOrUpdate(resourceGroupName, serviceEndpointPolicyName, serviceEndpointPolicyDefinitionName, this.client.subscriptionId(), serviceEndpointPolicyDefinitions, apiVersion, this.client.acceptLanguage(), this.client.userAgent());
return client.getAzureClient().getPutOrPatchResultAsync(observable, new TypeToken<ServiceEndpointPolicyDefinitionInner>() { }.getType());
}
/**
* Creates or updates a service endpoint policy definition in the specified service endpoint policy.
*
* @param resourceGroupName The name of the resource group.
* @param serviceEndpointPolicyName The name of the service endpoint policy.
* @param serviceEndpointPolicyDefinitionName The name of the service endpoint policy definition name.
* @param serviceEndpointPolicyDefinitions Parameters supplied to the create or update service endpoint policy operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the ServiceEndpointPolicyDefinitionInner object if successful.
*/
public ServiceEndpointPolicyDefinitionInner beginCreateOrUpdate(String resourceGroupName, String serviceEndpointPolicyName, String serviceEndpointPolicyDefinitionName, ServiceEndpointPolicyDefinitionInner serviceEndpointPolicyDefinitions) {
return beginCreateOrUpdateWithServiceResponseAsync(resourceGroupName, serviceEndpointPolicyName, serviceEndpointPolicyDefinitionName, serviceEndpointPolicyDefinitions).toBlocking().single().body();
}
/**
* Creates or updates a service endpoint policy definition in the specified service endpoint policy.
*
* @param resourceGroupName The name of the resource group.
* @param serviceEndpointPolicyName The name of the service endpoint policy.
* @param serviceEndpointPolicyDefinitionName The name of the service endpoint policy definition name.
* @param serviceEndpointPolicyDefinitions Parameters supplied to the create or update service endpoint policy operation.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<ServiceEndpointPolicyDefinitionInner> beginCreateOrUpdateAsync(String resourceGroupName, String serviceEndpointPolicyName, String serviceEndpointPolicyDefinitionName, ServiceEndpointPolicyDefinitionInner serviceEndpointPolicyDefinitions, final ServiceCallback<ServiceEndpointPolicyDefinitionInner> serviceCallback) {
return ServiceFuture.fromResponse(beginCreateOrUpdateWithServiceResponseAsync(resourceGroupName, serviceEndpointPolicyName, serviceEndpointPolicyDefinitionName, serviceEndpointPolicyDefinitions), serviceCallback);
}
/**
* Creates or updates a service endpoint policy definition in the specified service endpoint policy.
*
* @param resourceGroupName The name of the resource group.
* @param serviceEndpointPolicyName The name of the service endpoint policy.
* @param serviceEndpointPolicyDefinitionName The name of the service endpoint policy definition name.
* @param serviceEndpointPolicyDefinitions Parameters supplied to the create or update service endpoint policy operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the ServiceEndpointPolicyDefinitionInner object
*/
public Observable<ServiceEndpointPolicyDefinitionInner> beginCreateOrUpdateAsync(String resourceGroupName, String serviceEndpointPolicyName, String serviceEndpointPolicyDefinitionName, ServiceEndpointPolicyDefinitionInner serviceEndpointPolicyDefinitions) {
return beginCreateOrUpdateWithServiceResponseAsync(resourceGroupName, serviceEndpointPolicyName, serviceEndpointPolicyDefinitionName, serviceEndpointPolicyDefinitions).map(new Func1<ServiceResponse<ServiceEndpointPolicyDefinitionInner>, ServiceEndpointPolicyDefinitionInner>() {
@Override
public ServiceEndpointPolicyDefinitionInner call(ServiceResponse<ServiceEndpointPolicyDefinitionInner> response) {
return response.body();
}
});
}
/**
* Creates or updates a service endpoint policy definition in the specified service endpoint policy.
*
* @param resourceGroupName The name of the resource group.
* @param serviceEndpointPolicyName The name of the service endpoint policy.
* @param serviceEndpointPolicyDefinitionName The name of the service endpoint policy definition name.
* @param serviceEndpointPolicyDefinitions Parameters supplied to the create or update service endpoint policy operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the ServiceEndpointPolicyDefinitionInner object
*/
public Observable<ServiceResponse<ServiceEndpointPolicyDefinitionInner>> beginCreateOrUpdateWithServiceResponseAsync(String resourceGroupName, String serviceEndpointPolicyName, String serviceEndpointPolicyDefinitionName, ServiceEndpointPolicyDefinitionInner serviceEndpointPolicyDefinitions) {
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (serviceEndpointPolicyName == null) {
throw new IllegalArgumentException("Parameter serviceEndpointPolicyName is required and cannot be null.");
}
if (serviceEndpointPolicyDefinitionName == null) {
throw new IllegalArgumentException("Parameter serviceEndpointPolicyDefinitionName is required and cannot be null.");
}
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (serviceEndpointPolicyDefinitions == null) {
throw new IllegalArgumentException("Parameter serviceEndpointPolicyDefinitions is required and cannot be null.");
}
Validator.validate(serviceEndpointPolicyDefinitions);
final String apiVersion = "2018-07-01";
return service.beginCreateOrUpdate(resourceGroupName, serviceEndpointPolicyName, serviceEndpointPolicyDefinitionName, this.client.subscriptionId(), serviceEndpointPolicyDefinitions, apiVersion, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<ServiceEndpointPolicyDefinitionInner>>>() {
@Override
public Observable<ServiceResponse<ServiceEndpointPolicyDefinitionInner>> call(Response<ResponseBody> response) {
try {
ServiceResponse<ServiceEndpointPolicyDefinitionInner> clientResponse = beginCreateOrUpdateDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<ServiceEndpointPolicyDefinitionInner> beginCreateOrUpdateDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<ServiceEndpointPolicyDefinitionInner, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<ServiceEndpointPolicyDefinitionInner>() { }.getType())
.register(201, new TypeToken<ServiceEndpointPolicyDefinitionInner>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* Gets all service endpoint policy definitions in a service end point policy.
*
* @param resourceGroupName The name of the resource group.
* @param serviceEndpointPolicyName The name of the service endpoint policy name.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the PagedList<ServiceEndpointPolicyDefinitionInner> object if successful.
*/
public PagedList<ServiceEndpointPolicyDefinitionInner> listByResourceGroup(final String resourceGroupName, final String serviceEndpointPolicyName) {
ServiceResponse<Page<ServiceEndpointPolicyDefinitionInner>> response = listByResourceGroupSinglePageAsync(resourceGroupName, serviceEndpointPolicyName).toBlocking().single();
return new PagedList<ServiceEndpointPolicyDefinitionInner>(response.body()) {
@Override
public Page<ServiceEndpointPolicyDefinitionInner> nextPage(String nextPageLink) {
return listByResourceGroupNextSinglePageAsync(nextPageLink).toBlocking().single().body();
}
};
}
/**
* Gets all service endpoint policy definitions in a service end point policy.
*
* @param resourceGroupName The name of the resource group.
* @param serviceEndpointPolicyName The name of the service endpoint policy name.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<List<ServiceEndpointPolicyDefinitionInner>> listByResourceGroupAsync(final String resourceGroupName, final String serviceEndpointPolicyName, final ListOperationCallback<ServiceEndpointPolicyDefinitionInner> serviceCallback) {
return AzureServiceFuture.fromPageResponse(
listByResourceGroupSinglePageAsync(resourceGroupName, serviceEndpointPolicyName),
new Func1<String, Observable<ServiceResponse<Page<ServiceEndpointPolicyDefinitionInner>>>>() {
@Override
public Observable<ServiceResponse<Page<ServiceEndpointPolicyDefinitionInner>>> call(String nextPageLink) {
return listByResourceGroupNextSinglePageAsync(nextPageLink);
}
},
serviceCallback);
}
/**
* Gets all service endpoint policy definitions in a service end point policy.
*
* @param resourceGroupName The name of the resource group.
* @param serviceEndpointPolicyName The name of the service endpoint policy name.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<ServiceEndpointPolicyDefinitionInner> object
*/
public Observable<Page<ServiceEndpointPolicyDefinitionInner>> listByResourceGroupAsync(final String resourceGroupName, final String serviceEndpointPolicyName) {
return listByResourceGroupWithServiceResponseAsync(resourceGroupName, serviceEndpointPolicyName)
.map(new Func1<ServiceResponse<Page<ServiceEndpointPolicyDefinitionInner>>, Page<ServiceEndpointPolicyDefinitionInner>>() {
@Override
public Page<ServiceEndpointPolicyDefinitionInner> call(ServiceResponse<Page<ServiceEndpointPolicyDefinitionInner>> response) {
return response.body();
}
});
}
/**
* Gets all service endpoint policy definitions in a service end point policy.
*
* @param resourceGroupName The name of the resource group.
* @param serviceEndpointPolicyName The name of the service endpoint policy name.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<ServiceEndpointPolicyDefinitionInner> object
*/
public Observable<ServiceResponse<Page<ServiceEndpointPolicyDefinitionInner>>> listByResourceGroupWithServiceResponseAsync(final String resourceGroupName, final String serviceEndpointPolicyName) {
return listByResourceGroupSinglePageAsync(resourceGroupName, serviceEndpointPolicyName)
.concatMap(new Func1<ServiceResponse<Page<ServiceEndpointPolicyDefinitionInner>>, Observable<ServiceResponse<Page<ServiceEndpointPolicyDefinitionInner>>>>() {
@Override
public Observable<ServiceResponse<Page<ServiceEndpointPolicyDefinitionInner>>> call(ServiceResponse<Page<ServiceEndpointPolicyDefinitionInner>> page) {
String nextPageLink = page.body().nextPageLink();
if (nextPageLink == null) {
return Observable.just(page);
}
return Observable.just(page).concatWith(listByResourceGroupNextWithServiceResponseAsync(nextPageLink));
}
});
}
/**
* Gets all service endpoint policy definitions in a service end point policy.
*
ServiceResponse<PageImpl<ServiceEndpointPolicyDefinitionInner>> * @param resourceGroupName The name of the resource group.
ServiceResponse<PageImpl<ServiceEndpointPolicyDefinitionInner>> * @param serviceEndpointPolicyName The name of the service endpoint policy name.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the PagedList<ServiceEndpointPolicyDefinitionInner> object wrapped in {@link ServiceResponse} if successful.
*/
public Observable<ServiceResponse<Page<ServiceEndpointPolicyDefinitionInner>>> listByResourceGroupSinglePageAsync(final String resourceGroupName, final String serviceEndpointPolicyName) {
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (serviceEndpointPolicyName == null) {
throw new IllegalArgumentException("Parameter serviceEndpointPolicyName is required and cannot be null.");
}
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
final String apiVersion = "2018-07-01";
return service.listByResourceGroup(resourceGroupName, serviceEndpointPolicyName, this.client.subscriptionId(), apiVersion, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<ServiceEndpointPolicyDefinitionInner>>>>() {
@Override
public Observable<ServiceResponse<Page<ServiceEndpointPolicyDefinitionInner>>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PageImpl<ServiceEndpointPolicyDefinitionInner>> result = listByResourceGroupDelegate(response);
return Observable.just(new ServiceResponse<Page<ServiceEndpointPolicyDefinitionInner>>(result.body(), result.response()));
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<PageImpl<ServiceEndpointPolicyDefinitionInner>> listByResourceGroupDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<PageImpl<ServiceEndpointPolicyDefinitionInner>, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<PageImpl<ServiceEndpointPolicyDefinitionInner>>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* Gets all service endpoint policy definitions in a service end point policy.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the PagedList<ServiceEndpointPolicyDefinitionInner> object if successful.
*/
public PagedList<ServiceEndpointPolicyDefinitionInner> listByResourceGroupNext(final String nextPageLink) {
ServiceResponse<Page<ServiceEndpointPolicyDefinitionInner>> response = listByResourceGroupNextSinglePageAsync(nextPageLink).toBlocking().single();
return new PagedList<ServiceEndpointPolicyDefinitionInner>(response.body()) {
@Override
public Page<ServiceEndpointPolicyDefinitionInner> nextPage(String nextPageLink) {
return listByResourceGroupNextSinglePageAsync(nextPageLink).toBlocking().single().body();
}
};
}
/**
* Gets all service endpoint policy definitions in a service end point policy.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param serviceFuture the ServiceFuture object tracking the Retrofit calls
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<List<ServiceEndpointPolicyDefinitionInner>> listByResourceGroupNextAsync(final String nextPageLink, final ServiceFuture<List<ServiceEndpointPolicyDefinitionInner>> serviceFuture, final ListOperationCallback<ServiceEndpointPolicyDefinitionInner> serviceCallback) {
return AzureServiceFuture.fromPageResponse(
listByResourceGroupNextSinglePageAsync(nextPageLink),
new Func1<String, Observable<ServiceResponse<Page<ServiceEndpointPolicyDefinitionInner>>>>() {
@Override
public Observable<ServiceResponse<Page<ServiceEndpointPolicyDefinitionInner>>> call(String nextPageLink) {
return listByResourceGroupNextSinglePageAsync(nextPageLink);
}
},
serviceCallback);
}
/**
* Gets all service endpoint policy definitions in a service end point policy.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<ServiceEndpointPolicyDefinitionInner> object
*/
public Observable<Page<ServiceEndpointPolicyDefinitionInner>> listByResourceGroupNextAsync(final String nextPageLink) {
return listByResourceGroupNextWithServiceResponseAsync(nextPageLink)
.map(new Func1<ServiceResponse<Page<ServiceEndpointPolicyDefinitionInner>>, Page<ServiceEndpointPolicyDefinitionInner>>() {
@Override
public Page<ServiceEndpointPolicyDefinitionInner> call(ServiceResponse<Page<ServiceEndpointPolicyDefinitionInner>> response) {
return response.body();
}
});
}
/**
* Gets all service endpoint policy definitions in a service end point policy.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<ServiceEndpointPolicyDefinitionInner> object
*/
public Observable<ServiceResponse<Page<ServiceEndpointPolicyDefinitionInner>>> listByResourceGroupNextWithServiceResponseAsync(final String nextPageLink) {
return listByResourceGroupNextSinglePageAsync(nextPageLink)
.concatMap(new Func1<ServiceResponse<Page<ServiceEndpointPolicyDefinitionInner>>, Observable<ServiceResponse<Page<ServiceEndpointPolicyDefinitionInner>>>>() {
@Override
public Observable<ServiceResponse<Page<ServiceEndpointPolicyDefinitionInner>>> call(ServiceResponse<Page<ServiceEndpointPolicyDefinitionInner>> page) {
String nextPageLink = page.body().nextPageLink();
if (nextPageLink == null) {
return Observable.just(page);
}
return Observable.just(page).concatWith(listByResourceGroupNextWithServiceResponseAsync(nextPageLink));
}
});
}
/**
* Gets all service endpoint policy definitions in a service end point policy.
*
ServiceResponse<PageImpl<ServiceEndpointPolicyDefinitionInner>> * @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the PagedList<ServiceEndpointPolicyDefinitionInner> object wrapped in {@link ServiceResponse} if successful.
*/
public Observable<ServiceResponse<Page<ServiceEndpointPolicyDefinitionInner>>> listByResourceGroupNextSinglePageAsync(final String nextPageLink) {
if (nextPageLink == null) {
throw new IllegalArgumentException("Parameter nextPageLink is required and cannot be null.");
}
String nextUrl = String.format("%s", nextPageLink);
return service.listByResourceGroupNext(nextUrl, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<ServiceEndpointPolicyDefinitionInner>>>>() {
@Override
public Observable<ServiceResponse<Page<ServiceEndpointPolicyDefinitionInner>>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PageImpl<ServiceEndpointPolicyDefinitionInner>> result = listByResourceGroupNextDelegate(response);
return Observable.just(new ServiceResponse<Page<ServiceEndpointPolicyDefinitionInner>>(result.body(), result.response()));
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<PageImpl<ServiceEndpointPolicyDefinitionInner>> listByResourceGroupNextDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<PageImpl<ServiceEndpointPolicyDefinitionInner>, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<PageImpl<ServiceEndpointPolicyDefinitionInner>>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
}
| |
/*******************************************************************************
Copyright (c) 2014,2015, Oracle and/or its affiliates. All rights reserved.
$revision_history$
06-feb-2013 Steven Davelaar
1.0 initial creation
******************************************************************************/
package oracle.ateam.sample.mobile.dt.controller.parser;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import oracle.adf.model.adapter.dataformat.AccessorDef;
import oracle.adf.model.adapter.dataformat.MethodDef;
import oracle.adf.model.adapter.dataformat.MethodReturnDef;
import oracle.adfdt.model.objects.DataControl;
import oracle.ateam.sample.mobile.dt.model.DCMethod;
import oracle.ateam.sample.mobile.dt.model.DataObjectInfo;
import oracle.binding.meta.AccessorDefinition;
import oracle.binding.meta.DefinitionContainer;
import oracle.binding.meta.OperationDefinition;
import oracle.binding.meta.OperationReturnDefinition;
import oracle.binding.meta.StructureDefinition;
public class DataControlDataObjectParser
{
private DataControl dataControl;
private StructureDefinition dataControlBean;
private List<DataObjectInfo> accessorBeans = new ArrayList<DataObjectInfo>();
private List<DataObjectInfo> methodAccessorBeans = new ArrayList<DataObjectInfo>();
private Map<String, DCMethod> crudMethods;
public DataControlDataObjectParser(DataControl dataControl)
{
super();
this.dataControl = dataControl;
dataControlBean = dataControl.getRealDataControl().getDataControlDefinition().getStructure();
}
public void discoverBeanCollections()
{
discoverBeanCollections(dataControlBean, new ArrayList<StructureDefinition>(),null);
}
private boolean listContains(List<StructureDefinition> beansProcessed, StructureDefinition currentBean)
{
boolean found = false;
for (StructureDefinition sd : beansProcessed)
{
if (sd.getFullName().equals(currentBean.getFullName()))
{
found = true;
break;
}
}
return found;
}
public void discoverBeanCollections(StructureDefinition currentBean, List<StructureDefinition> beansProcessed,
DataObjectInfo parent)
{
if (listContains(beansProcessed,currentBean))
{
// can happen with recursive calls
return;
}
beansProcessed.add(currentBean);
Iterator accessors = currentBean.getAccessorDefinitions().iterator();
while (accessors.hasNext())
{
Object aap = accessors.next();
AccessorDefinition accessor = (AccessorDefinition) aap;
// if it is a top-level accessor, it must be a collection, otherwise we ignore it,
// with this check we skip the ADF BC SDO accessors suffixed with _parameters
// HMMM, but this also skips tableArray-item from AuroPlayer WS ...
// if (level> 1 || accessor.isCollection())
// {
StructureDefinition accBean = accessor.getStructure();
// only add bean when itself when it has attrobutes, otherwise it is
//a "container" element
boolean hasAttrs = accBean.getAttributeDefinitions().iterator().hasNext();
boolean createDoi = hasAttrs;
DataObjectInfo newParent = parent;
if (createDoi)
{
DataObjectInfo doi = new DataObjectInfo(accessor,accBean,accessor.isCollection(),parent);
newParent = doi;
accessorBeans.add(doi);
}
// recursive call
discoverBeanCollections(accBean,beansProcessed, newParent);
// }
// else
// {
// System.err.println(accessor.getName()+" is skipped: Top-level accessor that is NOT A COLLECTION!!");
// }
}
DefinitionContainer methodAccessors = (DefinitionContainer) currentBean.getOperationDefinitions();
// although this is a typed list, we get a CCE on some entries , very weird
// for (MethodDef method : methodAccessors)
// for (int i = 0; i < methodAccessors.size(); i++)
Iterator methods = methodAccessors.iterator();
while (methods.hasNext())
{
// Object entry = methodAccessors.get(i);
Object entry = methods.next();
if (entry instanceof OperationDefinition)
{
OperationDefinition method = (OperationDefinition) entry;
OperationReturnDefinition returnType = (OperationReturnDefinition) method.getOperationReturnType();
// if (returnType!=null && returnType.isCollection() && !returnType.isScalarCollection() && !usages.containsKey(method.getName()) ) //&& accessor.isCollectionType())
// if (returnType!=null && returnType.isAccessor() ) //&& accessor.isCollectionType())
if (returnType!=null ) // && returnType.isAccessor() ) //&& accessor.isCollectionType())
{
// StructureDefinition accBean = (StructureDefinition) returnType.getDefinitionParent();
MethodDef methodDef = (MethodDef) returnType.getDefinitionParent();
MethodReturnDef methodreturnDef = (MethodReturnDef) methodDef.getOperationReturnType();
// if (methodreturnDef!=null && methodreturnDef.isCollection() && !methodreturnDef.isScalarCollection() ) //&& accessor.isCollectionType())
if (methodreturnDef!=null ) // && methodreturnDef.isCollection() && !methodreturnDef.isScalarCollection() ) //&& accessor.isCollectionType())
{
StructureDefinition accBean = methodreturnDef.getStructure();
if (accBean==null)
{
continue;
}
if (!accBean.getAttributeDefinitions().iterator().hasNext() && accBean.getAccessorDefinitions().iterator().hasNext())
{
// bean has no attrs
// Return type is wrapped in "Result" accessor, then get the bean of the accessor
AccessorDefinition accessor = (AccessorDefinition) accBean.getAccessorDefinitions().iterator().next();
accBean = accessor.getStructure();
}
boolean hasAttrs = accBean.getAttributeDefinitions().iterator().hasNext();
boolean createDoi = hasAttrs;
DataObjectInfo newParent = parent;
if (createDoi)
{
boolean isCollection = methodreturnDef.isCollection();
// above statement no longer works in 12.1.3, we need to get the accessor def from the struct to get
// true returned if it is really a collectiond.
// update 06-jun-14: In latest 12.1.3 build, the above code works again as expected!
// if ( methodreturnDef.getStructure()!=null && methodreturnDef.getStructure().getAccessorDefinitions()!=null)
// {
// Iterator it = methodreturnDef.getStructure().getAccessorDefinitions().iterator();
// if (it.hasNext())
// {
// AccessorDef accdef = (AccessorDef) it.next();
// isCollection = accdef.isCollection();
// }
// }
DataObjectInfo doi = new DataObjectInfo(methodDef,accBean,isCollection,parent);
newParent = doi;
methodAccessorBeans.add(doi);
// recursive call
}
// only increase level when current bean is added as DataObject
discoverBeanCollections(accBean,beansProcessed,newParent);
}
}
}
}
}
public Map<String, DCMethod> getCRUDMethods()
{
if (crudMethods==null)
{
crudMethods = discoverCRUDMethods();
}
return crudMethods;
}
public Map<String, DCMethod> discoverCRUDMethods()
{
Map<String, DCMethod> methods = new HashMap<String, DCMethod>();
DefinitionContainer methodAccessors = (DefinitionContainer) dataControlBean.getOperationDefinitions();
Iterator methodsIter = methodAccessors.iterator();
while (methodsIter.hasNext())
{
Object entry = methodsIter.next();
if (entry instanceof OperationDefinition)
{
OperationDefinition method = (OperationDefinition) entry;
if (!method.getName().endsWith("_parameters"))
{
methods.put(method.getName(),new DCMethod(method));
}
}
}
DefinitionContainer accessors = (DefinitionContainer) dataControlBean.getAccessorDefinitions();
Iterator accessorIter = accessors.iterator();
while (accessorIter.hasNext())
{
Object entry = accessorIter.next();
if (entry instanceof AccessorDefinition)
{
AccessorDefinition accessor = (AccessorDefinition) entry;
if (!accessor.getName().endsWith("_parameters"))
{
methods.put(accessor.getName(),new DCMethod(accessor));
}
}
}
return methods;
}
public List<DataObjectInfo> getAccessorBeans()
{
return accessorBeans;
}
public List<DataObjectInfo> getMethodAccessorBeans()
{
return methodAccessorBeans;
}
public List<DataObjectInfo> getAllAccessorBeans()
{
List<DataObjectInfo> allBeans = new ArrayList<DataObjectInfo>();
allBeans.addAll(getAccessorBeans());
allBeans.addAll(getMethodAccessorBeans());
return allBeans;
}
public List<DataObjectInfo> getAllCollectionAccessorBeans()
{
List<DataObjectInfo> colBeans = new ArrayList<DataObjectInfo>();
for (DataObjectInfo bean : getAllAccessorBeans())
{
boolean add = bean.isCollection();
if (add)
{
// only add if all parents are also collections
DataObjectInfo parent = bean.getParent();
while (parent!=null)
{
if (!parent.isCollection())
{
add = false;
break;
}
parent = parent.getParent();
}
}
if (add)
{
colBeans.add(bean);
}
}
return colBeans;
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInspection.streamMigration;
import com.intellij.codeInsight.ExceptionUtil;
import com.intellij.codeInsight.PsiEquivalenceUtil;
import com.intellij.codeInsight.daemon.GroupNames;
import com.intellij.codeInsight.daemon.impl.analysis.HighlightControlFlowUtil;
import com.intellij.codeInsight.intention.impl.StreamRefactoringUtil;
import com.intellij.codeInspection.AbstractBaseJavaLocalInspectionTool;
import com.intellij.codeInspection.LambdaCanBeMethodReferenceInspection;
import com.intellij.codeInspection.ProblemHighlightType;
import com.intellij.codeInspection.ProblemsHolder;
import com.intellij.codeInspection.ui.MultipleCheckboxOptionsPanel;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.FileIndexFacade;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.profile.codeInspection.InspectionProjectProfileManager;
import com.intellij.psi.*;
import com.intellij.psi.controlFlow.*;
import com.intellij.psi.search.LocalSearchScope;
import com.intellij.psi.search.searches.ReferencesSearch;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.util.InheritanceUtil;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtil;
import com.intellij.psi.util.TypeConversionUtil;
import com.intellij.util.ArrayUtil;
import com.siyeh.ig.callMatcher.CallMatcher;
import com.siyeh.ig.psiutils.*;
import one.util.streamex.StreamEx;
import org.jetbrains.annotations.Contract;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.util.Collection;
import java.util.List;
import java.util.Objects;
import static com.intellij.codeInspection.streamMigration.OperationReductionMigration.SUM_OPERATION;
import static com.intellij.util.ObjectUtils.tryCast;
import static com.siyeh.ig.psiutils.ControlFlowUtils.InitializerUsageStatus.UNKNOWN;
public class StreamApiMigrationInspection extends AbstractBaseJavaLocalInspectionTool {
private static final Logger LOG = Logger.getInstance(StreamApiMigrationInspection.class);
@SuppressWarnings("PublicField")
public boolean REPLACE_TRIVIAL_FOREACH;
@SuppressWarnings("PublicField")
public boolean SUGGEST_FOREACH;
private static final String SHORT_NAME = "Convert2streamapi";
@Nullable
@Override
public JComponent createOptionsPanel() {
MultipleCheckboxOptionsPanel panel = new MultipleCheckboxOptionsPanel(this);
panel.addCheckbox("Warn if only 'forEach' replacement is available", "SUGGEST_FOREACH");
panel.addCheckbox("Warn if the loop is trivial", "REPLACE_TRIVIAL_FOREACH");
return panel;
}
@Nls
@NotNull
@Override
public String getGroupDisplayName() {
return GroupNames.LANGUAGE_LEVEL_SPECIFIC_GROUP_NAME;
}
@Nls
@NotNull
@Override
public String getDisplayName() {
return "foreach loop can be collapsed with Stream API";
}
@Override
public boolean isEnabledByDefault() {
return true;
}
@NotNull
@Override
public String getShortName() {
return SHORT_NAME;
}
@NotNull
@Override
public PsiElementVisitor buildVisitor(@NotNull final ProblemsHolder holder, boolean isOnTheFly) {
PsiFile file = holder.getFile();
VirtualFile virtualFile = file.getVirtualFile();
if (!PsiUtil.isLanguageLevel8OrHigher(file) || virtualFile == null ||
!FileIndexFacade.getInstance(holder.getProject()).isInSourceContent(virtualFile)) {
return PsiElementVisitor.EMPTY_VISITOR;
}
return new StreamApiMigrationVisitor(holder, isOnTheFly);
}
@Contract("null, null -> true; null, !null -> false")
private static boolean sameReference(PsiExpression expr1, PsiExpression expr2) {
if (expr1 == null && expr2 == null) return true;
if (!(expr1 instanceof PsiReferenceExpression) || !(expr2 instanceof PsiReferenceExpression)) return false;
PsiReferenceExpression ref1 = (PsiReferenceExpression)expr1;
PsiReferenceExpression ref2 = (PsiReferenceExpression)expr2;
return Objects.equals(ref1.getReferenceName(), ref2.getReferenceName()) && sameReference(ref1.getQualifierExpression(),
ref2.getQualifierExpression());
}
/**
* Extracts an addend from assignment expression like {@code x+=addend} or {@code x = x+addend}
*
* @param assignment assignment expression to extract an addend from
* @return extracted addend expression or null if supplied assignment statement is not an addition
*/
@Nullable
static PsiExpression extractAddend(PsiAssignmentExpression assignment) {
return extractOperand(assignment, JavaTokenType.PLUSEQ);
}
@Nullable
static PsiExpression extractOperand(PsiAssignmentExpression assignment, IElementType compoundAssignmentOp) {
if (compoundAssignmentOp.equals(assignment.getOperationTokenType())) {
return assignment.getRExpression();
}
else if (JavaTokenType.EQ.equals(assignment.getOperationTokenType())) {
if (assignment.getRExpression() instanceof PsiBinaryExpression) {
PsiBinaryExpression binOp = (PsiBinaryExpression)assignment.getRExpression();
IElementType op = TypeConversionUtil.convertEQtoOperation(compoundAssignmentOp);
if (op.equals(binOp.getOperationTokenType())) {
if (sameReference(binOp.getLOperand(), assignment.getLExpression())) {
return binOp.getROperand();
}
if (sameReference(binOp.getROperand(), assignment.getLExpression())) {
return binOp.getLOperand();
}
}
}
}
return null;
}
@Nullable
static PsiVariable extractSumAccumulator(PsiAssignmentExpression assignment) {
return extractAccumulator(assignment, JavaTokenType.PLUSEQ);
}
@Nullable
static PsiVariable extractAccumulator(PsiAssignmentExpression assignment, IElementType compoundAssignmentOp) {
PsiReferenceExpression lExpr = tryCast(assignment.getLExpression(), PsiReferenceExpression.class);
if (lExpr == null) return null;
PsiVariable var = tryCast(lExpr.resolve(), PsiVariable.class);
if (var == null) return null;
if (compoundAssignmentOp.equals(assignment.getOperationTokenType())) {
return var;
}
else if (JavaTokenType.EQ.equals(assignment.getOperationTokenType())) {
if (assignment.getRExpression() instanceof PsiBinaryExpression) {
PsiBinaryExpression binOp = (PsiBinaryExpression)assignment.getRExpression();
IElementType op = TypeConversionUtil.convertEQtoOperation(compoundAssignmentOp);
if (op.equals(binOp.getOperationTokenType())) {
PsiExpression left = binOp.getLOperand();
PsiExpression right = binOp.getROperand();
if (sameReference(left, lExpr) || sameReference(right, lExpr)) {
return var;
}
}
}
}
return null;
}
/**
* Extract incremented value from expression which looks like {@code x++}, {@code ++x}, {@code x = x + 1} or {@code x += 1}
*
* @param expression expression to extract the incremented value
* @return an extracted incremented value or null if increment pattern is not detected in the supplied expression
*/
@Contract("null -> null")
static PsiExpression extractIncrementedLValue(PsiExpression expression) {
if (expression instanceof PsiUnaryExpression) {
if (JavaTokenType.PLUSPLUS.equals(((PsiUnaryExpression)expression).getOperationTokenType())) {
return ((PsiUnaryExpression)expression).getOperand();
}
}
else if (expression instanceof PsiAssignmentExpression) {
PsiAssignmentExpression assignment = (PsiAssignmentExpression)expression;
if (ExpressionUtils.isLiteral(extractAddend(assignment), 1)) {
return assignment.getLExpression();
}
}
return null;
}
@Nullable
private static PsiLocalVariable getIncrementedVariable(PsiExpression expression, TerminalBlock tb, List<PsiVariable> variables) {
// have only one non-final variable
if (variables.size() != 1) return null;
// have single expression which is either ++x or x++ or x+=1 or x=x+1
PsiReferenceExpression operand = tryCast(extractIncrementedLValue(expression), PsiReferenceExpression.class);
if (operand == null) return null;
PsiLocalVariable variable = tryCast(operand.resolve(), PsiLocalVariable.class);
// the referred variable is the same as non-final variable and not used in intermediate operations
if (variable != null && variables.contains(variable) && !tb.isReferencedInOperations(variable)) {
return variable;
}
return null;
}
@Nullable
private static PsiVariable getAccumulatedVariable(TerminalBlock tb,
List<PsiVariable> variables,
OperationReductionMigration.ReductionOperation operation) {
IElementType compoundAssignmentOp = operation.getCompoundAssignmentOp();
// have only one non-final variable
if (variables.size() != 1) return null;
PsiAssignmentExpression assignment = tb.getSingleExpression(PsiAssignmentExpression.class);
if (assignment == null) return null;
PsiVariable var = extractAccumulator(assignment, compoundAssignmentOp);
// the referred variable is the same as non-final variable
if (var == null || !variables.contains(var)) return null;
if (!operation.getAccumulatorRestriction().test(var)) return null;
// the referred variable is not used in intermediate operations
if (tb.isReferencedInOperations(var)) return null;
PsiExpression operand = extractOperand(assignment, compoundAssignmentOp);
LOG.assertTrue(operand != null);
if (VariableAccessUtils.variableIsUsed(var, operand)) return null;
return var;
}
static boolean isAddAllCall(TerminalBlock tb) {
PsiMethodCallExpression call = tb.getSingleMethodCall();
if (call == null || tb.getVariable().getType() instanceof PsiPrimitiveType) return false;
if (!ExpressionUtils.isReferenceTo(call.getArgumentList().getExpressions()[0], tb.getVariable())) return false;
if (!"add".equals(call.getMethodExpression().getReferenceName())) return false;
PsiExpression qualifierExpression = call.getMethodExpression().getQualifierExpression();
if (qualifierExpression == null || qualifierExpression instanceof PsiThisExpression) {
PsiMethod method = PsiTreeUtil.getParentOfType(call, PsiMethod.class);
return method == null || !method.getName().equals("addAll");
}
return !(qualifierExpression instanceof PsiMethodCallExpression);
}
@Contract("null, _, _ -> false")
static boolean isCallOf(PsiMethodCallExpression call, String className, String... methodNames) {
if (call == null) return false;
PsiReferenceExpression methodExpression = call.getMethodExpression();
String name = methodExpression.getReferenceName();
if (!ArrayUtil.contains(name, methodNames)) return false;
PsiMethod maybeMapMethod = call.resolveMethod();
if (maybeMapMethod == null ||
maybeMapMethod.getParameterList().getParametersCount() != call.getArgumentList().getExpressionCount()) {
return false;
}
PsiClass containingClass = maybeMapMethod.getContainingClass();
if (containingClass == null) return false;
if (className.equals(containingClass.getQualifiedName())) return true;
PsiMethod[] superMethods = maybeMapMethod.findDeepestSuperMethods();
return StreamEx.of(superMethods).map(PsiMember::getContainingClass).nonNull().map(PsiClass::getQualifiedName).has(className);
}
private static boolean isCountOperation(List<PsiVariable> nonFinalVariables, TerminalBlock tb) {
PsiLocalVariable variable = getIncrementedVariable(tb.getSingleExpression(PsiExpression.class), tb, nonFinalVariables);
PsiExpression counter = tb.getCountExpression();
if (counter == null) {
return variable != null;
}
if (tb.isEmpty()) {
// like "if(++count == limit) break"
variable = getIncrementedVariable(counter, tb, nonFinalVariables);
}
else if (!ExpressionUtils.isReferenceTo(counter, variable)) {
return false;
}
return variable != null &&
ExpressionUtils.isZero(variable.getInitializer()) &&
ControlFlowUtils.getInitializerUsageStatus(variable, tb.getStreamSourceStatement()) != UNKNOWN;
}
private static boolean isTrivial(TerminalBlock tb) {
PsiVariable variable = tb.getVariable();
final PsiExpression candidate = LambdaCanBeMethodReferenceInspection
.canBeMethodReferenceProblem(tb.getSingleStatement(),
new PsiVariable[]{variable}, createDefaultConsumerType(variable.getProject(), variable), null);
if (!(candidate instanceof PsiCallExpression)) return true;
final PsiMethod method = ((PsiCallExpression)candidate).resolveMethod();
return method == null;
}
@Nullable
private static PsiClassType createDefaultConsumerType(Project project, PsiVariable variable) {
final JavaPsiFacade psiFacade = JavaPsiFacade.getInstance(project);
final PsiClass consumerClass = psiFacade.findClass(CommonClassNames.JAVA_UTIL_FUNCTION_CONSUMER, variable.getResolveScope());
return consumerClass != null ? psiFacade.getElementFactory().createType(consumerClass, variable.getType()) : null;
}
static boolean isVariableSuitableForStream(PsiVariable variable, PsiStatement statement, TerminalBlock tb) {
if (ReferencesSearch.search(variable).forEach(ref -> {
PsiExpression expression = tryCast(ref.getElement(), PsiExpression.class);
return expression == null ||
!PsiUtil.isAccessedForWriting(expression) ||
tb.operations().anyMatch(op -> op.isWriteAllowed(variable, expression));
})) {
return true;
}
return HighlightControlFlowUtil.isEffectivelyFinal(variable, statement, null);
}
static String tryUnbox(PsiVariable variable) {
PsiType type = variable.getType();
String mapOp = null;
if (type.equals(PsiType.INT)) {
mapOp = "mapToInt";
}
else if (type.equals(PsiType.LONG)) {
mapOp = "mapToLong";
}
else if (type.equals(PsiType.DOUBLE)) {
mapOp = "mapToDouble";
}
return mapOp == null ? "" : "." + mapOp + "(" + variable.getName() + " -> " + variable.getName() + ")";
}
static boolean isExpressionDependsOnUpdatedCollections(PsiExpression condition,
PsiExpression qualifierExpression) {
final PsiElement collection = qualifierExpression instanceof PsiReferenceExpression
? ((PsiReferenceExpression)qualifierExpression).resolve()
: null;
if (collection != null) {
return collection instanceof PsiVariable && VariableAccessUtils.variableIsUsed((PsiVariable)collection, condition);
}
final boolean[] dependsOnCollection = {false};
condition.accept(new JavaRecursiveElementWalkingVisitor() {
@Override
public void visitMethodCallExpression(PsiMethodCallExpression expression) {
super.visitMethodCallExpression(expression);
final PsiExpression callQualifier = expression.getMethodExpression().getQualifierExpression();
if (callQualifier == null ||
callQualifier instanceof PsiThisExpression && ((PsiThisExpression)callQualifier).getQualifier() == null ||
callQualifier instanceof PsiSuperExpression && ((PsiSuperExpression)callQualifier).getQualifier() == null) {
dependsOnCollection[0] = true;
}
}
@Override
public void visitThisExpression(PsiThisExpression expression) {
super.visitThisExpression(expression);
if (expression.getQualifier() == null && expression.getParent() instanceof PsiExpressionList) {
dependsOnCollection[0] = true;
}
}
@Override
public void visitClass(PsiClass aClass) {}
@Override
public void visitLambdaExpression(PsiLambdaExpression expression) {}
});
return dependsOnCollection[0];
}
private class StreamApiMigrationVisitor extends JavaElementVisitor {
private final ProblemsHolder myHolder;
private final boolean myIsOnTheFly;
public StreamApiMigrationVisitor(ProblemsHolder holder, boolean isOnTheFly) {
myHolder = holder;
myIsOnTheFly = isOnTheFly;
}
@Override
public void visitForeachStatement(PsiForeachStatement statement) {
super.visitForeachStatement(statement);
processLoop(statement);
}
@Override
public void visitWhileStatement(PsiWhileStatement statement) {
super.visitWhileStatement(statement);
processLoop(statement);
}
@Override
public void visitForStatement(PsiForStatement statement) {
super.visitForStatement(statement);
processLoop(statement);
}
void processLoop(PsiLoopStatement statement) {
final PsiStatement body = statement.getBody();
if (body == null) return;
StreamSource source = StreamSource.tryCreate(statement);
if (source == null) return;
if (!ExceptionUtil.getThrownCheckedExceptions(body).isEmpty()) return;
TerminalBlock tb = TerminalBlock.from(source, body);
BaseStreamApiMigration migration = findMigration(statement, body, tb, SUGGEST_FOREACH, REPLACE_TRIVIAL_FOREACH);
if (migration == null || (!myIsOnTheFly && !migration.isShouldWarn())) return;
MigrateToStreamFix[] fixes = {new MigrateToStreamFix(migration)};
if (migration instanceof ForEachMigration && !(tb.getLastOperation() instanceof CollectionStream)) { //for .stream()
fixes = ArrayUtil.append(fixes, new MigrateToStreamFix(new ForEachMigration(migration.isShouldWarn(), "forEachOrdered")));
}
ProblemHighlightType highlightType =
migration.isShouldWarn() ? ProblemHighlightType.GENERIC_ERROR_OR_WARNING : ProblemHighlightType.INFORMATION;
String message = "Can be replaced with '" + migration.getReplacement() + "' call";
TextRange range = getRange(migration.isShouldWarn(), statement, myIsOnTheFly);
myHolder.registerProblem(statement, message, highlightType, range.shiftRight(-statement.getTextOffset()), fixes);
}
}
@Nullable
static BaseStreamApiMigration findMigration(PsiStatement loop,
PsiElement body,
TerminalBlock tb,
boolean suggestForeach,
boolean replaceTrivialForEach) {
final ControlFlow controlFlow;
try {
controlFlow = ControlFlowFactory.getInstance(loop.getProject())
.getControlFlow(body, LocalsOrMyInstanceFieldsControlFlowPolicy.getInstance());
}
catch (AnalysisCanceledException ignored) {
return null;
}
int startOffset = controlFlow.getStartOffset(body);
int endOffset = controlFlow.getEndOffset(body);
if (startOffset < 0 || endOffset < 0) return null;
PsiElement surrounder = PsiTreeUtil.getParentOfType(loop, PsiLambdaExpression.class, PsiClass.class);
final List<PsiVariable> nonFinalVariables = StreamEx.of(ControlFlowUtil.getUsedVariables(controlFlow, startOffset, endOffset))
.remove(variable -> variable instanceof PsiField)
.remove(variable -> PsiTreeUtil.getParentOfType(variable, PsiLambdaExpression.class, PsiClass.class) != surrounder)
.remove(variable -> isVariableSuitableForStream(variable, loop, tb)).toList();
if (isCountOperation(nonFinalVariables, tb)) {
return new CountMigration(true);
}
CollectMigration.CollectTerminal terminal = CollectMigration.extractCollectTerminal(tb, nonFinalVariables);
if (terminal != null) {
boolean addAll = loop instanceof PsiForeachStatement && !tb.hasOperations() && isAddAllCall(tb);
// Don't suggest to convert the loop which can be trivially replaced via addAll:
// this is covered by UseBulkOperationInspection and ManualArrayToCollectionCopyInspection
if (addAll) return null;
boolean shouldWarn = replaceTrivialForEach ||
tb.hasOperations() ||
tb.getLastOperation() instanceof BufferedReaderLines ||
!terminal.isTrivial();
return new CollectMigration(shouldWarn, terminal.getMethodName());
}
if (JoiningMigration.extractTerminal(tb, nonFinalVariables) != null) {
return new JoiningMigration(true);
}
if (tb.getCountExpression() != null || tb.isEmpty()) return null;
if (nonFinalVariables.isEmpty() && extractArray(tb) != null) {
return new ToArrayMigration(true);
}
if (getAccumulatedVariable(tb, nonFinalVariables, SUM_OPERATION) != null) {
return new SumMigration(true);
}
FindExtremumMigration.ExtremumTerminal extremumTerminal = FindExtremumMigration.extract(tb, nonFinalVariables);
if (extremumTerminal != null) {
return new FindExtremumMigration(true, FindExtremumMigration.getOperation(extremumTerminal.isMax()) + "()");
}
for (OperationReductionMigration.ReductionOperation reductionOperation : OperationReductionMigration.OPERATIONS) {
if (getAccumulatedVariable(tb, nonFinalVariables, reductionOperation) != null) {
return new OperationReductionMigration(true, reductionOperation);
}
}
Collection<PsiStatement> exitPoints = tb.findExitPoints(controlFlow);
if (exitPoints == null) return null;
boolean onlyNonLabeledContinue = StreamEx.of(exitPoints)
.allMatch(statement -> statement instanceof PsiContinueStatement && ((PsiContinueStatement)statement).getLabelIdentifier() == null);
if (onlyNonLabeledContinue && nonFinalVariables.isEmpty()) {
boolean shouldWarn = suggestForeach &&
(replaceTrivialForEach ||
tb.hasOperations() ||
ForEachMigration.tryExtractMapExpression(tb) != null ||
!isTrivial(tb));
return new ForEachMigration(shouldWarn, "forEach");
}
if (nonFinalVariables.isEmpty() && tb.getSingleStatement() instanceof PsiReturnStatement) {
return findMigrationForReturn(loop, tb, replaceTrivialForEach);
}
// Source and intermediate ops should not refer to non-final variables
if (tb.intermediateAndSourceExpressions()
.flatCollection(expr -> PsiTreeUtil.collectElementsOfType(expr, PsiReferenceExpression.class))
.map(PsiReferenceExpression::resolve).select(PsiVariable.class).anyMatch(nonFinalVariables::contains)) {
return null;
}
PsiStatement[] statements = tb.getStatements();
if (statements.length == 2) {
PsiStatement breakStatement = statements[1];
if (loop instanceof PsiLoopStatement && ControlFlowUtils.statementBreaksLoop(breakStatement, (PsiLoopStatement)loop) &&
exitPoints.size() == 1 &&
exitPoints.contains(breakStatement)) {
return findMigrationForBreak(tb, nonFinalVariables, statements[0], replaceTrivialForEach);
}
}
return null;
}
@Nullable
private static BaseStreamApiMigration findMigrationForBreak(TerminalBlock tb,
List<PsiVariable> nonFinalVariables,
PsiStatement statement,
boolean replaceTrivialForEach) {
boolean shouldWarn = replaceTrivialForEach || tb.hasOperations();
if (ReferencesSearch.search(tb.getVariable(), new LocalSearchScope(statement)).findFirst() == null) {
return new MatchMigration(shouldWarn, "anyMatch");
}
if (nonFinalVariables.isEmpty() && statement instanceof PsiExpressionStatement) {
return new FindFirstMigration(shouldWarn);
}
if (nonFinalVariables.size() == 1) {
PsiAssignmentExpression assignment = ExpressionUtils.getAssignment(statement);
if (assignment == null) return null;
PsiReferenceExpression lValue = tryCast(assignment.getLExpression(), PsiReferenceExpression.class);
if (lValue == null) return null;
PsiVariable var = tryCast(lValue.resolve(), PsiVariable.class);
if (var == null || !nonFinalVariables.contains(var)) return null;
PsiExpression rValue = assignment.getRExpression();
if (rValue == null || VariableAccessUtils.variableIsUsed(var, rValue)) return null;
if (tb.getVariable().getType() instanceof PsiPrimitiveType && !ExpressionUtils.isReferenceTo(rValue, tb.getVariable())) return null;
return new FindFirstMigration(shouldWarn);
}
return null;
}
@Nullable
private static BaseStreamApiMigration findMigrationForReturn(PsiStatement statement, TerminalBlock tb, boolean replaceTrivialForEach) {
boolean shouldWarn = replaceTrivialForEach || tb.hasOperations();
PsiReturnStatement returnStatement = (PsiReturnStatement)tb.getSingleStatement();
PsiExpression value = returnStatement.getReturnValue();
PsiReturnStatement nextReturnStatement = ControlFlowUtils.getNextReturnStatement(statement);
if (nextReturnStatement != null &&
(ExpressionUtils.isLiteral(value, Boolean.TRUE) || ExpressionUtils.isLiteral(value, Boolean.FALSE))) {
boolean foundResult = (boolean)((PsiLiteralExpression)value).getValue();
String methodName;
if (foundResult) {
methodName = "anyMatch";
}
else {
methodName = "noneMatch";
FilterOp lastFilter = tb.getLastOperation(FilterOp.class);
if (lastFilter != null && (lastFilter.isNegated() ^ BoolUtils.isNegation(lastFilter.getExpression()))) {
methodName = "allMatch";
}
}
if (nextReturnStatement.getParent() == statement.getParent() ||
ExpressionUtils.isLiteral(nextReturnStatement.getReturnValue(), !foundResult)) {
return new MatchMigration(shouldWarn, methodName);
}
}
if (!VariableAccessUtils.variableIsUsed(tb.getVariable(), value)) {
if (!replaceTrivialForEach && !tb.hasOperations() ||
(tb.getLastOperation() instanceof FilterOp && tb.operations().count() == 2)) {
return null;
}
return new MatchMigration(shouldWarn, "anyMatch");
}
if (nextReturnStatement != null && ExpressionUtils.isSafelyRecomputableExpression(nextReturnStatement.getReturnValue())
&& (!(tb.getVariable().getType() instanceof PsiPrimitiveType) || ExpressionUtils.isReferenceTo(value, tb.getVariable()))) {
return new FindFirstMigration(shouldWarn);
}
return null;
}
@NotNull
private static TextRange getRange(boolean shouldWarn, PsiStatement statement, boolean isOnTheFly) {
boolean wholeStatement =
isOnTheFly && (!shouldWarn || InspectionProjectProfileManager.isInformationLevel(SHORT_NAME, statement));
if (statement instanceof PsiForeachStatement) {
PsiJavaToken rParenth = ((PsiForeachStatement)statement).getRParenth();
if (wholeStatement && rParenth != null) {
return new TextRange(statement.getTextOffset(), rParenth.getTextOffset() + 1);
}
PsiExpression iteratedValue = ((PsiForeachStatement)statement).getIteratedValue();
LOG.assertTrue(iteratedValue != null);
return iteratedValue.getTextRange();
}
else if (statement instanceof PsiForStatement) {
PsiJavaToken rParenth = ((PsiForStatement)statement).getRParenth();
if (wholeStatement && rParenth != null) {
return new TextRange(statement.getTextOffset(), rParenth.getTextOffset() + 1);
}
PsiStatement initialization = ((PsiForStatement)statement).getInitialization();
LOG.assertTrue(initialization != null);
return initialization.getTextRange();
}
else if (statement instanceof PsiWhileStatement) {
PsiJavaToken rParenth = ((PsiWhileStatement)statement).getRParenth();
if (wholeStatement && rParenth != null) {
return new TextRange(statement.getTextOffset(), rParenth.getTextOffset() + 1);
}
return statement.getFirstChild().getTextRange();
}
else {
throw new IllegalStateException("Unexpected statement type: " + statement);
}
}
@Nullable
static PsiLocalVariable extractArray(TerminalBlock tb) {
CountingLoopSource loop = tb.getLastOperation(CountingLoopSource.class);
if (loop == null || loop.myIncluding) return null;
PsiAssignmentExpression assignment = tb.getSingleExpression(PsiAssignmentExpression.class);
if (assignment == null || !assignment.getOperationTokenType().equals(JavaTokenType.EQ)) return null;
PsiArrayAccessExpression arrayAccess = tryCast(assignment.getLExpression(), PsiArrayAccessExpression.class);
if (arrayAccess == null) return null;
if (!ExpressionUtils.isReferenceTo(arrayAccess.getIndexExpression(), loop.getVariable())) return null;
PsiReferenceExpression arrayReference = tryCast(arrayAccess.getArrayExpression(), PsiReferenceExpression.class);
if (arrayReference == null) return null;
PsiLocalVariable arrayVariable = tryCast(arrayReference.resolve(), PsiLocalVariable.class);
if (arrayVariable == null || ControlFlowUtils.getInitializerUsageStatus(arrayVariable, tb.getStreamSourceStatement()) == UNKNOWN) {
return null;
}
PsiNewExpression initializer = tryCast(arrayVariable.getInitializer(), PsiNewExpression.class);
if (initializer == null) return null;
PsiArrayType arrayType = tryCast(initializer.getType(), PsiArrayType.class);
if (arrayType == null || !StreamApiUtil.isSupportedStreamElement(arrayType.getComponentType())) return null;
PsiExpression dimension = ArrayUtil.getFirstElement(initializer.getArrayDimensions());
if (dimension == null) return null;
PsiExpression bound = loop.myBound;
if (!isArrayLength(arrayVariable, dimension, bound)) return null;
if (VariableAccessUtils.variableIsUsed(arrayVariable, assignment.getRExpression())) return null;
return arrayVariable;
}
private static boolean isArrayLength(PsiLocalVariable arrayVariable, PsiExpression dimension, PsiExpression bound) {
if (ExpressionUtils.isReferenceTo(ExpressionUtils.getArrayFromLengthExpression(bound), arrayVariable)) return true;
if (PsiEquivalenceUtil.areElementsEquivalent(dimension, bound)) return true;
if (bound instanceof PsiMethodCallExpression) {
PsiExpression qualifier = ((PsiMethodCallExpression)bound).getMethodExpression().getQualifierExpression();
if (qualifier != null && CollectionUtils.isCollectionOrMapSize(dimension, qualifier)) return true;
}
if (dimension instanceof PsiMethodCallExpression) {
PsiExpression qualifier = ((PsiMethodCallExpression)dimension).getMethodExpression().getQualifierExpression();
if (qualifier != null && CollectionUtils.isCollectionOrMapSize(bound, qualifier)) return true;
}
return false;
}
/**
* Intermediate stream operation representation
*/
static abstract class Operation {
final PsiExpression myExpression;
final PsiVariable myVariable;
protected Operation(PsiExpression expression, PsiVariable variable) {
myExpression = expression;
myVariable = variable;
}
void cleanUp() {}
public PsiVariable getVariable() {
return myVariable;
}
PsiExpression getExpression() {
return myExpression;
}
StreamEx<PsiExpression> expressions() {
return StreamEx.ofNullable(myExpression);
}
abstract String createReplacement(CommentTracker ct);
boolean isWriteAllowed(PsiVariable variable, PsiExpression reference) {
return false;
}
boolean canReassignVariable(PsiVariable variable) {
return true;
}
}
static class FilterOp extends Operation {
private final boolean myNegated;
FilterOp(PsiExpression condition, PsiVariable variable, boolean negated) {
super(condition, variable);
myNegated = negated;
}
public boolean isNegated() {
return myNegated;
}
@Override
public String createReplacement(CommentTracker ct) {
PsiElementFactory factory = JavaPsiFacade.getElementFactory(myExpression.getProject());
PsiExpression intermediate = makeIntermediateExpression(ct, factory);
PsiExpression expression =
myNegated ? factory.createExpressionFromText(BoolUtils.getNegatedExpressionText(intermediate, ct), myExpression) : intermediate;
return "." + getOpName() + "(" + LambdaUtil.createLambda(myVariable, expression) + ")";
}
@NotNull
String getOpName() {
return "filter";
}
PsiExpression makeIntermediateExpression(CommentTracker ct, PsiElementFactory factory) {
return ct.markUnchanged(myExpression);
}
}
static class TakeWhileOp extends FilterOp {
TakeWhileOp(PsiExpression condition, PsiVariable variable, boolean negated) {
super(condition, variable, negated);
}
@NotNull
@Override
String getOpName() {
return "takeWhile";
}
}
static class CompoundFilterOp extends FilterOp {
private final StreamSource mySource;
private final PsiVariable myMatchVariable;
protected CompoundFilterOp(StreamSource source, PsiVariable matchVariable, FilterOp sourceFilter) {
super(sourceFilter.getExpression(), matchVariable, sourceFilter.isNegated());
mySource = source;
myMatchVariable = sourceFilter.getVariable();
}
@Override
PsiExpression makeIntermediateExpression(CommentTracker ct, PsiElementFactory factory) {
return factory.createExpressionFromText(mySource.createReplacement(ct) + ".anyMatch(" +
ct.lambdaText(myMatchVariable, myExpression) + ")", myExpression);
}
@Override
boolean isWriteAllowed(PsiVariable variable, PsiExpression reference) {
return mySource.isWriteAllowed(variable, reference);
}
@Override
StreamEx<PsiExpression> expressions() {
return StreamEx.of(myExpression, mySource.getExpression());
}
}
static class MapOp extends Operation {
private final @Nullable PsiType myType;
MapOp(PsiExpression expression, PsiVariable variable, @Nullable PsiType targetType) {
super(expression, variable);
myType = targetType;
}
@Override
public String createReplacement(CommentTracker ct) {
return StreamRefactoringUtil.generateMapOperation(myVariable, myType, ct.markUnchanged(myExpression));
}
@Override
boolean isWriteAllowed(PsiVariable variable, PsiExpression reference) {
return variable == myVariable && reference.getParent() == myExpression.getParent();
}
}
static class FlatMapOp extends Operation {
private final StreamSource mySource;
FlatMapOp(StreamSource source, PsiVariable variable) {
super(source.getExpression(), variable);
mySource = source;
}
@Override
public String createReplacement(CommentTracker ct) {
String operation = "flatMap";
PsiType inType = myVariable.getType();
PsiType outType = mySource.getVariable().getType();
String lambda = myVariable.getName() + " -> " + getStreamExpression(ct);
if (outType instanceof PsiPrimitiveType && !outType.equals(inType)) {
if (outType.equals(PsiType.INT)) {
operation = "flatMapToInt";
}
else if (outType.equals(PsiType.LONG)) {
operation = "flatMapToLong";
}
else if (outType.equals(PsiType.DOUBLE)) {
operation = "flatMapToDouble";
}
}
if (inType instanceof PsiPrimitiveType && !outType.equals(inType)) {
return ".mapToObj(" + lambda + ")." + operation + "(" + CommonClassNames.JAVA_UTIL_FUNCTION_FUNCTION + ".identity())";
}
return "." + operation + "(" + lambda + ")";
}
public StreamSource getSource() {
return mySource;
}
@NotNull
String getStreamExpression(CommentTracker ct) {
return mySource.createReplacement(ct);
}
@Override
boolean isWriteAllowed(PsiVariable variable, PsiExpression reference) {
return mySource.isWriteAllowed(variable, reference);
}
@Override
boolean canReassignVariable(PsiVariable variable) {
return mySource.canReassignVariable(variable);
}
boolean breaksMe(PsiBreakStatement statement) {
return statement.findExitedStatement() == mySource.getMainStatement();
}
}
static class LimitOp extends Operation {
private final PsiExpression myCounter;
private final PsiLocalVariable myCounterVariable;
private final int myDelta;
LimitOp(PsiVariable variable,
PsiExpression countExpression,
PsiExpression limitExpression,
PsiLocalVariable counterVariable,
int delta) {
super(limitExpression, variable);
LOG.assertTrue(delta >= 0);
myDelta = delta;
myCounter = countExpression;
myCounterVariable = counterVariable;
}
@Override
String createReplacement(CommentTracker ct) {
return ".limit(" + getLimitExpression(ct) + ")";
}
PsiLocalVariable getCounterVariable() {
return myCounterVariable;
}
PsiExpression getCountExpression() {
return myCounter;
}
@Override
void cleanUp() {
if (myCounterVariable != null) {
new CommentTracker().deleteAndRestoreComments(myCounterVariable);
}
}
@Override
boolean isWriteAllowed(PsiVariable variable, PsiExpression reference) {
return variable == myCounterVariable && PsiTreeUtil.isAncestor(myCounter, reference, false);
}
private String getLimitExpression(CommentTracker ct) {
if (myDelta == 0) {
return ct.text(myExpression);
}
if (myExpression instanceof PsiLiteralExpression) {
Object value = ((PsiLiteralExpression)myExpression).getValue();
if (value instanceof Integer || value instanceof Long) {
return String.valueOf(((Number)value).longValue() + myDelta);
}
}
return ct.text(myExpression, ParenthesesUtils.ADDITIVE_PRECEDENCE) + "+" + myDelta;
}
}
static class DistinctOp extends Operation {
protected DistinctOp(PsiVariable variable) {
super(null, variable);
}
@Override
String createReplacement(CommentTracker ct) {
return ".distinct()";
}
}
abstract static class StreamSource extends Operation {
private final PsiStatement myMainStatement;
protected StreamSource(PsiStatement mainStatement, PsiVariable variable, PsiExpression expression) {
super(expression, variable);
myMainStatement = mainStatement;
}
PsiStatement getMainStatement() {
return myMainStatement;
}
@Contract("null -> null")
static StreamSource tryCreate(PsiLoopStatement statement) {
if (statement == null) return null;
BufferedReaderLines readerSource = BufferedReaderLines.from(statement);
if (readerSource != null) return readerSource;
if (statement instanceof PsiForStatement) {
CountingLoopSource countingLoopSource = CountingLoopSource.from((PsiForStatement)statement);
if (countingLoopSource != null) return countingLoopSource;
return IterateStreamSource.from((PsiForStatement)statement);
}
if (statement instanceof PsiForeachStatement) {
ArrayStream source = ArrayStream.from((PsiForeachStatement)statement);
return source == null ? CollectionStream.from((PsiForeachStatement)statement) : source;
}
return null;
}
}
static class BufferedReaderLines extends StreamSource {
private static final CallMatcher BUFFERED_READER_READ_LINE =
CallMatcher.instanceCall("java.io.BufferedReader", "readLine").parameterCount(0);
private boolean myDeleteVariable;
private BufferedReaderLines(PsiLoopStatement loop, PsiVariable variable, PsiExpression expression, boolean deleteVariable) {
super(loop, variable, expression);
myDeleteVariable = deleteVariable;
}
@Override
String createReplacement(CommentTracker ct) {
return ct.text(myExpression) + ".lines()";
}
@Override
void cleanUp() {
if (myDeleteVariable) {
new CommentTracker().deleteAndRestoreComments(myVariable);
}
}
@Override
boolean isWriteAllowed(PsiVariable variable, PsiExpression reference) {
if (myVariable == variable) {
if (reference.getParent() == PsiTreeUtil.getParentOfType(myExpression, PsiAssignmentExpression.class)) return true;
PsiForStatement forStatement = PsiTreeUtil.getParentOfType(variable, PsiForStatement.class);
if (forStatement != null && forStatement == PsiTreeUtil.getParentOfType(myVariable, PsiForStatement.class)) {
return PsiTreeUtil.isAncestor(forStatement.getUpdate(), reference, false) ||
PsiTreeUtil.isAncestor(forStatement.getCondition(), reference, false);
}
}
return false;
}
@Nullable
public static BufferedReaderLines from(PsiLoopStatement loopStatement) {
BufferedReaderLines whileSimple = extractWhileSimple(loopStatement);
if (whileSimple != null) return whileSimple;
BufferedReaderLines forSimple = extractForSimple(loopStatement);
if (forSimple != null) return forSimple;
return extractForReadInCondition(loopStatement);
}
/**
* Extracts BufferedReaderSource from condition (for update or while condition), but additional checks may be required
* Condition must look like: (line = reader.readLine()) != null
*/
@Nullable
private static BufferedReaderLines extractReaderFromCondition(@Nullable PsiExpression condition,
@NotNull PsiLoopStatement loopStatement) {
PsiBinaryExpression binOp = tryCast(PsiUtil.skipParenthesizedExprDown(condition), PsiBinaryExpression.class);
if (binOp == null) return null;
if (!JavaTokenType.NE.equals(binOp.getOperationTokenType())) return null;
PsiExpression operand = ExpressionUtils.getValueComparedWithNull(binOp);
if (operand == null) return null;
PsiAssignmentExpression assignment = ExpressionUtils.getAssignment(PsiUtil.skipParenthesizedExprDown(operand));
if (assignment == null) return null;
PsiMethodCallExpression readerCall = tryCast(assignment.getRExpression(), PsiMethodCallExpression.class);
if (!BUFFERED_READER_READ_LINE.test(readerCall)) return null;
PsiExpression reader = readerCall.getMethodExpression().getQualifierExpression();
PsiLocalVariable lineVar = ExpressionUtils.resolveLocalVariable(assignment.getLExpression());
if (lineVar == null) return null;
if (!ReferencesSearch.search(lineVar).forEach(ref -> {
return PsiTreeUtil.isAncestor(loopStatement, ref.getElement(), true);
})) {
return null;
}
return new BufferedReaderLines(loopStatement, lineVar, reader, false);
}
// for (String line; (line = reader.readLine()) != null; )
@Nullable
private static BufferedReaderLines extractForReadInCondition(PsiLoopStatement loopStatement) {
PsiForStatement forLoop = tryCast(loopStatement, PsiForStatement.class);
if (forLoop == null || forLoop.getUpdate() != null) return null;
BufferedReaderLines reader = extractReaderFromCondition(forLoop.getCondition(), loopStatement);
if (reader == null) return null;
PsiDeclarationStatement declaration = tryCast(forLoop.getInitialization(), PsiDeclarationStatement.class);
if (declaration == null) return null;
PsiElement[] declaredElements = declaration.getDeclaredElements();
if (declaredElements.length != 1) return null;
PsiVariable lineVar = reader.getVariable();
if (declaredElements[0] != lineVar) return null;
return reader;
}
// for (String line = reader.readLine(); line != null; line = reader.readLine()) ...
@Nullable
private static BufferedReaderLines extractForSimple(PsiLoopStatement loopStatement) {
PsiForStatement forLoop = tryCast(loopStatement, PsiForStatement.class);
if (forLoop == null) return null;
PsiDeclarationStatement declarationStatement = tryCast(forLoop.getInitialization(), PsiDeclarationStatement.class);
if (declarationStatement == null) return null;
PsiElement[] declarations = declarationStatement.getDeclaredElements();
if (declarations.length != 1) return null;
PsiLocalVariable lineVar = tryCast(declarations[0], PsiLocalVariable.class);
if (lineVar == null) return null;
if (!ReferencesSearch.search(lineVar).forEach(ref -> {
return PsiTreeUtil.isAncestor(forLoop, ref.getElement(), true);
})) {
return null;
}
PsiMethodCallExpression maybeReadLines = tryCast(lineVar.getInitializer(), PsiMethodCallExpression.class);
if (!BUFFERED_READER_READ_LINE.test(maybeReadLines)) return null;
PsiExpression reader = maybeReadLines.getMethodExpression().getQualifierExpression();
PsiReferenceExpression readerRef = tryCast(reader, PsiReferenceExpression.class);
if (readerRef == null) return null;
PsiVariable readerVar = tryCast(readerRef.resolve(), PsiVariable.class);
if (readerVar == null) return null;
PsiBinaryExpression binOp = tryCast(PsiUtil.skipParenthesizedExprDown(forLoop.getCondition()), PsiBinaryExpression.class);
if (binOp == null) return null;
if (!JavaTokenType.NE.equals(binOp.getOperationTokenType())) return null;
PsiExpression lineExpr = ExpressionUtils.getValueComparedWithNull(binOp);
if (!ExpressionUtils.isReferenceTo(lineExpr, lineVar)) return null;
PsiExpressionStatement updateStmt = tryCast(forLoop.getUpdate(), PsiExpressionStatement.class);
if (updateStmt == null) return null;
PsiExpression readNewLineExpr = ExpressionUtils.getAssignmentTo(updateStmt.getExpression(), lineVar);
PsiMethodCallExpression readNewLineCall = tryCast(readNewLineExpr, PsiMethodCallExpression.class);
if (!BUFFERED_READER_READ_LINE.test(readNewLineCall)) return null;
if (!ExpressionUtils.isReferenceTo(readNewLineCall.getMethodExpression().getQualifierExpression(), readerVar)) return null;
return new BufferedReaderLines(forLoop, lineVar, reader, false);
}
// while ((line = br.readLine()) != null)
@Nullable
private static BufferedReaderLines extractWhileSimple(PsiLoopStatement loopStatement) {
PsiWhileStatement whileLoop = tryCast(loopStatement, PsiWhileStatement.class);
if (whileLoop == null) return null;
BufferedReaderLines reader = extractReaderFromCondition(whileLoop.getCondition(), loopStatement);
if (reader == null) return null;
reader.myDeleteVariable = true;
return reader;
}
}
static class ArrayStream extends StreamSource {
private ArrayStream(PsiLoopStatement loop, PsiVariable variable, PsiExpression expression) {
super(loop, variable, expression);
}
@Override
String createReplacement(CommentTracker ct) {
if (myExpression instanceof PsiNewExpression) {
PsiArrayInitializerExpression initializer = ((PsiNewExpression)myExpression).getArrayInitializer();
if (initializer != null) {
PsiElement[] children = initializer.getChildren();
if (children.length > 2) {
String initializerText = StreamEx.of(children, 1, children.length - 1).map(ct::text).joining();
PsiType type = myExpression.getType();
if (type instanceof PsiArrayType) {
PsiType componentType = ((PsiArrayType)type).getComponentType();
if (componentType.equals(PsiType.INT)) {
return CommonClassNames.JAVA_UTIL_STREAM_INT_STREAM + ".of(" + initializerText + ")";
}
else if (componentType.equals(PsiType.LONG)) {
return CommonClassNames.JAVA_UTIL_STREAM_LONG_STREAM + ".of(" + initializerText + ")";
}
else if (componentType.equals(PsiType.DOUBLE)) {
return CommonClassNames.JAVA_UTIL_STREAM_DOUBLE_STREAM + ".of(" + initializerText + ")";
}
else if (componentType instanceof PsiClassType) {
return CommonClassNames.JAVA_UTIL_STREAM_STREAM + ".<" + componentType.getCanonicalText() + ">of(" + initializerText + ")";
}
}
}
}
}
return CommonClassNames.JAVA_UTIL_ARRAYS + ".stream(" + ct.text(myExpression) + ")";
}
@Nullable
public static ArrayStream from(PsiForeachStatement statement) {
PsiExpression iteratedValue = statement.getIteratedValue();
if (iteratedValue == null) return null;
PsiArrayType iteratedValueType = tryCast(iteratedValue.getType(), PsiArrayType.class);
PsiParameter parameter = statement.getIterationParameter();
if (iteratedValueType != null && StreamApiUtil.isSupportedStreamElement(iteratedValueType.getComponentType()) &&
(!(parameter.getType() instanceof PsiPrimitiveType) || parameter.getType().equals(iteratedValueType.getComponentType()))) {
return new ArrayStream(statement, parameter, iteratedValue);
}
return null;
}
}
static class CollectionStream extends StreamSource {
private CollectionStream(PsiLoopStatement loop, PsiVariable variable, PsiExpression expression) {
super(loop, variable, expression);
}
@Override
String createReplacement(CommentTracker ct) {
return ct.text(myExpression, ParenthesesUtils.METHOD_CALL_PRECEDENCE) + ".stream()" + tryUnbox(myVariable);
}
@Contract("null, _ -> false")
static boolean isRawSubstitution(PsiType iteratedValueType, PsiClass collectionClass) {
return iteratedValueType instanceof PsiClassType &&
PsiUtil.isRawSubstitutor(collectionClass,
TypeConversionUtil.getSuperClassSubstitutor(collectionClass, (PsiClassType)iteratedValueType));
}
@Nullable
public static CollectionStream from(PsiForeachStatement statement) {
PsiExpression iteratedValue = statement.getIteratedValue();
if (iteratedValue == null) return null;
PsiType iteratedValueType = iteratedValue.getType();
PsiClass collectionClass =
JavaPsiFacade.getInstance(statement.getProject()).findClass(CommonClassNames.JAVA_UTIL_COLLECTION, statement.getResolveScope());
PsiClass iteratorClass = PsiUtil.resolveClassInClassTypeOnly(iteratedValueType);
if (collectionClass == null ||
!InheritanceUtil.isInheritorOrSelf(iteratorClass, collectionClass, true) ||
isRawSubstitution(iteratedValueType, collectionClass) ||
!StreamApiUtil.isSupportedStreamElement(statement.getIterationParameter().getType())) {
return null;
}
return new CollectionStream(statement, statement.getIterationParameter(), iteratedValue);
}
}
static class CountingLoopSource extends StreamSource {
final PsiExpression myBound;
final boolean myIncluding;
private CountingLoopSource(PsiStatement loop,
PsiVariable counter,
PsiExpression initializer,
PsiExpression bound,
boolean including) {
super(loop, counter, initializer);
myBound = bound;
myIncluding = including;
}
@Override
StreamEx<PsiExpression> expressions() {
return StreamEx.of(myExpression, myBound);
}
@Override
public String createReplacement(CommentTracker ct) {
String className = myVariable.getType().equals(PsiType.LONG) ? "java.util.stream.LongStream" : "java.util.stream.IntStream";
String methodName = myIncluding ? "rangeClosed" : "range";
return className + "." + methodName + "(" + ct.text(myExpression) + ", " + ct.text(myBound) + ")";
}
CountingLoopSource withBound(PsiExpression bound) {
return new CountingLoopSource(getMainStatement(), getVariable(), getExpression(), bound, myIncluding);
}
CountingLoopSource withInitializer(PsiExpression expression) {
return new CountingLoopSource(getMainStatement(), getVariable(), expression, myBound, myIncluding);
}
@Override
boolean isWriteAllowed(PsiVariable variable, PsiExpression reference) {
if (variable == myVariable) {
PsiForStatement forStatement = PsiTreeUtil.getParentOfType(variable, PsiForStatement.class);
if (forStatement != null) {
return PsiTreeUtil.isAncestor(forStatement.getUpdate(), reference, false);
}
}
return false;
}
@Override
boolean canReassignVariable(PsiVariable variable) {
return variable != myVariable;
}
@Nullable
public static CountingLoopSource from(PsiForStatement forStatement) {
CountingLoop loop = CountingLoop.from(forStatement);
if (loop == null) return null;
return new CountingLoopSource(forStatement, loop.getCounter(), loop.getInitializer(), loop.getBound(), loop.isIncluding());
}
}
/**
* for(int i = 0;; i = i + 1) // i + 1 - expression
*/
static class IterateStreamSource extends StreamSource {
private final PsiExpression myInitializer;
private @Nullable final PsiExpression myCondition;
private @Nullable final IElementType myOpType;
private @Nullable final PsiUnaryExpression myUnaryExpression;
/**
* @param condition
* @param type if not null, equivalent form of update is: variable type= expression;
* @param unaryExpression
*/
protected IterateStreamSource(
@NotNull PsiLoopStatement loop,
@NotNull PsiVariable variable,
@Nullable PsiExpression expression,
@NotNull PsiExpression initializer,
@Nullable PsiExpression condition,
@Nullable IElementType type,
@Nullable PsiUnaryExpression unaryExpression) {
super(loop, variable, expression);
myInitializer = initializer;
myCondition = condition;
myOpType = type;
myUnaryExpression = unaryExpression;
}
@Nullable
@Contract(pure = true)
private static String getOperationSign(IElementType op) {
if (op == JavaTokenType.AND) {
return "&";
}
else if (op == JavaTokenType.ASTERISK) {
return "*";
}
else if (op == JavaTokenType.DIV) {
return "/";
}
else if (op == JavaTokenType.GTGT) {
return ">>";
}
else if (op == JavaTokenType.GTGTGT) {
return ">>>";
}
else if (op == JavaTokenType.LTLT) {
return "<<";
}
else if (op == JavaTokenType.MINUS) {
return "-";
}
else if (op == JavaTokenType.OR) {
return "|";
}
else if (op == JavaTokenType.PERC) {
return "%";
}
else if (op == JavaTokenType.PLUS) {
return "+";
}
else if (op == JavaTokenType.XOR) {
return "^";
}
return null;
}
@Override
String createReplacement(CommentTracker ct) {
String lambda;
if (myOpType != null) {
PsiElementFactory factory = JavaPsiFacade.getElementFactory(myVariable.getProject());
PsiExpression expression = myUnaryExpression == null ? myExpression : factory.createExpressionFromText("1", null);
String expressionText = ParenthesesUtils.getText(ct.markUnchanged(expression), ParenthesesUtils.getPrecedenceForOperator(myOpType));
String lambdaBody = myVariable.getName() + getOperationSign(myOpType) + expressionText;
if (!myVariable.getType().equals(expression.getType())) {
lambdaBody = ("(" + myVariable.getType().getCanonicalText() + ")") + "(" + lambdaBody + ")";
}
lambda = myVariable.getName() + "->" + lambdaBody;
}
else {
lambda = ct.lambdaText(myVariable, myExpression);
}
String maybeCondition = myCondition != null ? ct.lambdaText(myVariable, myCondition) + "," : "";
return getStreamClass(myVariable.getType()) + ".iterate(" + ct.text(myInitializer) + "," + maybeCondition + lambda + ")";
}
@Contract(value = "null -> null", pure = true)
private static String getStreamClass(@Nullable PsiType type) {
if (type == null) return null;
if (ClassUtils.isPrimitive(type)) {
if (type.equals(PsiType.INT)) {
return CommonClassNames.JAVA_UTIL_STREAM_INT_STREAM;
}
else if (type.equals(PsiType.DOUBLE)) {
return CommonClassNames.JAVA_UTIL_STREAM_DOUBLE_STREAM;
}
else if (type.equals(PsiType.LONG)) {
return CommonClassNames.JAVA_UTIL_STREAM_LONG_STREAM;
}
return null;
}
return CommonClassNames.JAVA_UTIL_STREAM_STREAM;
}
@Override
StreamEx<PsiExpression> expressions() {
return StreamEx.of(myInitializer, myExpression);
}
@Override
boolean isWriteAllowed(PsiVariable variable, PsiExpression reference) {
if (variable == myVariable) {
PsiForStatement forStatement = PsiTreeUtil.getParentOfType(variable, PsiForStatement.class);
if (forStatement != null) {
return PsiTreeUtil.isAncestor(forStatement.getUpdate(), reference, false);
}
}
return false;
}
@Override
boolean canReassignVariable(PsiVariable variable) {
return variable != myVariable;
}
@Nullable
static IterateStreamSource from(@NotNull PsiForStatement forStatement) {
PsiExpression condition = forStatement.getCondition();
if (!PsiUtil.isLanguageLevel9OrHigher(forStatement) && condition != null) return null;
PsiStatement initialization = forStatement.getInitialization();
PsiDeclarationStatement initStmt = tryCast(initialization, PsiDeclarationStatement.class);
if (initStmt == null || initStmt.getDeclaredElements().length != 1) return null;
PsiLocalVariable variable = tryCast(initStmt.getDeclaredElements()[0], PsiLocalVariable.class);
if (variable == null) return null;
if (getStreamClass(variable.getType()) == null) return null;
PsiExpression initializer = variable.getInitializer();
if (initializer == null) return null;
PsiStatement update = forStatement.getUpdate();
if (update == null) return null;
PsiExpressionStatement exprStmt = tryCast(update, PsiExpressionStatement.class);
if (exprStmt == null) return null;
PsiExpression expression = exprStmt.getExpression();
PsiExpression updateExpr = null;
IElementType op;
PsiUnaryExpression unaryExpression = null;
if (expression instanceof PsiAssignmentExpression) {
PsiAssignmentExpression assignment = (PsiAssignmentExpression)expression;
op = TypeConversionUtil.convertEQtoOperation(assignment.getOperationTokenType());
updateExpr = assignment.getRExpression();
if (!ExpressionUtils.isReferenceTo(assignment.getLExpression(), variable)) return null;
if (updateExpr == null) return null;
}
else if (expression instanceof PsiUnaryExpression) {
unaryExpression = (PsiUnaryExpression)expression;
IElementType tokenType = unaryExpression.getOperationTokenType();
op = getOperation(tokenType);
if (op == null) return null;
}
else {
return null;
}
if (updateExpr != null && !ExceptionUtil.getThrownCheckedExceptions(updateExpr).isEmpty()) return null;
if (condition != null && !ExceptionUtil.getThrownCheckedExceptions(condition).isEmpty()) return null;
if (!VariableAccessUtils.variableIsUsed(variable, update)) return null;
return new IterateStreamSource(forStatement, variable, updateExpr, initializer, condition, op, unaryExpression);
}
@Nullable
private static IElementType getOperation(IElementType tokenType) {
if (tokenType == JavaTokenType.PLUSPLUS) {
return JavaTokenType.PLUS;
}
else if (tokenType == JavaTokenType.MINUSMINUS) {
return JavaTokenType.MINUS;
}
else {
return null;
}
}
}
}
| |
/*
* Copyright 2016 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.config;
import com.thoughtworks.go.config.exceptions.NoSuchEnvironmentException;
import com.thoughtworks.go.domain.BaseCollection;
import com.thoughtworks.go.domain.ConfigErrors;
import com.thoughtworks.go.domain.EnvironmentPipelineMatcher;
import com.thoughtworks.go.domain.EnvironmentPipelineMatchers;
import com.thoughtworks.go.util.comparator.AlphaAsciiComparator;
import java.util.*;
/**
* @understands the current persistent information related to multiple logical groupings of machines
*/
@ConfigTag("environments")
@ConfigCollection(BasicEnvironmentConfig.class)
public class EnvironmentsConfig extends BaseCollection<EnvironmentConfig> implements Validatable {
private final ConfigErrors configErrors = new ConfigErrors();
public EnvironmentsConfig() { }
public void validate(ValidationContext validationContext) {
List<CaseInsensitiveString> allPipelineNames = validationContext.getCruiseConfig().getAllPipelineNames();
List<CaseInsensitiveString> allEnvironmentNames = new ArrayList<>();
Map<CaseInsensitiveString, CaseInsensitiveString> pipelineToEnvMap = new HashMap<>();
for (EnvironmentConfig environmentConfig : this) {
if (allEnvironmentNames.contains(environmentConfig.name())) {
environmentConfig.addError("name", String.format("Environment with name '%s' already exists.", environmentConfig.name()));
} else {
allEnvironmentNames.add(environmentConfig.name());
}
for (EnvironmentPipelineConfig pipeline : environmentConfig.getPipelines()) {
if (!allPipelineNames.contains(pipeline.getName())) {
environmentConfig.addError("pipeline", String.format("Environment '%s' refers to an unknown pipeline '%s'.", environmentConfig.name(), pipeline.getName()));
}
if (pipelineToEnvMap.containsKey(pipeline.getName())) {
environmentConfig.addError("pipeline", "Associating pipeline(s) which is already part of " + pipelineToEnvMap.get(pipeline.getName()) + " environment");
} else {
pipelineToEnvMap.put(pipeline.getName(), environmentConfig.name());
}
}
}
}
public ConfigErrors errors() {
return configErrors;
}
public void addError(String fieldName, String message) {
configErrors.add(fieldName, message);
}
public boolean validateContainOnlyUuids(Set<String> uuids) {
boolean isValid = true;
for (EnvironmentConfig environmentConfig : this) {
isValid = environmentConfig.validateContainsOnlyUuids(uuids) && isValid;
}
return isValid;
}
public void addAgentsToEnvironment(String environmentName, String... uuids) {
EnvironmentConfig environment = getOrCreateEnvironment(environmentName);
for (String uuid : uuids) {
environment.addAgent(uuid);
}
}
private EnvironmentConfig getOrCreateEnvironment(String environmentName) {
for (EnvironmentConfig config : this) {
if (config.hasName(new CaseInsensitiveString(environmentName))) {
return config;
}
}
BasicEnvironmentConfig config = new BasicEnvironmentConfig(new CaseInsensitiveString(environmentName));
add(config);
return config;
}
public EnvironmentPipelineMatcher matchersForPipeline(String pipelineName) {
for (EnvironmentConfig config : this) {
if (config.containsPipeline(new CaseInsensitiveString(pipelineName))) {
return config.createMatcher();
}
}
return null;
}
@Override
public boolean add(EnvironmentConfig environment) {
return super.add(environment);
}
public void addPipelinesToEnvironment(String environmentName, String ... pipelineNames) {
EnvironmentConfig environment = getOrCreateEnvironment(environmentName);
for (String pipelineName : pipelineNames) {
environment.addPipeline(new CaseInsensitiveString(pipelineName));
}
}
public EnvironmentPipelineMatchers matchers() {
EnvironmentPipelineMatchers environmentPipelineMatchers = new EnvironmentPipelineMatchers();
for (EnvironmentConfig environment : this) {
environmentPipelineMatchers.add(environment.createMatcher());
}
return environmentPipelineMatchers;
}
public CaseInsensitiveString findEnvironmentNameForPipeline(final CaseInsensitiveString pipelineName) {
EnvironmentConfig environment = findEnvironmentForPipeline(pipelineName);
return environment == null ? null : environment.name();
}
public EnvironmentConfig findEnvironmentForPipeline(final CaseInsensitiveString pipelineName) {
for (EnvironmentConfig config : this) {
if (config.containsPipeline(pipelineName)) {
return config;
}
}
return null;
}
public boolean isPipelineAssociatedWithAnyEnvironment(final CaseInsensitiveString pipelineName) {
for (EnvironmentConfig environment : this) {
if (environment.containsPipeline(pipelineName)) {
return true;
}
}
return false;
}
public boolean isPipelineAssociatedWithRemoteEnvironment(final CaseInsensitiveString pipelineName) {
for (EnvironmentConfig environment : this) {
if (environment.containsPipelineRemotely(pipelineName)) {
return true;
}
}
return false;
}
public boolean isAgentUnderEnvironment(String agentUuid) {
for (EnvironmentConfig environment : this) {
if (environment.hasAgent(agentUuid)) {
return true;
}
}
return false;
}
public EnvironmentConfig named(final CaseInsensitiveString envName) throws NoSuchEnvironmentException {
EnvironmentConfig environmentConfig = find(envName);
if (environmentConfig != null) return environmentConfig;
throw new NoSuchEnvironmentException(envName);
}
public EnvironmentConfig find(CaseInsensitiveString envName) {
for (EnvironmentConfig environmentConfig : this) {
if(environmentConfig.name().equals(envName)) {
return environmentConfig;
}
}
return null;
}
public List<CaseInsensitiveString> names() {
ArrayList<CaseInsensitiveString> names = new ArrayList<>();
for (EnvironmentConfig environment : this) {
names.add(environment.name());
}
return names;
}
public TreeSet<String> environmentsForAgent(String agentUuid) {
TreeSet<String> environmentNames = new TreeSet<>(new AlphaAsciiComparator());
for (EnvironmentConfig config : this) {
if (config.hasAgent(agentUuid)) {
environmentNames.add(CaseInsensitiveString.str(config.name()));
}
}
return environmentNames;
}
public boolean hasEnvironmentNamed(CaseInsensitiveString environmentName) {
return find(environmentName) != null;
}
public void removeAgentFromAllEnvironments(String uuid) {
for (EnvironmentConfig environmentConfig : this) {
environmentConfig.removeAgent(uuid);
}
}
public EnvironmentsConfig getLocal() {
EnvironmentsConfig locals = new EnvironmentsConfig();
for(EnvironmentConfig environmentConfig : this)
{
EnvironmentConfig local = environmentConfig.getLocal();
if(local != null)
locals.add(local);
}
return locals;
}
}
| |
/*
* Phone.com API
* This is a Phone.com api Swagger definition
*
* OpenAPI spec version: 1.0.0
* Contact: apisupport@phone.com
*
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
* Do not edit the class manually.
*/
package io.swagger.client.api;
import io.swagger.client.ApiCallback;
import io.swagger.client.ApiClient;
import io.swagger.client.ApiException;
import io.swagger.client.ApiResponse;
import io.swagger.client.Configuration;
import io.swagger.client.Pair;
import io.swagger.client.ProgressRequestBody;
import io.swagger.client.ProgressResponseBody;
import com.google.gson.reflect.TypeToken;
import java.io.IOException;
import io.swagger.client.model.CreateQueueParams;
import io.swagger.client.model.DeleteQueue;
import io.swagger.client.model.ListQueues;
import io.swagger.client.model.QueueFull;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class QueuesApi {
private ApiClient apiClient;
public QueuesApi() {
this(Configuration.getDefaultApiClient());
}
public QueuesApi(ApiClient apiClient) {
this.apiClient = apiClient;
}
public ApiClient getApiClient() {
return apiClient;
}
public void setApiClient(ApiClient apiClient) {
this.apiClient = apiClient;
}
/* Build call for createAccountQueue */
private com.squareup.okhttp.Call createAccountQueueCall(Integer accountId, CreateQueueParams data, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
Object localVarPostBody = data;
// create path and map variables
String localVarPath = "/accounts/{account_id}/queues".replaceAll("\\{format\\}","json")
.replaceAll("\\{" + "account_id" + "\\}", apiClient.escapeString(accountId.toString()));
List<Pair> localVarQueryParams = new ArrayList<Pair>();
Map<String, String> localVarHeaderParams = new HashMap<String, String>();
Map<String, Object> localVarFormParams = new HashMap<String, Object>();
final String[] localVarAccepts = {
"application/json"
};
final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts);
if (localVarAccept != null) localVarHeaderParams.put("Accept", localVarAccept);
final String[] localVarContentTypes = {
"application/json"
};
final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes);
localVarHeaderParams.put("Content-Type", localVarContentType);
if(progressListener != null) {
apiClient.getHttpClient().networkInterceptors().add(new com.squareup.okhttp.Interceptor() {
@Override
public com.squareup.okhttp.Response intercept(com.squareup.okhttp.Interceptor.Chain chain) throws IOException {
com.squareup.okhttp.Response originalResponse = chain.proceed(chain.request());
return originalResponse.newBuilder()
.body(new ProgressResponseBody(originalResponse.body(), progressListener))
.build();
}
});
}
String[] localVarAuthNames = new String[] { "apiKey" };
return apiClient.buildCall(localVarPath, "POST", localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAuthNames, progressRequestListener);
}
@SuppressWarnings("rawtypes")
private com.squareup.okhttp.Call createAccountQueueValidateBeforeCall(Integer accountId, CreateQueueParams data, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
// verify the required parameter 'accountId' is set
if (accountId == null) {
throw new ApiException("Missing the required parameter 'accountId' when calling createAccountQueue(Async)");
}
com.squareup.okhttp.Call call = createAccountQueueCall(accountId, data, progressListener, progressRequestListener);
return call;
}
/**
* Create a queue
* For more on the input fields, see Account Queues.
* @param accountId Account ID (required)
* @param data Queue data (optional)
* @return QueueFull
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public QueueFull createAccountQueue(Integer accountId, CreateQueueParams data) throws ApiException {
ApiResponse<QueueFull> resp = createAccountQueueWithHttpInfo(accountId, data);
return resp.getData();
}
/**
* Create a queue
* For more on the input fields, see Account Queues.
* @param accountId Account ID (required)
* @param data Queue data (optional)
* @return ApiResponse<QueueFull>
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public ApiResponse<QueueFull> createAccountQueueWithHttpInfo(Integer accountId, CreateQueueParams data) throws ApiException {
com.squareup.okhttp.Call call = createAccountQueueValidateBeforeCall(accountId, data, null, null);
Type localVarReturnType = new TypeToken<QueueFull>(){}.getType();
return apiClient.execute(call, localVarReturnType);
}
/**
* Create a queue (asynchronously)
* For more on the input fields, see Account Queues.
* @param accountId Account ID (required)
* @param data Queue data (optional)
* @param callback The callback to be executed when the API call finishes
* @return The request call
* @throws ApiException If fail to process the API call, e.g. serializing the request body object
*/
public com.squareup.okhttp.Call createAccountQueueAsync(Integer accountId, CreateQueueParams data, final ApiCallback<QueueFull> callback) throws ApiException {
ProgressResponseBody.ProgressListener progressListener = null;
ProgressRequestBody.ProgressRequestListener progressRequestListener = null;
if (callback != null) {
progressListener = new ProgressResponseBody.ProgressListener() {
@Override
public void update(long bytesRead, long contentLength, boolean done) {
callback.onDownloadProgress(bytesRead, contentLength, done);
}
};
progressRequestListener = new ProgressRequestBody.ProgressRequestListener() {
@Override
public void onRequestProgress(long bytesWritten, long contentLength, boolean done) {
callback.onUploadProgress(bytesWritten, contentLength, done);
}
};
}
com.squareup.okhttp.Call call = createAccountQueueValidateBeforeCall(accountId, data, progressListener, progressRequestListener);
Type localVarReturnType = new TypeToken<QueueFull>(){}.getType();
apiClient.executeAsync(call, localVarReturnType, callback);
return call;
}
/* Build call for deleteAccountQueue */
private com.squareup.okhttp.Call deleteAccountQueueCall(Integer accountId, Integer queueId, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
Object localVarPostBody = null;
// create path and map variables
String localVarPath = "/accounts/{account_id}/queues/{queue_id}".replaceAll("\\{format\\}","json")
.replaceAll("\\{" + "account_id" + "\\}", apiClient.escapeString(accountId.toString()))
.replaceAll("\\{" + "queue_id" + "\\}", apiClient.escapeString(queueId.toString()));
List<Pair> localVarQueryParams = new ArrayList<Pair>();
Map<String, String> localVarHeaderParams = new HashMap<String, String>();
Map<String, Object> localVarFormParams = new HashMap<String, Object>();
final String[] localVarAccepts = {
"application/json"
};
final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts);
if (localVarAccept != null) localVarHeaderParams.put("Accept", localVarAccept);
final String[] localVarContentTypes = {
"application/json"
};
final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes);
localVarHeaderParams.put("Content-Type", localVarContentType);
if(progressListener != null) {
apiClient.getHttpClient().networkInterceptors().add(new com.squareup.okhttp.Interceptor() {
@Override
public com.squareup.okhttp.Response intercept(com.squareup.okhttp.Interceptor.Chain chain) throws IOException {
com.squareup.okhttp.Response originalResponse = chain.proceed(chain.request());
return originalResponse.newBuilder()
.body(new ProgressResponseBody(originalResponse.body(), progressListener))
.build();
}
});
}
String[] localVarAuthNames = new String[] { "apiKey" };
return apiClient.buildCall(localVarPath, "DELETE", localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAuthNames, progressRequestListener);
}
@SuppressWarnings("rawtypes")
private com.squareup.okhttp.Call deleteAccountQueueValidateBeforeCall(Integer accountId, Integer queueId, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
// verify the required parameter 'accountId' is set
if (accountId == null) {
throw new ApiException("Missing the required parameter 'accountId' when calling deleteAccountQueue(Async)");
}
// verify the required parameter 'queueId' is set
if (queueId == null) {
throw new ApiException("Missing the required parameter 'queueId' when calling deleteAccountQueue(Async)");
}
com.squareup.okhttp.Call call = deleteAccountQueueCall(accountId, queueId, progressListener, progressRequestListener);
return call;
}
/**
* Delete a queue
* This service a queue from the account. For more information on queue properties, see Account Queues.
* @param accountId Account ID (required)
* @param queueId Queue ID (required)
* @return DeleteQueue
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public DeleteQueue deleteAccountQueue(Integer accountId, Integer queueId) throws ApiException {
ApiResponse<DeleteQueue> resp = deleteAccountQueueWithHttpInfo(accountId, queueId);
return resp.getData();
}
/**
* Delete a queue
* This service a queue from the account. For more information on queue properties, see Account Queues.
* @param accountId Account ID (required)
* @param queueId Queue ID (required)
* @return ApiResponse<DeleteQueue>
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public ApiResponse<DeleteQueue> deleteAccountQueueWithHttpInfo(Integer accountId, Integer queueId) throws ApiException {
com.squareup.okhttp.Call call = deleteAccountQueueValidateBeforeCall(accountId, queueId, null, null);
Type localVarReturnType = new TypeToken<DeleteQueue>(){}.getType();
return apiClient.execute(call, localVarReturnType);
}
/**
* Delete a queue (asynchronously)
* This service a queue from the account. For more information on queue properties, see Account Queues.
* @param accountId Account ID (required)
* @param queueId Queue ID (required)
* @param callback The callback to be executed when the API call finishes
* @return The request call
* @throws ApiException If fail to process the API call, e.g. serializing the request body object
*/
public com.squareup.okhttp.Call deleteAccountQueueAsync(Integer accountId, Integer queueId, final ApiCallback<DeleteQueue> callback) throws ApiException {
ProgressResponseBody.ProgressListener progressListener = null;
ProgressRequestBody.ProgressRequestListener progressRequestListener = null;
if (callback != null) {
progressListener = new ProgressResponseBody.ProgressListener() {
@Override
public void update(long bytesRead, long contentLength, boolean done) {
callback.onDownloadProgress(bytesRead, contentLength, done);
}
};
progressRequestListener = new ProgressRequestBody.ProgressRequestListener() {
@Override
public void onRequestProgress(long bytesWritten, long contentLength, boolean done) {
callback.onUploadProgress(bytesWritten, contentLength, done);
}
};
}
com.squareup.okhttp.Call call = deleteAccountQueueValidateBeforeCall(accountId, queueId, progressListener, progressRequestListener);
Type localVarReturnType = new TypeToken<DeleteQueue>(){}.getType();
apiClient.executeAsync(call, localVarReturnType, callback);
return call;
}
/* Build call for getAccountQueue */
private com.squareup.okhttp.Call getAccountQueueCall(Integer accountId, Integer queueId, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
Object localVarPostBody = null;
// create path and map variables
String localVarPath = "/accounts/{account_id}/queues/{queue_id}".replaceAll("\\{format\\}","json")
.replaceAll("\\{" + "account_id" + "\\}", apiClient.escapeString(accountId.toString()))
.replaceAll("\\{" + "queue_id" + "\\}", apiClient.escapeString(queueId.toString()));
List<Pair> localVarQueryParams = new ArrayList<Pair>();
Map<String, String> localVarHeaderParams = new HashMap<String, String>();
Map<String, Object> localVarFormParams = new HashMap<String, Object>();
final String[] localVarAccepts = {
"application/json"
};
final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts);
if (localVarAccept != null) localVarHeaderParams.put("Accept", localVarAccept);
final String[] localVarContentTypes = {
"application/json"
};
final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes);
localVarHeaderParams.put("Content-Type", localVarContentType);
if(progressListener != null) {
apiClient.getHttpClient().networkInterceptors().add(new com.squareup.okhttp.Interceptor() {
@Override
public com.squareup.okhttp.Response intercept(com.squareup.okhttp.Interceptor.Chain chain) throws IOException {
com.squareup.okhttp.Response originalResponse = chain.proceed(chain.request());
return originalResponse.newBuilder()
.body(new ProgressResponseBody(originalResponse.body(), progressListener))
.build();
}
});
}
String[] localVarAuthNames = new String[] { "apiKey" };
return apiClient.buildCall(localVarPath, "GET", localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAuthNames, progressRequestListener);
}
@SuppressWarnings("rawtypes")
private com.squareup.okhttp.Call getAccountQueueValidateBeforeCall(Integer accountId, Integer queueId, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
// verify the required parameter 'accountId' is set
if (accountId == null) {
throw new ApiException("Missing the required parameter 'accountId' when calling getAccountQueue(Async)");
}
// verify the required parameter 'queueId' is set
if (queueId == null) {
throw new ApiException("Missing the required parameter 'queueId' when calling getAccountQueue(Async)");
}
com.squareup.okhttp.Call call = getAccountQueueCall(accountId, queueId, progressListener, progressRequestListener);
return call;
}
/**
* Show details of an individual queue
* This service shows the details of an individual queue. For more on the input fields, see Account Queues.
* @param accountId Account ID (required)
* @param queueId Queue ID (required)
* @return QueueFull
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public QueueFull getAccountQueue(Integer accountId, Integer queueId) throws ApiException {
ApiResponse<QueueFull> resp = getAccountQueueWithHttpInfo(accountId, queueId);
return resp.getData();
}
/**
* Show details of an individual queue
* This service shows the details of an individual queue. For more on the input fields, see Account Queues.
* @param accountId Account ID (required)
* @param queueId Queue ID (required)
* @return ApiResponse<QueueFull>
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public ApiResponse<QueueFull> getAccountQueueWithHttpInfo(Integer accountId, Integer queueId) throws ApiException {
com.squareup.okhttp.Call call = getAccountQueueValidateBeforeCall(accountId, queueId, null, null);
Type localVarReturnType = new TypeToken<QueueFull>(){}.getType();
return apiClient.execute(call, localVarReturnType);
}
/**
* Show details of an individual queue (asynchronously)
* This service shows the details of an individual queue. For more on the input fields, see Account Queues.
* @param accountId Account ID (required)
* @param queueId Queue ID (required)
* @param callback The callback to be executed when the API call finishes
* @return The request call
* @throws ApiException If fail to process the API call, e.g. serializing the request body object
*/
public com.squareup.okhttp.Call getAccountQueueAsync(Integer accountId, Integer queueId, final ApiCallback<QueueFull> callback) throws ApiException {
ProgressResponseBody.ProgressListener progressListener = null;
ProgressRequestBody.ProgressRequestListener progressRequestListener = null;
if (callback != null) {
progressListener = new ProgressResponseBody.ProgressListener() {
@Override
public void update(long bytesRead, long contentLength, boolean done) {
callback.onDownloadProgress(bytesRead, contentLength, done);
}
};
progressRequestListener = new ProgressRequestBody.ProgressRequestListener() {
@Override
public void onRequestProgress(long bytesWritten, long contentLength, boolean done) {
callback.onUploadProgress(bytesWritten, contentLength, done);
}
};
}
com.squareup.okhttp.Call call = getAccountQueueValidateBeforeCall(accountId, queueId, progressListener, progressRequestListener);
Type localVarReturnType = new TypeToken<QueueFull>(){}.getType();
apiClient.executeAsync(call, localVarReturnType, callback);
return call;
}
/* Build call for listAccountQueues */
private com.squareup.okhttp.Call listAccountQueuesCall(Integer accountId, List<String> filtersId, List<String> filtersName, String sortId, String sortName, Integer limit, Integer offset, String fields, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
Object localVarPostBody = null;
// create path and map variables
String localVarPath = "/accounts/{account_id}/queues".replaceAll("\\{format\\}","json")
.replaceAll("\\{" + "account_id" + "\\}", apiClient.escapeString(accountId.toString()));
List<Pair> localVarQueryParams = new ArrayList<Pair>();
if (filtersId != null)
localVarQueryParams.addAll(apiClient.parameterToPairs("multi", "filters[id]", filtersId));
if (filtersName != null)
localVarQueryParams.addAll(apiClient.parameterToPairs("multi", "filters[name]", filtersName));
if (sortId != null)
localVarQueryParams.addAll(apiClient.parameterToPairs("", "sort[id]", sortId));
if (sortName != null)
localVarQueryParams.addAll(apiClient.parameterToPairs("", "sort[name]", sortName));
if (limit != null)
localVarQueryParams.addAll(apiClient.parameterToPairs("", "limit", limit));
if (offset != null)
localVarQueryParams.addAll(apiClient.parameterToPairs("", "offset", offset));
if (fields != null)
localVarQueryParams.addAll(apiClient.parameterToPairs("", "fields", fields));
Map<String, String> localVarHeaderParams = new HashMap<String, String>();
Map<String, Object> localVarFormParams = new HashMap<String, Object>();
final String[] localVarAccepts = {
"application/json"
};
final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts);
if (localVarAccept != null) localVarHeaderParams.put("Accept", localVarAccept);
final String[] localVarContentTypes = {
"application/json"
};
final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes);
localVarHeaderParams.put("Content-Type", localVarContentType);
if(progressListener != null) {
apiClient.getHttpClient().networkInterceptors().add(new com.squareup.okhttp.Interceptor() {
@Override
public com.squareup.okhttp.Response intercept(com.squareup.okhttp.Interceptor.Chain chain) throws IOException {
com.squareup.okhttp.Response originalResponse = chain.proceed(chain.request());
return originalResponse.newBuilder()
.body(new ProgressResponseBody(originalResponse.body(), progressListener))
.build();
}
});
}
String[] localVarAuthNames = new String[] { "apiKey" };
return apiClient.buildCall(localVarPath, "GET", localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAuthNames, progressRequestListener);
}
@SuppressWarnings("rawtypes")
private com.squareup.okhttp.Call listAccountQueuesValidateBeforeCall(Integer accountId, List<String> filtersId, List<String> filtersName, String sortId, String sortName, Integer limit, Integer offset, String fields, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
// verify the required parameter 'accountId' is set
if (accountId == null) {
throw new ApiException("Missing the required parameter 'accountId' when calling listAccountQueues(Async)");
}
com.squareup.okhttp.Call call = listAccountQueuesCall(accountId, filtersId, filtersName, sortId, sortName, limit, offset, fields, progressListener, progressRequestListener);
return call;
}
/**
* Get a list of queues for an account
* The List Queues service lists all the queues belong to the account. See Account Queues for more info on the properties.
* @param accountId Account ID (required)
* @param filtersId ID filter (optional)
* @param filtersName Name filter (optional)
* @param sortId ID sorting (optional)
* @param sortName Name sorting (optional)
* @param limit Max results (optional)
* @param offset Results to skip (optional)
* @param fields Field set (optional)
* @return ListQueues
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public ListQueues listAccountQueues(Integer accountId, List<String> filtersId, List<String> filtersName, String sortId, String sortName, Integer limit, Integer offset, String fields) throws ApiException {
ApiResponse<ListQueues> resp = listAccountQueuesWithHttpInfo(accountId, filtersId, filtersName, sortId, sortName, limit, offset, fields);
return resp.getData();
}
/**
* Get a list of queues for an account
* The List Queues service lists all the queues belong to the account. See Account Queues for more info on the properties.
* @param accountId Account ID (required)
* @param filtersId ID filter (optional)
* @param filtersName Name filter (optional)
* @param sortId ID sorting (optional)
* @param sortName Name sorting (optional)
* @param limit Max results (optional)
* @param offset Results to skip (optional)
* @param fields Field set (optional)
* @return ApiResponse<ListQueues>
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public ApiResponse<ListQueues> listAccountQueuesWithHttpInfo(Integer accountId, List<String> filtersId, List<String> filtersName, String sortId, String sortName, Integer limit, Integer offset, String fields) throws ApiException {
com.squareup.okhttp.Call call = listAccountQueuesValidateBeforeCall(accountId, filtersId, filtersName, sortId, sortName, limit, offset, fields, null, null);
Type localVarReturnType = new TypeToken<ListQueues>(){}.getType();
return apiClient.execute(call, localVarReturnType);
}
/**
* Get a list of queues for an account (asynchronously)
* The List Queues service lists all the queues belong to the account. See Account Queues for more info on the properties.
* @param accountId Account ID (required)
* @param filtersId ID filter (optional)
* @param filtersName Name filter (optional)
* @param sortId ID sorting (optional)
* @param sortName Name sorting (optional)
* @param limit Max results (optional)
* @param offset Results to skip (optional)
* @param fields Field set (optional)
* @param callback The callback to be executed when the API call finishes
* @return The request call
* @throws ApiException If fail to process the API call, e.g. serializing the request body object
*/
public com.squareup.okhttp.Call listAccountQueuesAsync(Integer accountId, List<String> filtersId, List<String> filtersName, String sortId, String sortName, Integer limit, Integer offset, String fields, final ApiCallback<ListQueues> callback) throws ApiException {
ProgressResponseBody.ProgressListener progressListener = null;
ProgressRequestBody.ProgressRequestListener progressRequestListener = null;
if (callback != null) {
progressListener = new ProgressResponseBody.ProgressListener() {
@Override
public void update(long bytesRead, long contentLength, boolean done) {
callback.onDownloadProgress(bytesRead, contentLength, done);
}
};
progressRequestListener = new ProgressRequestBody.ProgressRequestListener() {
@Override
public void onRequestProgress(long bytesWritten, long contentLength, boolean done) {
callback.onUploadProgress(bytesWritten, contentLength, done);
}
};
}
com.squareup.okhttp.Call call = listAccountQueuesValidateBeforeCall(accountId, filtersId, filtersName, sortId, sortName, limit, offset, fields, progressListener, progressRequestListener);
Type localVarReturnType = new TypeToken<ListQueues>(){}.getType();
apiClient.executeAsync(call, localVarReturnType, callback);
return call;
}
/* Build call for replaceAccountQueue */
private com.squareup.okhttp.Call replaceAccountQueueCall(Integer accountId, Integer queueId, CreateQueueParams data, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
Object localVarPostBody = data;
// create path and map variables
String localVarPath = "/accounts/{account_id}/queues/{queue_id}".replaceAll("\\{format\\}","json")
.replaceAll("\\{" + "account_id" + "\\}", apiClient.escapeString(accountId.toString()))
.replaceAll("\\{" + "queue_id" + "\\}", apiClient.escapeString(queueId.toString()));
List<Pair> localVarQueryParams = new ArrayList<Pair>();
Map<String, String> localVarHeaderParams = new HashMap<String, String>();
Map<String, Object> localVarFormParams = new HashMap<String, Object>();
final String[] localVarAccepts = {
"application/json"
};
final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts);
if (localVarAccept != null) localVarHeaderParams.put("Accept", localVarAccept);
final String[] localVarContentTypes = {
"application/json"
};
final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes);
localVarHeaderParams.put("Content-Type", localVarContentType);
if(progressListener != null) {
apiClient.getHttpClient().networkInterceptors().add(new com.squareup.okhttp.Interceptor() {
@Override
public com.squareup.okhttp.Response intercept(com.squareup.okhttp.Interceptor.Chain chain) throws IOException {
com.squareup.okhttp.Response originalResponse = chain.proceed(chain.request());
return originalResponse.newBuilder()
.body(new ProgressResponseBody(originalResponse.body(), progressListener))
.build();
}
});
}
String[] localVarAuthNames = new String[] { "apiKey" };
return apiClient.buildCall(localVarPath, "PUT", localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAuthNames, progressRequestListener);
}
@SuppressWarnings("rawtypes")
private com.squareup.okhttp.Call replaceAccountQueueValidateBeforeCall(Integer accountId, Integer queueId, CreateQueueParams data, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
// verify the required parameter 'accountId' is set
if (accountId == null) {
throw new ApiException("Missing the required parameter 'accountId' when calling replaceAccountQueue(Async)");
}
// verify the required parameter 'queueId' is set
if (queueId == null) {
throw new ApiException("Missing the required parameter 'queueId' when calling replaceAccountQueue(Async)");
}
com.squareup.okhttp.Call call = replaceAccountQueueCall(accountId, queueId, data, progressListener, progressRequestListener);
return call;
}
/**
* Replace a queue
* The Replace Queue service replaces the parameters of a queue. For more on the input fields, see Account Queues.
* @param accountId Account ID (required)
* @param queueId Queue ID (required)
* @param data Queue data (optional)
* @return QueueFull
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public QueueFull replaceAccountQueue(Integer accountId, Integer queueId, CreateQueueParams data) throws ApiException {
ApiResponse<QueueFull> resp = replaceAccountQueueWithHttpInfo(accountId, queueId, data);
return resp.getData();
}
/**
* Replace a queue
* The Replace Queue service replaces the parameters of a queue. For more on the input fields, see Account Queues.
* @param accountId Account ID (required)
* @param queueId Queue ID (required)
* @param data Queue data (optional)
* @return ApiResponse<QueueFull>
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public ApiResponse<QueueFull> replaceAccountQueueWithHttpInfo(Integer accountId, Integer queueId, CreateQueueParams data) throws ApiException {
com.squareup.okhttp.Call call = replaceAccountQueueValidateBeforeCall(accountId, queueId, data, null, null);
Type localVarReturnType = new TypeToken<QueueFull>(){}.getType();
return apiClient.execute(call, localVarReturnType);
}
/**
* Replace a queue (asynchronously)
* The Replace Queue service replaces the parameters of a queue. For more on the input fields, see Account Queues.
* @param accountId Account ID (required)
* @param queueId Queue ID (required)
* @param data Queue data (optional)
* @param callback The callback to be executed when the API call finishes
* @return The request call
* @throws ApiException If fail to process the API call, e.g. serializing the request body object
*/
public com.squareup.okhttp.Call replaceAccountQueueAsync(Integer accountId, Integer queueId, CreateQueueParams data, final ApiCallback<QueueFull> callback) throws ApiException {
ProgressResponseBody.ProgressListener progressListener = null;
ProgressRequestBody.ProgressRequestListener progressRequestListener = null;
if (callback != null) {
progressListener = new ProgressResponseBody.ProgressListener() {
@Override
public void update(long bytesRead, long contentLength, boolean done) {
callback.onDownloadProgress(bytesRead, contentLength, done);
}
};
progressRequestListener = new ProgressRequestBody.ProgressRequestListener() {
@Override
public void onRequestProgress(long bytesWritten, long contentLength, boolean done) {
callback.onUploadProgress(bytesWritten, contentLength, done);
}
};
}
com.squareup.okhttp.Call call = replaceAccountQueueValidateBeforeCall(accountId, queueId, data, progressListener, progressRequestListener);
Type localVarReturnType = new TypeToken<QueueFull>(){}.getType();
apiClient.executeAsync(call, localVarReturnType, callback);
return call;
}
}
| |
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
/**
* The Class for fingerprinting .wav files.
*
*/
public class FingerPrintWav {
/** The Constant DATA_CHUNK_BYTE. */
private final static byte[] DATA_CHUNK_BYTE = {0x64, 0x61, 0x74,
0x61};
/** The Constant FMT_CHUNK_BYTE. */
private final static byte[] FMT_CHUNK_BYTE = {0x66, 0x6d, 0x74,
0x20};
/** The Constant CHANNEL_OFFSET. */
private final static int CHANNEL_OFFSET = 10;
/** The Constant BYTES_PER_SEC_OFFSET. */
private final static int BYTES_PER_SEC_OFFSET = 16;
/** The Constant BITS_PER_SAMP_OFFSET. */
private final static int BITS_PER_SAMP_OFFSET = 22;
/** The Constant START_FREQ. */
private final static int START_FREQ = 30;
/**
* The Constant THRESHHOLD_LOW used to determine the match
* between fingerprints of short file and the fingerprints
* of subset of long file.
*/
private static final double THRESHHOLD_LOW = 0.32;
/**
* The Constant THRESHHOLD_HIGH used to determine the
* match between the 1st second fingerprints of short file
* and any one second fingerprints long file.
*/
private static final double THRESHHOLD_HIGH = 0.4;
/** The Constant FREQRANGE. */
private static final int[] FREQRANGE = new int[] {2000, 2700, 2800,
2900, 3000, 3100, 3200, 3300, 3400, 3500, 3600, 3700, 3800,
3900, 4000, 4200, 4400, 4600, 4800, 5000, 5200, 5600, 6000,
7000, 8000, 9000, 10000};
/**
* Gets the wav file's body or header.
*
* @param file: the file
* @param choice: String represents HEADER or BODY
* @return the byte[] of body or header
*/
public byte[] getWavBodyHeader(byte[] file, String choice) {
// Find the start index of data chunk in the file
int datastart = indexOfPattern(file, DATA_CHUNK_BYTE);
// If no index found, return empty byte[]
if (datastart == -1) {
return (new byte[0]);
}
// get the body and header information
byte[] body = new byte[file.length - (datastart + 8)];
byte[] header = new byte[datastart];
if (choice.equalsIgnoreCase(Constants.BODY)) {
System.arraycopy(file, datastart + 8, body, 0, file.length
- (datastart + 8));
return body;
} else if (choice.equalsIgnoreCase(Constants.HEADER)) {
System.arraycopy(file, 0, header, 0, datastart);
return header;
} else
return (new byte[0]);
}
/**
* Gets the property from wav header.
*
* @param header the header
* @param property: the property required
* @return the value of the required property
*/
public int getWavProperty(byte[] header, String property) {
int fmtstart = indexOfPattern(header, FMT_CHUNK_BYTE);
if (fmtstart == -1) {
return -1;
}
// use ByteBuffer to handle endianness of the bytes
ByteBuffer bb;
if (property.equalsIgnoreCase(Constants.CHANNELS)) {
byte[] channelsBytes =
{header[fmtstart + CHANNEL_OFFSET],
header[fmtstart + CHANNEL_OFFSET + 1]};
bb = ByteBuffer.wrap(channelsBytes);
bb.order(ByteOrder.LITTLE_ENDIAN);
return bb.getShort();
} else if (property.equalsIgnoreCase(Constants.BYTES_PER_SEC)) {
// set bytes per second from the header
byte[] bytesPerSecBytes =
{header[fmtstart + BYTES_PER_SEC_OFFSET],
header[fmtstart + BYTES_PER_SEC_OFFSET + 1],
header[fmtstart + BYTES_PER_SEC_OFFSET + 2],
header[fmtstart + BYTES_PER_SEC_OFFSET + 3]};
bb = ByteBuffer.wrap(bytesPerSecBytes);
bb.order(ByteOrder.LITTLE_ENDIAN);
return bb.getInt();
} else if (property.equalsIgnoreCase(Constants.BITS_PER_SAMPLE)) {
// set bits per sample from the header
byte[] bitsPerSampBytes =
{header[fmtstart + BITS_PER_SAMP_OFFSET],
header[fmtstart + BITS_PER_SAMP_OFFSET + 1]};
bb = ByteBuffer.wrap(bitsPerSampBytes);
bb.order(ByteOrder.LITTLE_ENDIAN);
return bb.getShort();
} else
return -1;
}
/**
* This method calculates the finger print for a give wav
* file represented by byte[].
*
* @param file: the file
* @param header: the header info
* @param channels the channels
* @param isLeft: true if fingerprinting on left channel,
* false if fingerprinting on right channel
* @return the int[][], calculated fingerprint of a given
* file
*/
public int[][] fingerPrint(byte[] file, byte[] header,
int channels, boolean isLeft) {
byte[] body = getWavBodyHeader(file, Constants.BODY);
int bytesPerSec = getWavProperty(header, Constants.BYTES_PER_SEC);
if (bytesPerSec == -1) return new int[0][0];
int bitsPerSamp =
getWavProperty(header, Constants.BITS_PER_SAMPLE);
if (bitsPerSamp == -1) return new int[0][0];
// get the spectrum of the data of the input file
Complex[][] spectrum =
calculateSpectrum(body, bytesPerSec, bitsPerSamp, channels,
isLeft);
// get the fingerprint of the calculated spectrum
return getFingerPrint(spectrum);
}
/**
* Calculate spectrum for the given data part of the
* signal.
*
* @param data the data
* @param bytesPerSec the bytes per sec
* @param bitsPerSamp the bits per samp
* @param channels the channels
* @param isLeft: true if fingerprinting on left channel,
* false if fingerprinting on right channel
* @return the complex[][], the spectrum
*/
private Complex[][] calculateSpectrum(byte[] data, int bytesPerSec,
int bitsPerSamp, int channels, boolean isLeft) {
// If mono file, isLeft should always be true
if (channels == 1 && !isLeft) isLeft = true;
// input data only contains the body, not include
// header info
int signalLength = data.length;
int totalSeconds = signalLength / bytesPerSec;
Complex[][] spectrum = new Complex[totalSeconds][];
// for every second, get the sample chunk into a complex
// array
Complex[] sampleChunk;
for (int second = 0; second < totalSeconds; second++) {
int bytesPerSample = bitsPerSamp / 8;
// compute the step as channels * bytesPerSample
// we'll take one sample in every "step" bytes
int step = channels * bytesPerSample;
int channelStep = ((isLeft) ? 0 : 1) * bytesPerSample;
// initialize the sample chunk array
sampleChunk = new Complex[bytesPerSec / step];
// create a corresponding complex number for each
// sample,
// put the real part as the sample (byte to double),
// put the imaginary part as 0,
for (int i = 0; i < bytesPerSec; i = i + step) {
// compute the realPart
double realPart;
if (bitsPerSamp == 16)
realPart =
bytesToDouble(data[(second * bytesPerSec) + i
+ channelStep], data[(second * bytesPerSec) + i + 1
+ channelStep]);
else
realPart =
(double) data[(second * bytesPerSec) + i + channelStep];
// each sample has been converted to a complex
// number
sampleChunk[i / step] = new Complex(realPart, 0);
}
// Perform FFT analysis on the chunk:
spectrum[second] = FFT.fft(FFT.padding(sampleChunk));
}
return spectrum;
}
/**
* Gets the finger print using complex signal spectrum.
*
* @param spectrum the spectrum
* @return the computed finger print
*/
private int[][] getFingerPrint(Complex[][] spectrum) {
// Check if spectrum is populated correctly, else return
if (spectrum.length == 0) return new int[0][0];
if (spectrum[0].length == 0) return new int[0][0];
int bytesPerSecond = spectrum[0].length;
int totalSeconds = spectrum.length;
// array that saves highest magnitude in each freq range
// in each second
double[][] maxMag = new double[totalSeconds][FREQRANGE.length];
// array saves freqs of highest magnitude in each range
// in each second
int[][] maxFreq = new int[totalSeconds][FREQRANGE.length];
// For every second
for (int second = 0; second < totalSeconds; second++) {
// for every freq
for (int freq = START_FREQ; freq < bytesPerSecond / 2; freq++) {
// Get the magnitude in log:
double mag = Math.log(spectrum[second][freq].absolute() + 1);
// Find out which range we are in:
int index = getRangeIndex(freq);
// Save the highest magnitude and corresponding
// frequency:
if (index < FREQRANGE.length) {
if (mag > maxMag[second][index]) {
maxMag[second][index] = mag;
maxFreq[second][index] = freq;
}
}
}
}
return maxFreq;
}
/**
* Find out in which frequency range the input frequency
* occurs.
*
* @param freq the freq
* @return int, the range index of the frequency
*/
private int getRangeIndex(int freq) {
try {
int i = 0;
while (i < FREQRANGE.length) {
if (FREQRANGE[i] < freq)
i++;
else
break;
}
return i;
} catch (Exception e) {
return FREQRANGE.length + 1;
}
}
/**
* Method to find Index in the data bytes where the
* pattern starts. We use KMP string search algorithm. REF
* : http://en.wikipedia.org/wiki/Knuth%E2%80%93Morris%E2%
* 80%93Pratt_algorithm
*
* @param data the data
* @param pattern the pattern
* @return the int, the index where pattern starts
*/
private int indexOfPattern(byte[] data, byte[] pattern) {
// Compute failure function
int[] failureFunction = computeFailureFunction(pattern);
int patternIndex = 0;
if (data.length == 0) return -1;
for (int dataIndex = 0; dataIndex < data.length; dataIndex++) {
while (patternIndex > 0
&& pattern[patternIndex] != data[dataIndex]) {
patternIndex = failureFunction[patternIndex - 1];
}
// when there is a match increase the pattern index by
// one
if (pattern[patternIndex] == data[dataIndex]) {
patternIndex++;
}
// in case pattern completely searched return the
// pattern matching index
if (patternIndex == pattern.length) {
return dataIndex - pattern.length + 1;
}
}
return -1;
}
/**
* Computes the failure function using a boot-strapping
* process, where the pattern is matched against itself.
*
* @param pattern the pattern
* @return the int[] : failureFunction
*/
private int[] computeFailureFunction(byte[] pattern) {
int[] failureFunction = new int[pattern.length];
int currentIndex = 0;
for (int nextIndex = 1; nextIndex < pattern.length; nextIndex++) {
while (currentIndex > 0
&& pattern[currentIndex] != pattern[nextIndex]) {
currentIndex = failureFunction[currentIndex - 1];
}
if (pattern[currentIndex] == pattern[nextIndex]) {
currentIndex++;
}
failureFunction[nextIndex] = currentIndex;
}
return failureFunction;
}
/**
* This is a helper method that converts the given 2
* bytes(in little-endian) to a double.
*
* @param byte1 the byte1
* @param byte2 the byte2
* @return the converted output
*/
private double bytesToDouble(byte byte1, byte byte2) {
byte[] bytes = {byte1, byte2};
ByteBuffer bb = ByteBuffer.wrap(bytes);
bb.order(ByteOrder.LITTLE_ENDIAN);
return bb.getShort();
}
/**
* Method to match two given finger prints.
*
* @param fingerPrintOne : FFT values of file one
* @param fingerPrintTwo : FFT values of file two
* @param file1Name the file1 name
* @param file2Name the file2 name
* @return true, if match found, otherwise false
*/
public boolean matchTwoFingerPrints(int[][] fingerPrintOne,
int[][] fingerPrintTwo, String file1Name, String file2Name) {
if (fingerPrintOne.length == 0 || fingerPrintTwo.length == 0)
return false;
if (fingerPrintOne[0].length == 0
|| fingerPrintTwo[0].length == 0) return false;
// get the filenames for printing, longer file name
// followed by shorter file name
String fileNames =
(fingerPrintOne.length >= fingerPrintTwo.length) ? (file1Name
+ Constants.SPACE + file2Name) : (file2Name
+ Constants.SPACE + file1Name);
int vectorSize = fingerPrintOne[0].length;
// array to hold the longer fingerprint
int[][] longer;
// array to hold the shorter fingerprint
int[][] shorter;
if (fingerPrintOne.length >= fingerPrintTwo.length) {
shorter = fingerPrintTwo;
longer = fingerPrintOne;
} else {
shorter = fingerPrintOne;
longer = fingerPrintTwo;
}
int[] first_vector_short = shorter[0];
double distance = 0.0;
double bestMatch = 1.0;
double time = 0.0;
int[] vector_long = new int[vectorSize];
int[] subset_long = new int[shorter.length * vectorSize];
int[] flattened_short = flattenMatrix(shorter);
int count = 0;
// FAST ALGO : Check the similarity between first second
// fingerprints of short file with every 1 second finger
// prints of the long file.
for (int i = 0; i <= longer.length - shorter.length; i++) {
vector_long = longer[i];
distance =
Math.abs(getDistance(vector_long, first_vector_short));
// If distance between one second fingerprints is
// smaller than
// the THRESHHOLD_HIGH: switch to SLOW ALGO to confirm
// the match
if (distance <= THRESHHOLD_HIGH) {
count = 0;
for (int row = i; row < (i + shorter.length); row++) {
for (int col = 0; col < vectorSize; col++) {
subset_long[count++] = longer[row][col];
}
}
// SLOW ALGO : Check if the pattern of shorter
// fingerprints exists in the subset of longer
// fingerprint starting at this second.
distance =
Math.abs(getDistance(subset_long, flattened_short));
// if distance smaller than THRESHHOLD_LOW, replace
// the bestMatch
if (distance <= THRESHHOLD_LOW && distance < bestMatch) {
bestMatch = distance;
time = i;
}
}
}
if (bestMatch != 1.0) {
// Match Found
System.out.println("MATCH: " + time + " " + fileNames);
return true;
}
// No match found
return false;
}
/**
* Method to convert the 2D array of type int[][] into
* double[].
*
* @param matrix : int[][] representing computed FFT
* values of a audio file (.wav for prototype)
* @return : double[] containing all values of matrix
* linearly
*/
private int[] flattenMatrix(int[][] matrix) {
// create a result array of length N*5
int rowSize = matrix.length;
// Check if matrix has data else return
if (rowSize == 0) return (new int[0]);
int colSize = matrix[0].length;
if (colSize == 0) return (new int[0]);
int[] results = new int[rowSize * colSize];
int count = 0;
// Loop through the matrix to flatten it
for (int row = 0; row < rowSize; row++) {
for (int col = 0; col < colSize; col++) {
results[count++] = matrix[row][col];
}
}
// return the flattened array
return results;
}
/**
* Gets the distance.
*
* @param fingerPrintOne: the finger print of one file
* @param fingerPrintTwo the finger print of two file
* @return the distance
*/
private double getDistance(int[] fingerPrintOne,
int[] fingerPrintTwo) {
double d = 0.0;
double error = 0.0;
double sum = 0.0;
int freqLength = FREQRANGE.length;
int column = 0;
double delta = 0.0;
for (int i = 0; i < fingerPrintOne.length; i++) {
delta =
Math.abs(fingerPrintOne[i]) - Math.abs(fingerPrintTwo[i]);
column = i % freqLength;
error =
(column == 0)
? (delta / FREQRANGE[0])
: (delta / (FREQRANGE[column] - FREQRANGE[column - 1]));
sum += Math.pow(error, 2);
}
d = Math.sqrt(sum / fingerPrintTwo.length);
return d;
}
}
| |
package gsvm_project;
import static gsvm_project.MatrixOperations.*;
import java.awt.Color;
import java.util.ArrayList;
import java.util.HashMap;
/**
* @author JakubNvk
*/
public class Object {
ArrayList<ArrayList<Integer>> surfaces;
HashMap<Integer, Vertex> vertices;
ArrayList<Color> colours;
ArrayList<Color> shades;
float x, y, z;
int vertices_count;
Color colour;
Vertex light_vertex;
boolean isLit;
boolean isHidden;
/**
* Create graphical object with default values.
*/
Object() {
x = 0;
y = 0;
z = 0;
isLit = false;
isHidden = false;
vertices_count = 0;
light_vertex = new Vertex(0, 0, 0);
colour = new Color((Color.white).getRGB());
colours = new ArrayList<>();
shades = new ArrayList<>();
surfaces = new ArrayList<>();
light_vertex = new Vertex(0, 0, 0);
vertices = new HashMap<>();
}
/**
* Add vertex to object.
*
* @param vertex vertex to be added
*/
void addVertex(Vertex vertex) {
vertices_count++;
vertices.put(vertices_count, vertex);
}
/**
* Get vertex from object at specified position in vertex list.
*
* @param key the key whose associated vertex is to be returned
* @return the vertex to which the specified key is mapped, or null if this
* map contains no mapping for the key
*/
Vertex getVertex(int key) {
return vertices.get(key);
}
/**
* Add surface to object.
*
* @param surface surface to be added
*/
void addSurface(ArrayList surface) {
surfaces.add(surface);
shades.add(colour);
colours.add(colour);
}
/**
* Get surface from object at specified position in surface list.
*
* @param index index of the surface to return
* @return the surface at the specified position in this list
*/
ArrayList<Integer> getSurface(int index) {
return surfaces.get(index);
}
/**
* Rotates object over the X axis.
*
* @param angle the angle through which the object should be rotated
*/
void rotateX(double angle) {
float[][] rotate_x_matrix = new float[4][4];
float previous_x = x;
float previous_y = y;
float previous_z = z;
translateX(-x);
translateY(-y);
translateZ(-z);
initMatrix(rotate_x_matrix);
setMatrixValue(rotate_x_matrix, 0, 0, 1);
setMatrixValue(rotate_x_matrix, 1, 1, (float) Math.cos(angle));
setMatrixValue(rotate_x_matrix, 1, 2, (float) Math.sin(angle));
setMatrixValue(rotate_x_matrix, 2, 1, (float) -Math.sin(angle));
setMatrixValue(rotate_x_matrix, 2, 2, (float) Math.cos(angle));
setMatrixValue(rotate_x_matrix, 3, 3, 1);
if (vertices_count > 0) {
for (int i = 1; i < vertices_count + 1; i++) {
Vertex vertex = vertices.get(i);
vertices.remove(i);
vertex.multiplyVertex(rotate_x_matrix);
vertices.put(i, vertex);
}
}
translateX(previous_x);
translateY(previous_y);
translateZ(previous_z);
if (isLit) {
renderLight();
}
}
/**
* Rotates object over the Y axis.
*
* @param angle the angle through which the object should be rotated
*/
void rotateY(double angle) {
float[][] rotate_y_matrix = new float[4][4];
float previous_x = x;
float previous_y = y;
float previous_z = z;
translateX(-x);
translateY(-y);
translateZ(-z);
initMatrix(rotate_y_matrix);
setMatrixValue(rotate_y_matrix, 0, 0, (float) Math.cos(angle));
setMatrixValue(rotate_y_matrix, 0, 2, (float) Math.sin(angle));
setMatrixValue(rotate_y_matrix, 1, 1, 1);
setMatrixValue(rotate_y_matrix, 2, 0, (float) -Math.sin(angle));
setMatrixValue(rotate_y_matrix, 2, 2, (float) Math.cos(angle));
setMatrixValue(rotate_y_matrix, 3, 3, 1);
if (vertices_count > 0) {
for (int i = 1; i < vertices_count + 1; i++) {
Vertex vertex = vertices.get(i);
vertices.remove(i);
vertex.multiplyVertex(rotate_y_matrix);
vertices.put(i, vertex);
}
}
translateX(previous_x);
translateY(previous_y);
translateZ(previous_z);
if (isLit) {
renderLight();
}
}
/**
* Rotates object over the Z axis.
*
* @param angle the angle through which the object should be rotated
*/
void rotateZ(double angle) {
float[][] rotate_z_matrix = new float[4][4];
float previous_x = x;
float previous_y = y;
float previous_z = z;
translateX(-x);
translateY(-y);
translateZ(-z);
initMatrix(rotate_z_matrix);
setMatrixValue(rotate_z_matrix, 0, 0, (float) Math.cos(angle));
setMatrixValue(rotate_z_matrix, 0, 1, (float) Math.sin(angle));
setMatrixValue(rotate_z_matrix, 1, 0, (float) -Math.sin(angle));
setMatrixValue(rotate_z_matrix, 1, 1, (float) Math.cos(angle));
setMatrixValue(rotate_z_matrix, 2, 2, 1);
setMatrixValue(rotate_z_matrix, 3, 3, 1);
if (vertices_count > 0) {
for (int i = 1; i < vertices_count + 1; i++) {
Vertex vertex = vertices.get(i);
vertices.remove(i);
vertex.multiplyVertex(rotate_z_matrix);
vertices.put(i, vertex);
}
}
translateX(previous_x);
translateY(previous_y);
translateZ(previous_z);
if (isLit) {
renderLight();
}
}
/**
* Translates object over X axis.
*
* @param range length by which the vector is to be moved
*/
void translateX(float range) {
float[][] translate_x_matrix = new float[4][4];
initMatrix(translate_x_matrix, 1, 1, 1, 1);
setMatrixValue(translate_x_matrix, 0, 3, range);
x += range;
if (vertices_count > 0) {
for (int i = 1; i < vertices_count + 1; i++) {
Vertex vertex = vertices.get(i);
vertices.remove(i);
vertex.multiplyVertex(translate_x_matrix);
vertices.put(i, vertex);
}
if (isLit) {
renderLight();
}
}
}
/**
* Translates object over Y axis.
*
* @param range length by which the vector is to be moved
*/
void translateY(float range) {
float[][] translate_y_matrix = new float[4][4];
initMatrix(translate_y_matrix, 1, 1, 1, 1);
setMatrixValue(translate_y_matrix, 1, 3, range);
y += range;
if (vertices_count > 0) {
for (int i = 1; i < vertices_count + 1; i++) {
Vertex vertex = vertices.get(i);
vertices.remove(i);
vertex.multiplyVertex(translate_y_matrix);
vertices.put(i, vertex);
}
if (isLit) {
renderLight();
}
}
}
/**
* Translates object over Z axis.
*
* @param range length by which the vector is to be moved
*/
void translateZ(float range) {
float[][] translate_z_matrix = new float[4][4];
initMatrix(translate_z_matrix, 1, 1, 1, 1);
setMatrixValue(translate_z_matrix, 2, 3, range);
z += range;
if (vertices_count > 0) {
for (int i = 1; i < vertices_count + 1; i++) {
Vertex vertex = vertices.get(i);
vertices.remove(i);
vertex.multiplyVertex(translate_z_matrix);
vertices.put(i, vertex);
}
if (isLit) {
renderLight();
}
}
}
/**
* Scales the object up.
*/
void scaleUp() {
float upscale = (float) 1.1;
float[][] upscale_matrix = new float[4][4];
initMatrix(upscale_matrix, upscale, upscale, upscale, 1);
if (vertices_count > 0) {
for (int i = 1; i < vertices_count + 1; i++) {
Vertex vertex = vertices.get(i);
vertices.remove(i);
vertex.multiplyVertex(upscale_matrix);
vertices.put(i, vertex);
}
}
}
/**
* Scales the object down.
*/
void scaleDown() {
float downscale = (float) 0.9;
float[][] downscale_matrix = new float[4][4];
initMatrix(downscale_matrix, downscale, downscale, downscale, 1);
if (vertices_count > 0) {
for (int i = 1; i < vertices_count + 1; i++) {
Vertex vertex = vertices.get(i);
vertices.remove(i);
vertex.multiplyVertex(downscale_matrix);
vertices.put(i, vertex);
}
}
}
/**
* Hides invisible faces of the object.
*/
void hideInvisibleFaces() {
for (int i = 0; i < colours.size(); i++) {
colours.set(i, Color.white);
}
isHidden = true;
}
/**
* Shows invisible faces of the object.
*/
void unhideInvisibleFaces() {
isHidden = false;
isLit = false;
}
/**
* Set vertex of light which is shining on object.
*
* @param light_vertex light vertex to be set
*/
void setLight(Vertex light_vertex) {
this.light_vertex = light_vertex;
}
/**
* Render object with set light.
*/
void renderLight() {
if (!isHidden) {
hideInvisibleFaces();
}
int index = 0;
for (ArrayList<Integer> surface : surfaces) {
ArrayList<Vertex> surface_vertices = new ArrayList<>();
for (Integer surface_point : surface) {
surface_vertices.add(getVertex(surface_point));
}
float average_x = 0;
float average_y = 0;
float average_z = 0;
for (Vertex vertex : surface_vertices) {
average_x += vertex.x;
average_y += vertex.y;
average_z += vertex.z;
}
int surface_size = surface_vertices.size();
average_x = average_x / surface_size;
average_y = average_y / surface_size;
average_z = average_z / surface_size;
Vertex v0 = surface_vertices.get(0);
Vertex v1 = surface_vertices.get(1);
Vertex v2 = surface_vertices.get(2);
float[] vector_a = {v1.x - v0.x, v1.y - v0.y, v1.z - v0.z};
float[] vector_b = {v2.x - v1.x, v2.y - v1.y, v2.z - v1.z};
float[] vector_n = {
vector_a[1] * vector_b[2] - vector_a[2] * vector_b[1],
vector_a[2] * vector_b[0] - vector_a[0] * vector_b[2],
vector_a[0] * vector_b[1] - vector_a[1] * vector_b[0]};
float normalize = (float) Math.sqrt(Math.pow(vector_n[0], 2)
+ Math.pow(vector_n[1], 2) + Math.pow(vector_n[2], 2));
vector_n[0] = vector_n[0] / normalize;
vector_n[1] = vector_n[1] / normalize;
vector_n[2] = vector_n[2] / normalize;
float vector_l[] = new float[3];
vector_l[0] = average_x - light_vertex.x;
vector_l[1] = average_y - light_vertex.y;
vector_l[2] = average_z - light_vertex.z;
normalize = (float) Math.sqrt(Math.pow(vector_l[0], 2)
+ Math.pow(vector_l[1], 2) + Math.pow(vector_l[2], 2));
vector_l[0] = average_x - light_vertex.x / normalize;
vector_l[1] = average_y - light_vertex.y / normalize;
vector_l[2] = average_z - light_vertex.z / normalize;
float intensity = (float) Math.cos(vector_n[0] * vector_l[0]
+ vector_n[1] * vector_l[1] + vector_n[2] * vector_l[2]);
int color_r = colours.get(index).getRed();
int color_g = colours.get(index).getGreen();
int color_b = colours.get(index).getBlue();
int new_r, new_g, new_b;
if (color_r * intensity < 0) {
new_r = 0;
} else if (color_r * intensity > color_r) {
new_r = color_r;
} else {
new_r = (int) (color_r * intensity);
}
if (color_g * intensity < 0) {
new_g = 0;
} else if (color_g * intensity > color_g) {
new_g = color_g;
} else {
new_g = (int) (color_g * intensity);
}
if (color_b * intensity < 0) {
new_b = 0;
} else if (color_b * intensity > color_b) {
new_b = color_b;
} else {
new_b = (int) (color_b * intensity);
}
colour = new Color(new_r, new_g, new_b);
shades.set(index, colour);
index++;
isLit = true;
}
}
/**
* Set object colour.
*
* @param colour new colour of object
*/
void setColour(Color colour) {
this.colour = colour;
}
/**
* Get object colour.
*
* @param index index of surface
* @return object colour if the object is not lit, shade colour if the object
* is lit
*/
Color getColour(int index) {
if (!isLit) {
return colours.get(index);
} else {
return shades.get(index);
}
}
/**
* Render object with set colour.
*/
void renderColour() {
isHidden = true;
for (int i = 0; i < surfaces.size(); i++) {
colours.set(i, colour);
}
}
/**
* Reset object data.
*/
void clear() {
x = 0;
y = 0;
z = 0;
isLit = false;
isHidden = false;
light_vertex = new Vertex(0, 0, 0);
colour = new Color((Color.white).getRGB());
vertices_count = 0;
vertices.clear();
surfaces.clear();
colours.clear();
shades.clear();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs;
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_FAILOVER_PROXY_PROVIDER_KEY_PREFIX;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HA_NAMENODES_KEY_PREFIX;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_INTERNAL_NAMESERVICES_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_BACKUP_ADDRESS_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_DEFAULT;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_DEFAULT;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_RPC_ADDRESS_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMESERVICES;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMESERVICE_ID;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SERVER_HTTPS_KEYPASSWORD_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SERVER_HTTPS_TRUSTSTORE_PASSWORD_KEY;
import static org.apache.hadoop.test.GenericTestUtils.assertExceptionContains;
import static org.hamcrest.CoreMatchers.not;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.File;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
import org.apache.hadoop.hdfs.server.namenode.NameNode;
import org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.alias.CredentialProvider;
import org.apache.hadoop.security.alias.CredentialProviderFactory;
import org.apache.hadoop.security.alias.JavaKeyStoreProvider;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.Shell;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Before;
import org.junit.Test;
public class TestDFSUtil {
/**
* Reset to default UGI settings since some tests change them.
*/
@Before
public void resetUGI() {
UserGroupInformation.setConfiguration(new Configuration());
}
/**
* Test conversion of LocatedBlock to BlockLocation
*/
@Test
public void testLocatedBlocks2Locations() {
DatanodeInfo d = DFSTestUtil.getLocalDatanodeInfo();
DatanodeInfo[] ds = new DatanodeInfo[1];
ds[0] = d;
// ok
ExtendedBlock b1 = new ExtendedBlock("bpid", 1, 1, 1);
LocatedBlock l1 = new LocatedBlock(b1, ds, 0, false);
// corrupt
ExtendedBlock b2 = new ExtendedBlock("bpid", 2, 1, 1);
LocatedBlock l2 = new LocatedBlock(b2, ds, 0, true);
List<LocatedBlock> ls = Arrays.asList(l1, l2);
LocatedBlocks lbs = new LocatedBlocks(10, false, ls, l2, true, null);
BlockLocation[] bs = DFSUtil.locatedBlocks2Locations(lbs);
assertTrue("expected 2 blocks but got " + bs.length,
bs.length == 2);
int corruptCount = 0;
for (BlockLocation b: bs) {
if (b.isCorrupt()) {
corruptCount++;
}
}
assertTrue("expected 1 corrupt files but got " + corruptCount,
corruptCount == 1);
// test an empty location
bs = DFSUtil.locatedBlocks2Locations(new LocatedBlocks());
assertEquals(0, bs.length);
}
/**
* Test constructing LocatedBlock with null cachedLocs
*/
@Test
public void testLocatedBlockConstructorWithNullCachedLocs() {
DatanodeInfo d = DFSTestUtil.getLocalDatanodeInfo();
DatanodeInfo[] ds = new DatanodeInfo[1];
ds[0] = d;
ExtendedBlock b1 = new ExtendedBlock("bpid", 1, 1, 1);
LocatedBlock l1 = new LocatedBlock(b1, ds, null, null, 0, false, null);
final DatanodeInfo[] cachedLocs = l1.getCachedLocations();
assertTrue(cachedLocs.length == 0);
}
private Configuration setupAddress(String key) {
HdfsConfiguration conf = new HdfsConfiguration();
conf.set(DFS_NAMESERVICES, "nn1");
conf.set(DFSUtil.addKeySuffixes(key, "nn1"), "localhost:9000");
return conf;
}
/**
* Test {@link DFSUtil#getNamenodeNameServiceId(Configuration)} to ensure
* nameserviceId from the configuration returned
*/
@Test
public void getNameServiceId() {
HdfsConfiguration conf = new HdfsConfiguration();
conf.set(DFS_NAMESERVICE_ID, "nn1");
assertEquals("nn1", DFSUtil.getNamenodeNameServiceId(conf));
}
/**
* Test {@link DFSUtil#getNamenodeNameServiceId(Configuration)} to ensure
* nameserviceId for namenode is determined based on matching the address with
* local node's address
*/
@Test
public void getNameNodeNameServiceId() {
Configuration conf = setupAddress(DFS_NAMENODE_RPC_ADDRESS_KEY);
assertEquals("nn1", DFSUtil.getNamenodeNameServiceId(conf));
}
/**
* Test {@link DFSUtil#getBackupNameServiceId(Configuration)} to ensure
* nameserviceId for backup node is determined based on matching the address
* with local node's address
*/
@Test
public void getBackupNameServiceId() {
Configuration conf = setupAddress(DFS_NAMENODE_BACKUP_ADDRESS_KEY);
assertEquals("nn1", DFSUtil.getBackupNameServiceId(conf));
}
/**
* Test {@link DFSUtil#getSecondaryNameServiceId(Configuration)} to ensure
* nameserviceId for backup node is determined based on matching the address
* with local node's address
*/
@Test
public void getSecondaryNameServiceId() {
Configuration conf = setupAddress(DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY);
assertEquals("nn1", DFSUtil.getSecondaryNameServiceId(conf));
}
/**
* Test {@link DFSUtil#getNamenodeNameServiceId(Configuration)} to ensure
* exception is thrown when multiple rpc addresses match the local node's
* address
*/
@Test(expected = HadoopIllegalArgumentException.class)
public void testGetNameServiceIdException() {
HdfsConfiguration conf = new HdfsConfiguration();
conf.set(DFS_NAMESERVICES, "nn1,nn2");
conf.set(DFSUtil.addKeySuffixes(DFS_NAMENODE_RPC_ADDRESS_KEY, "nn1"),
"localhost:9000");
conf.set(DFSUtil.addKeySuffixes(DFS_NAMENODE_RPC_ADDRESS_KEY, "nn2"),
"localhost:9001");
DFSUtil.getNamenodeNameServiceId(conf);
fail("Expected exception is not thrown");
}
/**
* Test {@link DFSUtil#getNameServiceIds(Configuration)}
*/
@Test
public void testGetNameServiceIds() {
HdfsConfiguration conf = new HdfsConfiguration();
conf.set(DFS_NAMESERVICES, "nn1,nn2");
Collection<String> nameserviceIds = DFSUtil.getNameServiceIds(conf);
Iterator<String> it = nameserviceIds.iterator();
assertEquals(2, nameserviceIds.size());
assertEquals("nn1", it.next().toString());
assertEquals("nn2", it.next().toString());
}
@Test
public void testGetOnlyNameServiceIdOrNull() {
HdfsConfiguration conf = new HdfsConfiguration();
conf.set(DFS_NAMESERVICES, "ns1,ns2");
assertNull(DFSUtil.getOnlyNameServiceIdOrNull(conf));
conf.set(DFS_NAMESERVICES, "");
assertNull(DFSUtil.getOnlyNameServiceIdOrNull(conf));
conf.set(DFS_NAMESERVICES, "ns1");
assertEquals("ns1", DFSUtil.getOnlyNameServiceIdOrNull(conf));
}
/**
* Test for {@link DFSUtil#getNNServiceRpcAddresses(Configuration)}
* {@link DFSUtil#getNameServiceIdFromAddress(Configuration, InetSocketAddress, String...)
* (Configuration)}
*/
@Test
public void testMultipleNamenodes() throws IOException {
HdfsConfiguration conf = new HdfsConfiguration();
conf.set(DFS_NAMESERVICES, "nn1,nn2");
// Test - configured list of namenodes are returned
final String NN1_ADDRESS = "localhost:9000";
final String NN2_ADDRESS = "localhost:9001";
final String NN3_ADDRESS = "localhost:9002";
conf.set(DFSUtil.addKeySuffixes(DFS_NAMENODE_RPC_ADDRESS_KEY, "nn1"),
NN1_ADDRESS);
conf.set(DFSUtil.addKeySuffixes(DFS_NAMENODE_RPC_ADDRESS_KEY, "nn2"),
NN2_ADDRESS);
Map<String, Map<String, InetSocketAddress>> nnMap = DFSUtil
.getNNServiceRpcAddresses(conf);
assertEquals(2, nnMap.size());
Map<String, InetSocketAddress> nn1Map = nnMap.get("nn1");
assertEquals(1, nn1Map.size());
InetSocketAddress addr = nn1Map.get(null);
assertEquals("localhost", addr.getHostName());
assertEquals(9000, addr.getPort());
Map<String, InetSocketAddress> nn2Map = nnMap.get("nn2");
assertEquals(1, nn2Map.size());
addr = nn2Map.get(null);
assertEquals("localhost", addr.getHostName());
assertEquals(9001, addr.getPort());
// Test - can look up nameservice ID from service address
checkNameServiceId(conf, NN1_ADDRESS, "nn1");
checkNameServiceId(conf, NN2_ADDRESS, "nn2");
checkNameServiceId(conf, NN3_ADDRESS, null);
// HA is not enabled in a purely federated config
assertFalse(HAUtil.isHAEnabled(conf, "nn1"));
assertFalse(HAUtil.isHAEnabled(conf, "nn2"));
}
public void checkNameServiceId(Configuration conf, String addr,
String expectedNameServiceId) {
InetSocketAddress s = NetUtils.createSocketAddr(addr);
String nameserviceId = DFSUtil.getNameServiceIdFromAddress(conf, s,
DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY, DFS_NAMENODE_RPC_ADDRESS_KEY);
assertEquals(expectedNameServiceId, nameserviceId);
}
/** Tests to ensure default namenode is used as fallback */
@Test
public void testDefaultNamenode() throws IOException {
HdfsConfiguration conf = new HdfsConfiguration();
final String hdfs_default = "hdfs://localhost:9999/";
conf.set(FS_DEFAULT_NAME_KEY, hdfs_default);
// If DFS_FEDERATION_NAMESERVICES is not set, verify that
// default namenode address is returned.
Map<String, Map<String, InetSocketAddress>> addrMap =
DFSUtil.getNNServiceRpcAddresses(conf);
assertEquals(1, addrMap.size());
Map<String, InetSocketAddress> defaultNsMap = addrMap.get(null);
assertEquals(1, defaultNsMap.size());
assertEquals(9999, defaultNsMap.get(null).getPort());
}
/**
* Test to ensure nameservice specific keys in the configuration are
* copied to generic keys when the namenode starts.
*/
@Test
public void testConfModificationFederationOnly() {
final HdfsConfiguration conf = new HdfsConfiguration();
String nsId = "ns1";
conf.set(DFS_NAMESERVICES, nsId);
conf.set(DFS_NAMESERVICE_ID, nsId);
// Set the nameservice specific keys with nameserviceId in the config key
for (String key : NameNode.NAMENODE_SPECIFIC_KEYS) {
// Note: value is same as the key
conf.set(DFSUtil.addKeySuffixes(key, nsId), key);
}
// Initialize generic keys from specific keys
NameNode.initializeGenericKeys(conf, nsId, null);
// Retrieve the keys without nameserviceId and Ensure generic keys are set
// to the correct value
for (String key : NameNode.NAMENODE_SPECIFIC_KEYS) {
assertEquals(key, conf.get(key));
}
}
/**
* Test to ensure nameservice specific keys in the configuration are
* copied to generic keys when the namenode starts.
*/
@Test
public void testConfModificationFederationAndHa() {
final HdfsConfiguration conf = new HdfsConfiguration();
String nsId = "ns1";
String nnId = "nn1";
conf.set(DFS_NAMESERVICES, nsId);
conf.set(DFS_NAMESERVICE_ID, nsId);
conf.set(DFS_HA_NAMENODES_KEY_PREFIX + "." + nsId, nnId);
// Set the nameservice specific keys with nameserviceId in the config key
for (String key : NameNode.NAMENODE_SPECIFIC_KEYS) {
// Note: value is same as the key
conf.set(DFSUtil.addKeySuffixes(key, nsId, nnId), key);
}
// Initialize generic keys from specific keys
NameNode.initializeGenericKeys(conf, nsId, nnId);
// Retrieve the keys without nameserviceId and Ensure generic keys are set
// to the correct value
for (String key : NameNode.NAMENODE_SPECIFIC_KEYS) {
assertEquals(key, conf.get(key));
}
}
/**
* Ensure that fs.defaultFS is set in the configuration even if neither HA nor
* Federation is enabled.
*
* Regression test for HDFS-3351.
*/
@Test
public void testConfModificationNoFederationOrHa() {
final HdfsConfiguration conf = new HdfsConfiguration();
String nsId = null;
String nnId = null;
conf.set(DFS_NAMENODE_RPC_ADDRESS_KEY, "localhost:1234");
assertFalse("hdfs://localhost:1234".equals(conf.get(FS_DEFAULT_NAME_KEY)));
NameNode.initializeGenericKeys(conf, nsId, nnId);
assertEquals("hdfs://localhost:1234", conf.get(FS_DEFAULT_NAME_KEY));
}
/**
* Regression test for HDFS-2934.
*/
@Test
public void testSomeConfsNNSpecificSomeNSSpecific() {
final HdfsConfiguration conf = new HdfsConfiguration();
String key = DFSConfigKeys.DFS_NAMENODE_SHARED_EDITS_DIR_KEY;
conf.set(key, "global-default");
conf.set(key + ".ns1", "ns1-override");
conf.set(key + ".ns1.nn1", "nn1-override");
// A namenode in another nameservice should get the global default.
Configuration newConf = new Configuration(conf);
NameNode.initializeGenericKeys(newConf, "ns2", "nn1");
assertEquals("global-default", newConf.get(key));
// A namenode in another non-HA nameservice should get global default.
newConf = new Configuration(conf);
NameNode.initializeGenericKeys(newConf, "ns2", null);
assertEquals("global-default", newConf.get(key));
// A namenode in the same nameservice should get the ns setting
newConf = new Configuration(conf);
NameNode.initializeGenericKeys(newConf, "ns1", "nn2");
assertEquals("ns1-override", newConf.get(key));
// The nn with the nn-specific setting should get its own override
newConf = new Configuration(conf);
NameNode.initializeGenericKeys(newConf, "ns1", "nn1");
assertEquals("nn1-override", newConf.get(key));
}
/**
* Tests for empty configuration, an exception is thrown from
* {@link DFSUtil#getNNServiceRpcAddresses(Configuration)}
* {@link DFSUtil#getBackupNodeAddresses(Configuration)}
* {@link DFSUtil#getSecondaryNameNodeAddresses(Configuration)}
*/
@Test
public void testEmptyConf() {
HdfsConfiguration conf = new HdfsConfiguration(false);
try {
Map<String, Map<String, InetSocketAddress>> map =
DFSUtil.getNNServiceRpcAddresses(conf);
fail("Expected IOException is not thrown, result was: " +
DFSUtil.addressMapToString(map));
} catch (IOException expected) {
/** Expected */
}
try {
Map<String, Map<String, InetSocketAddress>> map =
DFSUtil.getBackupNodeAddresses(conf);
fail("Expected IOException is not thrown, result was: " +
DFSUtil.addressMapToString(map));
} catch (IOException expected) {
/** Expected */
}
try {
Map<String, Map<String, InetSocketAddress>> map =
DFSUtil.getSecondaryNameNodeAddresses(conf);
fail("Expected IOException is not thrown, result was: " +
DFSUtil.addressMapToString(map));
} catch (IOException expected) {
/** Expected */
}
}
@Test
public void testGetInfoServer() throws IOException, URISyntaxException {
HdfsConfiguration conf = new HdfsConfiguration();
URI httpsport = DFSUtil.getInfoServer(null, conf, "https");
assertEquals(new URI("https", null, "0.0.0.0",
DFS_NAMENODE_HTTPS_PORT_DEFAULT, null, null, null), httpsport);
URI httpport = DFSUtil.getInfoServer(null, conf, "http");
assertEquals(new URI("http", null, "0.0.0.0",
DFS_NAMENODE_HTTP_PORT_DEFAULT, null, null, null), httpport);
URI httpAddress = DFSUtil.getInfoServer(new InetSocketAddress(
"localhost", 8020), conf, "http");
assertEquals(
URI.create("http://localhost:" + DFS_NAMENODE_HTTP_PORT_DEFAULT),
httpAddress);
}
@Test
public void testHANameNodesWithFederation() throws URISyntaxException {
HdfsConfiguration conf = new HdfsConfiguration();
final String NS1_NN1_HOST = "ns1-nn1.example.com:8020";
final String NS1_NN2_HOST = "ns1-nn2.example.com:8020";
final String NS2_NN1_HOST = "ns2-nn1.example.com:8020";
final String NS2_NN2_HOST = "ns2-nn2.example.com:8020";
conf.set(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY, "hdfs://ns1");
// Two nameservices, each with two NNs.
conf.set(DFS_NAMESERVICES, "ns1,ns2");
conf.set(DFSUtil.addKeySuffixes(DFS_HA_NAMENODES_KEY_PREFIX, "ns1"),
"ns1-nn1,ns1-nn2");
conf.set(DFSUtil.addKeySuffixes(DFS_HA_NAMENODES_KEY_PREFIX, "ns2"),
"ns2-nn1,ns2-nn2");
conf.set(DFSUtil.addKeySuffixes(
DFS_NAMENODE_RPC_ADDRESS_KEY, "ns1", "ns1-nn1"),
NS1_NN1_HOST);
conf.set(DFSUtil.addKeySuffixes(
DFS_NAMENODE_RPC_ADDRESS_KEY, "ns1", "ns1-nn2"),
NS1_NN2_HOST);
conf.set(DFSUtil.addKeySuffixes(
DFS_NAMENODE_RPC_ADDRESS_KEY, "ns2", "ns2-nn1"),
NS2_NN1_HOST);
conf.set(DFSUtil.addKeySuffixes(
DFS_NAMENODE_RPC_ADDRESS_KEY, "ns2", "ns2-nn2"),
NS2_NN2_HOST);
Map<String, Map<String, InetSocketAddress>> map =
DFSUtil.getHaNnRpcAddresses(conf);
assertTrue(HAUtil.isHAEnabled(conf, "ns1"));
assertTrue(HAUtil.isHAEnabled(conf, "ns2"));
assertFalse(HAUtil.isHAEnabled(conf, "ns3"));
assertEquals(NS1_NN1_HOST, map.get("ns1").get("ns1-nn1").toString());
assertEquals(NS1_NN2_HOST, map.get("ns1").get("ns1-nn2").toString());
assertEquals(NS2_NN1_HOST, map.get("ns2").get("ns2-nn1").toString());
assertEquals(NS2_NN2_HOST, map.get("ns2").get("ns2-nn2").toString());
assertEquals(NS1_NN1_HOST,
DFSUtil.getNamenodeServiceAddr(conf, "ns1", "ns1-nn1"));
assertEquals(NS1_NN2_HOST,
DFSUtil.getNamenodeServiceAddr(conf, "ns1", "ns1-nn2"));
assertEquals(NS2_NN1_HOST,
DFSUtil.getNamenodeServiceAddr(conf, "ns2", "ns2-nn1"));
// No nameservice was given and we can't determine which service addr
// to use as two nameservices could share a namenode ID.
assertEquals(null, DFSUtil.getNamenodeServiceAddr(conf, null, "ns1-nn1"));
// Ditto for nameservice IDs, if multiple are defined
assertEquals(null, DFSUtil.getNamenodeNameServiceId(conf));
assertEquals(null, DFSUtil.getSecondaryNameServiceId(conf));
Collection<URI> uris = DFSUtil.getNameServiceUris(conf, DFS_NAMENODE_RPC_ADDRESS_KEY);
assertEquals(2, uris.size());
assertTrue(uris.contains(new URI("hdfs://ns1")));
assertTrue(uris.contains(new URI("hdfs://ns2")));
}
@Test
public void getNameNodeServiceAddr() throws IOException {
HdfsConfiguration conf = new HdfsConfiguration();
// One nameservice with two NNs
final String NS1_NN1_HOST = "ns1-nn1.example.com:8020";
final String NS1_NN1_HOST_SVC = "ns1-nn2.example.com:8021";
final String NS1_NN2_HOST = "ns1-nn1.example.com:8020";
final String NS1_NN2_HOST_SVC = "ns1-nn2.example.com:8021";
conf.set(DFS_NAMESERVICES, "ns1");
conf.set(DFSUtil.addKeySuffixes(DFS_HA_NAMENODES_KEY_PREFIX, "ns1"),"nn1,nn2");
conf.set(DFSUtil.addKeySuffixes(
DFS_NAMENODE_RPC_ADDRESS_KEY, "ns1", "nn1"), NS1_NN1_HOST);
conf.set(DFSUtil.addKeySuffixes(
DFS_NAMENODE_RPC_ADDRESS_KEY, "ns1", "nn2"), NS1_NN2_HOST);
// The rpc address is used if no service address is defined
assertEquals(NS1_NN1_HOST, DFSUtil.getNamenodeServiceAddr(conf, null, "nn1"));
assertEquals(NS1_NN2_HOST, DFSUtil.getNamenodeServiceAddr(conf, null, "nn2"));
// A nameservice is specified explicitly
assertEquals(NS1_NN1_HOST, DFSUtil.getNamenodeServiceAddr(conf, "ns1", "nn1"));
assertEquals(null, DFSUtil.getNamenodeServiceAddr(conf, "invalid", "nn1"));
// The service addrs are used when they are defined
conf.set(DFSUtil.addKeySuffixes(
DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY, "ns1", "nn1"), NS1_NN1_HOST_SVC);
conf.set(DFSUtil.addKeySuffixes(
DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY, "ns1", "nn2"), NS1_NN2_HOST_SVC);
assertEquals(NS1_NN1_HOST_SVC, DFSUtil.getNamenodeServiceAddr(conf, null, "nn1"));
assertEquals(NS1_NN2_HOST_SVC, DFSUtil.getNamenodeServiceAddr(conf, null, "nn2"));
// We can determine the nameservice ID, there's only one listed
assertEquals("ns1", DFSUtil.getNamenodeNameServiceId(conf));
assertEquals("ns1", DFSUtil.getSecondaryNameServiceId(conf));
}
@Test
public void testGetHaNnHttpAddresses() throws IOException {
final String LOGICAL_HOST_NAME = "ns1";
final String NS1_NN1_ADDR = "ns1-nn1.example.com:8020";
final String NS1_NN2_ADDR = "ns1-nn2.example.com:8020";
Configuration conf = createWebHDFSHAConfiguration(LOGICAL_HOST_NAME, NS1_NN1_ADDR, NS1_NN2_ADDR);
Map<String, Map<String, InetSocketAddress>> map =
DFSUtil.getHaNnWebHdfsAddresses(conf, "webhdfs");
assertEquals(NS1_NN1_ADDR, map.get("ns1").get("nn1").toString());
assertEquals(NS1_NN2_ADDR, map.get("ns1").get("nn2").toString());
}
private static Configuration createWebHDFSHAConfiguration(String logicalHostName, String nnaddr1, String nnaddr2) {
HdfsConfiguration conf = new HdfsConfiguration();
conf.set(DFS_NAMESERVICES, "ns1");
conf.set(DFSUtil.addKeySuffixes(DFS_HA_NAMENODES_KEY_PREFIX, "ns1"),"nn1,nn2");
conf.set(DFSUtil.addKeySuffixes(
DFS_NAMENODE_HTTP_ADDRESS_KEY, "ns1", "nn1"), nnaddr1);
conf.set(DFSUtil.addKeySuffixes(
DFS_NAMENODE_HTTP_ADDRESS_KEY, "ns1", "nn2"), nnaddr2);
conf.set(DFS_CLIENT_FAILOVER_PROXY_PROVIDER_KEY_PREFIX + "." + logicalHostName,
ConfiguredFailoverProxyProvider.class.getName());
return conf;
}
@Test
public void testSubstituteForWildcardAddress() throws IOException {
assertEquals("foo:12345",
DFSUtil.substituteForWildcardAddress("0.0.0.0:12345", "foo"));
assertEquals("127.0.0.1:12345",
DFSUtil.substituteForWildcardAddress("127.0.0.1:12345", "foo"));
}
@Test
public void testGetNNUris() throws Exception {
HdfsConfiguration conf = new HdfsConfiguration();
final String NS1_NN1_ADDR = "ns1-nn1.example.com:8020";
final String NS1_NN2_ADDR = "ns1-nn2.example.com:8020";
final String NS2_NN_ADDR = "ns2-nn.example.com:8020";
final String NN1_ADDR = "nn.example.com:8020";
final String NN1_SRVC_ADDR = "nn.example.com:8021";
final String NN2_ADDR = "nn2.example.com:8020";
conf.set(DFS_NAMESERVICES, "ns1,ns2");
conf.set(DFSUtil.addKeySuffixes(DFS_HA_NAMENODES_KEY_PREFIX, "ns1"),"nn1,nn2");
conf.set(DFSUtil.addKeySuffixes(
DFS_NAMENODE_RPC_ADDRESS_KEY, "ns1", "nn1"), NS1_NN1_ADDR);
conf.set(DFSUtil.addKeySuffixes(
DFS_NAMENODE_RPC_ADDRESS_KEY, "ns1", "nn2"), NS1_NN2_ADDR);
conf.set(DFSUtil.addKeySuffixes(DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY, "ns2"),
NS2_NN_ADDR);
conf.set(DFS_NAMENODE_RPC_ADDRESS_KEY, "hdfs://" + NN1_ADDR);
conf.set(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY, "hdfs://" + NN2_ADDR);
Collection<URI> uris = DFSUtil.getNameServiceUris(conf,
DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY, DFS_NAMENODE_RPC_ADDRESS_KEY);
assertEquals(4, uris.size());
assertTrue(uris.contains(new URI("hdfs://ns1")));
assertTrue(uris.contains(new URI("hdfs://" + NS2_NN_ADDR)));
assertTrue(uris.contains(new URI("hdfs://" + NN1_ADDR)));
assertTrue(uris.contains(new URI("hdfs://" + NN2_ADDR)));
// Make sure that non-HDFS URIs in fs.defaultFS don't get included.
conf.set(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY,
"viewfs://vfs-name.example.com");
uris = DFSUtil.getNameServiceUris(conf, DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY,
DFS_NAMENODE_RPC_ADDRESS_KEY);
assertEquals(3, uris.size());
assertTrue(uris.contains(new URI("hdfs://ns1")));
assertTrue(uris.contains(new URI("hdfs://" + NS2_NN_ADDR)));
assertTrue(uris.contains(new URI("hdfs://" + NN1_ADDR)));
// Make sure that an HA URI being the default URI doesn't result in multiple
// entries being returned.
conf.set(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY, "hdfs://ns1");
uris = DFSUtil.getNameServiceUris(conf, DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY,
DFS_NAMENODE_RPC_ADDRESS_KEY);
assertEquals(3, uris.size());
assertTrue(uris.contains(new URI("hdfs://ns1")));
assertTrue(uris.contains(new URI("hdfs://" + NS2_NN_ADDR)));
assertTrue(uris.contains(new URI("hdfs://" + NN1_ADDR)));
// Make sure that when a service RPC address is used that is distinct from
// the client RPC address, and that client RPC address is also used as the
// default URI, that the client URI does not end up in the set of URIs
// returned.
conf = new HdfsConfiguration();
conf.set(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY, "hdfs://" + NN1_ADDR);
conf.set(DFS_NAMENODE_RPC_ADDRESS_KEY, NN1_ADDR);
conf.set(DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY, NN1_SRVC_ADDR);
uris = DFSUtil.getNameServiceUris(conf, DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY,
DFS_NAMENODE_RPC_ADDRESS_KEY);
assertEquals(1, uris.size());
assertTrue(uris.contains(new URI("hdfs://" + NN1_SRVC_ADDR)));
}
@Test (timeout=15000)
public void testLocalhostReverseLookup() {
// 127.0.0.1 -> localhost reverse resolution does not happen on Windows.
Assume.assumeTrue(!Shell.WINDOWS);
// Make sure when config FS_DEFAULT_NAME_KEY using IP address,
// it will automatically convert it to hostname
HdfsConfiguration conf = new HdfsConfiguration();
conf.set(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY, "hdfs://127.0.0.1:8020");
Collection<URI> uris = DFSUtil.getNameServiceUris(conf);
assertEquals(1, uris.size());
for (URI uri : uris) {
assertThat(uri.getHost(), not("127.0.0.1"));
}
}
@Test (timeout=15000)
public void testIsValidName() {
assertFalse(DFSUtil.isValidName("/foo/../bar"));
assertFalse(DFSUtil.isValidName("/foo/./bar"));
assertFalse(DFSUtil.isValidName("/foo//bar"));
assertTrue(DFSUtil.isValidName("/"));
assertTrue(DFSUtil.isValidName("/bar/"));
assertFalse(DFSUtil.isValidName("/foo/:/bar"));
assertFalse(DFSUtil.isValidName("/foo:bar"));
}
@Test(timeout=5000)
public void testGetSpnegoKeytabKey() {
HdfsConfiguration conf = new HdfsConfiguration();
String defaultKey = "default.spengo.key";
conf.unset(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY);
assertEquals("Test spnego key in config is null", defaultKey,
DFSUtil.getSpnegoKeytabKey(conf, defaultKey));
conf.set(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY, "");
assertEquals("Test spnego key is empty", defaultKey,
DFSUtil.getSpnegoKeytabKey(conf, defaultKey));
String spengoKey = "spengo.key";
conf.set(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY,
spengoKey);
assertEquals("Test spnego key is NOT null",
DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY,
DFSUtil.getSpnegoKeytabKey(conf, defaultKey));
}
@Test(timeout=1000)
public void testDurationToString() throws Exception {
assertEquals("000:00:00:00.000", DFSUtil.durationToString(0));
assertEquals("001:01:01:01.000",
DFSUtil.durationToString(((24*60*60)+(60*60)+(60)+1)*1000));
assertEquals("000:23:59:59.999",
DFSUtil.durationToString(((23*60*60)+(59*60)+(59))*1000+999));
assertEquals("-001:01:01:01.000",
DFSUtil.durationToString(-((24*60*60)+(60*60)+(60)+1)*1000));
assertEquals("-000:23:59:59.574",
DFSUtil.durationToString(-(((23*60*60)+(59*60)+(59))*1000+574)));
}
@Test(timeout=5000)
public void testRelativeTimeConversion() throws Exception {
try {
DFSUtil.parseRelativeTime("1");
} catch (IOException e) {
assertExceptionContains("too short", e);
}
try {
DFSUtil.parseRelativeTime("1z");
} catch (IOException e) {
assertExceptionContains("unknown time unit", e);
}
try {
DFSUtil.parseRelativeTime("yyz");
} catch (IOException e) {
assertExceptionContains("is not a number", e);
}
assertEquals(61*1000, DFSUtil.parseRelativeTime("61s"));
assertEquals(61*60*1000, DFSUtil.parseRelativeTime("61m"));
assertEquals(0, DFSUtil.parseRelativeTime("0s"));
assertEquals(25*60*60*1000, DFSUtil.parseRelativeTime("25h"));
assertEquals(4*24*60*60*1000l, DFSUtil.parseRelativeTime("4d"));
assertEquals(999*24*60*60*1000l, DFSUtil.parseRelativeTime("999d"));
}
@Test
public void testAssertAllResultsEqual() {
checkAllResults(new Long[]{}, true);
checkAllResults(new Long[]{1l}, true);
checkAllResults(new Long[]{1l, 1l}, true);
checkAllResults(new Long[]{1l, 1l, 1l}, true);
checkAllResults(new Long[]{new Long(1), new Long(1)}, true);
checkAllResults(new Long[]{null, null, null}, true);
checkAllResults(new Long[]{1l, 2l}, false);
checkAllResults(new Long[]{2l, 1l}, false);
checkAllResults(new Long[]{1l, 2l, 1l}, false);
checkAllResults(new Long[]{2l, 1l, 1l}, false);
checkAllResults(new Long[]{1l, 1l, 2l}, false);
checkAllResults(new Long[]{1l, null}, false);
checkAllResults(new Long[]{null, 1l}, false);
checkAllResults(new Long[]{1l, null, 1l}, false);
}
private static void checkAllResults(Long[] toCheck, boolean shouldSucceed) {
if (shouldSucceed) {
DFSUtil.assertAllResultsEqual(Arrays.asList(toCheck));
} else {
try {
DFSUtil.assertAllResultsEqual(Arrays.asList(toCheck));
fail("Should not have succeeded with input: " +
Arrays.toString(toCheck));
} catch (AssertionError ae) {
GenericTestUtils.assertExceptionContains("Not all elements match", ae);
}
}
}
@Test
public void testGetPassword() throws Exception {
File testDir = new File(System.getProperty("test.build.data",
"target/test-dir"));
Configuration conf = new Configuration();
final Path jksPath = new Path(testDir.toString(), "test.jks");
final String ourUrl =
JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri();
File file = new File(testDir, "test.jks");
file.delete();
conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, ourUrl);
CredentialProvider provider =
CredentialProviderFactory.getProviders(conf).get(0);
char[] keypass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
char[] storepass = {'s', 't', 'o', 'r', 'e', 'p', 'a', 's', 's'};
char[] trustpass = {'t', 'r', 'u', 's', 't', 'p', 'a', 's', 's'};
// ensure that we get nulls when the key isn't there
assertEquals(null, provider.getCredentialEntry(
DFS_SERVER_HTTPS_KEYPASSWORD_KEY));
assertEquals(null, provider.getCredentialEntry(
DFS_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY));
assertEquals(null, provider.getCredentialEntry(
DFS_SERVER_HTTPS_TRUSTSTORE_PASSWORD_KEY));
// create new aliases
try {
provider.createCredentialEntry(
DFS_SERVER_HTTPS_KEYPASSWORD_KEY, keypass);
provider.createCredentialEntry(
DFS_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY, storepass);
provider.createCredentialEntry(
DFS_SERVER_HTTPS_TRUSTSTORE_PASSWORD_KEY, trustpass);
// write out so that it can be found in checks
provider.flush();
} catch (Exception e) {
e.printStackTrace();
throw e;
}
// make sure we get back the right key directly from api
assertArrayEquals(keypass, provider.getCredentialEntry(
DFS_SERVER_HTTPS_KEYPASSWORD_KEY).getCredential());
assertArrayEquals(storepass, provider.getCredentialEntry(
DFS_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY).getCredential());
assertArrayEquals(trustpass, provider.getCredentialEntry(
DFS_SERVER_HTTPS_TRUSTSTORE_PASSWORD_KEY).getCredential());
// use WebAppUtils as would be used by loadSslConfiguration
Assert.assertEquals("keypass",
DFSUtil.getPassword(conf, DFS_SERVER_HTTPS_KEYPASSWORD_KEY));
Assert.assertEquals("storepass",
DFSUtil.getPassword(conf, DFS_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY));
Assert.assertEquals("trustpass",
DFSUtil.getPassword(conf, DFS_SERVER_HTTPS_TRUSTSTORE_PASSWORD_KEY));
// let's make sure that a password that doesn't exist returns null
Assert.assertEquals(null, DFSUtil.getPassword(conf,"invalid-alias"));
}
@Test
public void testGetNNServiceRpcAddressesForNsIds() throws IOException {
Configuration conf = new HdfsConfiguration();
conf.set(DFS_NAMESERVICES, "nn1,nn2");
conf.set(DFS_INTERNAL_NAMESERVICES_KEY, "nn1");
// Test - configured list of namenodes are returned
final String NN1_ADDRESS = "localhost:9000";
final String NN2_ADDRESS = "localhost:9001";
conf.set(DFSUtil.addKeySuffixes(DFS_NAMENODE_RPC_ADDRESS_KEY, "nn1"),
NN1_ADDRESS);
conf.set(DFSUtil.addKeySuffixes(DFS_NAMENODE_RPC_ADDRESS_KEY, "nn2"),
NN2_ADDRESS);
Map<String, Map<String, InetSocketAddress>> nnMap = DFSUtil
.getNNServiceRpcAddressesForCluster(conf);
assertEquals(1, nnMap.size());
assertTrue(nnMap.containsKey("nn1"));
conf.set(DFS_INTERNAL_NAMESERVICES_KEY, "nn3");
try {
DFSUtil.getNNServiceRpcAddressesForCluster(conf);
fail("Should fail for misconfiguration");
} catch (IOException ignored) {
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans;
import java.util.List;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.PhysicalOperator;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.Add;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.ConstantExpression;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.Divide;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.EqualToExpr;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.GTOrEqualToExpr;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.GreaterThanExpr;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.LTOrEqualToExpr;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.LessThanExpr;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.Mod;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.Multiply;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.NotEqualToExpr;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.POAnd;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.POBinCond;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.POCast;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.POIsNull;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.POMapLookUp;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.PONegative;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.PONot;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.POOr;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.POProject;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.PORegexp;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.POUserComparisonFunc;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.POUserFunc;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.Subtract;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POCollectedGroup;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POCounter;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POCross;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.PODemux;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.PODistinct;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POFRJoin;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POFilter;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POForEach;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POGlobalRearrange;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POLimit;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POLoad;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POLocalRearrange;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POMergeCogroup;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POMergeJoin;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.PONative;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POOptimizedForEach;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POPackage;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POPartialAgg;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POPartitionRearrange;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POPoissonSample;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POPreCombinerLocalRearrange;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.PORank;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POReservoirSample;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POSkewedJoin;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POSort;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POSplit;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POStore;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POStream;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POUnion;
import org.apache.pig.impl.plan.PlanVisitor;
import org.apache.pig.impl.plan.PlanWalker;
import org.apache.pig.impl.plan.VisitorException;
/**
* The visitor class for the Physical Plan. To use this,
* create the visitor with the plan to be visited. Call
* the visit() method to traverse the plan in a depth first
* fashion.
*
* This class also visits the nested plans inside the operators.
* One has to extend this class to modify the nature of each visit
* and to maintain any relevant state information between the visits
* to two different operators.
*
*/
public class PhyPlanVisitor extends PlanVisitor<PhysicalOperator,PhysicalPlan> {
public PhyPlanVisitor(PhysicalPlan plan, PlanWalker<PhysicalOperator, PhysicalPlan> walker) {
super(plan, walker);
}
public void visit(PhysicalOperator op) {
// do nothing
}
public void visitLoad(POLoad ld) throws VisitorException{
//do nothing
}
public void visitStore(POStore st) throws VisitorException{
//do nothing
}
public void visitNative(PONative nat) throws VisitorException{
//do nothing
}
public void visitFilter(POFilter fl) throws VisitorException{
pushWalker(mCurrentWalker.spawnChildWalker(fl.getPlan()));
visit();
popWalker();
}
public void visitCollectedGroup(POCollectedGroup mg) throws VisitorException{
List<PhysicalPlan> inpPlans = mg.getPlans();
for (PhysicalPlan plan : inpPlans) {
pushWalker(mCurrentWalker.spawnChildWalker(plan));
visit();
popWalker();
}
}
public void visitLocalRearrange(POLocalRearrange lr) throws VisitorException{
List<PhysicalPlan> inpPlans = lr.getPlans();
for (PhysicalPlan plan : inpPlans) {
pushWalker(mCurrentWalker.spawnChildWalker(plan));
visit();
popWalker();
}
}
public void visitGlobalRearrange(POGlobalRearrange gr) throws VisitorException{
//do nothing
}
public void visitPackage(POPackage pkg) throws VisitorException{
//do nothing
}
public void visitPOForEach(POForEach nfe) throws VisitorException {
List<PhysicalPlan> inpPlans = nfe.getInputPlans();
for (PhysicalPlan plan : inpPlans) {
pushWalker(mCurrentWalker.spawnChildWalker(plan));
visit();
popWalker();
}
}
public void visitUnion(POUnion un) throws VisitorException{
//do nothing
}
public void visitSplit(POSplit spl) throws VisitorException{
List<PhysicalPlan> plans = spl.getPlans();
for (PhysicalPlan plan : plans) {
pushWalker(mCurrentWalker.spawnChildWalker(plan));
visit();
popWalker();
}
}
public void visitDemux(PODemux demux) throws VisitorException{
List<PhysicalPlan> plans = demux.getPlans();
for (PhysicalPlan plan : plans) {
pushWalker(mCurrentWalker.spawnChildWalker(plan));
visit();
popWalker();
}
}
public void visitCounter(POCounter poCounter) throws VisitorException {
//do nothing
}
public void visitRank(PORank rank) throws VisitorException {
//do nothing
}
public void visitDistinct(PODistinct distinct) throws VisitorException {
//do nothing
}
public void visitSort(POSort sort) throws VisitorException {
List<PhysicalPlan> inpPlans = sort.getSortPlans();
for (PhysicalPlan plan : inpPlans) {
pushWalker(mCurrentWalker.spawnChildWalker(plan));
visit();
popWalker();
}
}
public void visitConstant(ConstantExpression cnst) throws VisitorException{
//do nothing
}
public void visitProject(POProject proj) throws VisitorException{
//do nothing
}
public void visitGreaterThan(GreaterThanExpr grt) throws VisitorException{
//do nothing
}
public void visitLessThan(LessThanExpr lt) throws VisitorException{
//do nothing
}
public void visitGTOrEqual(GTOrEqualToExpr gte) throws VisitorException{
//do nothing
}
public void visitLTOrEqual(LTOrEqualToExpr lte) throws VisitorException{
//do nothing
}
public void visitEqualTo(EqualToExpr eq) throws VisitorException{
//do nothing
}
public void visitNotEqualTo(NotEqualToExpr eq) throws VisitorException{
//do nothing
}
public void visitRegexp(PORegexp re) throws VisitorException{
//do nothing
}
public void visitIsNull(POIsNull isNull) throws VisitorException {
}
public void visitAdd(Add add) throws VisitorException{
//do nothing
}
public void visitSubtract(Subtract sub) throws VisitorException {
//do nothing
}
public void visitMultiply(Multiply mul) throws VisitorException {
//do nothing
}
public void visitDivide(Divide dv) throws VisitorException {
//do nothing
}
public void visitMod(Mod mod) throws VisitorException {
//do nothing
}
public void visitAnd(POAnd and) throws VisitorException {
//do nothing
}
public void visitOr(POOr or) throws VisitorException {
//do nothing
}
public void visitNot(PONot not) throws VisitorException {
//do nothing
}
public void visitBinCond(POBinCond binCond) {
// do nothing
}
public void visitNegative(PONegative negative) {
//do nothing
}
public void visitUserFunc(POUserFunc userFunc) throws VisitorException {
//do nothing
}
public void visitComparisonFunc(POUserComparisonFunc compFunc) throws VisitorException {
//do nothing
}
public void visitMapLookUp(POMapLookUp mapLookUp) {
// TODO Auto-generated method stub
}
public void visitCast(POCast cast) {
// TODO Auto-generated method stub
}
public void visitLimit(POLimit lim) throws VisitorException{
PhysicalPlan inpPlan = lim.getLimitPlan();
if (inpPlan!=null) {
pushWalker(mCurrentWalker.spawnChildWalker(inpPlan));
visit();
popWalker();
}
}
public void visitCross(POCross cross) throws VisitorException{
//do nothing
}
public void visitFRJoin(POFRJoin join) throws VisitorException {
//do nothing
}
public void visitMergeJoin(POMergeJoin join) throws VisitorException {
//do nothing
}
public void visitMergeCoGroup(POMergeCogroup mergeCoGrp) throws VisitorException{
}
/**
* @param stream
* @throws VisitorException
*/
public void visitStream(POStream stream) throws VisitorException {
// TODO Auto-generated method stub
}
public void visitSkewedJoin(POSkewedJoin sk) throws VisitorException {
}
public void visitPartitionRearrange(POPartitionRearrange pr) throws VisitorException {
List<PhysicalPlan> inpPlans = pr.getPlans();
for (PhysicalPlan plan : inpPlans) {
pushWalker(mCurrentWalker.spawnChildWalker(plan));
visit();
popWalker();
}
}
/**
* @param optimizedForEach
*/
public void visitPOOptimizedForEach(POOptimizedForEach optimizedForEach) throws VisitorException {
// TODO Auto-generated method stub
}
/**
* @param preCombinerLocalRearrange
*/
public void visitPreCombinerLocalRearrange(
POPreCombinerLocalRearrange preCombinerLocalRearrange) throws VisitorException {
// TODO Auto-generated method stub
}
public void visitPartialAgg(POPartialAgg poPartialAgg) throws VisitorException {
}
public void visitReservoirSample(POReservoirSample reservoirSample) throws VisitorException {
}
public void visitPoissonSample(POPoissonSample poissonSample) throws VisitorException {
}
}
| |
package org.robolectric.shadows;
import static com.google.common.truth.Truth.assertThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import android.content.Context;
import android.content.pm.ShortcutInfo;
import android.content.pm.ShortcutManager;
import android.os.Build;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.common.collect.ImmutableList;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.annotation.Config;
import org.robolectric.shadow.api.Shadow;
/** Unit tests for ShadowShortcutManager. */
@Config(minSdk = Build.VERSION_CODES.N_MR1)
@RunWith(AndroidJUnit4.class)
public final class ShadowShortcutManagerTest {
private ShortcutManager shortcutManager;
@Before
public void setUp() {
shortcutManager =
(ShortcutManager)
ApplicationProvider.getApplicationContext().getSystemService(Context.SHORTCUT_SERVICE);
}
@Test
public void testDynamicShortcuts_twoAdded() throws Exception {
shortcutManager.addDynamicShortcuts(
ImmutableList.of(createShortcut("id1"), createShortcut("id2")));
assertThat(shortcutManager.getDynamicShortcuts()).hasSize(2);
}
@Test
public void testDynamicShortcuts_duplicateGetsDeduped() throws Exception {
shortcutManager.addDynamicShortcuts(
ImmutableList.of(createShortcut("id1"), createShortcut("id1")));
assertThat(shortcutManager.getDynamicShortcuts()).hasSize(1);
}
@Test
public void testDynamicShortcuts_immutableShortcutDoesntGetUpdated() throws Exception {
ShortcutInfo shortcut1 = createShortcut("id1", true /* isImmutable */);
when(shortcut1.getLongLabel()).thenReturn("original");
ShortcutInfo shortcut2 = createShortcut("id1", true /* isImmutable */);
when(shortcut2.getLongLabel()).thenReturn("updated");
shortcutManager.addDynamicShortcuts(ImmutableList.of(shortcut1));
assertThat(shortcutManager.getDynamicShortcuts()).hasSize(1);
shortcutManager.addDynamicShortcuts(ImmutableList.of(shortcut2));
assertThat(shortcutManager.getDynamicShortcuts()).hasSize(1);
assertThat(shortcutManager.getDynamicShortcuts().get(0).getLongLabel()).isEqualTo("original");
}
@Test
public void testShortcutWithIdenticalIdGetsUpdated() throws Exception {
ShortcutInfo shortcut1 = createShortcut("id1");
when(shortcut1.getLongLabel()).thenReturn("original");
ShortcutInfo shortcut2 = createShortcut("id1");
when(shortcut2.getLongLabel()).thenReturn("updated");
shortcutManager.addDynamicShortcuts(ImmutableList.of(shortcut1));
assertThat(shortcutManager.getDynamicShortcuts()).hasSize(1);
shortcutManager.addDynamicShortcuts(ImmutableList.of(shortcut2));
assertThat(shortcutManager.getDynamicShortcuts()).hasSize(1);
assertThat(shortcutManager.getDynamicShortcuts().get(0).getLongLabel()).isEqualTo("updated");
}
@Test
public void testRemoveAllDynamicShortcuts() throws Exception {
shortcutManager.addDynamicShortcuts(
ImmutableList.of(createShortcut("id1"), createShortcut("id2")));
assertThat(shortcutManager.getDynamicShortcuts()).hasSize(2);
shortcutManager.removeAllDynamicShortcuts();
assertThat(shortcutManager.getDynamicShortcuts()).isEmpty();
}
@Test
public void testRemoveDynamicShortcuts() throws Exception {
ShortcutInfo shortcut1 = createShortcut("id1");
ShortcutInfo shortcut2 = createShortcut("id2");
shortcutManager.addDynamicShortcuts(
ImmutableList.of(shortcut1, shortcut2));
assertThat(shortcutManager.getDynamicShortcuts()).hasSize(2);
shortcutManager.removeDynamicShortcuts(ImmutableList.of("id1"));
assertThat(shortcutManager.getDynamicShortcuts()).containsExactly(shortcut2);
}
@Test
public void testSetDynamicShortcutsClearOutOldList() throws Exception {
ShortcutInfo shortcut1 = createShortcut("id1");
ShortcutInfo shortcut2 = createShortcut("id2");
ShortcutInfo shortcut3 = createShortcut("id3");
ShortcutInfo shortcut4 = createShortcut("id4");
shortcutManager.addDynamicShortcuts(ImmutableList.of(shortcut1, shortcut2));
assertThat(shortcutManager.getDynamicShortcuts()).containsExactly(shortcut1, shortcut2);
shortcutManager.setDynamicShortcuts(ImmutableList.of(shortcut3, shortcut4));
assertThat(shortcutManager.getDynamicShortcuts()).containsExactly(shortcut3, shortcut4);
}
@Test
public void testUpdateShortcut_dynamic() throws Exception {
ShortcutInfo shortcut1 = createShortcut("id1");
when(shortcut1.getLongLabel()).thenReturn("original");
ShortcutInfo shortcutUpdated = createShortcut("id1");
when(shortcutUpdated.getLongLabel()).thenReturn("updated");
shortcutManager.addDynamicShortcuts(
ImmutableList.of(shortcut1));
assertThat(shortcutManager.getDynamicShortcuts()).containsExactly(shortcut1);
shortcutManager.updateShortcuts(ImmutableList.of(shortcutUpdated));
assertThat(shortcutManager.getDynamicShortcuts()).containsExactly(shortcutUpdated);
}
@Test
@Config(minSdk = Build.VERSION_CODES.O)
public void testUpdateShortcut_pinned() throws Exception {
ShortcutInfo shortcut1 = createShortcut("id1");
when(shortcut1.getLongLabel()).thenReturn("original");
ShortcutInfo shortcutUpdated = createShortcut("id1");
when(shortcutUpdated.getLongLabel()).thenReturn("updated");
shortcutManager.requestPinShortcut(
shortcut1, null /* resultIntent */);
assertThat(shortcutManager.getPinnedShortcuts()).containsExactly(shortcut1);
shortcutManager.updateShortcuts(ImmutableList.of(shortcutUpdated));
assertThat(shortcutManager.getPinnedShortcuts()).containsExactly(shortcutUpdated);
}
@Test
public void testUpdateShortcutsOnlyUpdatesExistingShortcuts() throws Exception {
ShortcutInfo shortcut1 = createShortcut("id1");
when(shortcut1.getLongLabel()).thenReturn("original");
ShortcutInfo shortcutUpdated = createShortcut("id1");
when(shortcutUpdated.getLongLabel()).thenReturn("updated");
ShortcutInfo shortcut2 = createShortcut("id2");
shortcutManager.addDynamicShortcuts(ImmutableList.of(shortcut1));
assertThat(shortcutManager.getDynamicShortcuts()).containsExactly(shortcut1);
shortcutManager.updateShortcuts(ImmutableList.of(shortcutUpdated, shortcut2));
assertThat(shortcutManager.getDynamicShortcuts()).containsExactly(shortcutUpdated);
assertThat(shortcutManager.getDynamicShortcuts().get(0).getLongLabel()).isEqualTo("updated");
}
@Test
@Config(minSdk = Build.VERSION_CODES.O)
public void testPinningExistingDynamicShortcut() throws Exception {
ShortcutInfo shortcut1 = createShortcut("id1");
ShortcutInfo shortcut2 = createShortcut("id2");
shortcutManager.addDynamicShortcuts(ImmutableList.of(shortcut1, shortcut2));
assertThat(shortcutManager.getDynamicShortcuts()).hasSize(2);
shortcutManager.requestPinShortcut(shortcut1, null /* resultIntent */);
assertThat(shortcutManager.getDynamicShortcuts()).containsExactly(shortcut2);
assertThat(shortcutManager.getPinnedShortcuts()).containsExactly(shortcut1);
}
@Test
@Config(minSdk = Build.VERSION_CODES.O)
public void testPinningNewShortcut() throws Exception {
ShortcutInfo shortcut1 = createShortcut("id1");
shortcutManager.requestPinShortcut(shortcut1, null /* resultIntent */);
assertThat(shortcutManager.getPinnedShortcuts()).containsExactly(shortcut1);
}
@Test
@Config(minSdk = Build.VERSION_CODES.O)
public void testSetMaxShortcutCountPerActivity() {
ShadowShortcutManager shadowShortcutManager = Shadow.extract(shortcutManager);
shadowShortcutManager.setMaxShortcutCountPerActivity(42);
assertThat(shortcutManager.getMaxShortcutCountPerActivity()).isEqualTo(42);
}
@Test
@Config(minSdk = Build.VERSION_CODES.O)
public void testSetManifestShortcuts() {
ImmutableList<ShortcutInfo> manifestShortcuts = ImmutableList.of(createShortcut("id1"));
ShadowShortcutManager shadowShortcutManager = Shadow.extract(shortcutManager);
shadowShortcutManager.setManifestShortcuts(manifestShortcuts);
assertThat(shortcutManager.getManifestShortcuts()).isEqualTo(manifestShortcuts);
}
private static ShortcutInfo createShortcut(String id) {
return createShortcut(id, false /* isImmutable */);
}
private static ShortcutInfo createShortcut(String id, boolean isImmutable) {
ShortcutInfo shortcut = mock(ShortcutInfo.class);
when(shortcut.getId()).thenReturn(id);
when(shortcut.isImmutable()).thenReturn(isImmutable);
return shortcut;
}
}
| |
/*L
* Copyright RTI International
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/webgenome/LICENSE.txt for details.
*/
/*
$Revision: 1.1 $
$Date: 2007-04-09 22:19:50 $
*/
package org.rti.webgenome.graphics.widget;
import java.awt.Color;
import java.awt.Point;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import javax.imageio.ImageIO;
import org.rti.webgenome.core.WebGenomeSystemException;
import org.rti.webgenome.graphics.RasterDrawingCanvas;
import org.rti.webgenome.graphics.widget.PlotPanel;
import org.rti.webgenome.util.UnitTestUtils;
/**
* Class to assist testing of graphic widgets and
* plots. This class enables widgets to be added
* like a normal <code>PlotPanel</code>. It contains
* additional methods for rendering the contents
* to a raster file (e.g. PNG, JPEG).
* @author dhall
*
*/
public final class RasterFileTestPlotPanel extends PlotPanel {
// ============================
// Attributes
// ============================
/**
* Absolute path to directory where output graphic files
* will be written.
*/
private String outputDirPath = null;
/**
* Width of border that may or may not be drawn around panel
* when rendered. This property also controls width of crosshairs
* which also may or may not be drawn.
*/
private int borderWidth = 2;
/**
* Color of border that may or may not be drawn around panel
* when rendered. This property also controls color of crosshairs
* which also may or may not be drawn.
*/
private Color borderColor = Color.BLACK;
/** Draw border around panel when rendered? */
private boolean drawBorder = false;
/** Draw centered crosshairs in panel when rendered? */
private boolean drawCrossHairs = false;
// ============================
// Getters/setters
// ============================
/**
* Get color of border that may or may not be drawn around panel
* when rendered. This property also controls color of crosshairs
* which also may or may not be drawn.
* @see org.rti.webcgh.graph.unit_test.SvgTestPanel#setDrawBorder(boolean)
* @see
* org.rti.webcgh.graph.unit_test.SvgTestPanel#setDrawCrossHairs(boolean)
* @return A color
*/
public Color getBorderColor() {
return borderColor;
}
/**
* Set color of border that may or may not be drawn around panel
* when rendered. This property also controls color of crosshairs
* which also may or may not be drawn.
* @see
* org.rti.webcgh.graph.unit_test.SvgTestPanel#setDrawBorder(boolean)
* @see
* org.rti.webcgh.graph.unit_test.SvgTestPanel#setDrawCrossHairs(boolean)
* @param borderColor A color
*/
public void setBorderColor(final Color borderColor) {
this.borderColor = borderColor;
}
/**
* Width of border that may or may not be drawn around panel
* when rendered. This property also controls width of crosshairs
* which also may or may not be drawn.
* @see
* org.rti.webcgh.graph.unit_test.SvgTestPanel#setDrawBorder(boolean)
* @see
* rg.rti.webcgh.graph.unit_test.SvgTestPanel#setDrawCrossHairs(boolean)
* @return Border width in pixels
*/
public int getBorderWidth() {
return borderWidth;
}
/**
* Width of border that may or may not be drawn around panel
* when rendered. This property also controls width of crosshairs
* which also may or may not be drawn.
* @see
* org.rti.webcgh.graph.unit_test.SvgTestPanel#setDrawBorder(boolean)
* @see
* org.rti.webcgh.graph.unit_test.SvgTestPanel#setDrawCrossHairs(boolean)
* @param borderWidth Width of border in pixels
*/
public void setBorderWidth(final int borderWidth) {
this.borderWidth = borderWidth;
}
/**
* Should rendered panel include a border around it?
* @return T/F
*/
public boolean isDrawBorder() {
return drawBorder;
}
/**
* Should rendered panel include a border around it?
* @param drawBorder T/F
*/
public void setDrawBorder(final boolean drawBorder) {
this.drawBorder = drawBorder;
}
/**
* Should rendered panel include centered crosshairs?
* @return T/F
*/
public boolean isDrawCrossHairs() {
return drawCrossHairs;
}
/**
* Should rendered panel include centered crosshairs?
* @param drawCrossHairs T/F
*/
public void setDrawCrossHairs(final boolean drawCrossHairs) {
this.drawCrossHairs = drawCrossHairs;
}
/**
* Get absolute path to directory where output graphic files
* will be written.
* @return Absolute path to directory where output graphic files
* will be written
*/
public String getOutputDirPath() {
return outputDirPath;
}
/**
* Set absolute path to directory where output graphic files
* will be written.
* @param outputDirPath Absolute path to directory where
* output graphic files will be written.
*/
public void setOutputDirPath(final String outputDirPath) {
this.outputDirPath = outputDirPath;
}
// ===============================
// Constructors
// ===============================
/**
* Constructor.
*/
public RasterFileTestPlotPanel() {
super(new RasterDrawingCanvas());
// Set test directory path to
// temporary unit test directory
// that should be configured in the
// file 'unit_test.properties'
this.outputDirPath = UnitTestUtils.getUnitTestProperty("temp.dir");
if (this.outputDirPath == null) {
throw new WebGenomeSystemException(
"Unit test property 'temp.dir' is not defined");
}
}
/**
* Constructor.
* @param outputDirPath Absolute path to directory where
* output graphic files will be written
*/
public RasterFileTestPlotPanel(final String outputDirPath) {
super(new RasterDrawingCanvas());
this.outputDirPath = outputDirPath;
}
/**
* Constructor.
* @param outputDir Directory where
* output graphic files will be written
*/
public RasterFileTestPlotPanel(final File outputDir) {
this(outputDir.getAbsolutePath());
}
// =====================================
// Business methods
// =====================================
/**
* Render this panel as a PNG graphic in the
* given file. The file will be placed
* in the directory given by the
* <code>outputDirPath</code> property.
* @param fileName Name of graphic file
*/
public void toPngFile(final String fileName) {
File file = new File(this.outputDirPath + "/" + fileName);
File dir = file.getParentFile();
if (!dir.exists()) {
dir.mkdir();
}
this.paint(this.getDrawingCanvas());
this.getDrawingCanvas().setWidth(this.width());
this.getDrawingCanvas().setHeight(this.height());
((RasterDrawingCanvas) this.getDrawingCanvas()).
setOrigin(new Point(this.minX(), this.minY()));
BufferedImage img = ((RasterDrawingCanvas)
this.getDrawingCanvas()).toBufferedImage();
try {
ImageIO.write(img, "png", file);
} catch (IOException e) {
throw new WebGenomeSystemException("Error writing image to file", e);
}
}
}
| |
/*******************************************************************************
* Copyright 2012 EMBL-EBI, Hinxton outstation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package uk.ac.ebi.embl.api.validation.fixer.entry;
import org.junit.Before;
import org.junit.Test;
import uk.ac.ebi.embl.api.entry.AgpRow;
import uk.ac.ebi.embl.api.entry.Entry;
import uk.ac.ebi.embl.api.entry.EntryFactory;
import uk.ac.ebi.embl.api.entry.feature.Feature;
import uk.ac.ebi.embl.api.entry.feature.FeatureFactory;
import uk.ac.ebi.embl.api.entry.location.Location;
import uk.ac.ebi.embl.api.entry.location.LocationFactory;
import uk.ac.ebi.embl.api.entry.location.Order;
import uk.ac.ebi.embl.api.entry.qualifier.Qualifier;
import uk.ac.ebi.embl.api.entry.sequence.SequenceFactory;
import uk.ac.ebi.embl.api.validation.ValidationEngineException;
import uk.ac.ebi.embl.api.validation.ValidationMessageManager;
import uk.ac.ebi.embl.api.validation.dao.EntryDAOUtils;
import uk.ac.ebi.embl.api.validation.plan.EmblEntryValidationPlanProperty;
import java.nio.ByteBuffer;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import static org.easymock.EasyMock.createMock;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class NonContoAGPFixTest {
private Entry entry;
private NonContoAGPFix check;
public EntryFactory entryFactory;
public LocationFactory locationFactory;
public SequenceFactory sequenceFactory;
public FeatureFactory featureFactory;
public Feature assemblyGapFeature,assemblyGapFeature1;
public EntryDAOUtils entryDAOUtils;
public EmblEntryValidationPlanProperty planProperty;
@Before
public void setUp() {
ValidationMessageManager
.addBundle(ValidationMessageManager.STANDARD_FIXER_BUNDLE);
entryFactory = new EntryFactory();
featureFactory=new FeatureFactory();
locationFactory=new LocationFactory();
entry = entryFactory.createEntry();
SequenceFactory sequenceFactory=new SequenceFactory();
entry.setSequence(sequenceFactory.createSequence());
check = new NonContoAGPFix();
entryDAOUtils = createMock(EntryDAOUtils.class);
planProperty=new EmblEntryValidationPlanProperty();
assemblyGapFeature=featureFactory.createFeature(Feature.ASSEMBLY_GAP_FEATURE_NAME);
Order<Location> order = new Order<Location>();
order.addLocation(locationFactory.createLocalRange(12l,21l));
assemblyGapFeature.setLocations(order);
assemblyGapFeature.addQualifier(Qualifier.ESTIMATED_LENGTH_QUALIFIER_NAME,"10");
assemblyGapFeature.addQualifier(Qualifier.GAP_TYPE_QUALIFIER_NAME, "within scaffold");
assemblyGapFeature.addQualifier(Qualifier.LINKAGE_EVIDENCE_QUALIFIER_NAME, "paired-ends");
assemblyGapFeature1=featureFactory.createFeature(Feature.ASSEMBLY_GAP_FEATURE_NAME);
Order<Location> order1= new Order<Location>();
order1.addLocation(locationFactory.createLocalRange(29l,35l));
assemblyGapFeature1.setLocations(order1);
assemblyGapFeature1.addQualifier(Qualifier.ESTIMATED_LENGTH_QUALIFIER_NAME,"5");
assemblyGapFeature1.addQualifier(Qualifier.GAP_TYPE_QUALIFIER_NAME, "within scaffold");
assemblyGapFeature1.addQualifier(Qualifier.LINKAGE_EVIDENCE_QUALIFIER_NAME, "paired-ends");
locationFactory = new LocationFactory();
sequenceFactory=new SequenceFactory();
entry.setSequence(sequenceFactory.createSequence());
entry.getSequence().setSequence(ByteBuffer.wrap("aaaaaaaaaaannnnnnnnnnaaaaaaaa".getBytes()));
entry.setSubmitterAccession("nonconentry");
entry.setPrimaryAccession("AC0001");
entry.getSequence().setVersion(1);
}
@Test
public void testCheck_NoEntry() throws ValidationEngineException {
assertTrue(check.check(null).isValid());
}
@Test
public void testCheck_NoAssemblyGapfeature() throws ValidationEngineException {
check.check(entry);
assertEquals(0,entry.getSequence().getAgpRows().size());
}
@Test
public void testCheck_withoneAssemblyGapFeature() throws ValidationEngineException, SQLException
{
entry.addFeature(assemblyGapFeature);
check.check(entry);
assertEquals(3,entry.getSequence().getAgpRows().size());
AgpRow row1=entry.getSequence().getAgpRows().get(0);
AgpRow row2=entry.getSequence().getAgpRows().get(1);
AgpRow row3=entry.getSequence().getAgpRows().get(2);
assertTrue(!row1.isGap());
assertEquals("nonconentry",row1.getObject());
assertEquals("AC0001.1",row1.getObject_acc());
assertEquals(new Long(1),row1.getObject_beg());
assertEquals(new Long(11),row1.getObject_end());
assertEquals(new Long(1),row1.getComponent_beg());
assertEquals(new Long(11),row1.getComponent_end());
assertEquals("AC0001.1",row1.getComponent_acc());
assertEquals("nonconentry",row1.getComponent_id());
assertEquals("O",row1.getComponent_type_id());
assertEquals(null,row1.getGap_length());
assertEquals(null,row1.getGap_type());
assertEquals(null,row1.getLinkageevidence());
assertEquals(false,row1.isGap());
assertEquals("+",row1.getOrientation());
assertEquals(new Integer(1),row1.getPart_number());
List<String> linkage_evidences= new ArrayList<String>();
linkage_evidences.add("paired-ends");
//second
assertEquals("nonconentry", row2.getObject());
assertEquals("AC0001.1", row2.getObject_acc());
assertEquals(new Long(12), row2.getObject_beg());
assertEquals(new Long(21), row2.getObject_end());
assertEquals(null, row2.getComponent_beg());
assertEquals(null, row2.getComponent_end());
assertEquals(null, row2.getComponent_acc());
assertEquals(null, row2.getComponent_id());
assertEquals("N", row2.getComponent_type_id());
assertEquals(new Long(10), row2.getGap_length());
assertEquals("scaffold", row2.getGap_type());
assertEquals(linkage_evidences, row2.getLinkageevidence());
assertEquals(true, row2.isGap());
assertEquals(null, row2.getOrientation());
assertEquals(new Integer(2), row2.getPart_number());
assertTrue(row2.isGap());
//third
assertEquals("nonconentry", row3.getObject());
assertEquals("AC0001.1", row3.getObject_acc());
assertEquals(new Long(22), row3.getObject_beg());
assertEquals(new Long(29), row3.getObject_end());
assertEquals(new Long(22), row3.getComponent_beg());
assertEquals(new Long(29), row3.getComponent_end());
assertEquals("AC0001.1", row3.getComponent_acc());
assertEquals("nonconentry", row3.getComponent_id());
assertEquals("O", row3.getComponent_type_id());
assertEquals(null, row3.getGap_length());
assertEquals(null, row3.getGap_type());
assertEquals(null, row3.getLinkageevidence());
assertEquals(false, row3.isGap());
assertEquals("+", row3.getOrientation());
assertEquals(new Integer(3), row3.getPart_number());
assertTrue(!row3.isGap());
}
@Test
public void testCheck_withMultipleAssemblyGapFeature() throws ValidationEngineException, SQLException
{
entry.addFeature(assemblyGapFeature);
entry.addFeature(assemblyGapFeature1);
entry.getSequence().setSequence(ByteBuffer.wrap("aaaaaaaaaaannnnnnnnnnaaaaaaaannnnnaaaaa".getBytes()));
check.check(entry);
assertEquals(5,entry.getSequence().getAgpRows().size());
AgpRow row1=entry.getSequence().getAgpRows().get(0);
AgpRow row2=entry.getSequence().getAgpRows().get(1);
AgpRow row3=entry.getSequence().getAgpRows().get(2);
AgpRow row4=entry.getSequence().getAgpRows().get(3);
AgpRow row5=entry.getSequence().getAgpRows().get(4);
assertTrue(!row1.isGap());
assertEquals("nonconentry",row1.getObject());
assertEquals("AC0001.1",row1.getObject_acc());
assertEquals(new Long(1),row1.getObject_beg());
assertEquals(new Long(11),row1.getObject_end());
assertEquals(new Long(1),row1.getComponent_beg());
assertEquals(new Long(11),row1.getComponent_end());
assertEquals("AC0001.1",row1.getComponent_acc());
assertEquals("nonconentry",row1.getComponent_id());
assertEquals("O",row1.getComponent_type_id());
assertEquals(null,row1.getGap_length());
assertEquals(null,row1.getGap_type());
assertEquals(null,row1.getLinkageevidence());
assertEquals(false,row1.isGap());
assertEquals("+",row1.getOrientation());
assertEquals(new Integer(1),row1.getPart_number());
//second
assertEquals("nonconentry", row2.getObject());
assertEquals("AC0001.1", row2.getObject_acc());
assertEquals(new Long(12), row2.getObject_beg());
assertEquals(new Long(21), row2.getObject_end());
assertEquals(null, row2.getComponent_beg());
assertEquals(null, row2.getComponent_end());
assertEquals(null, row2.getComponent_acc());
assertEquals(null, row2.getComponent_id());
assertEquals("N", row2.getComponent_type_id());
assertEquals(new Long(10), row2.getGap_length());
assertEquals("scaffold", row2.getGap_type());
List<String> linkage_evidences= new ArrayList<String>();
linkage_evidences.add("paired-ends");
assertEquals(linkage_evidences, row2.getLinkageevidence());
assertEquals(true, row2.isGap());
assertEquals(null, row2.getOrientation());
assertEquals(new Integer(2), row2.getPart_number());
assertTrue(row2.isGap());
//third
assertEquals("nonconentry", row3.getObject());
assertEquals("AC0001.1", row3.getObject_acc());
assertEquals(new Long(22), row3.getObject_beg());
assertEquals(new Long(28), row3.getObject_end());
assertEquals(new Long(22), row3.getComponent_beg());
assertEquals(new Long(28), row3.getComponent_end());
assertEquals("AC0001.1", row3.getComponent_acc());
assertEquals("nonconentry", row3.getComponent_id());
assertEquals("O", row3.getComponent_type_id());
assertEquals(null, row3.getGap_length());
assertEquals(null, row3.getGap_type());
assertEquals(null, row3.getLinkageevidence());
assertEquals(false, row3.isGap());
assertEquals("+", row3.getOrientation());
assertEquals(new Integer(3), row3.getPart_number());
assertTrue(!row3.isGap());
// four
assertEquals("nonconentry", row4.getObject());
assertEquals("AC0001.1", row4.getObject_acc());
assertEquals(new Long(29), row4.getObject_beg());
assertEquals(new Long(35), row4.getObject_end());
assertEquals(null, row4.getComponent_beg());
assertEquals(null, row4.getComponent_end());
assertEquals(null, row4.getComponent_acc());
assertEquals(null, row4.getComponent_id());
assertEquals("N", row4.getComponent_type_id());
assertEquals(new Long(7), row4.getGap_length());
assertEquals("scaffold", row4.getGap_type());
assertEquals(linkage_evidences, row4.getLinkageevidence());
assertEquals(null, row4.getOrientation());
assertEquals(new Integer(4), row4.getPart_number());
assertTrue(row4.isGap());
// five
assertEquals("nonconentry", row5.getObject());
assertEquals("AC0001.1", row5.getObject_acc());
assertEquals(new Long(36), row5.getObject_beg());
assertEquals(new Long(39), row5.getObject_end());
assertEquals(new Long(36), row5.getComponent_beg());
assertEquals(new Long(39), row5.getComponent_end());
assertEquals("AC0001.1", row5.getComponent_acc());
assertEquals("nonconentry", row5.getComponent_id());
assertEquals("O", row5.getComponent_type_id());
assertEquals(null, row5.getGap_length());
assertEquals(null, row5.getGap_type());
assertEquals(null, row5.getLinkageevidence());
assertEquals(false, row5.isGap());
assertEquals("+", row5.getOrientation());
assertEquals(new Integer(5), row5.getPart_number());
assertTrue(!row5.isGap());
}
}
| |
/**
* Copyright (C) 2009-2012 enStratus Networks Inc
*
* ====================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ====================================================================
*/
package org.dasein.cloud.nimbula.compute;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Locale;
import org.apache.log4j.Logger;
import org.dasein.cloud.AsynchronousTask;
import org.dasein.cloud.CloudException;
import org.dasein.cloud.InternalException;
import org.dasein.cloud.OperationNotSupportedException;
import org.dasein.cloud.ProviderContext;
import org.dasein.cloud.Requirement;
import org.dasein.cloud.ResourceStatus;
import org.dasein.cloud.Tag;
import org.dasein.cloud.compute.Architecture;
import org.dasein.cloud.compute.ImageClass;
import org.dasein.cloud.compute.ImageCreateOptions;
import org.dasein.cloud.compute.MachineImage;
import org.dasein.cloud.compute.MachineImageFormat;
import org.dasein.cloud.compute.MachineImageState;
import org.dasein.cloud.compute.MachineImageSupport;
import org.dasein.cloud.compute.MachineImageType;
import org.dasein.cloud.compute.Platform;
import org.dasein.cloud.identity.ServiceAction;
import org.dasein.cloud.nimbula.NimbulaDirector;
import org.dasein.cloud.nimbula.NimbulaMethod;
import org.dasein.util.Jiterator;
import org.dasein.util.JiteratorPopulator;
import org.dasein.util.PopulatorThread;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import javax.annotation.Nonnegative;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
public class Image implements MachineImageSupport {
static private final Logger logger = NimbulaDirector.getLogger(Image.class);
static public final String IMAGELIST = "imagelist";
static public final String MACHINEIMAGE = "machineimage";
private NimbulaDirector cloud;
Image(@Nonnull NimbulaDirector cloud) { this.cloud = cloud; }
@Override
public void addImageShare(@Nonnull String providerImageId, @Nonnull String accountNumber) throws CloudException, InternalException {
throw new OperationNotSupportedException("Sharing not supported");
}
@Override
public void addPublicShare(@Nonnull String providerImageId) throws CloudException, InternalException {
throw new OperationNotSupportedException("Sharing not supported");
}
@Override
public @Nonnull String bundleVirtualMachine(@Nonnull String virtualMachineId, @Nonnull MachineImageFormat format, @Nonnull String bucket, @Nonnull String name) throws CloudException, InternalException {
throw new OperationNotSupportedException("Bundling not supported");
}
@Override
public void bundleVirtualMachineAsync(@Nonnull String virtualMachineId, @Nonnull MachineImageFormat format, @Nonnull String bucket, @Nonnull String name, @Nonnull AsynchronousTask<String> trackingTask) throws CloudException, InternalException {
throw new OperationNotSupportedException("Bundling not supported");
}
@Override
public @Nonnull MachineImage captureImage(@Nonnull ImageCreateOptions options) throws CloudException, InternalException {
throw new OperationNotSupportedException("Image capture not supported");
}
@Override
public void captureImageAsync(@Nonnull ImageCreateOptions options, @Nonnull AsynchronousTask<MachineImage> taskTracker) throws CloudException, InternalException {
throw new OperationNotSupportedException("Image capture not supported");
}
@Override
public MachineImage getImage(@Nonnull String providerImageId) throws CloudException, InternalException {
NimbulaMethod method = new NimbulaMethod(cloud, MACHINEIMAGE);
int code = method.get(providerImageId);
if( code == 404 || code == 401 ) {
return null;
}
try {
return toMachineImage(method.getResponseBody());
}
catch( JSONException e ) {
if( logger.isDebugEnabled() ) {
logger.error("Error parsing JSON: " + e.getMessage());
e.printStackTrace();
}
throw new InternalException(e);
}
}
@Override
@Deprecated
public @Nullable MachineImage getMachineImage(@Nonnull String machineImageId) throws CloudException, InternalException {
return getImage(machineImageId);
}
public @Nullable String getMachineImageId(@Nonnull String imagelist, @Nonnegative int entryNumber) throws CloudException, InternalException {
NimbulaMethod method = new NimbulaMethod(cloud, Image.IMAGELIST);
method.get(imagelist);
try {
JSONObject item = method.getResponseBody();
JSONArray entries = item.getJSONArray("entries");
for( int i=0; i<entries.length(); i++ ) {
JSONObject entry = entries.getJSONObject(i);
JSONArray images = entry.getJSONArray("machineimages");
if( images.length() >= entryNumber ) {
return images.getString(entryNumber-1);
}
}
return null;
}
catch( JSONException e ) {
if( logger.isDebugEnabled() ) {
logger.error("Error parsing JSON: " + e.getMessage());
e.printStackTrace();
}
throw new InternalException(e);
}
}
@Override
public @Nonnull String getProviderTermForImage(@Nonnull Locale locale) {
return getProviderTermForImage(locale, ImageClass.MACHINE);
}
@Override
public @Nonnull String getProviderTermForImage(@Nonnull Locale locale, @Nonnull ImageClass cls) {
return "image";
}
@Override
public @Nonnull String getProviderTermForCustomImage(@Nonnull Locale locale, @Nonnull ImageClass cls) {
return getProviderTermForImage(locale, cls);
}
@Override
public boolean hasPublicLibrary() {
return true;
}
@Override
public @Nonnull Requirement identifyLocalBundlingRequirement() throws CloudException, InternalException {
return Requirement.NONE;
}
@Override
public @Nonnull AsynchronousTask<String> imageVirtualMachine(@Nonnull String vmId, @Nonnull String name, @Nonnull String description) throws CloudException, InternalException {
throw new OperationNotSupportedException("Imaging not yet supported");
}
@Override
public boolean isImageSharedWithPublic(@Nonnull String machineImageId) throws CloudException, InternalException {
return machineImageId.startsWith("/nimbula/public");
}
@Override
public boolean isSubscribed() throws CloudException, InternalException {
return true;
}
@Override
public @Nonnull Iterable<ResourceStatus> listImageStatus(@Nonnull ImageClass cls) throws CloudException, InternalException {
ArrayList<ResourceStatus> status = new ArrayList<ResourceStatus>();
for( MachineImage img : listImages(cls) ) {
status.add(new ResourceStatus(img.getProviderMachineImageId(), img.getCurrentState()));
}
return status;
}
@Override
public @Nonnull Iterable<MachineImage> listImages(@Nonnull ImageClass cls) throws CloudException, InternalException {
NimbulaMethod method = new NimbulaMethod(cloud, MACHINEIMAGE);
method.list();
try {
ArrayList<MachineImage> images = new ArrayList<MachineImage>();
JSONArray array = method.getResponseBody().getJSONArray("result");
for( int i=0; i<array.length(); i++ ) {
MachineImage image = toMachineImage(array.getJSONObject(i));
if( image != null ) {
images.add(image);
}
}
return images;
}
catch( JSONException e ) {
if( logger.isDebugEnabled() ) {
logger.error("Error parsing JSON: " + e.getMessage());
e.printStackTrace();
}
throw new InternalException(e);
}
}
@Override
public @Nonnull Iterable<MachineImage> listImages(@Nonnull ImageClass cls, @Nonnull String ownedBy) throws CloudException, InternalException {
if( !ownedBy.endsWith("/") ){
ownedBy = ownedBy + "/";
}
NimbulaMethod method = new NimbulaMethod(cloud, MACHINEIMAGE);
int code = method.get(ownedBy);
if( code == 401 ) {
return Collections.emptyList();
}
try {
ArrayList<MachineImage> images = new ArrayList<MachineImage>();
JSONArray array = method.getResponseBody().getJSONArray("result");
for( int i=0; i<array.length(); i++ ) {
MachineImage image = toMachineImage(array.getJSONObject(i));
if( image != null ) {
images.add(image);
}
}
return images;
}
catch( JSONException e ) {
if( logger.isDebugEnabled() ) {
logger.error("Error parsing JSON: " + e.getMessage());
e.printStackTrace();
}
throw new InternalException(e);
}
}
@Override
@Deprecated
public @Nonnull Iterable<MachineImage> listMachineImages() throws CloudException, InternalException {
return listImages(ImageClass.MACHINE);
}
@Override
@Deprecated
public @Nonnull Iterable<MachineImage> listMachineImagesOwnedBy(@Nullable String accountId) throws CloudException, InternalException {
if( accountId == null ) {
accountId = "/nimbula/public/";
}
return listImages(ImageClass.MACHINE, accountId);
}
@Override
public @Nonnull Iterable<String> listShares(@Nonnull String forMachineImageId) throws CloudException, InternalException {
return Collections.emptyList();
}
@Override
public @Nonnull Iterable<ImageClass> listSupportedImageClasses() throws CloudException, InternalException {
return Collections.singletonList(ImageClass.MACHINE);
}
@Override
public @Nonnull Iterable<MachineImageType> listSupportedImageTypes() throws CloudException, InternalException {
return Collections.singletonList(MachineImageType.VOLUME);
}
@Override
public @Nonnull MachineImage registerImageBundle(@Nonnull ImageCreateOptions options) throws CloudException, InternalException {
throw new OperationNotSupportedException("Bundles not supported");
}
@Override
public @Nonnull Iterable<MachineImageFormat> listSupportedFormats() throws CloudException, InternalException {
return Collections.singletonList(MachineImageFormat.NIMBULA);
}
@Override
public @Nonnull Iterable<MachineImageFormat> listSupportedFormatsForBundling() throws CloudException, InternalException {
throw new OperationNotSupportedException("Bundles not supported");
}
@Override
public @Nonnull String[] mapServiceAction(@Nonnull ServiceAction action) {
return new String[0];
}
private boolean matches(MachineImage image, String keyword, Platform platform, Architecture architecture) {
if( architecture != null && !architecture.equals(image.getArchitecture()) ) {
return false;
}
if( platform != null && !platform.equals(Platform.UNKNOWN) ) {
Platform mine = image.getPlatform();
if( platform.isWindows() && !mine.isWindows() ) {
return false;
}
if( platform.isUnix() && !mine.isUnix() ) {
return false;
}
if( platform.isBsd() && !mine.isBsd() ) {
return false;
}
if( platform.isLinux() && !mine.isLinux() ) {
return false;
}
if( platform.equals(Platform.UNIX) ) {
if( !mine.isUnix() ) {
return false;
}
}
else if( !platform.equals(mine) ) {
return false;
}
}
if( keyword != null ) {
keyword = keyword.toLowerCase();
if( !image.getDescription().toLowerCase().contains(keyword) ) {
if( !image.getName().toLowerCase().contains(keyword) ) {
if( !image.getProviderMachineImageId().toLowerCase().contains(keyword) ) {
return false;
}
}
}
}
return true;
}
@Override
public void remove(@Nonnull String machineImageId) throws CloudException, InternalException {
remove(machineImageId, false);
}
@Override
public void remove(@Nonnull String providerImageId, boolean checkState) throws CloudException, InternalException {
NimbulaMethod method = new NimbulaMethod(cloud, MACHINEIMAGE);
method.delete(providerImageId);
}
@Override
public void removeAllImageShares(@Nonnull String providerImageId) throws CloudException, InternalException {
throw new OperationNotSupportedException("Sharing is not supported");
}
@Override
public void removeImageShare(@Nonnull String providerImageId, @Nonnull String accountNumber) throws CloudException, InternalException {
throw new OperationNotSupportedException("Sharing is not supported");
}
@Override
public void removePublicShare(@Nonnull String providerImageId) throws CloudException, InternalException {
throw new OperationNotSupportedException("Sharing is not supported");
}
@Override
public @Nonnull Iterable<MachineImage> searchMachineImages(@Nullable String keyword, @Nullable Platform platform, @Nullable Architecture architecture) throws CloudException, InternalException {
return searchImages(null, keyword, platform, architecture, ImageClass.MACHINE);
}
@Override
public @Nonnull Iterable<MachineImage> searchImages(final @Nullable String accountNumber, final @Nullable String keyword, final @Nullable Platform platform, final @Nullable Architecture architecture, final @Nullable ImageClass... imageClasses) throws CloudException, InternalException {
PopulatorThread<MachineImage> populator;
cloud.hold();
populator = new PopulatorThread<MachineImage>(new JiteratorPopulator<MachineImage>() {
@Override
public void populate(@Nonnull Jiterator<MachineImage> iterator) throws Exception {
ImageClass[] classes = ((imageClasses == null || imageClasses.length < 1) ? ImageClass.values() : imageClasses);
for( ImageClass cls : classes ) {
try {
Iterable<MachineImage> images = (accountNumber == null ? listImages(cls) : listImages(cls, accountNumber));
for( MachineImage image : images ) {
if( matches(image, keyword, platform, architecture) ) {
iterator.push(image);
}
}
}
finally {
cloud.release();
}
}
}
});
populator.populate();
return populator.getResult();
}
@Override
public @Nonnull Iterable<MachineImage> searchPublicImages(final @Nullable String keyword, final @Nullable Platform platform, final @Nullable Architecture architecture, final @Nullable ImageClass... imageClasses) throws CloudException, InternalException {
PopulatorThread<MachineImage> populator;
cloud.hold();
populator = new PopulatorThread<MachineImage>(new JiteratorPopulator<MachineImage>() {
@Override
public void populate(@Nonnull Jiterator<MachineImage> iterator) throws Exception {
ImageClass[] classes = ((imageClasses == null || imageClasses.length < 1) ? ImageClass.values() : imageClasses);
for( ImageClass cls : classes ) {
try {
for( MachineImage image : listImages(cls) ) {
if( matches(image, keyword, platform, architecture) ) {
iterator.push(image);
}
}
for( MachineImage image : listImages(ImageClass.MACHINE, "/nimbula/public/") ) {
if( matches(image, keyword, platform, architecture) ) {
iterator.push(image);
}
}
}
finally {
cloud.release();
}
}
}
});
populator.populate();
return populator.getResult();
}
@Override
public void shareMachineImage(@Nonnull String machineImageId, @Nullable String withAccountId, boolean allow) throws CloudException, InternalException {
throw new OperationNotSupportedException("Nimbula does not support image sharing of any kind.");
}
@Override
public boolean supportsCustomImages() {
return false;
}
@Override
public boolean supportsDirectImageUpload() throws CloudException, InternalException {
return false;
}
@Override
public boolean supportsImageCapture(@Nonnull MachineImageType type) throws CloudException, InternalException {
return false;
}
@Override
public boolean supportsImageSharing() {
return false;
}
@Override
public boolean supportsImageSharingWithPublic() {
return false;
}
@Override
public boolean supportsPublicLibrary(@Nonnull ImageClass cls) throws CloudException, InternalException {
return cls.equals(ImageClass.MACHINE);
}
@Override
public void updateTags(@Nonnull String imageId, @Nonnull Tag... tags) throws CloudException, InternalException {
// NO-OP
}
private @Nullable MachineImage toMachineImage(@Nonnull JSONObject ob) throws JSONException, CloudException {
ProviderContext ctx = cloud.getContext();
if( ctx == null ) {
throw new CloudException("No context was set for this request");
}
String regionId = ctx.getRegionId();
if( regionId == null ) {
throw new CloudException("No region was set for this request");
}
if( !ob.has("name") ) {
return null;
}
MachineImage image = new MachineImage();
String name = ob.getString("name");
String[] idInfo = cloud.parseId(name);
image.setProviderOwnerId(idInfo[0]);
image.setProviderMachineImageId(name);
image.setName(idInfo[2]);
image.setImageClass(ImageClass.MACHINE);
Platform platform = Platform.UNKNOWN;
try {
JSONObject attrs = ob.getJSONObject("attributes");
platform = Platform.guess(attrs.getString("type"));
image.setDescription(idInfo[2] + " (" + attrs.getString("type") + ")");
}
catch( Throwable ignore ) {
image.setDescription(idInfo[2]);
}
image.setPlatform(platform);
image.setArchitecture(Architecture.I64);
image.setCurrentState(MachineImageState.ACTIVE);
image.setProviderRegionId(regionId);
image.setSoftware("");
image.setType(MachineImageType.VOLUME);
return image;
}
}
| |
package com.adonis.ui.menu;
import com.adonis.data.persons.Person;
import com.adonis.data.service.PersonService;
import com.adonis.data.service.VehicleService;
import com.adonis.data.vehicles.Vehicle;
import com.adonis.ui.persons.CardPopup;
import com.adonis.ui.persons.CreditCardUI;
import com.adonis.utils.FileReader;
import com.adonis.utils.VaadinUtils;
import com.vaadin.client.Focusable;
import com.vaadin.event.Action;
import com.vaadin.event.MouseEvents;
import com.vaadin.event.ShortcutAction;
import com.vaadin.event.ShortcutListener;
import com.vaadin.navigator.Navigator;
import com.vaadin.navigator.View;
import com.vaadin.server.*;
import com.vaadin.ui.*;
import com.vaadin.ui.Button;
import com.vaadin.ui.Button.ClickEvent;
import com.vaadin.ui.Button.ClickListener;
import com.vaadin.ui.Image;
import com.vaadin.ui.Label;
import com.vaadin.ui.themes.ValoTheme;
import lombok.NoArgsConstructor;
import org.vaadin.crudui.crud.CrudOperation;
import org.vaadin.crudui.crud.impl.GridBasedCrudComponent;
import org.vaadin.crudui.form.impl.GridLayoutCrudFormFactory;
import org.vaadin.crudui.layout.impl.HorizontalSplitCrudLayout;
import org.vaadin.easyuploads.UploadField;
import ua.edu.file.MyFiler;
import java.io.*;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import static com.adonis.utils.VaadinUtils.getInitialPath;
import static com.adonis.utils.VaadinUtils.getPage;
/**
* Responsive navigation menu presenting a list of available views to the user.
*/
@NoArgsConstructor
public class Menu extends CssLayout {
public static final String VALO_MENUITEMS = "valo-menuitems";
private static final String VALO_MENU_TOGGLE = "valo-menu-toggle";
private static final String VALO_MENU_VISIBLE = "valo-menu-visible";
private Navigator navigator;
private Map<String, Button> viewButtons = new HashMap<String, Button>();
public Button showMenu;
private CssLayout menuItemsLayout;
private CssLayout menuPart;
public final GridBasedCrudComponent<Vehicle> vehiclesCrud = new GridBasedCrudComponent<>(Vehicle.class, new HorizontalSplitCrudLayout());
public final GridBasedCrudComponent<Person> personsCrud = new GridBasedCrudComponent<>(Person.class, new HorizontalSplitCrudLayout());
// public ImageData initialimage;
Image image = new Image();
UploadField uploadFieldImage;
public Menu(PersonService personService, VehicleService vehicleService, Navigator navigator) {
this.navigator = navigator;
setPrimaryStyleName(ValoTheme.MENU_ROOT);
menuPart = new CssLayout();
menuPart.addStyleName(ValoTheme.MENU_PART);
setPersonsCrudProperties(personService);
setVehiclesCrudProperties(vehicleService);
// header of the menu
final HorizontalLayout top = new HorizontalLayout();
top.addStyleName(ValoTheme.MENU_TITLE);
top.setSpacing(true);
Label title = new Label("Vehicle manager");
title.addStyleName(ValoTheme.LABEL_H1);
title.setSizeUndefined();
Image image = new Image(null, new ThemeResource("img/car.png"));
image.setStyleName(ValoTheme.MENU_LOGO);
top.addComponent(image);
top.addComponent(title);
menuPart.addComponent(top);
// logout menu item
// HorizontalLayout logoutLayout = new HorizontalLayout();
// logoutLayout.addStyleName(ValoTheme.MENU_ITEM);
// logoutLayout.setSpacing(false);
//
// MenuBar logoutMenu = new MenuBar();
// logoutMenu.setStyleName(VALO_MENUITEMS);
// logoutMenu.addItem("Logout", new MenuBar.Command() {
//
// @Override
// public void menuSelected(MenuBar.MenuItem selectedItem) {
// VaadinSession.getCurrent().getSession().invalidate();
// Page.getCurrent().reload();
// }
// });
//
// logoutMenu.addStyleName("user-menu");
// Image logout = new Image(null, new ThemeResource("img/logout.png"));
// logoutLayout.addComponent(logout, 0);
// logoutLayout.addComponent(logoutMenu, 1);
// menuPart.addComponent(logoutLayout);
// button for toggling the visibility of the menu when on a small screen
showMenu = new Button("Menu", new ClickListener() {
@Override
public void buttonClick(final ClickEvent event) {
if (menuPart.getStyleName().contains(VALO_MENU_VISIBLE)) {
menuPart.removeStyleName(VALO_MENU_VISIBLE);
} else {
menuPart.addStyleName(VALO_MENU_VISIBLE);
}
}
});
showMenu.addStyleName(ValoTheme.BUTTON_PRIMARY);
showMenu.addStyleName(ValoTheme.BUTTON_SMALL);
showMenu.addStyleName(VALO_MENU_TOGGLE);
// showMenu.setIcon(FontAwesome.NAVICON);
menuPart.addComponent(showMenu);
// container for the navigation buttons, which are added by addView()
menuItemsLayout = new CssLayout();
menuItemsLayout.setPrimaryStyleName(VALO_MENUITEMS);
menuPart.addComponent(menuItemsLayout);
addComponent(menuPart);
addStyleName("backImage");
}
public void setVehiclesCrudProperties(VehicleService vehicleService) {
GridLayoutCrudFormFactory<Vehicle> formFactory = new GridLayoutCrudFormFactory<>(Vehicle.class, 1, 10);
vehiclesCrud.setCrudFormFactory(formFactory);
vehiclesCrud.setAddOperation(vehicle -> vehicleService.insert(vehicle));
vehiclesCrud.setUpdateOperation(vehicle -> vehicleService.save(vehicle));
vehiclesCrud.setDeleteOperation(vehicle -> vehicleService.delete(vehicle));
vehiclesCrud.setFindAllOperation(() -> vehicleService.findAll());
vehiclesCrud.getCrudFormFactory().setVisiblePropertyIds("vehicleNmbr", "licenseNmbr", "make", "model", "year", "status", "vehicleType", "active", "location", "vinNumber");
vehiclesCrud.getCrudFormFactory().setDisabledPropertyIds(CrudOperation.UPDATE, "id", "created", "updated");
vehiclesCrud.getCrudFormFactory().setDisabledPropertyIds(CrudOperation.ADD, "id", "created", "updated");
vehiclesCrud.getCrudLayout().setWidth(90F, Unit.PERCENTAGE);
vehiclesCrud.getGrid().setColumns("vehicleNmbr", "licenseNmbr", "make", "model", "year", "status", "vehicleType", "active", "location", "vinNumber");
}
public void setPersonsCrudProperties(PersonService personService) {
personsCrud.setAddOperation(person -> personService.insert(person));
personsCrud.setUpdateOperation(person -> personService.save(person));
personsCrud.setDeleteOperation(person -> personService.delete(person));
personsCrud.setFindAllOperation(() -> personService.findAll());
GridLayoutCrudFormFactory<Person> formFactory = new GridLayoutCrudFormFactory<>(Person.class, 1, 10);
formFactory.setVisiblePropertyIds("firstName", "lastName", "email", "login", "password", "birthDate", "picture", "notes");
formFactory.setDisabledPropertyIds(CrudOperation.UPDATE, "id", "created", "updated");
formFactory.setDisabledPropertyIds(CrudOperation.ADD, "id", "created", "updated");
formFactory.setFieldType("password", com.vaadin.v7.ui.PasswordField.class);
//formFactory.setFieldType("birthDate",com.vaadin.v7.ui.DateField.class);
// formFactory.setFieldCreationListener("birthDate", field -> ((com.vaadin.v7.ui.DateField) field).setDateFormat("dd/mm/yy"));
personsCrud.setCrudFormFactory(formFactory);
personsCrud.getCrudLayout().setWidth(90F, Unit.PERCENTAGE);
personsCrud.getGrid().setColumns("firstName", "lastName", "email", "login", "birthDate", "picture", "notes");
}
/**
* Register a pre-created view instance in the navigation menu and in the
* {@link Navigator}.
*
* @param view view instance to register
* @param name view name
* @param caption view caption in the menu
* @param icon view icon in the menu
* @see Navigator#addView(String, View)
*/
public void addView(View view, final String name, String caption,
com.vaadin.server.Resource icon) {
navigator.addView(name, view);
createViewButton(name, caption, icon);
}
public void addView(Class<? extends View> viewClass, final String name,
String caption, com.vaadin.server.Resource icon) {
navigator.addView(name, viewClass);
createViewButton(name, caption, icon);
}
public void addViewWithEditableIcon(View view, final String name, String caption, String nameImage) {
navigator.addView(name, view);
createViewButtonWithEditableImage(name, caption, nameImage);
}
private void createViewButton(final String name, String caption,
com.vaadin.server.Resource icon) {
Button button = new Button(caption, new ClickListener() {
@Override
public void buttonClick(ClickEvent event) {
navigator.navigateTo(name);
}
});
button.setPrimaryStyleName(ValoTheme.MENU_ITEM);
button.setIcon(icon);
menuItemsLayout.addComponent(button);
viewButtons.put(name, button);
}
private void createViewButtonWithEditableImage(final String name, String caption, String nameImage) {
Button button = new Button(caption, new ClickListener() {
@Override
public void buttonClick(ClickEvent event) {
navigator.navigateTo(name);
}
});
button.setPrimaryStyleName(ValoTheme.BUTTON_FRIENDLY);
// button.setWidth(50, Unit.PERCENTAGE);
image.setWidth(90, Unit.PIXELS);
image.setHeight(90, Unit.PIXELS);
FileReader.createDirectoriesFromCurrent(getInitialPath());
final Image image = new Image("", new ThemeResource("img/" + nameImage));
try {
FileReader.copyFile(VaadinUtils.getResourcePath(nameImage), VaadinUtils.getInitialPath() + File.separator + nameImage);
image.setSource(new FileResource(new File(VaadinUtils.getInitialPath() + File.separator + nameImage)));
} catch (IOException e) {
e.printStackTrace();
image.setSource(new ThemeResource("img/" + nameImage));
}
// image.setWidth(50, Unit.PERCENTAGE);
image.setWidth(90, Unit.PIXELS);
image.setHeight(90, Unit.PIXELS);
HorizontalLayout horizontalLayout = new HorizontalLayout();
horizontalLayout.setPrimaryStyleName(ValoTheme.MENU_ITEM);
horizontalLayout.addComponents(image, button);
image.addClickListener(new MouseEvents.ClickListener() {
@Override
public void click(MouseEvents.ClickEvent event) {
uploadFieldImage = new UploadField();
uploadFieldImage.setAcceptFilter("image/*");
uploadFieldImage.getUpload().addListener(new com.vaadin.v7.ui.Upload.FailedListener() {
@Override
public void uploadFailed(com.vaadin.v7.ui.Upload.FailedEvent event) {
uploadFieldImage.clearDefaulLayout();
horizontalLayout.removeComponent(uploadFieldImage);
}
private static final long serialVersionUID = 1L;
});
horizontalLayout.addComponent(uploadFieldImage, 2);
uploadFieldImage.getUpload().addListener(new com.vaadin.v7.ui.Upload.SucceededListener() {
@Override
public void uploadSucceeded(com.vaadin.v7.ui.Upload.SucceededEvent event) {
File file = (File) uploadFieldImage.getValue();
try {
showUploadedImage(uploadFieldImage, image, file.getName(), nameImage);
} catch (IOException e) {
e.printStackTrace();
}
uploadFieldImage.clearDefaulLayout();
horizontalLayout.removeComponent(uploadFieldImage);
}
});
uploadFieldImage.setFieldType(UploadField.FieldType.FILE);
horizontalLayout.markAsDirty();
// image.setWidth(50, Unit.PERCENTAGE);
image.setWidth(90, Unit.PIXELS);
image.setHeight(90, Unit.PIXELS);
image.setVisible(false);
image.markAsDirty();
horizontalLayout.addComponent(image, 0);
}
});
button.setVisible(true);
image.setVisible(true);
menuItemsLayout.addComponents(horizontalLayout);
viewButtons.put(name, button);
}
private void showUploadedImage(UploadField upload, Image image, String fileName, String newNameFile) throws IOException {
File value = (File) upload.getValue();
//copy to resources
FileReader.copyFile(value.getAbsolutePath().toString(), VaadinUtils.getResourcePath(newNameFile));
//copy to server directory
FileReader.createDirectoriesFromCurrent(getInitialPath());
FileReader.copyFile(value.getAbsolutePath().toString(), VaadinUtils.getInitialPath() + File.separator + newNameFile);
FileInputStream fileInputStream = new FileInputStream(value);
long byteLength = value.length(); //bytecount of the file-content
byte[] filecontent = new byte[(int) byteLength];
fileInputStream.read(filecontent, 0, (int) byteLength);
final byte[] data = filecontent;
StreamResource resource = new StreamResource(
new StreamResource.StreamSource() {
@Override
public InputStream getStream() {
return new ByteArrayInputStream(data);
}
}, fileName);
image.setSource(resource);
image.setVisible(true);
}
/**
* Highlights a view navigation button as the currently active view in the
* menu. This method does not perform the actual navigation.
*
* @param viewName the name of the view to show as active
*/
public void setActiveView(String viewName) {
for (Button button : viewButtons.values()) {
button.removeStyleName("selected");
}
Button selected = viewButtons.get(viewName);
if (selected != null) {
selected.addStyleName("selected");
}
menuPart.removeStyleName(VALO_MENU_VISIBLE);
}
public Button getShowMenu() {
return showMenu;
}
}
| |
/**
* Copyright 2008 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver;
import java.io.IOException;
import java.lang.management.ManagementFactory;
import java.util.ArrayList;
import java.util.ConcurrentModificationException;
import java.util.HashSet;
import java.util.SortedMap;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.DroppedSnapshotException;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.RemoteExceptionHandler;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.util.StringUtils;
/**
* Thread that flushes cache on request
*
* NOTE: This class extends Thread rather than Chore because the sleep time
* can be interrupted when there is something to do, rather than the Chore
* sleep time which is invariant.
*
* @see FlushRequester
*/
class MemStoreFlusher extends Thread implements FlushRequester {
static final Log LOG = LogFactory.getLog(MemStoreFlusher.class);
private final BlockingQueue<HRegion> flushQueue =
new LinkedBlockingQueue<HRegion>();
private final HashSet<HRegion> regionsInQueue = new HashSet<HRegion>();
private final long threadWakeFrequency;
private final HRegionServer server;
private final ReentrantLock lock = new ReentrantLock();
protected final long globalMemStoreLimit;
protected final long globalMemStoreLimitLowMark;
private static final float DEFAULT_UPPER = 0.4f;
private static final float DEFAULT_LOWER = 0.25f;
private static final String UPPER_KEY =
"hbase.regionserver.global.memstore.upperLimit";
private static final String LOWER_KEY =
"hbase.regionserver.global.memstore.lowerLimit";
private long blockingStoreFilesNumber;
private long blockingWaitTime;
/**
* @param conf
* @param server
*/
public MemStoreFlusher(final HBaseConfiguration conf,
final HRegionServer server) {
super();
this.server = server;
this.threadWakeFrequency =
conf.getLong(HConstants.THREAD_WAKE_FREQUENCY, 10 * 1000);
long max = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getMax();
this.globalMemStoreLimit = globalMemStoreLimit(max, DEFAULT_UPPER,
UPPER_KEY, conf);
long lower = globalMemStoreLimit(max, DEFAULT_LOWER, LOWER_KEY, conf);
if (lower > this.globalMemStoreLimit) {
lower = this.globalMemStoreLimit;
LOG.info("Setting globalMemStoreLimitLowMark == globalMemStoreLimit " +
"because supplied " + LOWER_KEY + " was > " + UPPER_KEY);
}
this.globalMemStoreLimitLowMark = lower;
this.blockingStoreFilesNumber =
conf.getInt("hbase.hstore.blockingStoreFiles", -1);
if (this.blockingStoreFilesNumber == -1) {
this.blockingStoreFilesNumber = 1 +
conf.getInt("hbase.hstore.compactionThreshold", 3);
}
this.blockingWaitTime = conf.getInt("hbase.hstore.blockingWaitTime",
90000); // default of 180 seconds
LOG.info("globalMemStoreLimit=" +
StringUtils.humanReadableInt(this.globalMemStoreLimit) +
", globalMemStoreLimitLowMark=" +
StringUtils.humanReadableInt(this.globalMemStoreLimitLowMark) +
", maxHeap=" + StringUtils.humanReadableInt(max));
}
/**
* Calculate size using passed <code>key</code> for configured
* percentage of <code>max</code>.
* @param max
* @param defaultLimit
* @param key
* @param c
* @return Limit.
*/
static long globalMemStoreLimit(final long max,
final float defaultLimit, final String key, final HBaseConfiguration c) {
float limit = c.getFloat(key, defaultLimit);
return getMemStoreLimit(max, limit, defaultLimit);
}
static long getMemStoreLimit(final long max, final float limit,
final float defaultLimit) {
if (limit >= 0.9f || limit < 0.1f) {
LOG.warn("Setting global memstore limit to default of " + defaultLimit +
" because supplied value outside allowed range of 0.1 -> 0.9");
}
return (long)(max * limit);
}
@Override
public void run() {
while (!this.server.isStopRequested() && this.server.isInSafeMode()) {
try {
Thread.sleep(threadWakeFrequency);
} catch (InterruptedException ex) {
continue;
}
}
while (!server.isStopRequested()) {
HRegion r = null;
try {
r = flushQueue.poll(threadWakeFrequency, TimeUnit.MILLISECONDS);
if (r == null) {
continue;
}
if (!flushRegion(r, false)) {
break;
}
} catch (InterruptedException ex) {
continue;
} catch (ConcurrentModificationException ex) {
continue;
} catch (Exception ex) {
LOG.error("Cache flush failed" +
(r != null ? (" for region " + Bytes.toString(r.getRegionName())) : ""),
ex);
if (!server.checkFileSystem()) {
break;
}
}
}
regionsInQueue.clear();
flushQueue.clear();
LOG.info(getName() + " exiting");
}
public void request(HRegion r) {
synchronized (regionsInQueue) {
if (!regionsInQueue.contains(r)) {
regionsInQueue.add(r);
flushQueue.add(r);
}
}
}
/**
* Only interrupt once it's done with a run through the work loop.
*/
void interruptIfNecessary() {
lock.lock();
try {
this.interrupt();
} finally {
lock.unlock();
}
}
/*
* Flush a region.
*
* @param region the region to be flushed
* @param removeFromQueue True if the region needs to be removed from the
* flush queue. False if called from the main flusher run loop and true if
* called from flushSomeRegions to relieve memory pressure from the region
* server. If <code>true</code>, we are in a state of emergency; we are not
* taking on updates regionserver-wide, not until memory is flushed. In this
* case, do not let a compaction run inline with blocked updates. Compactions
* can take a long time. Stopping compactions, there is a danger that number
* of flushes will overwhelm compaction on a busy server; we'll have to see.
* That compactions do not run when called out of flushSomeRegions means that
* compactions can be reported by the historian without danger of deadlock
* (HBASE-670).
*
* <p>In the main run loop, regions have already been removed from the flush
* queue, and if this method is called for the relief of memory pressure,
* this may not be necessarily true. We want to avoid trying to remove
* region from the queue because if it has already been removed, it requires a
* sequential scan of the queue to determine that it is not in the queue.
*
* <p>If called from flushSomeRegions, the region may be in the queue but
* it may have been determined that the region had a significant amount of
* memory in use and needed to be flushed to relieve memory pressure. In this
* case, its flush may preempt the pending request in the queue, and if so,
* it needs to be removed from the queue to avoid flushing the region
* multiple times.
*
* @return true if the region was successfully flushed, false otherwise. If
* false, there will be accompanying log messages explaining why the log was
* not flushed.
*/
private boolean flushRegion(HRegion region, boolean removeFromQueue) {
// Wait until it is safe to flush
int count = 0;
boolean triggered = false;
while (count++ < (blockingWaitTime / 500)) {
for (Store hstore: region.stores.values()) {
if (hstore.getStorefilesCount() > this.blockingStoreFilesNumber) {
// always request a compaction
server.compactSplitThread.compactionRequested(region, getName());
// only log once
if (!triggered) {
LOG.info("Too many store files for region " + region + ": " +
hstore.getStorefilesCount() + ", waiting");
triggered = true;
}
try {
Thread.sleep(500);
} catch (InterruptedException e) {
// ignore
}
continue;
}
}
if (triggered) {
LOG.info("Compaction completed on region " + region +
", proceeding");
}
break;
}
synchronized (regionsInQueue) {
// See comment above for removeFromQueue on why we do not
// take the region out of the set. If removeFromQueue is true, remove it
// from the queue too if it is there. This didn't used to be a
// constraint, but now that HBASE-512 is in play, we need to try and
// limit double-flushing of regions.
if (regionsInQueue.remove(region) && removeFromQueue) {
flushQueue.remove(region);
}
lock.lock();
}
try {
// See comment above for removeFromQueue on why we do not
// compact if removeFromQueue is true. Note that region.flushCache()
// only returns true if a flush is done and if a compaction is needed.
if (region.flushcache() && !removeFromQueue) {
server.compactSplitThread.compactionRequested(region, getName());
}
} catch (DroppedSnapshotException ex) {
// Cache flush can fail in a few places. If it fails in a critical
// section, we get a DroppedSnapshotException and a replay of hlog
// is required. Currently the only way to do this is a restart of
// the server. Abort because hdfs is probably bad (HBASE-644 is a case
// where hdfs was bad but passed the hdfs check).
LOG.fatal("Replay of hlog required. Forcing server shutdown", ex);
server.abort();
return false;
} catch (IOException ex) {
LOG.error("Cache flush failed"
+ (region != null ? (" for region " + Bytes.toString(region.getRegionName())) : ""),
RemoteExceptionHandler.checkIOException(ex));
if (!server.checkFileSystem()) {
return false;
}
} finally {
lock.unlock();
}
return true;
}
/**
* Check if the regionserver's memstore memory usage is greater than the
* limit. If so, flush regions with the biggest memstores until we're down
* to the lower limit. This method blocks callers until we're down to a safe
* amount of memstore consumption.
*/
public synchronized void reclaimMemStoreMemory() {
if (server.getGlobalMemStoreSize() >= globalMemStoreLimit) {
flushSomeRegions();
}
}
/*
* Emergency! Need to flush memory.
*/
private synchronized void flushSomeRegions() {
// keep flushing until we hit the low water mark
long globalMemStoreSize = -1;
ArrayList<HRegion> regionsToCompact = new ArrayList<HRegion>();
for (SortedMap<Long, HRegion> m =
this.server.getCopyOfOnlineRegionsSortedBySize();
(globalMemStoreSize = server.getGlobalMemStoreSize()) >=
this.globalMemStoreLimitLowMark;) {
// flush the region with the biggest memstore
if (m.size() <= 0) {
LOG.info("No online regions to flush though we've been asked flush " +
"some; globalMemStoreSize=" +
StringUtils.humanReadableInt(globalMemStoreSize) +
", globalMemStoreLimitLowMark=" +
StringUtils.humanReadableInt(this.globalMemStoreLimitLowMark));
break;
}
HRegion biggestMemStoreRegion = m.remove(m.firstKey());
LOG.info("Forced flushing of " + biggestMemStoreRegion.toString() +
" because global memstore limit of " +
StringUtils.humanReadableInt(this.globalMemStoreLimit) +
" exceeded; currently " +
StringUtils.humanReadableInt(globalMemStoreSize) + " and flushing till " +
StringUtils.humanReadableInt(this.globalMemStoreLimitLowMark));
if (!flushRegion(biggestMemStoreRegion, true)) {
LOG.warn("Flush failed");
break;
}
regionsToCompact.add(biggestMemStoreRegion);
}
for (HRegion region : regionsToCompact) {
server.compactSplitThread.compactionRequested(region, getName());
}
}
}
| |
/*
* Copyright (C) 2011, FuseSource Corp. All rights reserved.
*
* http://fusesource.com
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of FuseSource Corp. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.fusesource.leveldbjni.test;
import junit.framework.TestCase;
import org.fusesource.leveldbjni.JniDBFactory;
import org.fusesource.leveldbjni.internal.JniDB;
import org.iq80.leveldb.*;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.*;
import static org.fusesource.leveldbjni.JniDBFactory.asString;
import static org.fusesource.leveldbjni.JniDBFactory.bytes;
/**
* A Unit test for the DB class implementation.
*
* @author <a href="http://hiramchirino.com">Hiram Chirino</a>
*/
public class DBTest extends TestCase {
DBFactory factory = JniDBFactory.factory;
File getTestDirectory(String name) throws IOException {
File rc = new File(new File("test-data"), name);
factory.destroy(rc, new Options().createIfMissing(true));
rc.mkdirs();
return rc;
}
@Test
public void testOpen() throws IOException {
Options options = new Options().createIfMissing(true);
File path = getTestDirectory(getName());
DB db = factory.open(path, options);
db.close();
// Try again.. this time we expect a failure since it exists.
options = new Options().errorIfExists(true);
try {
factory.open(path, options);
fail("Expected exception.");
} catch (IOException e) {
}
}
@Test
public void testRepair() throws IOException, DBException {
testCRUD();
factory.repair(new File(new File("test-data"), getName()), new Options());
}
@Test
public void testCRUD() throws IOException, DBException {
Options options = new Options().createIfMissing(true);
File path = getTestDirectory(getName());
DB db = factory.open(path, options);
WriteOptions wo = new WriteOptions().sync(false);
ReadOptions ro = new ReadOptions().fillCache(true).verifyChecksums(true);
db.put(bytes("Tampa"), bytes("green"));
db.put(bytes("London"), bytes("red"));
db.put(bytes("New York"), bytes("blue"));
assertEquals(db.get(bytes("Tampa"), ro), bytes("green"));
assertEquals(db.get(bytes("London"), ro), bytes("red"));
assertEquals(db.get(bytes("New York"), ro), bytes("blue"));
db.delete(bytes("New York"), wo);
assertNull(db.get(bytes("New York"), ro));
// leveldb does not consider deleting something that does not exist an error.
db.delete(bytes("New York"), wo);
db.close();
}
@Test
public void testIterator() throws IOException, DBException {
Options options = new Options().createIfMissing(true);
File path = getTestDirectory(getName());
DB db = factory.open(path, options);
db.put(bytes("Tampa"), bytes("green"));
db.put(bytes("London"), bytes("red"));
db.put(bytes("New York"), bytes("blue"));
ArrayList<String> expecting = new ArrayList<String>();
expecting.add("London");
expecting.add("New York");
expecting.add("Tampa");
ArrayList<String> actual = new ArrayList<String>();
DBIterator iterator = db.iterator();
for (iterator.seekToFirst(); iterator.hasNext(); iterator.next()) {
actual.add(asString(iterator.peekNext().getKey()));
}
iterator.close();
assertEquals(expecting, actual);
db.close();
}
@Test
public void testSnapshot() throws IOException, DBException {
Options options = new Options().createIfMissing(true);
File path = getTestDirectory(getName());
DB db = factory.open(path, options);
db.put(bytes("Tampa"), bytes("green"));
db.put(bytes("London"), bytes("red"));
db.delete(bytes("New York"));
ReadOptions ro = new ReadOptions().snapshot(db.getSnapshot());
db.put(bytes("New York"), bytes("blue"));
assertEquals(db.get(bytes("Tampa"), ro), bytes("green"));
assertEquals(db.get(bytes("London"), ro), bytes("red"));
// Should not be able to get "New York" since it was added
// after the snapshot
assertNull(db.get(bytes("New York"), ro));
ro.snapshot().close();
// Now try again without the snapshot..
ro.snapshot(null);
assertEquals(db.get(bytes("New York"), ro), bytes("blue"));
db.close();
}
@Test
public void testWriteBatch() throws IOException, DBException {
Options options = new Options().createIfMissing(true);
File path = getTestDirectory(getName());
DB db = factory.open(path, options);
db.put(bytes("NA"), bytes("Na"));
WriteBatch batch = db.createWriteBatch();
batch.delete(bytes("NA"));
batch.put(bytes("Tampa"), bytes("green"));
batch.put(bytes("London"), bytes("red"));
batch.put(bytes("New York"), bytes("blue"));
db.write(batch);
batch.close();
ArrayList<String> expecting = new ArrayList<String>();
expecting.add("London");
expecting.add("New York");
expecting.add("Tampa");
ArrayList<String> actual = new ArrayList<String>();
DBIterator iterator = db.iterator();
for (iterator.seekToFirst(); iterator.hasNext(); iterator.next()) {
actual.add(asString(iterator.peekNext().getKey()));
}
iterator.close();
assertEquals(expecting, actual);
db.close();
}
@Test
public void testApproximateSizes() throws IOException, DBException {
Options options = new Options().createIfMissing(true);
File path = getTestDirectory(getName());
DB db = factory.open(path, options);
Random r = new Random(0);
String data="";
for(int i=0; i < 1024; i++) {
data+= 'a'+r.nextInt(26);
}
for(int i=0; i < 5*1024; i++) {
db.put(bytes("row"+i), bytes(data));
}
long[] approximateSizes = db.getApproximateSizes(new Range(bytes("row"), bytes("s")));
assertNotNull(approximateSizes);
assertEquals(1, approximateSizes.length);
assertTrue("Wrong size", approximateSizes[0] > 0);
db.close();
}
@Test
public void testGetProperty() throws IOException, DBException {
Options options = new Options().createIfMissing(true);
File path = getTestDirectory(getName());
DB db = factory.open(path, options);
Random r = new Random(0);
String data="";
for(int i=0; i < 1024; i++) {
data+= 'a'+r.nextInt(26);
}
for(int i=0; i < 5*1024; i++) {
db.put(bytes("row"+i), bytes(data));
}
String stats = db.getProperty("leveldb.stats");
assertNotNull(stats);
assertTrue(stats.contains("Compactions"));
db.close();
}
@Test
public void testCustomComparator1() throws IOException, DBException {
Options options = new Options().createIfMissing(true);
options.comparator(new DBComparator() {
public int compare(byte[] key1, byte[] key2) {
return new String(key1).compareTo(new String(key2));
}
public String name() {
return getName();
}
public byte[] findShortestSeparator(byte[] start, byte[] limit) {
return start;
}
public byte[] findShortSuccessor(byte[] key) {
return key;
}
});
File path = getTestDirectory(getName());
DB db = factory.open(path, options);
ArrayList<String> expecting = new ArrayList<String>();
for(int i=0; i < 26; i++) {
String t = ""+ ((char) ('a' + i));
expecting.add(t);
db.put(bytes(t), bytes(t));
}
ArrayList<String> actual = new ArrayList<String>();
DBIterator iterator = db.iterator();
for (iterator.seekToFirst(); iterator.hasNext(); iterator.next()) {
actual.add(asString(iterator.peekNext().getKey()));
}
iterator.close();
assertEquals(expecting, actual);
db.close();
}
@Test
public void testCustomComparator2() throws IOException, DBException {
Options options = new Options().createIfMissing(true);
options.comparator(new DBComparator() {
public int compare(byte[] key1, byte[] key2) {
return new String(key1).compareTo(new String(key2)) * -1;
}
public String name() {
return getName();
}
public byte[] findShortestSeparator(byte[] start, byte[] limit) {
return start;
}
public byte[] findShortSuccessor(byte[] key) {
return key;
}
});
File path = getTestDirectory(getName());
DB db = factory.open(path, options);
ArrayList<String> expecting = new ArrayList<String>();
for(int i=0; i < 26; i++) {
String t = ""+ ((char) ('a' + i));
expecting.add(t);
db.put(bytes(t), bytes(t));
}
Collections.reverse(expecting);
ArrayList<String> actual = new ArrayList<String>();
DBIterator iterator = db.iterator();
for (iterator.seekToFirst(); iterator.hasNext(); iterator.next()) {
actual.add(asString(iterator.peekNext().getKey()));
}
iterator.close();
assertEquals(expecting, actual);
db.close();
}
@Test
public void testLogger() throws IOException, InterruptedException, DBException {
final List<String> messages = Collections.synchronizedList(new ArrayList<String>());
Options options = new Options().createIfMissing(true);
options.logger(new Logger() {
public void log(String message) {
messages.add(message);
}
});
File path = getTestDirectory(getName());
DB db = factory.open(path, options);
for( int j=0; j < 5; j++) {
Random r = new Random(0);
String data="";
for(int i=0; i < 1024; i++) {
data+= 'a'+r.nextInt(26);
}
for(int i=0; i < 5*1024; i++) {
db.put(bytes("row"+i), bytes(data));
}
Thread.sleep(100);
}
db.close();
assertFalse(messages.isEmpty());
}
@Test
public void testCompactRanges() throws IOException, InterruptedException, DBException {
Options options = new Options().createIfMissing(true);
File path = getTestDirectory(getName());
DB db = factory.open(path, options);
if( db instanceof JniDB) {
Random r = new Random(0);
String data="";
for(int i=0; i < 1024; i++) {
data+= 'a'+r.nextInt(26);
}
for(int i=0; i < 5*1024; i++) {
db.put(bytes("row"+i), bytes(data));
}
for(int i=0; i < 5*1024; i++) {
db.delete(bytes("row" + i));
}
String stats = db.getProperty("leveldb.stats");
System.out.println(stats);
// Compactions
// Level Files Size(MB) Time(sec) Read(MB) Write(MB)
// --------------------------------------------------
assertFalse(stats.contains("1 0 0 0"));
assertFalse(stats.contains("2 0 0 0"));
// After the compaction, level 1 and 2 should not have any files in it..
((JniDB) db).compactRange(null, null);
stats = db.getProperty("leveldb.stats");
System.out.println(stats);
assertTrue(stats.contains("1 0 0 0"));
assertTrue(stats.contains("2 0 0 0"));
}
db.close();
}
@Test
public void testSuspendAndResumeCompactions() throws Exception {
Options options = new Options().createIfMissing(true);
File path = getTestDirectory(getName());
DB db = factory.open(path, options);
db.suspendCompactions();
db.resumeCompactions();
db.close();
}
public void assertEquals(byte[] arg1, byte[] arg2) {
assertTrue(Arrays.equals(arg1, arg2));
}
@Test
public void testIssue26() throws IOException {
JniDBFactory.pushMemoryPool(1024 * 512);
try {
Options options = new Options();
options.createIfMissing(true);
DB db = factory.open(getTestDirectory(getName()), options);
for (int i = 0; i < 1024 * 1024; i++) {
byte[] key = ByteBuffer.allocate(4).putInt(i).array();
byte[] value = ByteBuffer.allocate(4).putInt(-i).array();
db.put(key, value);
assertTrue(Arrays.equals(db.get(key), value));
}
db.close();
} finally {
JniDBFactory.popMemoryPool();
}
}
@Test
public void testIssue27() throws IOException {
Options options = new Options();
options.createIfMissing(true);
DB db = factory.open(getTestDirectory(getName()), options);
db.close();
try {
db.iterator();
fail("Expected a DBException");
} catch(DBException e) {
}
}
@Test
public void testIssue40_1() throws IOException {
// incorrect behaviour.., but it shouldn't crash JVM:
// test: seekToLast() -> next() -> prev()
Options options = new Options().createIfMissing(true);
File path = getTestDirectory(getName());
DB db = factory.open(path, options);
DBIterator it = db.iterator();
it.seekToLast();
try {
it.next();
fail("NoSuchElementException is expected");
} catch (NoSuchElementException ex) {
}
try {
it.prev(); // was SIGSEV
fail("NoSuchElementException is expected");
} catch (NoSuchElementException ex) {
}
it.close();
db.close();
}
@Test
public void testIssue40_2() throws IOException {
// incorrect behaviour.., but it shouldn't crash JVM
// test: seekToLast() -> next() -> peekPrev()
Options options = new Options().createIfMissing(true);
File path = getTestDirectory(getName());
DB db = factory.open(path, options);
DBIterator it = db.iterator();
it.seekToLast();
try {
it.next();
fail("NoSuchElementException is expected");
} catch (NoSuchElementException ex) {
}
try {
it.peekPrev(); // was SIGSEV
fail("NoSuchElementException is expected");
} catch (NoSuchElementException ex) {
}
it.close();
db.close();
}
DBComparator byteComparator = new DBComparator() {
public int compare(byte[] key1, byte[] key2) {
return key1[0] - key2[0];
}
public String name() {
return "ByteComparator";
}
public byte[] findShortestSeparator(byte[] start, byte[] limit) {
return start;
}
public byte[] findShortSuccessor(byte[] key) {
return key;
}
};
public byte[] newKey(byte value) {
final byte[] result = new byte[1];
result[0] = value;
return result;
}
public byte[] getData() {
final byte[] result = new byte[10];
for (int i = 0 ; i<10; i++) {
result[i] = (byte) i;
}
return result;
}
@Test
public void testIssue40_3() throws IOException {
// test seek(after last record) -> peekPrev()/prev()
Options options = new Options().createIfMissing(true);
options.comparator(byteComparator);
File path = getTestDirectory(getName());
DB db = factory.open(path, options);
byte[] key = newKey((byte) 10);
byte[] big_key = newKey((byte) 20);
byte[] data = getData();
db.put(key, data);
DBIterator it = db.iterator();
it.seek(big_key);
try {
data = it.peekPrev().getValue();
fail("NoSuchElementException is expected");
} catch (NoSuchElementException ex) {
}
try {
data = it.prev().getValue();
fail("NoSuchElementException is expected");
} catch (NoSuchElementException ex) {
}
it.close();
db.close();
}
@Test
public void testSeekAndIterator() throws IOException {
final byte[] key_001 = newKey((byte) 1);
final byte[] key_025 = newKey((byte) 25);
final byte[] key_050 = newKey((byte) 50);
final byte[] key_075 = newKey((byte) 75);
final byte[] key_100 = newKey((byte) 100);
final byte[] value_025 = bytes("25");
final byte[] value_050 = bytes("50");
final byte[] value_075 = bytes("75");
Map.Entry<byte[], byte[]> entry;
Options options = new Options().createIfMissing(true);
options.comparator(byteComparator);
File path = getTestDirectory(getName());
DB db = factory.open(path, options);
db.put(key_025, value_025);
db.put(key_050, value_050);
db.put(key_075, value_075);
DBIterator it = db.iterator();
//
// check hasNext:
//
it.seek(key_001);
assertTrue(it.hasNext());
it.seek(key_025);
assertTrue(it.hasNext());
it.seek(key_050);
assertTrue(it.hasNext());
it.seek(key_075);
assertTrue(it.hasNext());
it.seek(key_100);
assertFalse(it.hasNext());
//
// check next:
//
it.seek(key_001);
entry = it.next();
assertEquals(key_025, entry.getKey());
assertEquals(value_025, entry.getValue());
it.seek(key_025);
entry = it.next();
assertEquals(key_025, entry.getKey());
assertEquals(value_025, entry.getValue());
it.seek(key_050);
entry = it.next();
assertEquals(key_050, entry.getKey());
assertEquals(value_050, entry.getValue());
it.seek(key_075);
entry = it.next();
assertEquals(key_075, entry.getKey());
assertEquals(value_075, entry.getValue());
it.seek(key_100);
try {
it.next();
fail("NoSuchElementException is expected");
} catch (NoSuchElementException ex) {
}
//
// check peekNext:
//
it.seek(key_001);
entry = it.peekNext();
assertEquals(key_025, entry.getKey());
assertEquals(value_025, entry.getValue());
it.seek(key_025);
entry = it.peekNext();
assertEquals(key_025, entry.getKey());
assertEquals(value_025, entry.getValue());
it.seek(key_050);
entry = it.peekNext();
assertEquals(key_050, entry.getKey());
assertEquals(value_050, entry.getValue());
it.seek(key_075);
entry = it.peekNext();
assertEquals(key_075, entry.getKey());
assertEquals(value_075, entry.getValue());
it.seek(key_100);
try {
it.peekNext();
fail("NoSuchElementException is expected");
} catch (NoSuchElementException ex) {
}
//
// check hasPrev
//
it.seek(key_001);
assertFalse(it.hasPrev());
it.seek(key_025);
assertFalse(it.hasPrev());
it.seek(key_050);
assertTrue(it.hasPrev());
it.seek(key_075);
assertTrue(it.hasPrev());
it.seek(key_100);
assertFalse(it.hasPrev()); // TODO: Expected result?
//
// check prev:
//
it.seek(key_001);
try {
it.prev();
fail("NoSuchElementException is expected");
} catch (NoSuchElementException ex) {
}
it.seek(key_025);
try {
it.prev();
fail("NoSuchElementException is expected");
} catch (NoSuchElementException ex) {
}
it.seek(key_050);
entry = it.prev();
assertEquals(key_025, entry.getKey());
assertEquals(value_025, entry.getValue());
it.seek(key_075);
entry = it.prev();
assertEquals(key_050, entry.getKey());
assertEquals(value_050, entry.getValue());
it.seek(key_100);
try {
it.prev(); // TODO: Expected result?
fail("NoSuchElementException is expected");
} catch (NoSuchElementException ex) {
}
//
// check peekPrev:
//
it.seek(key_001);
try {
it.peekPrev();
fail("NoSuchElementException is expected");
} catch (NoSuchElementException ex) {
}
it.seek(key_025);
try {
it.peekPrev();
fail("NoSuchElementException is expected");
} catch (NoSuchElementException ex) {
}
it.seek(key_050);
entry = it.peekPrev();
assertEquals(key_025, entry.getKey());
assertEquals(value_025, entry.getValue());
it.seek(key_075);
entry = it.peekPrev();
assertEquals(key_050, entry.getKey());
assertEquals(value_050, entry.getValue());
it.seek(key_100);
try {
it.peekPrev(); // TODO: Expected result?
fail("NoSuchElementException is expected");
} catch (NoSuchElementException ex) {
}
it.close();
db.close();
}
@Test
public void testIteratorNegative() throws IOException {
final byte[] key_001 = newKey((byte) 1);
final byte[] key_025 = newKey((byte) 25);
final byte[] key_050 = newKey((byte) 50);
final byte[] key_075 = newKey((byte) 75);
final byte[] key_100 = newKey((byte) 100);
final byte[] value_025 = bytes("25");
final byte[] value_050 = bytes("50");
final byte[] value_075 = bytes("75");
Map.Entry<byte[], byte[]> entry;
Options options = new Options().createIfMissing(true);
options.comparator(byteComparator);
File path = getTestDirectory(getName());
DB db = factory.open(path, options);
db.put(key_025, value_025);
db.put(key_050, value_050);
db.put(key_075, value_075);
DBIterator it = db.iterator();
//
// check next:
//
it.seekToFirst();
entry = it.next();
assertEquals(key_025, entry.getKey());
assertEquals(value_025, entry.getValue());
entry = it.next();
assertEquals(key_050, entry.getKey());
assertEquals(value_050, entry.getValue());
entry = it.next();
assertEquals(key_075, entry.getKey());
assertEquals(value_075, entry.getValue());
try {
it.next();
fail("NoSuchElementException is expected");
} catch (NoSuchElementException ex) {
}
//
// check prev:
//
it.seekToLast();
entry = it.prev();
assertEquals(key_050, entry.getKey());
assertEquals(value_050, entry.getValue());
entry = it.prev();
assertEquals(key_025, entry.getKey());
assertEquals(value_025, entry.getValue());
try {
it.prev();
fail("NoSuchElementException is expected");
} catch (NoSuchElementException ex) {
}
it.close();
db.close();
}
}
| |
package org.obolibrary.robot;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
import com.fasterxml.jackson.dataformat.yaml.YAMLGenerator;
import com.google.gson.*;
import java.io.*;
import java.util.*;
import org.obolibrary.robot.export.Cell;
import org.obolibrary.robot.export.Column;
import org.obolibrary.robot.export.Row;
import org.obolibrary.robot.export.Table;
import org.obolibrary.robot.metrics.MeasureResult;
import org.obolibrary.robot.metrics.OntologyMetrics;
import org.obolibrary.robot.providers.CURIEShortFormProvider;
import org.semanticweb.owlapi.model.OWLOntology;
import org.semanticweb.owlapi.reasoner.OWLReasoner;
import org.semanticweb.owlapi.reasoner.OWLReasonerFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Compute metrics for the ontology.
*
* @author <a href="mailto:nicolas.matentzoglu@gmail.com">Nicolas Matentzoglu</a>
*/
public class MeasureOperation {
/** Logger. */
private static final Logger LOGGER = LoggerFactory.getLogger(MeasureOperation.class);
/** Namespace for error messages. */
private static final String NS = "measure#";
/** Error message when metric type is illegal. Expects: metric type. */
private static final String METRICS_TYPE_ERROR =
NS + "METRICS TYPE ERROR unknown metrics type: %s";
/** Error message when format type is illegal. Expects: format. */
private static final String METRICS_FORMAT_ERROR =
NS + "METRICS FORMAT ERROR unknown metrics format: %s";
/**
* If a result set has results, write to the output stream and return true. Otherwise return
* false.
*
* @param result the results to write
* @param format the name of the file format to write the results to
* @param output the file to write to
* @return true if there were results, false otherwise
* @throws IOException if writing file failed
*/
public static boolean maybeWriteResult(MeasureResult result, String format, File output)
throws IOException {
if (!result.isEmpty()) {
writeResult(result, format, output);
return true;
} else {
return false;
}
}
/**
* Running the measure command
*
* @param ontology input ontology
* @param rf reasoner factory to be used for reasoning metrics
* @param metricsType The type of metrics that should be generated, like 'essential', 'extended'
* or all
* @param format the name of the file format to write the results to
* @param output the file to write to
* @param prefixes prefix map to be used for computing metrics
* @throws IOException if writing file failed
*/
public static void measure(
OWLOntology ontology,
OWLReasonerFactory rf,
String metricsType,
String format,
File output,
Map<String, String> prefixes)
throws IOException {
MeasureResult metrics = new MeasureResult();
CURIEShortFormProvider curieShortFormProvider = new CURIEShortFormProvider(prefixes);
if (metricsType.contains("reasoner")) {
metrics.importMetrics(getMetrics(ontology, rf, metricsType, curieShortFormProvider));
} else {
metrics.importMetrics(getMetrics(ontology, metricsType, curieShortFormProvider));
}
boolean wroteData = MeasureOperation.maybeWriteResult(metrics, format, output);
if (!wroteData) {
LOGGER.info("No metrics written.");
}
}
/**
* Compute metrics for a given ontology.
*
* @param ontology Ontology to run metrics
* @param metricsType what kind of metrics to harvest
* @param curieShortFormProvider Shortformprovider to be used for computation of CURIEs
* @return Metrics, if successful
*/
public static MeasureResult getMetrics(
OWLOntology ontology, String metricsType, CURIEShortFormProvider curieShortFormProvider) {
OntologyMetrics ontologyMetrics = new OntologyMetrics(ontology, curieShortFormProvider);
MeasureResult metrics;
switch (metricsType) {
case "essential":
metrics = ontologyMetrics.getEssentialMetrics();
break;
case "extended":
metrics = ontologyMetrics.getExtendedMetrics();
break;
case "all":
metrics = ontologyMetrics.getAllMetrics();
break;
default:
throw new IllegalArgumentException(String.format(METRICS_TYPE_ERROR, metricsType));
}
return metrics;
}
/**
* Run the metrics command using the reasoner factory. Note: when the reasoner factory is passed,
* it is assumed that reasoner metrics should be harvested. For example: both reasoner-all, and
* all will collect the same metrics: all metrics, plus the (simple) reasoner metrics.
*
* @param ontology Ontology to run metrics
* @param rf reasoner factory, in case reasoner metrics should be collected
* @param metricsType what kind of metrics to harvest
* @param curieShortFormProvider short form provider
* @return Metrics, if successful
*/
public static MeasureResult getMetrics(
OWLOntology ontology,
OWLReasonerFactory rf,
String metricsType,
CURIEShortFormProvider curieShortFormProvider) {
OntologyMetrics ontologyMetrics = new OntologyMetrics(ontology);
MeasureResult metrics = new MeasureResult();
OWLReasoner r = rf.createReasoner(ontology);
metrics.importMetrics(ontologyMetrics.getSimpleReasonerMetrics(r));
if (metricsType.contains("reasoner")) {
switch (metricsType) {
case "essential-reasoner":
metrics.importMetrics(getMetrics(ontology, "essential", curieShortFormProvider));
break;
case "extended-reasoner":
metrics.importMetrics(getMetrics(ontology, "extended", curieShortFormProvider));
break;
case "all-reasoner":
metrics.importMetrics(getMetrics(ontology, "all", curieShortFormProvider));
break;
default:
throw new IllegalArgumentException(String.format(METRICS_TYPE_ERROR, metricsType));
}
return metrics;
} else {
metrics.importMetrics(getMetrics(ontology, metricsType, curieShortFormProvider));
}
return metrics;
}
/**
* Write a model to an output stream.
*
* @param result results of the metrics operation
* @param format the language to write in (if null, TTL)
* @param output the output stream to write to
* @throws IOException on problem writing to output
*/
public static void writeResult(MeasureResult result, String format, File output)
throws IOException {
switch (format) {
case "tsv":
writeTable(result, output, "tsv");
break;
case "csv":
writeTable(result, output, "csv");
break;
case "yaml":
writeYAML(result, output);
break;
case "json":
writeJSON(result, output);
break;
case "html":
writeHTML(result, output);
break;
default:
throw new IllegalArgumentException(String.format(METRICS_FORMAT_ERROR, format));
}
}
private static JsonElement resultsToJson(MeasureResult result) {
JsonObject root = new JsonObject();
Gson gson = new GsonBuilder().setPrettyPrinting().disableHtmlEscaping().create();
JsonObject metrics = new JsonObject();
root.add("metrics", metrics);
Map<String, Object> data = result.getData();
List<String> keys = new ArrayList<>(data.keySet());
Collections.sort(keys);
for (String key : keys) {
Object value = data.get(key);
if (value instanceof Double) {
metrics.addProperty(key, (Double) value);
} else if (value instanceof Float) {
metrics.addProperty(key, (Float) value);
} else if (value instanceof Long) {
metrics.addProperty(key, (Long) value);
} else if (value instanceof Integer) {
metrics.addProperty(key, (Integer) value);
} else if (value instanceof Boolean) {
metrics.addProperty(key, (Boolean) value);
} else {
metrics.addProperty(key, value.toString());
}
}
Map<String, List<Object>> dataList = result.getListData();
List<String> keysList = new ArrayList<>(dataList.keySet());
Collections.sort(keysList);
for (String key : keysList) {
List<String> stringList = new ArrayList<>();
dataList.get(key).forEach(s -> stringList.add(s.toString()));
JsonElement element = gson.toJsonTree(stringList).getAsJsonArray();
metrics.add(key, element);
}
Map<String, Map<String, Object>> dataMap = result.getMapData();
List<String> keysMap = new ArrayList<>(dataMap.keySet());
Collections.sort(keysMap);
for (String key : keysMap) {
JsonElement element = gson.toJsonTree(dataMap.get(key)).getAsJsonObject();
metrics.add(key, element);
}
return root;
}
private static void writeJSON(MeasureResult result, File output) throws IOException {
JsonElement root = resultsToJson(result);
Gson gson = new GsonBuilder().setPrettyPrinting().disableHtmlEscaping().create();
writeStringToFile(gson.toJson(root), output);
}
private static void writeStringToFile(String output, File outputPath) throws IOException {
try (FileWriter fw = new FileWriter(outputPath);
BufferedWriter bw = new BufferedWriter(fw)) {
LOGGER.debug("Writing metrics to: " + outputPath);
bw.write(output);
}
}
private static void writeYAML(MeasureResult result, File outputPath) throws IOException {
JsonElement root = resultsToJson(result);
Gson gson = new GsonBuilder().setPrettyPrinting().disableHtmlEscaping().create();
String output = asYaml(gson.toJson(root));
writeStringToFile(output, outputPath);
}
private static String asYaml(String jsonString) throws IOException {
JsonNode jsonNodeTree = new ObjectMapper().readTree(jsonString);
ObjectMapper mapper =
new ObjectMapper(new YAMLFactory().disable(YAMLGenerator.Feature.WRITE_DOC_START_MARKER));
return mapper.writeValueAsString(jsonNodeTree);
}
private static String escapeTSV(String s) {
return s.replaceAll("\t", " ");
}
private static final Column COLUMN_METRIC = new Column("metric");
private static final Column COLUMN_METRIC_VALUE = new Column("metric_value");
private static final Column COLUMN_METRIC_TYPE = new Column("metric_type");
private static void addRowToTable(
Table table, String metric, String metricValue, String metricType) {
Row row = new Row();
row.add(new Cell(COLUMN_METRIC, metric));
row.add(new Cell(COLUMN_METRIC_VALUE, metricValue));
row.add(new Cell(COLUMN_METRIC_TYPE, metricType));
table.addRow(row);
}
private static Table resultsToTable(MeasureResult result, String format) {
Table table = new Table(format);
COLUMN_METRIC.setSort(2);
COLUMN_METRIC_TYPE.setSort(1);
COLUMN_METRIC_VALUE.setSort(0);
table.addColumn(COLUMN_METRIC);
table.addColumn(COLUMN_METRIC_VALUE);
table.addColumn(COLUMN_METRIC_TYPE);
table.setSortColumns();
// StringBuilder sb = new StringBuilder();
for (Map.Entry<String, Object> entry : result.getData().entrySet()) {
String key = escapeTSV(entry.getKey());
String value = escapeTSV(entry.getValue().toString());
addRowToTable(table, key, value, "single_value");
// sb.append(key).append("\t").append(value).append("\t").append("single_value").append("\n");
}
for (Map.Entry<String, List<Object>> entry : result.getListData().entrySet()) {
String key = escapeTSV(entry.getKey());
for (Object v : entry.getValue()) {
String value = escapeTSV(v.toString());
addRowToTable(table, key, value, "list_value");
// sb.append(key).append("\t").append(value).append("\t").append("list_value").append("\n");
}
}
for (Map.Entry<String, Map<String, Object>> entry : result.getMapData().entrySet()) {
String key = escapeTSV(entry.getKey());
Map<String, Object> v = entry.getValue();
for (Map.Entry<String, Object> entryMap : v.entrySet()) {
String key_inner = entryMap.getKey();
String value_inner = escapeTSV(entryMap.getValue() + "");
addRowToTable(table, key, key_inner + " " + value_inner, "map_value");
}
}
table.sortRows();
return table;
}
private static void writeTable(MeasureResult result, File output, String format)
throws IOException {
Table table = resultsToTable(result, format);
table.write(output.getPath(), "");
}
private static void writeHTML(MeasureResult result, File output) throws IOException {
Table table = resultsToTable(result, "tsv");
writeStringToFile(table.toHTML(""), output);
}
}
| |
/*
* Copyright 2016 Matthew Tamlin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.matthewtamlin.java_utilities.concurrent;
import com.matthewtamlin.java_utilities.concurrent.CallbackExecutor.OnExecutionCompleteListener;
import org.junit.Before;
import org.junit.Test;
import java.util.concurrent.atomic.AtomicBoolean;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.mockito.Mockito.*;
/**
* Tests for the {@link CallbackExecutor} class. These tests are not unit tests, instead they test
* to make sure that: <ul> <li>The correct exceptions are thrown when incorrect arguments are passed
* </li> <li>Queued tasks are not executed before {@link CallbackExecutor#execute()} is called.</li>
* <li>All queued tasks are executed.</li> <li>Callbacks are delivered.</li> </ul>
*/
public class TestCallbackExecutor {
/**
* The length of time each task should run for, measured in milliseconds.
*/
private static final int TASK_EXECUTION_TIME_MS = 2000;
/**
* A task for use in testing. The task runs for approximately 2000 ms.
*/
private Runnable task1;
/**
* A task for use in testing. The task runs for approximately 2000 ms.
*/
private Runnable task2;
/**
* A task for use in testing. The task runs for approximately 2000 ms.
*/
private Runnable task3;
/**
* A mock of the OnExecutionCompleteListener interface.
*/
private OnExecutionCompleteListener mockCallback1;
/**
* A mock of the OnExecutionCompleteListener interface.
*/
private OnExecutionCompleteListener mockCallback2;
/**
* Flag to indicate whether or not task 1 has started. This flag must be set by task 1 when
* it starts.
*/
private AtomicBoolean task1Started = new AtomicBoolean();
/**
* Flag to indicate whether or not task 2 has started. This flag must be set by task 2 when
* it starts.
*/
private AtomicBoolean task2Started = new AtomicBoolean();
/**
* Flag to indicate whether or not task 3 has started. This flag must be set by task 3 when
* it starts.
*/
private AtomicBoolean task3Started = new AtomicBoolean();
/**
* Flag to indicate whether or not task 1 has finished. This flag must be set by task 1 when
* it finishes.
*/
private AtomicBoolean task1Finished = new AtomicBoolean();
/**
* Flag to indicate whether or not task 2 has finished. This flag must be set by task 2 when
* it finishes.
*/
private AtomicBoolean task2Finished = new AtomicBoolean();
/**
* Flag to indicate whether or not task 3 has finished. This flag must be set by task 3 when
* it finishes.
*/
private AtomicBoolean task3Finished = new AtomicBoolean();
/**
* Blocks the calling thread for the supplied amount of time.
*
* @param waitTime
* the length of the pause, measured in milliseconds
*/
private static void pause(int waitTime) {
try {
Thread.sleep(waitTime);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
/**
* Initialises the testing environment.
*/
@Before
public void setup() {
// Initialise thread flags to show that no tasks have started or finished
task1Started.getAndSet(false);
task2Started.getAndSet(false);
task3Started.getAndSet(false);
task1Finished.getAndSet(false);
task2Finished.getAndSet(false);
task3Finished.getAndSet(false);
task1 = new Runnable() {
@Override
public void run() {
task1Started.getAndSet(true);
pause(TASK_EXECUTION_TIME_MS);
task1Finished.getAndSet(true);
}
};
task2 = new Runnable() {
@Override
public void run() {
task2Started.getAndSet(true);
pause(TASK_EXECUTION_TIME_MS);
task2Finished.getAndSet(true);
}
};
task3 = new Runnable() {
@Override
public void run() {
task3Started.getAndSet(true);
pause(TASK_EXECUTION_TIME_MS);
task3Finished.getAndSet(true);
}
};
mockCallback1 = mock(OnExecutionCompleteListener.class);
mockCallback2 = mock(OnExecutionCompleteListener.class);
}
/**
* Test to verify that the correct exception is thrown when the {@code task} argument of {@link
* CallbackExecutor#addToQueue(Runnable)} is null.
*/
@Test(expected = IllegalArgumentException.class)
public void ensureCorrectExceptionIsThrown() {
final CallbackExecutor executor = CallbackExecutor.usingSingleThreadExecutor();
executor.addToQueue(null);
}
/**
* Test to verify that the CallbackExecutor class functions correctly when using a single
* thread executor. The test will only pass if: <ul> <li>Queued tasks do not execute until the
* {@link CallbackExecutor#execute()} method is called.</li> <li>All queued tasks are
* executed.</li> <li>Callbacks are delivered after all tasks finish.</li>.
*/
@Test
public void testCallbackExecutor_usingSingleThreadExecutor() {
// Create a new executor and register callbacks
final CallbackExecutor executorUnderTest = CallbackExecutor.usingSingleThreadExecutor();
executorUnderTest.registerOnExecutionCompleteListener(mockCallback1);
executorUnderTest.registerOnExecutionCompleteListener(mockCallback2);
// Queue all tasks but don't start execution
executorUnderTest.addToQueue(task1);
executorUnderTest.addToQueue(task2);
executorUnderTest.addToQueue(task3);
// Verify that no tasks started
assertThat("task 1 started before execute()", task1Started.get(), is(false));
assertThat("task 2 started before execute()", task2Started.get(), is(false));
assertThat("task 3 started before execute()", task3Started.get(), is(false));
// Trigger
executorUnderTest.execute();
// Allow enough time for all tasks to complete
System.out.println("Waiting for concurrent tasks to complete...");
pause(TASK_EXECUTION_TIME_MS * 4);
// Verify that all tasks finished
assertThat("task 1 did not finish", task1Finished.get(), is(true));
assertThat("task 2 did not finish", task2Finished.get(), is(true));
assertThat("task 3 did not finish", task3Finished.get(), is(true));
// Verify that the callbacks were delivered
verify(mockCallback1, times(1)).onExecutionComplete(executorUnderTest);
verify(mockCallback2, times(1)).onExecutionComplete(executorUnderTest);
}
/**
* Test to verify that the CallbackExecutor class functions correctly when using a cached
* thread pool. The test will only pass if: <ul> <li>Queued tasks do not execute until the
* {@link CallbackExecutor#execute()} method is called.</li> <li>All queued tasks are
* executed.</li> <li>Callbacks are delivered after all tasks finish.</li>.
*/
@Test
public void testCallbackExecutor_usingCachedThreadPool() {
final CallbackExecutor executorUnderTest = CallbackExecutor.usingCachedThreadPool();
final OnExecutionCompleteListener callback1 = mock(OnExecutionCompleteListener.class);
final OnExecutionCompleteListener callback2 = mock(OnExecutionCompleteListener.class);
executorUnderTest.registerOnExecutionCompleteListener(callback1);
executorUnderTest.registerOnExecutionCompleteListener(callback2);
// Queue all tasks but don't start execution
executorUnderTest.addToQueue(task1);
executorUnderTest.addToQueue(task2);
executorUnderTest.addToQueue(task3);
// Verify that no tasks started
assertThat("task 1 started before execute()", task1Started.get(), is(false));
assertThat("task 2 started before execute()", task2Started.get(), is(false));
assertThat("task 3 started before execute()", task3Started.get(), is(false));
// Trigger
executorUnderTest.execute();
// Allow enough time for all tasks to complete
System.out.println("Waiting for concurrent tasks to complete...");
pause(TASK_EXECUTION_TIME_MS * 2);
// Verify that all tasks finished
assertThat("task 1 did not finish", task1Finished.get(), is(true));
assertThat("task 2 did not finish", task2Finished.get(), is(true));
assertThat("task 3 did not finish", task3Finished.get(), is(true));
// Verify that the callbacks were delivered
verify(callback1, times(1)).onExecutionComplete(executorUnderTest);
verify(callback2, times(1)).onExecutionComplete(executorUnderTest);
}
/**
* Test to verify that the CallbackExecutor class functions correctly when using a fixed thread
* pool. The test will only pass if: <ul> <li>Queued tasks do not execute until the
* {@link CallbackExecutor#execute()} method is called.</li> <li>All queued tasks are
* executed.</li> <li>Callbacks are delivered after all tasks finish.</li>.
*/
@Test
public void testCallbackExecutor_usingFixedThreadPool() {
final CallbackExecutor executorUnderTest = CallbackExecutor.usingFixedThreadPool(3);
// Create and register callbacks
final OnExecutionCompleteListener callback1 = mock(OnExecutionCompleteListener.class);
final OnExecutionCompleteListener callback2 = mock(OnExecutionCompleteListener.class);
executorUnderTest.registerOnExecutionCompleteListener(callback1);
executorUnderTest.registerOnExecutionCompleteListener(callback2);
// Queue all tasks but don't start execution
executorUnderTest.addToQueue(task1);
executorUnderTest.addToQueue(task2);
executorUnderTest.addToQueue(task3);
// Verify that no tasks started
assertThat("task 1 started before execute()", task1Started.get(), is(false));
assertThat("task 2 started before execute()", task2Started.get(), is(false));
assertThat("task 3 started before execute()", task3Started.get(), is(false));
// Trigger
executorUnderTest.execute();
// Allow enough time for all tasks to complete
System.out.println("Waiting for concurrent tasks to complete...");
pause(TASK_EXECUTION_TIME_MS * 2);
// Verify that all tasks finished
assertThat("task 1 did not finish", task1Finished.get(), is(true));
assertThat("task 2 did not finish", task2Finished.get(), is(true));
assertThat("task 3 did not finish", task3Finished.get(), is(true));
// Verify that the callbacks were delivered
verify(callback1, times(1)).onExecutionComplete(executorUnderTest);
verify(callback2, times(1)).onExecutionComplete(executorUnderTest);
}
}
| |
/*
* Copyright 2016 The gRPC Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.grpc.examples.routeguide;
import static junit.framework.TestCase.fail;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.timeout;
import static org.mockito.Mockito.verify;
import io.grpc.ManagedChannel;
import io.grpc.inprocess.InProcessChannelBuilder;
import io.grpc.inprocess.InProcessServerBuilder;
import io.grpc.stub.StreamObserver;
import io.grpc.testing.GrpcCleanupRule;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.mockito.ArgumentCaptor;
/**
* Unit tests for {@link RouteGuideServer}.
* For demonstrating how to write gRPC unit test only.
* Not intended to provide a high code coverage or to test every major usecase.
*
* <p>For basic unit test examples see {@link io.grpc.examples.helloworld.HelloWorldClientTest} and
* {@link io.grpc.examples.helloworld.HelloWorldServerTest}.
*/
@RunWith(JUnit4.class)
public class RouteGuideServerTest {
/**
* This rule manages automatic graceful shutdown for the registered channel at the end of test.
*/
@Rule
public final GrpcCleanupRule grpcCleanup = new GrpcCleanupRule();
private RouteGuideServer server;
private ManagedChannel inProcessChannel;
private Collection<Feature> features;
@Before
public void setUp() throws Exception {
// Generate a unique in-process server name.
String serverName = InProcessServerBuilder.generateName();
features = new ArrayList<Feature>();
// Use directExecutor for both InProcessServerBuilder and InProcessChannelBuilder can reduce the
// usage timeouts and latches in test. But we still add timeout and latches where they would be
// needed if no directExecutor were used, just for demo purpose.
server = new RouteGuideServer(
InProcessServerBuilder.forName(serverName).directExecutor(), 0, features);
server.start();
// Create a client channel and register for automatic graceful shutdown.
inProcessChannel = grpcCleanup.register(
InProcessChannelBuilder.forName(serverName).directExecutor().build());
}
@After
public void tearDown() {
server.stop();
}
@Test
public void getFeature() {
Point point = Point.newBuilder().setLongitude(1).setLatitude(1).build();
Feature unnamedFeature = Feature.newBuilder()
.setName("").setLocation(point).build();
RouteGuideGrpc.RouteGuideBlockingStub stub = RouteGuideGrpc.newBlockingStub(inProcessChannel);
// feature not found in the server
Feature feature = stub.getFeature(point);
assertEquals(unnamedFeature, feature);
// feature found in the server
Feature namedFeature = Feature.newBuilder()
.setName("name").setLocation(point).build();
features.add(namedFeature);
feature = stub.getFeature(point);
assertEquals(namedFeature, feature);
}
@Test
public void listFeatures() throws Exception {
// setup
Rectangle rect = Rectangle.newBuilder()
.setLo(Point.newBuilder().setLongitude(0).setLatitude(0).build())
.setHi(Point.newBuilder().setLongitude(10).setLatitude(10).build())
.build();
Feature f1 = Feature.newBuilder()
.setLocation(Point.newBuilder().setLongitude(-1).setLatitude(-1).build())
.setName("f1")
.build(); // not inside rect
Feature f2 = Feature.newBuilder()
.setLocation(Point.newBuilder().setLongitude(2).setLatitude(2).build())
.setName("f2")
.build();
Feature f3 = Feature.newBuilder()
.setLocation(Point.newBuilder().setLongitude(3).setLatitude(3).build())
.setName("f3")
.build();
Feature f4 = Feature.newBuilder()
.setLocation(Point.newBuilder().setLongitude(4).setLatitude(4).build())
.build(); // unamed
features.add(f1);
features.add(f2);
features.add(f3);
features.add(f4);
final Collection<Feature> result = new HashSet<Feature>();
final CountDownLatch latch = new CountDownLatch(1);
StreamObserver<Feature> responseObserver =
new StreamObserver<Feature>() {
@Override
public void onNext(Feature value) {
result.add(value);
}
@Override
public void onError(Throwable t) {
fail();
}
@Override
public void onCompleted() {
latch.countDown();
}
};
RouteGuideGrpc.RouteGuideStub stub = RouteGuideGrpc.newStub(inProcessChannel);
// run
stub.listFeatures(rect, responseObserver);
assertTrue(latch.await(1, TimeUnit.SECONDS));
// verify
assertEquals(new HashSet<Feature>(Arrays.asList(f2, f3)), result);
}
@Test
public void recordRoute() {
Point p1 = Point.newBuilder().setLongitude(1000).setLatitude(1000).build();
Point p2 = Point.newBuilder().setLongitude(2000).setLatitude(2000).build();
Point p3 = Point.newBuilder().setLongitude(3000).setLatitude(3000).build();
Point p4 = Point.newBuilder().setLongitude(4000).setLatitude(4000).build();
Feature f1 = Feature.newBuilder().setLocation(p1).build(); // unamed
Feature f2 = Feature.newBuilder().setLocation(p2).setName("f2").build();
Feature f3 = Feature.newBuilder().setLocation(p3).setName("f3").build();
Feature f4 = Feature.newBuilder().setLocation(p4).build(); // unamed
features.add(f1);
features.add(f2);
features.add(f3);
features.add(f4);
@SuppressWarnings("unchecked")
StreamObserver<RouteSummary> responseObserver =
(StreamObserver<RouteSummary>) mock(StreamObserver.class);
RouteGuideGrpc.RouteGuideStub stub = RouteGuideGrpc.newStub(inProcessChannel);
ArgumentCaptor<RouteSummary> routeSummaryCaptor = ArgumentCaptor.forClass(RouteSummary.class);
StreamObserver<Point> requestObserver = stub.recordRoute(responseObserver);
requestObserver.onNext(p1);
requestObserver.onNext(p2);
requestObserver.onNext(p3);
requestObserver.onNext(p4);
verify(responseObserver, never()).onNext(any(RouteSummary.class));
requestObserver.onCompleted();
// allow some ms to let client receive the response. Similar usage later on.
verify(responseObserver, timeout(100)).onNext(routeSummaryCaptor.capture());
RouteSummary summary = routeSummaryCaptor.getValue();
assertEquals(45, summary.getDistance()); // 45 is the hard coded distance from p1 to p4.
assertEquals(2, summary.getFeatureCount());
verify(responseObserver, timeout(100)).onCompleted();
verify(responseObserver, never()).onError(any(Throwable.class));
}
@Test
public void routeChat() {
Point p1 = Point.newBuilder().setLongitude(1).setLatitude(1).build();
Point p2 = Point.newBuilder().setLongitude(2).setLatitude(2).build();
RouteNote n1 = RouteNote.newBuilder().setLocation(p1).setMessage("m1").build();
RouteNote n2 = RouteNote.newBuilder().setLocation(p2).setMessage("m2").build();
RouteNote n3 = RouteNote.newBuilder().setLocation(p1).setMessage("m3").build();
RouteNote n4 = RouteNote.newBuilder().setLocation(p2).setMessage("m4").build();
RouteNote n5 = RouteNote.newBuilder().setLocation(p1).setMessage("m5").build();
RouteNote n6 = RouteNote.newBuilder().setLocation(p1).setMessage("m6").build();
int timesOnNext = 0;
@SuppressWarnings("unchecked")
StreamObserver<RouteNote> responseObserver =
(StreamObserver<RouteNote>) mock(StreamObserver.class);
RouteGuideGrpc.RouteGuideStub stub = RouteGuideGrpc.newStub(inProcessChannel);
StreamObserver<RouteNote> requestObserver = stub.routeChat(responseObserver);
verify(responseObserver, never()).onNext(any(RouteNote.class));
requestObserver.onNext(n1);
verify(responseObserver, never()).onNext(any(RouteNote.class));
requestObserver.onNext(n2);
verify(responseObserver, never()).onNext(any(RouteNote.class));
requestObserver.onNext(n3);
ArgumentCaptor<RouteNote> routeNoteCaptor = ArgumentCaptor.forClass(RouteNote.class);
verify(responseObserver, timeout(100).times(++timesOnNext)).onNext(routeNoteCaptor.capture());
RouteNote result = routeNoteCaptor.getValue();
assertEquals(p1, result.getLocation());
assertEquals("m1", result.getMessage());
requestObserver.onNext(n4);
routeNoteCaptor = ArgumentCaptor.forClass(RouteNote.class);
verify(responseObserver, timeout(100).times(++timesOnNext)).onNext(routeNoteCaptor.capture());
result = routeNoteCaptor.getAllValues().get(timesOnNext - 1);
assertEquals(p2, result.getLocation());
assertEquals("m2", result.getMessage());
requestObserver.onNext(n5);
routeNoteCaptor = ArgumentCaptor.forClass(RouteNote.class);
timesOnNext += 2;
verify(responseObserver, timeout(100).times(timesOnNext)).onNext(routeNoteCaptor.capture());
result = routeNoteCaptor.getAllValues().get(timesOnNext - 2);
assertEquals(p1, result.getLocation());
assertEquals("m1", result.getMessage());
result = routeNoteCaptor.getAllValues().get(timesOnNext - 1);
assertEquals(p1, result.getLocation());
assertEquals("m3", result.getMessage());
requestObserver.onNext(n6);
routeNoteCaptor = ArgumentCaptor.forClass(RouteNote.class);
timesOnNext += 3;
verify(responseObserver, timeout(100).times(timesOnNext)).onNext(routeNoteCaptor.capture());
result = routeNoteCaptor.getAllValues().get(timesOnNext - 3);
assertEquals(p1, result.getLocation());
assertEquals("m1", result.getMessage());
result = routeNoteCaptor.getAllValues().get(timesOnNext - 2);
assertEquals(p1, result.getLocation());
assertEquals("m3", result.getMessage());
result = routeNoteCaptor.getAllValues().get(timesOnNext - 1);
assertEquals(p1, result.getLocation());
assertEquals("m5", result.getMessage());
requestObserver.onCompleted();
verify(responseObserver, timeout(100)).onCompleted();
verify(responseObserver, never()).onError(any(Throwable.class));
}
}
| |
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver14;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.stat.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.oxs.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Set;
import io.netty.buffer.ByteBuf;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFMeterBandDropVer14 implements OFMeterBandDrop {
private static final Logger logger = LoggerFactory.getLogger(OFMeterBandDropVer14.class);
// version: 1.4
final static byte WIRE_VERSION = 5;
final static int LENGTH = 16;
private final static long DEFAULT_RATE = 0x0L;
private final static long DEFAULT_BURST_SIZE = 0x0L;
// OF message fields
private final long rate;
private final long burstSize;
//
// Immutable default instance
final static OFMeterBandDropVer14 DEFAULT = new OFMeterBandDropVer14(
DEFAULT_RATE, DEFAULT_BURST_SIZE
);
// package private constructor - used by readers, builders, and factory
OFMeterBandDropVer14(long rate, long burstSize) {
this.rate = rate;
this.burstSize = burstSize;
}
// Accessors for OF message fields
@Override
public int getType() {
return 0x1;
}
@Override
public long getRate() {
return rate;
}
@Override
public long getBurstSize() {
return burstSize;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
public OFMeterBandDrop.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFMeterBandDrop.Builder {
final OFMeterBandDropVer14 parentMessage;
// OF message fields
private boolean rateSet;
private long rate;
private boolean burstSizeSet;
private long burstSize;
BuilderWithParent(OFMeterBandDropVer14 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public int getType() {
return 0x1;
}
@Override
public long getRate() {
return rate;
}
@Override
public OFMeterBandDrop.Builder setRate(long rate) {
this.rate = rate;
this.rateSet = true;
return this;
}
@Override
public long getBurstSize() {
return burstSize;
}
@Override
public OFMeterBandDrop.Builder setBurstSize(long burstSize) {
this.burstSize = burstSize;
this.burstSizeSet = true;
return this;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
@Override
public OFMeterBandDrop build() {
long rate = this.rateSet ? this.rate : parentMessage.rate;
long burstSize = this.burstSizeSet ? this.burstSize : parentMessage.burstSize;
//
return new OFMeterBandDropVer14(
rate,
burstSize
);
}
}
static class Builder implements OFMeterBandDrop.Builder {
// OF message fields
private boolean rateSet;
private long rate;
private boolean burstSizeSet;
private long burstSize;
@Override
public int getType() {
return 0x1;
}
@Override
public long getRate() {
return rate;
}
@Override
public OFMeterBandDrop.Builder setRate(long rate) {
this.rate = rate;
this.rateSet = true;
return this;
}
@Override
public long getBurstSize() {
return burstSize;
}
@Override
public OFMeterBandDrop.Builder setBurstSize(long burstSize) {
this.burstSize = burstSize;
this.burstSizeSet = true;
return this;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
//
@Override
public OFMeterBandDrop build() {
long rate = this.rateSet ? this.rate : DEFAULT_RATE;
long burstSize = this.burstSizeSet ? this.burstSize : DEFAULT_BURST_SIZE;
return new OFMeterBandDropVer14(
rate,
burstSize
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFMeterBandDrop> {
@Override
public OFMeterBandDrop readFrom(ByteBuf bb) throws OFParseError {
int start = bb.readerIndex();
// fixed value property type == 0x1
short type = bb.readShort();
if(type != (short) 0x1)
throw new OFParseError("Wrong type: Expected=0x1(0x1), got="+type);
int length = U16.f(bb.readShort());
if(length != 16)
throw new OFParseError("Wrong length: Expected=16(16), got="+length);
if(bb.readableBytes() + (bb.readerIndex() - start) < length) {
// Buffer does not have all data yet
bb.readerIndex(start);
return null;
}
if(logger.isTraceEnabled())
logger.trace("readFrom - length={}", length);
long rate = U32.f(bb.readInt());
long burstSize = U32.f(bb.readInt());
// pad: 4 bytes
bb.skipBytes(4);
OFMeterBandDropVer14 meterBandDropVer14 = new OFMeterBandDropVer14(
rate,
burstSize
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", meterBandDropVer14);
return meterBandDropVer14;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFMeterBandDropVer14Funnel FUNNEL = new OFMeterBandDropVer14Funnel();
static class OFMeterBandDropVer14Funnel implements Funnel<OFMeterBandDropVer14> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFMeterBandDropVer14 message, PrimitiveSink sink) {
// fixed value property type = 0x1
sink.putShort((short) 0x1);
// fixed value property length = 16
sink.putShort((short) 0x10);
sink.putLong(message.rate);
sink.putLong(message.burstSize);
// skip pad (4 bytes)
}
}
public void writeTo(ByteBuf bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFMeterBandDropVer14> {
@Override
public void write(ByteBuf bb, OFMeterBandDropVer14 message) {
// fixed value property type = 0x1
bb.writeShort((short) 0x1);
// fixed value property length = 16
bb.writeShort((short) 0x10);
bb.writeInt(U32.t(message.rate));
bb.writeInt(U32.t(message.burstSize));
// pad: 4 bytes
bb.writeZero(4);
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFMeterBandDropVer14(");
b.append("rate=").append(rate);
b.append(", ");
b.append("burstSize=").append(burstSize);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFMeterBandDropVer14 other = (OFMeterBandDropVer14) obj;
if( rate != other.rate)
return false;
if( burstSize != other.burstSize)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * (int) (rate ^ (rate >>> 32));
result = prime * (int) (burstSize ^ (burstSize >>> 32));
return result;
}
}
| |
package dusan.stefanovic.trainingapp;
import java.util.List;
import android.app.AlertDialog;
import android.app.Dialog;
import android.content.ComponentName;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.ServiceConnection;
import android.os.Bundle;
import android.os.CountDownTimer;
import android.os.IBinder;
import android.os.Vibrator;
import android.support.v4.app.DialogFragment;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentStatePagerAdapter;
import android.support.v4.app.FragmentTransaction;
import android.support.v4.view.ViewPager;
import android.support.v4.view.ViewPager.SimpleOnPageChangeListener;
import android.support.v7.app.ActionBar;
import android.support.v7.app.ActionBar.Tab;
import android.support.v7.app.ActionBar.TabListener;
import android.support.v7.app.ActionBarActivity;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.TextView;
import dusan.stefanovic.trainingapp.data.Procedure;
import dusan.stefanovic.trainingapp.data.Step;
import dusan.stefanovic.trainingapp.fragment.ProcedureListener;
import dusan.stefanovic.trainingapp.fragment.TrainingCurrentStepFragment;
import dusan.stefanovic.trainingapp.fragment.TrainingOverviewFragment;
import dusan.stefanovic.trainingapp.fragment.TrainingProgressFragment;
import dusan.stefanovic.trainingapp.fragment.TrainingResultsFragment;
import dusan.stefanovic.trainingapp.fragment.TrainingStepsFragment;
import dusan.stefanovic.trainingapp.service.TrainingService;
import dusan.stefanovic.trainingapp.service.TrainingService.TrainingServiceListener;
import dusan.stefanovic.trainingapp.service.WATCHiTServiceInterface;
import dusan.stefanovic.treningapp.R;
public class TrainingActivity extends ActionBarActivity implements TabListener, ProcedureListener, TrainingServiceListener {
ActionBar mActionBar;
TabPagerAdapter mTabPagerAdapter;
ViewPager mViewPager;
Menu mMenu;
Button mStartButton;
Button mResumeButton;
Button mPauseButton;
// Intent for starting service after it's already bound
private Intent mDelayedStartServiceIntent;
private boolean mIsBindCalled;
private boolean mIsBound;
private int mDeviceConnectionState;
private TrainingService mBoundService;
Procedure mProcedure;
long mStartDelay = 5000;
private ServiceConnection mConnection = new ServiceConnection() {
public void onServiceConnected(ComponentName className, IBinder service) {
// This is called when the connection with the service has been
// established, giving us the service object we can use to
// interact with the service. Because we have bound to a explicit
// service that we know is running in our own process, we can
// cast its IBinder to a concrete class and directly access it.
mBoundService = ((TrainingService.LocalBinder) service).getService();
mBoundService.registerTrainingServiceListener(TrainingActivity.this);
mProcedure = mBoundService.setProcedure(mProcedure);
doSynchronization();
// start service with the intent after it's already bound
if (mDelayedStartServiceIntent != null) {
startService(mDelayedStartServiceIntent);
mDelayedStartServiceIntent = null;
}
setIsBound(true);
}
public void onServiceDisconnected(ComponentName className) {
// This is called when the connection with the service has been
// unexpectedly disconnected -- that is, its process crashed.
// Because it is running in our same process, we should never
// see this happen.
mBoundService = null;
setIsBound(false);
}
};
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_training);
mProcedure = getIntent().getParcelableExtra("procedure");
if (mProcedure == null) {
finish();
} else {
Intent intent = new Intent(this, TrainingService.class);
intent.putExtras(getIntent());
doBindAndStartService(intent);
}
// Create the adapter that will return a fragment for each of the three primary sections
// of the app.
mTabPagerAdapter = new TabPagerAdapter(getSupportFragmentManager());
// Set up the action bar.
mActionBar = getSupportActionBar();
// Specify Home/Up button
//actionBar.setHomeButtonEnabled(true);
mActionBar.setDisplayHomeAsUpEnabled(true);
mActionBar.setTitle(mProcedure.getTitle());
// Specify that we will be displaying tabs in the action bar.
mActionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_TABS);
// Set up the ViewPager, attaching the adapter and setting up a listener for when the
// user swipes between sections.
mViewPager = (ViewPager) findViewById(R.id.pager);
//mViewPager.setOffscreenPageLimit(2);
mViewPager.setAdapter(mTabPagerAdapter);
mViewPager.setOnPageChangeListener(new SimpleOnPageChangeListener() {
@Override
public void onPageSelected(int position) {
// When swiping between different app sections, select the corresponding tab.
// We can also use ActionBar.Tab#select() to do this if we have a reference to the
// Tab.
mActionBar.setSelectedNavigationItem(position);
}
});
setUpTabsTabs();
mStartButton = (Button) findViewById(R.id.start_button);
mStartButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View view) {
if (mBoundService != null) {
mBoundService.startTraining();
} else {
setIsBound(false);
}
}
});
mResumeButton = (Button) findViewById(R.id.resume_button);
mResumeButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View view) {
if (mBoundService != null) {
mBoundService.resumeTraining();
} else {
setIsBound(false);
}
}
});
mPauseButton = (Button) findViewById(R.id.pause_button);
mPauseButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View view) {
if (mBoundService != null) {
mBoundService.pauseTraining();
} else {
setIsBound(false);
}
}
});
}
@Override
protected void onStart() {
super.onStart();
}
@Override
protected void onResume() {
super.onResume();
doSynchronization();
}
@Override
protected void onDestroy() {
super.onDestroy();
doUnbindService();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
mMenu = menu;
getMenuInflater().inflate(R.menu.training, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
tryToQuitTrainingActivity();
return true;
case R.id.action_stop_training:
if (mBoundService != null) {
mBoundService.stopTraining();
} else {
setIsBound(false);
}
return true;
case R.id.action_reset_training:
if (mBoundService != null) {
mBoundService.resetTraining();
changeTrainingView();
updateProgress();
updateTimer(0);
} else {
setIsBound(false);
}
showStartButton();
return true;
case R.id.action_settings:
Intent intent = new Intent(WATCHiTServiceInterface.ACTION_START_WATCHiT_SETTINGS);
startActivity(intent);
return true;
default:
return super.onOptionsItemSelected(item);
}
}
@Override
public void onBackPressed() {
tryToQuitTrainingActivity();
}
@Override
public void onTabUnselected(Tab tab, FragmentTransaction fragmentTransaction) {
}
@Override
public void onTabSelected(Tab tab, FragmentTransaction fragmentTransaction) {
// When the given tab is selected, switch to the corresponding page in the ViewPager.
mViewPager.setCurrentItem(tab.getPosition());
}
@Override
public void onTabReselected(Tab tab, FragmentTransaction fragmentTransaction) {
}
@Override
public Procedure onProcedureRequested() {
return mProcedure;
}
@Override
public void onTrainingStart() {
performCountDown();
mStartButton.setVisibility(View.GONE);
showPauseButton();
}
@Override
public void onTrainingStarted() {
updateSteps();
updateCurrentStep();
}
@Override
public void onTrainingResumed() {
updateSteps();
showPauseButton();
}
@Override
public void onTrainingPaused() {
updateSteps();
showResumeButton();
}
@Override
public void onTrainingStopped() {
changeTrainingView();
updateSteps();
showReflectionDialog();
showRestartOption();
}
@Override
public void onDeviceConnectionChanged(int connectionState) {
setDeviceConnectionState(connectionState);
}
@Override
public void onProgressUpdated() {
updateProgress();
updateSteps();
updateCurrentStep();
}
@Override
public void onTimerTicked(long milliseconds) {
updateTimer(milliseconds);
}
private void setUpTabsTabs() {
mActionBar.removeAllTabs();
for (int i = 0; i < mTabPagerAdapter.getCount(); i++) {
mActionBar.addTab(mActionBar.newTab().setText(mTabPagerAdapter.getPageTitle(i)).setTabListener(this));
}
mActionBar.setSelectedNavigationItem(TabPagerAdapter.TAB_2);
}
private void tryToQuitTrainingActivity() {
if (mProcedure.isStarted()) {
DialogFragment dialog = new QuitDialogFragment();
dialog.show(getSupportFragmentManager(), "quit_dialog");
} else {
stopService(new Intent(this, TrainingService.class));
finish();
}
}
public void showReflectionDialog() {
DialogFragment dialog = new ReflectionDialogFragment();
dialog.show(getSupportFragmentManager(), "self_assessment_dialog");
// privremeno!!!
stopService(new Intent(this, TrainingService.class));
}
public void performCountDown() {
CountDownDialogFragment dialog = new CountDownDialogFragment();
dialog.show(getSupportFragmentManager(), "count_down_dialog");
}
private void doBindService(Intent intent) {
bindService(intent, mConnection, Context.BIND_AUTO_CREATE);
mIsBindCalled = true;
}
private void doBindAndStartService(Intent intent) {
mDelayedStartServiceIntent = intent;
doBindService(intent);
}
private void doUnbindService() {
if (mIsBindCalled) {
if (mBoundService != null) {
mBoundService.unregisterTrainingServiceListener(this);
}
unbindService(mConnection);
mIsBindCalled = false;
setIsBound(false);
}
}
private void doSynchronization() {
if (mBoundService != null) {
mProcedure = mBoundService.getProcedure();
updateProgress();
updateSteps();
updateState();
updateTimer(mBoundService.getElapsedTime());
setDeviceConnectionState(mBoundService.getDeviceConnectionState());
updateCurrentStep();
} else {
setIsBound(false);
}
}
private void setIsBound(boolean isBound) {
mIsBound = isBound;
if (!mIsBound) {
setDeviceConnectionState(WATCHiTServiceInterface.DEVICE_DISCONNECTED);
}
}
private void setDeviceConnectionState(int deviceConnectionState) {
mDeviceConnectionState = deviceConnectionState;
switch (deviceConnectionState) {
case WATCHiTServiceInterface.DEVICE_DISCONNECTED:
mActionBar.setIcon(R.drawable.ic_disconnected);
break;
case WATCHiTServiceInterface.DEVICE_CONNECTING:
mActionBar.setIcon(R.drawable.ic_connecting);
break;
case WATCHiTServiceInterface.DEVICE_CONNECTED:
mActionBar.setIcon(R.drawable.ic_connected);
break;
}
if (deviceConnectionState != WATCHiTServiceInterface.DEVICE_CONNECTED) {
if (mProcedure.isStarted()) {
// notifikacija ovde
}
mStartButton.setEnabled(false);
mResumeButton.setEnabled(false);
} else {
mStartButton.setEnabled(true);
mResumeButton.setEnabled(true);
}
}
private void updateProgress() {
TrainingProgressFragment progressFragment = mTabPagerAdapter.getTrainingProgressFragment();
if (progressFragment != null) {
progressFragment.update();
}
}
private void updateTimer(long milliseconds) {
TrainingProgressFragment progressFragment = mTabPagerAdapter.getTrainingProgressFragment();
if (progressFragment != null) {
progressFragment.updateTimer(milliseconds);
}
}
private void updateSteps() {
TrainingStepsFragment stepsFragment = mTabPagerAdapter.getTrainingStepsFragment();
if (stepsFragment != null) {
stepsFragment.update();
}
}
private void updateCurrentStep() {
TrainingCurrentStepFragment currentStepFragment = mTabPagerAdapter.getTrainingCurrentStepFragment();
if (currentStepFragment != null) {
currentStepFragment.update();
}
}
private void updateState() {
if (isTrainingFinished()) {
showRestartOption();
} else if (!mProcedure.isStarted()) {
showStartButton();
} else if (mProcedure.isPaused()) {
showResumeButton();
} else if (mProcedure.isRunning()) {
showPauseButton();
}
}
private void showStartButton() {
mStartButton.setVisibility(View.VISIBLE);
setMeniOptionVisibility(R.id.action_stop_training, false);
setMeniOptionVisibility(R.id.action_reset_training, false);
}
private void showResumeButton() {
mStartButton.setVisibility(View.GONE);
mResumeButton.setVisibility(View.VISIBLE);
mPauseButton.setVisibility(View.GONE);
setMeniOptionVisibility(R.id.action_reset_training, false);
setMeniOptionVisibility(R.id.action_stop_training, true);
}
private void showPauseButton() {
mStartButton.setVisibility(View.GONE);
mResumeButton.setVisibility(View.GONE);
mPauseButton.setVisibility(View.VISIBLE);
setMeniOptionVisibility(R.id.action_reset_training, false);
setMeniOptionVisibility(R.id.action_stop_training, true);
}
private void showRestartOption() {
mResumeButton.setVisibility(View.GONE);
mPauseButton.setVisibility(View.GONE);
setMeniOptionVisibility(R.id.action_stop_training, false);
setMeniOptionVisibility(R.id.action_reset_training, true);
}
private void setMeniOptionVisibility(int id, boolean isVisible) {
if (mMenu != null) {
MenuItem menuItem = mMenu.findItem(id);
menuItem.setVisible(isVisible);
}
}
private boolean isTrainingFinished() {
return false;
//return !mProcedure.isStarted() && mProcedure.getStep(0).getStatus() != Step.STATUS_PENDING;
}
private void changeTrainingView() {
setUpTabsTabs();
mTabPagerAdapter.notifyDataSetChanged();
}
public class TabPagerAdapter extends FragmentStatePagerAdapter {
static final int TAB_1 = 0;
static final int TAB_2 = 1;
static final int TAB_3 = 2;
TrainingResultsFragment mTrainingResultsFragment;
TrainingOverviewFragment mTrainingOverviewFragment;
public TabPagerAdapter(FragmentManager fragmentManager) {
super(fragmentManager);
}
@Override
public Fragment getItem(int position) {
switch (position) {
case TAB_1:
if (isTrainingFinished()) {
return new TrainingResultsFragment();
} else {
return new TrainingCurrentStepFragment();
}
case TAB_2:
if (isTrainingFinished()) {
return new TrainingOverviewFragment();
} else {
return new TrainingProgressFragment();
}
case TAB_3:
return new TrainingStepsFragment();
}
return null;
}
@Override
public int getCount() {
return 3;
}
@Override
public CharSequence getPageTitle(int position) {
switch (position) {
case TAB_1:
if (isTrainingFinished()) {
return getText(R.string.training_activity_tab_results);
} else {
return getText(R.string.training_activity_tab_current_step);
}
case TAB_2:
if (isTrainingFinished()) {
return getText(R.string.training_activity_tab_overview);
} else {
return getText(R.string.training_activity_tab_progress);
}
case TAB_3:
return getText(R.string.training_activity_tab_steps);
}
return null;
}
@Override
public int getItemPosition(Object object) {
return POSITION_NONE;
}
public TrainingCurrentStepFragment getTrainingCurrentStepFragment() {
List<Fragment> fragments = getSupportFragmentManager().getFragments();
if (fragments != null) {
for (Fragment fragment : fragments) {
if (fragment instanceof TrainingCurrentStepFragment) {
return (TrainingCurrentStepFragment) fragment;
}
}
}
return null;
}
public TrainingProgressFragment getTrainingProgressFragment() {
List<Fragment> fragments = getSupportFragmentManager().getFragments();
if (fragments != null) {
for (Fragment fragment : fragments) {
if (fragment instanceof TrainingProgressFragment) {
return (TrainingProgressFragment) fragment;
}
}
}
return null;
}
public TrainingStepsFragment getTrainingStepsFragment() {
List<Fragment> fragments = getSupportFragmentManager().getFragments();
if (fragments != null) {
for (Fragment fragment : fragments) {
if (fragment instanceof TrainingStepsFragment) {
return (TrainingStepsFragment) fragment;
}
}
}
return null;
}
public TrainingResultsFragment getTrainingResultsFragment() {
List<Fragment> fragments = getSupportFragmentManager().getFragments();
if (fragments != null) {
for (Fragment fragment : fragments) {
if (fragment instanceof TrainingResultsFragment) {
return (TrainingResultsFragment) fragment;
}
}
}
return null;
}
public TrainingOverviewFragment getTrainingOverviewFragment() {
List<Fragment> fragments = getSupportFragmentManager().getFragments();
if (fragments != null) {
for (Fragment fragment : fragments) {
if (fragment instanceof TrainingOverviewFragment) {
return (TrainingOverviewFragment) fragment;
}
}
}
return null;
}
}
public static class QuitDialogFragment extends DialogFragment {
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
builder.setTitle(getText(R.string.training_activity_quit_dialog_title));
builder.setMessage(getText(R.string.training_activity_quit_dialog_message));
builder.setPositiveButton(getText(R.string.button_yes), new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
getActivity().stopService(new Intent(getActivity(), TrainingService.class));
getActivity().finish();
}
});
builder.setNegativeButton(getText(R.string.button_no), new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
}
});
return builder.create();
}
}
public static class ReflectionDialogFragment extends DialogFragment {
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
builder.setTitle(getText(R.string.training_activity_reflection_dialog_title));
builder.setMessage(getText(R.string.training_activity_reflection_dialog_message));
builder.setNeutralButton(getText(R.string.button_ok), new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
Intent intent = new Intent(getActivity(), ReflectionActivity.class);
intent.putExtra("procedure", ((TrainingActivity) getActivity()).mProcedure);
startActivity(intent);
}
});
setCancelable(false);
return builder.create();
}
}
public static class CountDownDialogFragment extends DialogFragment {
TextView mTextView;
boolean mShouldStart;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setStyle(DialogFragment.STYLE_NO_FRAME, android.R.style.Theme_Translucent);
setRetainInstance(true);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.dialog_fragment_countdown, container, false);
mTextView = (TextView) rootView.findViewById(R.id.textView);
if (mShouldStart) {
CountDownTimer countDownTimer = new CountDownTimer(4000, 1000) {
Vibrator mVibrator = (Vibrator) getActivity().getSystemService(Context.VIBRATOR_SERVICE);
@Override
public void onTick(long millisUntilFinished) {
long secondsUntilFinished = (millisUntilFinished / 1000);
mTextView.setText(String.valueOf(secondsUntilFinished));
mVibrator.vibrate(300);
}
@Override
public void onFinish() {
mTextView.setText(getText(R.string.training_activity_go));
mVibrator.vibrate(1500);
dismiss();
}
};
countDownTimer.start();
mShouldStart = false;
}
return rootView;
}
@Override
public void onDestroyView() {
if (getDialog() != null && getRetainInstance())
getDialog().setDismissMessage(null);
super.onDestroyView();
}
public void show(FragmentManager manager, String tag) {
super.show(manager, tag);
mShouldStart = true;
}
}
}
| |
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.test.integration.routing;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.action.RoutingMissingException;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.Requests;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.test.integration.AbstractNodesTests;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import static org.elasticsearch.cluster.metadata.AliasAction.newAddAliasAction;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
/**
*
*/
public class AliasRoutingTests extends AbstractNodesTests {
private Client client;
@BeforeClass
public void createNodes() throws Exception {
startNode("node1");
startNode("node2");
client = getClient();
}
@AfterClass
public void closeNodes() {
client.close();
closeAllNodes();
}
protected Client getClient() {
return client("node1");
}
@Test
public void testAliasCrudRouting() throws Exception {
try {
client.admin().indices().prepareDelete("test").execute().actionGet();
} catch (Exception e) {
// ignore
}
client.admin().indices().prepareCreate("test").execute().actionGet();
client.admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
client.admin().indices().prepareAliases().addAliasAction(newAddAliasAction("test", "alias0").routing("0")).execute().actionGet();
logger.info("--> indexing with id [1], and routing [0] using alias");
client.prepareIndex("alias0", "type1", "1").setSource("field", "value1").setRefresh(true).execute().actionGet();
logger.info("--> verifying get with no routing, should not find anything");
for (int i = 0; i < 5; i++) {
assertThat(client.prepareGet("test", "type1", "1").execute().actionGet().isExists(), equalTo(false));
}
logger.info("--> verifying get with routing, should find");
for (int i = 0; i < 5; i++) {
assertThat(client.prepareGet("test", "type1", "1").setRouting("0").execute().actionGet().isExists(), equalTo(true));
}
logger.info("--> verifying get with routing alias, should find");
for (int i = 0; i < 5; i++) {
assertThat(client.prepareGet("alias0", "type1", "1").execute().actionGet().isExists(), equalTo(true));
}
logger.info("--> updating with id [1] and routing through alias");
client.prepareUpdate("alias0", "type1", "1")
.setUpsertRequest(XContentFactory.jsonBuilder().startObject().field("field", 1).endObject())
.setScript("ctx._source.field = 'value2'")
.execute().actionGet();
for (int i = 0; i < 5; i++) {
assertThat(client.prepareGet("alias0", "type1", "1").execute().actionGet().isExists(), equalTo(true));
assertThat(client.prepareGet("alias0", "type1", "1").execute().actionGet().getSourceAsMap().get("field").toString(), equalTo("value2"));
}
logger.info("--> deleting with no routing, should not delete anything");
client.prepareDelete("test", "type1", "1").setRefresh(true).execute().actionGet();
for (int i = 0; i < 5; i++) {
assertThat(client.prepareGet("test", "type1", "1").execute().actionGet().isExists(), equalTo(false));
assertThat(client.prepareGet("test", "type1", "1").setRouting("0").execute().actionGet().isExists(), equalTo(true));
assertThat(client.prepareGet("alias0", "type1", "1").execute().actionGet().isExists(), equalTo(true));
}
logger.info("--> deleting with routing alias, should delete");
client.prepareDelete("alias0", "type1", "1").setRefresh(true).execute().actionGet();
for (int i = 0; i < 5; i++) {
assertThat(client.prepareGet("test", "type1", "1").execute().actionGet().isExists(), equalTo(false));
assertThat(client.prepareGet("test", "type1", "1").setRouting("0").execute().actionGet().isExists(), equalTo(false));
assertThat(client.prepareGet("alias0", "type1", "1").execute().actionGet().isExists(), equalTo(false));
}
logger.info("--> indexing with id [1], and routing [0] using alias");
client.prepareIndex("alias0", "type1", "1").setSource("field", "value1").setRefresh(true).execute().actionGet();
logger.info("--> verifying get with no routing, should not find anything");
for (int i = 0; i < 5; i++) {
assertThat(client.prepareGet("test", "type1", "1").execute().actionGet().isExists(), equalTo(false));
}
logger.info("--> verifying get with routing, should find");
for (int i = 0; i < 5; i++) {
assertThat(client.prepareGet("test", "type1", "1").setRouting("0").execute().actionGet().isExists(), equalTo(true));
assertThat(client.prepareGet("alias0", "type1", "1").execute().actionGet().isExists(), equalTo(true));
}
logger.info("--> deleting_by_query with 1 as routing, should not delete anything");
client.prepareDeleteByQuery().setQuery(matchAllQuery()).setRouting("1").execute().actionGet();
client.admin().indices().prepareRefresh().execute().actionGet();
for (int i = 0; i < 5; i++) {
assertThat(client.prepareGet("test", "type1", "1").execute().actionGet().isExists(), equalTo(false));
assertThat(client.prepareGet("test", "type1", "1").setRouting("0").execute().actionGet().isExists(), equalTo(true));
assertThat(client.prepareGet("alias0", "type1", "1").execute().actionGet().isExists(), equalTo(true));
}
logger.info("--> deleting_by_query with alias0, should delete");
client.prepareDeleteByQuery("alias0").setQuery(matchAllQuery()).execute().actionGet();
client.admin().indices().prepareRefresh().execute().actionGet();
for (int i = 0; i < 5; i++) {
assertThat(client.prepareGet("test", "type1", "1").execute().actionGet().isExists(), equalTo(false));
assertThat(client.prepareGet("test", "type1", "1").setRouting("0").execute().actionGet().isExists(), equalTo(false));
assertThat(client.prepareGet("alias0", "type1", "1").execute().actionGet().isExists(), equalTo(false));
}
}
@Test
public void testAliasSearchRouting() throws Exception {
try {
client.admin().indices().prepareDelete("test").execute().actionGet();
} catch (Exception e) {
// ignore
}
client.admin().indices().prepareCreate("test").execute().actionGet();
client.admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
client.admin().indices().prepareAliases()
.addAliasAction(newAddAliasAction("test", "alias"))
.addAliasAction(newAddAliasAction("test", "alias0").routing("0"))
.addAliasAction(newAddAliasAction("test", "alias1").routing("1"))
.addAliasAction(newAddAliasAction("test", "alias01").searchRouting("0,1"))
.execute().actionGet();
logger.info("--> indexing with id [1], and routing [0] using alias");
client.prepareIndex("alias0", "type1", "1").setSource("field", "value1").setRefresh(true).execute().actionGet();
logger.info("--> verifying get with no routing, should not find anything");
for (int i = 0; i < 5; i++) {
assertThat(client.prepareGet("test", "type1", "1").execute().actionGet().isExists(), equalTo(false));
}
logger.info("--> verifying get with routing, should find");
for (int i = 0; i < 5; i++) {
assertThat(client.prepareGet("alias0", "type1", "1").execute().actionGet().isExists(), equalTo(true));
}
logger.info("--> search with no routing, should fine one");
for (int i = 0; i < 5; i++) {
assertThat(client.prepareSearch().setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1l));
}
logger.info("--> search with wrong routing, should not find");
for (int i = 0; i < 5; i++) {
assertThat(client.prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(0l));
assertThat(client.prepareCount().setRouting("1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getCount(), equalTo(0l));
assertThat(client.prepareSearch("alias1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(0l));
assertThat(client.prepareCount("alias1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getCount(), equalTo(0l));
}
logger.info("--> search with correct routing, should find");
for (int i = 0; i < 5; i++) {
assertThat(client.prepareSearch().setRouting("0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1l));
assertThat(client.prepareCount().setRouting("0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getCount(), equalTo(1l));
assertThat(client.prepareSearch("alias0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1l));
assertThat(client.prepareCount("alias0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getCount(), equalTo(1l));
}
logger.info("--> indexing with id [2], and routing [1] using alias");
client.prepareIndex("alias1", "type1", "2").setSource("field", "value1").setRefresh(true).execute().actionGet();
logger.info("--> search with no routing, should fine two");
for (int i = 0; i < 5; i++) {
assertThat(client.prepareSearch().setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2l));
assertThat(client.prepareCount().setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getCount(), equalTo(2l));
}
logger.info("--> search with 0 routing, should find one");
for (int i = 0; i < 5; i++) {
assertThat(client.prepareSearch().setRouting("0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1l));
assertThat(client.prepareCount().setRouting("0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getCount(), equalTo(1l));
assertThat(client.prepareSearch("alias0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1l));
assertThat(client.prepareCount("alias0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getCount(), equalTo(1l));
}
logger.info("--> search with 1 routing, should find one");
for (int i = 0; i < 5; i++) {
assertThat(client.prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1l));
assertThat(client.prepareCount().setRouting("1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getCount(), equalTo(1l));
assertThat(client.prepareSearch("alias1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1l));
assertThat(client.prepareCount("alias1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getCount(), equalTo(1l));
}
logger.info("--> search with 0,1 routings , should find two");
for (int i = 0; i < 5; i++) {
assertThat(client.prepareSearch().setRouting("0", "1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2l));
assertThat(client.prepareCount().setRouting("0", "1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getCount(), equalTo(2l));
assertThat(client.prepareSearch("alias01").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2l));
assertThat(client.prepareCount("alias01").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getCount(), equalTo(2l));
}
logger.info("--> search with two routing aliases , should find two");
for (int i = 0; i < 5; i++) {
assertThat(client.prepareSearch("alias0", "alias1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2l));
assertThat(client.prepareCount("alias0", "alias1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getCount(), equalTo(2l));
}
logger.info("--> search with alias0, alias1 and alias01, should find two");
for (int i = 0; i < 5; i++) {
assertThat(client.prepareSearch("alias0", "alias1", "alias01").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2l));
assertThat(client.prepareCount("alias0", "alias1", "alias01").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getCount(), equalTo(2l));
}
logger.info("--> search with test, alias0 and alias1, should find two");
for (int i = 0; i < 5; i++) {
assertThat(client.prepareSearch("test", "alias0", "alias1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2l));
assertThat(client.prepareCount("test", "alias0", "alias1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getCount(), equalTo(2l));
}
}
@Test
public void testAliasSearchRoutingWithTwoIndices() throws Exception {
try {
client.admin().indices().prepareDelete("test-a").execute().actionGet();
client.admin().indices().prepareDelete("test-b").execute().actionGet();
} catch (Exception e) {
// ignore
}
client.admin().indices().prepareCreate("test-a").execute().actionGet();
client.admin().indices().prepareCreate("test-b").execute().actionGet();
client.admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
client.admin().indices().prepareAliases()
.addAliasAction(newAddAliasAction("test-a", "alias-a0").routing("0"))
.addAliasAction(newAddAliasAction("test-a", "alias-a1").routing("1"))
.addAliasAction(newAddAliasAction("test-b", "alias-b0").routing("0"))
.addAliasAction(newAddAliasAction("test-b", "alias-b1").routing("1"))
.addAliasAction(newAddAliasAction("test-a", "alias-ab").searchRouting("0"))
.addAliasAction(newAddAliasAction("test-b", "alias-ab").searchRouting("1"))
.execute().actionGet();
logger.info("--> indexing with id [1], and routing [0] using alias to test-a");
client.prepareIndex("alias-a0", "type1", "1").setSource("field", "value1").setRefresh(true).execute().actionGet();
logger.info("--> verifying get with no routing, should not find anything");
for (int i = 0; i < 5; i++) {
assertThat(client.prepareGet("test", "type1", "1").execute().actionGet().isExists(), equalTo(false));
}
logger.info("--> verifying get with routing, should find");
for (int i = 0; i < 5; i++) {
assertThat(client.prepareGet("alias-a0", "type1", "1").execute().actionGet().isExists(), equalTo(true));
}
logger.info("--> indexing with id [0], and routing [1] using alias to test-b");
client.prepareIndex("alias-b1", "type1", "1").setSource("field", "value1").setRefresh(true).execute().actionGet();
logger.info("--> verifying get with no routing, should not find anything");
for (int i = 0; i < 5; i++) {
assertThat(client.prepareGet("test", "type1", "1").execute().actionGet().isExists(), equalTo(false));
}
logger.info("--> verifying get with routing, should find");
for (int i = 0; i < 5; i++) {
assertThat(client.prepareGet("alias-b1", "type1", "1").execute().actionGet().isExists(), equalTo(true));
}
logger.info("--> search with alias-a1,alias-b0, should not find");
for (int i = 0; i < 5; i++) {
assertThat(client.prepareSearch("alias-a1", "alias-b0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(0l));
assertThat(client.prepareCount("alias-a1", "alias-b0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getCount(), equalTo(0l));
}
logger.info("--> search with alias-ab, should find two");
for (int i = 0; i < 5; i++) {
assertThat(client.prepareSearch("alias-ab").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2l));
assertThat(client.prepareCount("alias-ab").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getCount(), equalTo(2l));
}
logger.info("--> search with alias-a0,alias-b1 should find two");
for (int i = 0; i < 5; i++) {
assertThat(client.prepareSearch("alias-a0", "alias-b1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2l));
assertThat(client.prepareCount("alias-a0", "alias-b1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getCount(), equalTo(2l));
}
}
@Test
public void testAliasSearchRoutingWithConcreteAndAliasedIndices() throws Exception {
try {
client.admin().indices().prepareDelete("index").execute().actionGet();
client.admin().indices().prepareDelete("index_2").execute().actionGet();
} catch (Exception e) {
// ignore
}
client.admin().indices().prepareCreate("index").execute().actionGet();
client.admin().indices().prepareCreate("index_2").execute().actionGet();
client.admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet();
client.admin().indices().prepareAliases()
.addAliasAction(newAddAliasAction("index", "index_1").routing("1"))
.execute().actionGet();
logger.info("--> indexing on index_1 which is an alias for index with routing [1]");
client.prepareIndex("index_1", "type1", "1").setSource("field", "value1").setRefresh(true).execute().actionGet();
logger.info("--> indexing on index_2 which is a concrete index");
client.prepareIndex("index_2", "type2", "2").setSource("field", "value2").setRefresh(true).execute().actionGet();
logger.info("--> search all on index_* should find two");
for (int i = 0; i < 5; i++) {
assertThat(client.prepareSearch("index_*").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2l));
}
}
@Test
public void testRequiredRoutingMappingWithAlias() throws Exception {
try {
client.admin().indices().prepareDelete("test").execute().actionGet();
} catch (Exception e) {
// ignore
}
client.admin().indices().prepareCreate("test")
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("_routing").field("required", true).endObject().endObject().endObject())
.execute().actionGet();
client.admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
logger.info("--> indexing with id [1], and routing [0]");
client.prepareIndex("test", "type1", "1").setRouting("0").setSource("field", "value1").setRefresh(true).execute().actionGet();
logger.info("--> verifying get with no routing, should not find anything");
logger.info("--> indexing with id [1], with no routing, should fail");
try {
client.prepareIndex("test", "type1", "1").setSource("field", "value1").setRefresh(true).execute().actionGet();
assert false;
} catch (ElasticSearchException e) {
assertThat(e.unwrapCause(), instanceOf(RoutingMissingException.class));
}
logger.info("--> verifying get with routing, should find");
for (int i = 0; i < 5; i++) {
assertThat(client.prepareGet("test", "type1", "1").setRouting("0").execute().actionGet().isExists(), equalTo(true));
}
logger.info("--> deleting with no routing, should broadcast the delete since _routing is required");
client.prepareDelete("test", "type1", "1").setRefresh(true).execute().actionGet();
for (int i = 0; i < 5; i++) {
assertThat(client.prepareGet("test", "type1", "1").execute().actionGet().isExists(), equalTo(false));
assertThat(client.prepareGet("test", "type1", "1").setRouting("0").execute().actionGet().isExists(), equalTo(false));
}
logger.info("--> indexing with id [1], and routing [0]");
client.prepareIndex("test", "type1", "1").setRouting("0").setSource("field", "value1").setRefresh(true).execute().actionGet();
logger.info("--> verifying get with no routing, should not find anything");
logger.info("--> bulk deleting with no routing, should broadcast the delete since _routing is required");
client.prepareBulk().add(Requests.deleteRequest("test").type("type1").id("1")).execute().actionGet();
client.admin().indices().prepareRefresh().execute().actionGet();
for (int i = 0; i < 5; i++) {
assertThat(client.prepareGet("test", "type1", "1").execute().actionGet().isExists(), equalTo(false));
assertThat(client.prepareGet("test", "type1", "1").setRouting("0").execute().actionGet().isExists(), equalTo(false));
}
}
@Test
public void testIndexingAliasesOverTime() throws Exception {
try {
client.admin().indices().prepareDelete("test").execute().actionGet();
} catch (Exception e) {
// ignore
}
client.admin().indices().prepareCreate("test").execute().actionGet();
client.admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
logger.info("--> creating alias with routing [3]");
client.admin().indices().prepareAliases()
.addAliasAction(newAddAliasAction("test", "alias").routing("3"))
.execute().actionGet();
logger.info("--> indexing with id [0], and routing [3]");
client.prepareIndex("alias", "type1", "0").setSource("field", "value1").setRefresh(true).execute().actionGet();
logger.info("--> verifying get with no routing, should not find anything");
logger.info("--> verifying get and search with routing, should find");
for (int i = 0; i < 5; i++) {
assertThat(client.prepareGet("test", "type1", "0").setRouting("3").execute().actionGet().isExists(), equalTo(true));
assertThat(client.prepareSearch("alias").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1l));
assertThat(client.prepareCount("alias").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getCount(), equalTo(1l));
}
logger.info("--> creating alias with routing [4]");
client.admin().indices().prepareAliases()
.addAliasAction(newAddAliasAction("test", "alias").routing("4"))
.execute().actionGet();
logger.info("--> verifying search with wrong routing should not find");
for (int i = 0; i < 5; i++) {
assertThat(client.prepareSearch("alias").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(0l));
assertThat(client.prepareCount("alias").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getCount(), equalTo(0l));
}
logger.info("--> creating alias with search routing [3,4] and index routing 4");
client.admin().indices().prepareAliases()
.addAliasAction(newAddAliasAction("test", "alias").searchRouting("3,4").indexRouting("4"))
.execute().actionGet();
logger.info("--> indexing with id [1], and routing [4]");
client.prepareIndex("alias", "type1", "1").setSource("field", "value2").setRefresh(true).execute().actionGet();
logger.info("--> verifying get with no routing, should not find anything");
logger.info("--> verifying get and search with routing, should find");
for (int i = 0; i < 5; i++) {
assertThat(client.prepareGet("test", "type1", "0").setRouting("3").execute().actionGet().isExists(), equalTo(true));
assertThat(client.prepareGet("test", "type1", "1").setRouting("4").execute().actionGet().isExists(), equalTo(true));
assertThat(client.prepareSearch("alias").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2l));
assertThat(client.prepareCount("alias").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getCount(), equalTo(2l));
}
}
}
| |
package com.shop.data.services;
import com.configuration.DataBaseTestConfiguration;
import com.shop.data.tables.Book;
import com.shop.data.tables.Category;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.LinkedList;
import static org.junit.Assert.*;
public class CategoriesServiceTest extends DataBaseTestConfiguration {
@Autowired
private CategoriesService service;
@Autowired
private BooksService booksService;
private LinkedList<Category> categories;
@Before
public void BeforeEachTest() {
categories = createCategoriesCollection();
}
@After
public void afterEachTest() {
service.delete(categories);
}
@Test
public void save() {
Category category = categories.getFirst();
service.save(category);
checker(category);
}
@Test
public void saveOne() {
Category category = categories.getFirst();
service.save(category);
checker(category);
}
@Test
public void saveCollection() {
LinkedList<Category> categories = this.categories;
service.save(categories);
categories.forEach(
x -> checker(x)
);
}
@Test
public void saveNull() {
try {
service.save((Category) null);
} catch (Exception e) {
assertNull(e);
}
}
@Test
public void saveOneWithExistName() {
categories.add(new Category("category " + 3));
categories.add(new Category("category " + 3));
service.save(categories);
for (Category x : service.findAll()) {
int numberOfSameObject = 0;
for (Category x1 : service.findAll()) {
if (x.getName().equals(x1.getName()))
numberOfSameObject++;
if (numberOfSameObject > 1)
fail("can't save two categories with same name");
}
}
}
@Test
public void updateCategoryWithSetBooks() {
Category category = categories.getFirst();
service.save(category);
assertTrue(category.equals(service.findOne(category.getName())));
Category categoryToUpdate = service.findOne(category.getName());
LinkedList<Book> books = new LinkedList<>();
books.add(new Book("avcx"));
books.add(new Book("avcx1"));
categoryToUpdate.setBooks(books);
service.save(categoryToUpdate);
Category one = service.findOne(categoryToUpdate.getName());
assertTrue(categoryToUpdate.equals(one));
}
@Test
public void updateCategoryWithAddBooks() {
Category category = categories.getFirst();
service.save(category);
assertTrue(category.equals(service.findOne(category.getName())));
Category categoryToUpdate = service.findOne(category.getName());
LinkedList<Book> books = new LinkedList<>();
books.add(new Book("avcx"));
books.add(new Book("avcx1"));
categoryToUpdate.getBooks().addAll(books);
service.save(categoryToUpdate);
assertTrue(categoryToUpdate.equals(service.findOne(category.getName())));
}
@Test
public void findOne() {
service.save(categories.getFirst());
Category category = service.findOne(categories.getFirst());
assertNotNull(category);
}
@Test
public void findOneWithNull() {
try {
service.findOne((Category) null);
service.findOneByName((Category) null);
service.findOne((Category) null);
service.findOneByName((String) null);
} catch (Exception e) {
assertNull(e);
}
}
@Test
public void findOneById() {
service.save(categories.getFirst());
Category category = service.findOne(categories.getFirst().getId());
assertNotNull(category);
}
@Test
public void findOneByNameWitchDoesNotExistWithSring() {
Category category = service.findOneByName("bla bla bla");
assertNull(category);
}
@Test
public void findOneByNameWitchDoesNotExistWithObject() {
Category category = service.findOneByName(new Category("bla bla bla"));
assertNull(category);
}
@Test
public void findOneByNameWithString() {
service.save(categories.getFirst());
Category category = service.findOneByName(categories.getFirst().getName());
assertNotNull(category);
}
@Test
public void findOneWithStringThrowException() {
service.save(categories.getFirst());
categories.getFirst().setName(null);
Category category = service.findOne(categories.getFirst());
assertNull(category);
}
@Test
public void findOneByNameWithObject() {
service.save(categories.getFirst());
Category category = service.findOneByName(categories.getFirst());
assertNotNull(category);
}
@Test
public void findAll() {
Iterable<Category> categories = service.findAll();
categories.forEach(
x -> assertNotNull(service.findOne(x.getId()))
);
}
@Test
public void delete() {
Category category = categories.getFirst();
service.save(category);
service.delete(category);
assertNull(service.findOne(category.getId()));
}
@Test
public void deleteById() {
Category category = categories.getFirst();
service.save(category);
service.delete(category.getId());
assertNull(service.findOne(category.getId()));
}
@Test
public void deleteCollection() {
LinkedList<Category> categories = this.categories;
service.save(categories);
service.delete(categories);
categories.forEach(
x -> assertNull(service.findOne(x.getId()))
);
}
public LinkedList<Category> createCategoriesCollection() {
LinkedList<Category> categoriesToReturn = new LinkedList<>();
for (int i = 0; i < 3; i++) {
Category book = new Category("category " + i);
categoriesToReturn.add(book);
}
categoriesToReturn.add(new Category("category " + 3));
return categoriesToReturn;
}
private void checker(Category category) {
category.getBooks().forEach(
x -> assertNotNull(booksService.findOne(x))
);
assertTrue(category.equals(service.findOne(category.getId())));
}
}
| |
package org.ringingmaster.engine.parser.assignparsetype;
import org.junit.Test;
import org.ringingmaster.engine.NumberOfBells;
import org.ringingmaster.engine.composition.MutableComposition;
import org.ringingmaster.engine.composition.compositiontype.CompositionType;
import org.ringingmaster.engine.notation.Notation;
import org.ringingmaster.engine.notation.NotationBuilder;
import org.ringingmaster.engine.parser.parse.Parse;
import static org.junit.Assert.assertEquals;
import static org.ringingmaster.engine.parser.AssertParse.assertParse;
import static org.ringingmaster.engine.parser.AssertParse.section;
import static org.ringingmaster.engine.parser.AssertParse.unparsed;
import static org.ringingmaster.engine.parser.AssertParse.valid;
import static org.ringingmaster.engine.parser.assignparsetype.ParseType.CALL;
import static org.ringingmaster.engine.parser.assignparsetype.ParseType.CALL_MULTIPLIER;
import static org.ringingmaster.engine.parser.assignparsetype.ParseType.DEFAULT_CALL_MULTIPLIER;
import static org.ringingmaster.engine.parser.assignparsetype.ParseType.DEFINITION;
import static org.ringingmaster.engine.parser.assignparsetype.ParseType.DEFINITION_MULTIPLIER;
import static org.ringingmaster.engine.parser.assignparsetype.ParseType.MULTIPLIER_GROUP_CLOSE;
import static org.ringingmaster.engine.parser.assignparsetype.ParseType.MULTIPLIER_GROUP_OPEN;
import static org.ringingmaster.engine.parser.assignparsetype.ParseType.MULTIPLIER_GROUP_OPEN_MULTIPLIER;
import static org.ringingmaster.engine.parser.assignparsetype.ParseType.PLAIN_LEAD;
import static org.ringingmaster.engine.parser.assignparsetype.ParseType.PLAIN_LEAD_MULTIPLIER;
import static org.ringingmaster.engine.parser.assignparsetype.ParseType.SPLICE;
import static org.ringingmaster.engine.parser.assignparsetype.ParseType.SPLICE_MULTIPLIER;
import static org.ringingmaster.engine.parser.assignparsetype.ParseType.VARIANCE_CLOSE;
import static org.ringingmaster.engine.parser.assignparsetype.ParseType.VARIANCE_DETAIL;
import static org.ringingmaster.engine.parser.assignparsetype.ParseType.VARIANCE_OPEN;
import static org.ringingmaster.engine.composition.TableType.COMPOSITION_TABLE;
import static org.ringingmaster.engine.composition.compositiontype.CompositionType.COURSE_BASED;
import static org.ringingmaster.engine.composition.tableaccess.DefinitionTableAccess.DEFINITION_COLUMN;
/**
* TODO comments???
*
* @author Steve Lake
*/
public class AssignParseTypeMULTIPLIERTest {
@Test
public void parsingEmptyParseReturnsEmptyParse() {
MutableComposition composition = buildSingleCellComposition(buildPlainBobMinor(), null);
Parse parse = new AssignParseType()
.apply(composition.get());
assertEquals(0, parse.allCompositionCells().getRowSize());
assertEquals(0, parse.allCompositionCells().getColumnSize());
}
@Test
public void parsingAllCellTypesReturnsOriginals() {
MutableComposition composition = buildSingleCellComposition(buildPlainBobMinor(), null);
composition.setCompositionType(COURSE_BASED);
composition.addCharacters(COMPOSITION_TABLE, 0,0, "CALL_POSITION");
composition.addCharacters(COMPOSITION_TABLE, 1,0, "MAIN_BODY");
composition.addCharacters(COMPOSITION_TABLE, 1,1, "SPLICE");
composition.addCharacters(COMPOSITION_TABLE, 2,0, "CALL");// To force the Parse to be replaced
composition.addCharacters(COMPOSITION_TABLE, 2,1, "CALL");// To force the Parse to be replaced
Parse parse = new AssignParseType()
.apply(composition.get());
assertEquals(3, parse.allCompositionCells().getRowSize());
assertEquals(2, parse.allCompositionCells().getColumnSize());
assertEquals("CALL_POSITION", parse.allCompositionCells().get(0,0).getCharacters());
assertEquals("MAIN_BODY", parse.allCompositionCells().get(1,0).getCharacters());
assertEquals("SPLICE", parse.allCompositionCells().get(1,1).getCharacters());
}
@Test
public void correctlyParseSingleDefaultCallMultiplier() {
MutableComposition composition = buildSingleCellComposition(buildPlainBobMinor(), "-2");
Parse parse = new AssignParseType()
.apply(composition.get());
assertParse(parse.allCompositionCells().get(0,0), valid(CALL), valid(DEFAULT_CALL_MULTIPLIER));
}
@Test
public void correctlyParseMultiDefaultCallMultiplier() {
MutableComposition composition = buildSingleCellComposition(buildPlainBobMinor(), "-22");
Parse parse = new AssignParseType()
.apply(composition.get());
assertParse(parse.allCompositionCells().get(0,0), valid(CALL), valid(2,DEFAULT_CALL_MULTIPLIER));
}
@Test
public void correctlyParseDefaultCallMultiplierBeforeWhitespace() {
MutableComposition composition = buildSingleCellComposition(buildPlainBobMinor(), "2 ");
Parse parse = new AssignParseType()
.apply(composition.get());
assertParse(parse.allCompositionCells().get(0,0), valid(DEFAULT_CALL_MULTIPLIER), unparsed(1));
}
@Test
public void correctlyParseDefaultCallMultiplierBeforeVariance() {
MutableComposition composition = buildSingleCellComposition(buildPlainBobMinor(), "6[-o7]");
Parse parse = new AssignParseType()
.apply(composition.get());
assertParse(parse.allCompositionCells().get(0,0), valid(DEFAULT_CALL_MULTIPLIER), valid(section(VARIANCE_OPEN), section(2,VARIANCE_DETAIL)), valid(DEFAULT_CALL_MULTIPLIER), valid(VARIANCE_CLOSE));
}
@Test
public void correctlyParseCallMultiplier() {
MutableComposition composition = buildSingleCellComposition(buildPlainBobMinor(), "2-");
Parse parse = new AssignParseType()
.apply(composition.get());
assertParse(parse.allCompositionCells().get(0,0), valid(section(CALL_MULTIPLIER), section(CALL)));
}
@Test
public void correctlyParseMultiCallMultiplier() {
MutableComposition composition = buildSingleCellComposition(buildPlainBobMinor(), "28-");
Parse parse = new AssignParseType()
.apply(composition.get());
assertParse(parse.allCompositionCells().get(0,0), valid(section(2,CALL_MULTIPLIER), section(CALL)));
}
@Test
public void correctlyParseGroupMultiplier() {
MutableComposition composition = buildSingleCellComposition(buildPlainBobMinor(), "6(7)");
Parse parse = new AssignParseType()
.apply(composition.get());
assertParse(parse.allCompositionCells().get(0,0), valid(section(MULTIPLIER_GROUP_OPEN_MULTIPLIER), section(MULTIPLIER_GROUP_OPEN)), valid(DEFAULT_CALL_MULTIPLIER), valid(MULTIPLIER_GROUP_CLOSE));
}
@Test
public void correctlyParseMultiGroupMultiplier() {
MutableComposition composition = buildSingleCellComposition(buildPlainBobMinor(), "624(");
Parse parse = new AssignParseType()
.apply(composition.get());
assertParse(parse.allCompositionCells().get(0,0), valid(section(3, MULTIPLIER_GROUP_OPEN_MULTIPLIER), section(MULTIPLIER_GROUP_OPEN)));
}
@Test
public void correctlyParsePlainLeadMultiplier() {
MutableComposition composition = buildSingleCellComposition(buildPlainBobMinor(), "3p");
Parse parse = new AssignParseType()
.apply(composition.get());
assertParse(parse.allCompositionCells().get(0,0), valid(section(PLAIN_LEAD_MULTIPLIER), section(PLAIN_LEAD)));
}
@Test
public void correctlyParseMultiPlainLeadMultiplier() {
MutableComposition composition = buildSingleCellComposition(buildPlainBobMinor(), "434p");
Parse parse = new AssignParseType()
.apply(composition.get());
assertParse(parse.allCompositionCells().get(0,0), valid(section(3, PLAIN_LEAD_MULTIPLIER), section(PLAIN_LEAD)));
}
@Test
public void correctlyParseDefinitionMultiplier() {
MutableComposition composition = buildSingleCellComposition(buildPlainBobMinor(), "2def1");
Parse parse = new AssignParseType()
.apply(composition.get());
assertParse(parse.allCompositionCells().get(0,0), valid(section(1, DEFINITION_MULTIPLIER), section(4, DEFINITION)));
}
@Test
public void correctlyParseMultiDefinitionMultiplier() {
MutableComposition composition = buildSingleCellComposition(buildPlainBobMinor(), "243def1");
Parse parse = new AssignParseType()
.apply(composition.get());
assertParse(parse.allCompositionCells().get(0,0), valid(section(3, DEFINITION_MULTIPLIER), section(4, DEFINITION)));
}
@Test
public void correctlyParseSpliceMultiplier() {
MutableComposition composition = buildSingleCellComposition(buildPlainBobMinor(), "DUMMY");
composition.addCharacters(COMPOSITION_TABLE, 0, 1, "2p");
composition.setSpliced(true);
Parse parse = new AssignParseType()
.apply(composition.get());
assertParse(parse.allCompositionCells().get(0,1), valid(section(1, SPLICE_MULTIPLIER), section(SPLICE)));
}
@Test
public void correctlyParseMultiSpliceMultiplier() {
MutableComposition composition = buildSingleCellComposition(buildPlainBobMinor(), "DUMMY");
composition.addCharacters(COMPOSITION_TABLE, 0, 1, "392p");
composition.setSpliced(true);
Parse parse = new AssignParseType()
.apply(composition.get());
assertParse(parse.allCompositionCells().get(0,1), valid(section(3, SPLICE_MULTIPLIER), section(SPLICE)));
}
@Test
public void standAloneNumbersInSplicedUnparsed() {
MutableComposition composition = buildSingleCellComposition(buildPlainBobMinor(), "DUMMY");
composition.addCharacters(COMPOSITION_TABLE, 0, 1, "392-");
composition.setSpliced(true);
Parse parse = new AssignParseType()
.apply(composition.get());
assertParse(parse.allCompositionCells().get(0,1), unparsed(4));
}
@Test
public void multiplierWorksInDefinition() {
MutableComposition composition = buildSingleCellComposition(buildPlainBobMinor(), "def2");
Parse parse = new AssignParseType()
.apply(composition.get());
assertParse(parse.findDefinitionByShorthand("def2").get().get(0,DEFINITION_COLUMN), valid(DEFAULT_CALL_MULTIPLIER));
}
@Test
public void multiplierDoesNotAddDefaultCallWhenUsedOnlyInSpliceInDefinition() {
MutableComposition composition = buildSingleCellComposition(buildPlainBobMinor(), "-");
composition.addCharacters(COMPOSITION_TABLE, 0, 1, "def2");
composition.setSpliced(true);
Parse parse = new AssignParseType()
.apply(composition.get());
assertParse(parse.findDefinitionByShorthand("def2").get().get(0,DEFINITION_COLUMN), unparsed());
}
@Test
public void multiplierDoesAddDefaultCallWhenUsedInMainCellsInDefinition() {
MutableComposition composition = buildSingleCellComposition(buildPlainBobMinor(), "def2");
composition.addCharacters(COMPOSITION_TABLE, 0, 1, "-");
composition.setSpliced(true);
Parse parse = new AssignParseType()
.apply(composition.get());
assertParse(parse.findDefinitionByShorthand("def2").get().get(0,DEFINITION_COLUMN), valid(DEFAULT_CALL_MULTIPLIER));
}
//TODO need tests of definition area
private Notation buildPlainBobMinor() {
return NotationBuilder.getInstance()
.setNumberOfWorkingBells(NumberOfBells.BELLS_6)
.setName("Plain Bob")
.setFoldedPalindromeNotationShorthand("x16x16x16", "12")
.addCall("Bob", "-", "14", true)
.addCall("Single", "s", "1234", false)
.addCallInitiationRow(7)
.addMethodCallingPosition("W", 7, 1)
.addMethodCallingPosition("H", 7, 2)
.setSpliceIdentifier("p")
.build();
}
private Notation buildLittleBobMinorWithNoDefaultCall() {
return NotationBuilder.getInstance()
.setNumberOfWorkingBells(NumberOfBells.BELLS_6)
.setName("Little Bob")
.setFoldedPalindromeNotationShorthand("x16x14", "12")
.addCallInitiationRow(7)
.addMethodCallingPosition("W", 7, 1)
.addMethodCallingPosition("H", 7, 2)
.setSpliceIdentifier("l")
.build();
}
private MutableComposition buildSingleCellComposition(Notation notation, String characters) {
MutableComposition composition = new MutableComposition();
composition.setNumberOfBells(notation.getNumberOfWorkingBells());
if (characters != null) {
composition.addCharacters(COMPOSITION_TABLE, 0, 0, characters);
}
composition.addNotation(notation);
composition.setCompositionType(CompositionType.LEAD_BASED);
composition.setSpliced(false);
composition.addDefinition("def1", "-P");
composition.addDefinition("def2", "2");
return composition;
}
}
| |
/*
* Copyright (C) 2007 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.imtopsales.vysper.util.google;
import java.util.NoSuchElementException;
/**
* Simple static methods to be called at the start of your own methods to verify
* correct arguments and state. This allows constructs such as
* <pre>
* if (count <= 0) {
* throw new IllegalArgumentException("must be positive: " + count);
* }</pre>
*
* to be replaced with the more compact
* <pre>
* checkArgument(count > 0, "must be positive: %s", count);</pre>
*
* Note that the sense of the expression is inverted; with {@code Preconditions}
* you declare what you expect to be <i>true</i>, just as you do with an
* <a href="http://java.sun.com/j2se/1.5.0/docs/guide/language/assert.html">
* {@code assert}</a> or a JUnit {@code assertTrue} call.
*
* <p><b>Warning:</b> only the {@code "%s"} specifier is recognized as a
* placeholder in these messages, not the full range of {@link
* String#format(String, Object[])} specifiers.
*
* <p>Take care not to confuse precondition checking with other similar types
* of checks! Precondition exceptions -- including those provided here, but also
* {@link IndexOutOfBoundsException}, {@link NoSuchElementException}, {@link
* UnsupportedOperationException} and others -- are used to signal that the
* <i>calling method</i> has made an error. This tells the caller that it should
* not have invoked the method when it did, with the arguments it did, or
* perhaps ever. Postcondition or other invariant failures should not throw
* these types of exceptions.
*
* <p>See the Guava User Guide on <a href=
* "http://code.google.com/p/guava-libraries/wiki/PreconditionsExplained">
* using {@code Preconditions}</a>.
*
* @author Kevin Bourrillion
* @since 2.0 (imported from Google Collections Library)
*/
public final class Preconditions {
private Preconditions() {}
/**
* Ensures the truth of an expression involving one or more parameters to the
* calling method.
*
* @param expression a boolean expression
* @throws IllegalArgumentException if {@code expression} is false
*/
public static void checkArgument(boolean expression) {
if (!expression) {
throw new IllegalArgumentException();
}
}
/**
* Ensures the truth of an expression involving one or more parameters to the
* calling method.
*
* @param expression a boolean expression
* @param errorMessage the exception message to use if the check fails; will
* be converted to a string using {@link String#valueOf(Object)}
* @throws IllegalArgumentException if {@code expression} is false
*/
public static void checkArgument(
boolean expression, Object errorMessage) {
if (!expression) {
throw new IllegalArgumentException(String.valueOf(errorMessage));
}
}
/**
* Ensures the truth of an expression involving one or more parameters to the
* calling method.
*
* @param expression a boolean expression
* @param errorMessageTemplate a template for the exception message should the
* check fail. The message is formed by replacing each {@code %s}
* placeholder in the template with an argument. These are matched by
* position - the first {@code %s} gets {@code errorMessageArgs[0]}, etc.
* Unmatched arguments will be appended to the formatted message in square
* braces. Unmatched placeholders will be left as-is.
* @param errorMessageArgs the arguments to be substituted into the message
* template. Arguments are converted to strings using
* {@link String#valueOf(Object)}.
* @throws IllegalArgumentException if {@code expression} is false
* @throws NullPointerException if the check fails and either {@code
* errorMessageTemplate} or {@code errorMessageArgs} is null (don't let
* this happen)
*/
public static void checkArgument(boolean expression,
String errorMessageTemplate,
Object... errorMessageArgs) {
if (!expression) {
throw new IllegalArgumentException(
format(errorMessageTemplate, errorMessageArgs));
}
}
/**
* Ensures the truth of an expression involving the state of the calling
* instance, but not involving any parameters to the calling method.
*
* @param expression a boolean expression
* @throws IllegalStateException if {@code expression} is false
*/
public static void checkState(boolean expression) {
if (!expression) {
throw new IllegalStateException();
}
}
/**
* Ensures the truth of an expression involving the state of the calling
* instance, but not involving any parameters to the calling method.
*
* @param expression a boolean expression
* @param errorMessage the exception message to use if the check fails; will
* be converted to a string using {@link String#valueOf(Object)}
* @throws IllegalStateException if {@code expression} is false
*/
public static void checkState(
boolean expression, Object errorMessage) {
if (!expression) {
throw new IllegalStateException(String.valueOf(errorMessage));
}
}
/**
* Ensures the truth of an expression involving the state of the calling
* instance, but not involving any parameters to the calling method.
*
* @param expression a boolean expression
* @param errorMessageTemplate a template for the exception message should the
* check fail. The message is formed by replacing each {@code %s}
* placeholder in the template with an argument. These are matched by
* position - the first {@code %s} gets {@code errorMessageArgs[0]}, etc.
* Unmatched arguments will be appended to the formatted message in square
* braces. Unmatched placeholders will be left as-is.
* @param errorMessageArgs the arguments to be substituted into the message
* template. Arguments are converted to strings using
* {@link String#valueOf(Object)}.
* @throws IllegalStateException if {@code expression} is false
* @throws NullPointerException if the check fails and either {@code
* errorMessageTemplate} or {@code errorMessageArgs} is null (don't let
* this happen)
*/
public static void checkState(boolean expression,
String errorMessageTemplate,
Object... errorMessageArgs) {
if (!expression) {
throw new IllegalStateException(
format(errorMessageTemplate, errorMessageArgs));
}
}
/**
* Ensures that an object reference passed as a parameter to the calling
* method is not null.
*
* @param reference an object reference
* @return the non-null reference that was validated
* @throws NullPointerException if {@code reference} is null
*/
public static <T> T checkNotNull(T reference) {
if (reference == null) {
throw new NullPointerException();
}
return reference;
}
/**
* Ensures that an object reference passed as a parameter to the calling
* method is not null.
*
* @param reference an object reference
* @param errorMessage the exception message to use if the check fails; will
* be converted to a string using {@link String#valueOf(Object)}
* @return the non-null reference that was validated
* @throws NullPointerException if {@code reference} is null
*/
public static <T> T checkNotNull(T reference, Object errorMessage) {
if (reference == null) {
throw new NullPointerException(String.valueOf(errorMessage));
}
return reference;
}
/**
* Ensures that an object reference passed as a parameter to the calling
* method is not null.
*
* @param reference an object reference
* @param errorMessageTemplate a template for the exception message should the
* check fail. The message is formed by replacing each {@code %s}
* placeholder in the template with an argument. These are matched by
* position - the first {@code %s} gets {@code errorMessageArgs[0]}, etc.
* Unmatched arguments will be appended to the formatted message in square
* braces. Unmatched placeholders will be left as-is.
* @param errorMessageArgs the arguments to be substituted into the message
* template. Arguments are converted to strings using
* {@link String#valueOf(Object)}.
* @return the non-null reference that was validated
* @throws NullPointerException if {@code reference} is null
*/
public static <T> T checkNotNull(T reference,
String errorMessageTemplate,
Object... errorMessageArgs) {
if (reference == null) {
// If either of these parameters is null, the right thing happens anyway
throw new NullPointerException(
format(errorMessageTemplate, errorMessageArgs));
}
return reference;
}
/*
* All recent hotspots (as of 2009) *really* like to have the natural code
*
* if (guardExpression) {
* throw new BadException(messageExpression);
* }
*
* refactored so that messageExpression is moved to a separate
* String-returning method.
*
* if (guardExpression) {
* throw new BadException(badMsg(...));
* }
*
* The alternative natural refactorings into void or Exception-returning
* methods are much slower. This is a big deal - we're talking factors of
* 2-8 in microbenchmarks, not just 10-20%. (This is a hotspot optimizer
* bug, which should be fixed, but that's a separate, big project).
*
* The coding pattern above is heavily used in java.util, e.g. in ArrayList.
* There is a RangeCheckMicroBenchmark in the JDK that was used to test this.
*
* But the methods in this class want to throw different exceptions,
* depending on the args, so it appears that this pattern is not directly
* applicable. But we can use the ridiculous, devious trick of throwing an
* exception in the middle of the construction of another exception.
* Hotspot is fine with that.
*/
/**
* Ensures that {@code index} specifies a valid <i>element</i> in an array,
* list or string of size {@code size}. An element index may range from zero,
* inclusive, to {@code size}, exclusive.
*
* @param index a user-supplied index identifying an element of an array, list
* or string
* @param size the size of that array, list or string
* @return the value of {@code index}
* @throws IndexOutOfBoundsException if {@code index} is negative or is not
* less than {@code size}
* @throws IllegalArgumentException if {@code size} is negative
*/
public static int checkElementIndex(int index, int size) {
return checkElementIndex(index, size, "index");
}
/**
* Ensures that {@code index} specifies a valid <i>element</i> in an array,
* list or string of size {@code size}. An element index may range from zero,
* inclusive, to {@code size}, exclusive.
*
* @param index a user-supplied index identifying an element of an array, list
* or string
* @param size the size of that array, list or string
* @param desc the text to use to describe this index in an error message
* @return the value of {@code index}
* @throws IndexOutOfBoundsException if {@code index} is negative or is not
* less than {@code size}
* @throws IllegalArgumentException if {@code size} is negative
*/
public static int checkElementIndex(
int index, int size, String desc) {
// Carefully optimized for execution by hotspot (explanatory comment above)
if (index < 0 || index >= size) {
throw new IndexOutOfBoundsException(badElementIndex(index, size, desc));
}
return index;
}
private static String badElementIndex(int index, int size, String desc) {
if (index < 0) {
return format("%s (%s) must not be negative", desc, index);
} else if (size < 0) {
throw new IllegalArgumentException("negative size: " + size);
} else { // index >= size
return format("%s (%s) must be less than size (%s)", desc, index, size);
}
}
/**
* Ensures that {@code index} specifies a valid <i>position</i> in an array,
* list or string of size {@code size}. A position index may range from zero
* to {@code size}, inclusive.
*
* @param index a user-supplied index identifying a position in an array, list
* or string
* @param size the size of that array, list or string
* @return the value of {@code index}
* @throws IndexOutOfBoundsException if {@code index} is negative or is
* greater than {@code size}
* @throws IllegalArgumentException if {@code size} is negative
*/
public static int checkPositionIndex(int index, int size) {
return checkPositionIndex(index, size, "index");
}
/**
* Ensures that {@code index} specifies a valid <i>position</i> in an array,
* list or string of size {@code size}. A position index may range from zero
* to {@code size}, inclusive.
*
* @param index a user-supplied index identifying a position in an array, list
* or string
* @param size the size of that array, list or string
* @param desc the text to use to describe this index in an error message
* @return the value of {@code index}
* @throws IndexOutOfBoundsException if {@code index} is negative or is
* greater than {@code size}
* @throws IllegalArgumentException if {@code size} is negative
*/
public static int checkPositionIndex(
int index, int size, String desc) {
// Carefully optimized for execution by hotspot (explanatory comment above)
if (index < 0 || index > size) {
throw new IndexOutOfBoundsException(badPositionIndex(index, size, desc));
}
return index;
}
private static String badPositionIndex(int index, int size, String desc) {
if (index < 0) {
return format("%s (%s) must not be negative", desc, index);
} else if (size < 0) {
throw new IllegalArgumentException("negative size: " + size);
} else { // index > size
return format("%s (%s) must not be greater than size (%s)",
desc, index, size);
}
}
/**
* Ensures that {@code start} and {@code end} specify a valid <i>positions</i>
* in an array, list or string of size {@code size}, and are in order. A
* position index may range from zero to {@code size}, inclusive.
*
* @param start a user-supplied index identifying a starting position in an
* array, list or string
* @param end a user-supplied index identifying a ending position in an array,
* list or string
* @param size the size of that array, list or string
* @throws IndexOutOfBoundsException if either index is negative or is
* greater than {@code size}, or if {@code end} is less than {@code start}
* @throws IllegalArgumentException if {@code size} is negative
*/
public static void checkPositionIndexes(int start, int end, int size) {
// Carefully optimized for execution by hotspot (explanatory comment above)
if (start < 0 || end < start || end > size) {
throw new IndexOutOfBoundsException(badPositionIndexes(start, end, size));
}
}
private static String badPositionIndexes(int start, int end, int size) {
if (start < 0 || start > size) {
return badPositionIndex(start, size, "start index");
}
if (end < 0 || end > size) {
return badPositionIndex(end, size, "end index");
}
// end < start
return format("end index (%s) must not be less than start index (%s)",
end, start);
}
/**
* Substitutes each {@code %s} in {@code template} with an argument. These
* are matched by position - the first {@code %s} gets {@code args[0]}, etc.
* If there are more arguments than placeholders, the unmatched arguments will
* be appended to the end of the formatted message in square braces.
*
* @param template a non-null string containing 0 or more {@code %s}
* placeholders.
* @param args the arguments to be substituted into the message
* template. Arguments are converted to strings using
* {@link String#valueOf(Object)}. Arguments can be null.
*/
static String format(String template,
Object... args) {
template = String.valueOf(template); // null -> "null"
// start substituting the arguments into the '%s' placeholders
StringBuilder builder = new StringBuilder(
template.length() + 16 * args.length);
int templateStart = 0;
int i = 0;
while (i < args.length) {
int placeholderStart = template.indexOf("%s", templateStart);
if (placeholderStart == -1) {
break;
}
builder.append(template.substring(templateStart, placeholderStart));
builder.append(args[i++]);
templateStart = placeholderStart + 2;
}
builder.append(template.substring(templateStart));
// if we run out of placeholders, append the extra args in square braces
if (i < args.length) {
builder.append(" [");
builder.append(args[i++]);
while (i < args.length) {
builder.append(", ");
builder.append(args[i++]);
}
builder.append(']');
}
return builder.toString();
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations;
import com.carrotsearch.hppc.IntHashSet;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.RangeQueryBuilder;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.MockScriptPlugin;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
import org.elasticsearch.search.aggregations.bucket.filter.Filter;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.range.Range;
import org.elasticsearch.search.aggregations.bucket.range.Range.Bucket;
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorFactory;
import org.elasticsearch.search.aggregations.metrics.Sum;
import org.elasticsearch.test.ESIntegTestCase;
import org.junit.After;
import org.junit.Before;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.search.aggregations.AggregationBuilders.extendedStats;
import static org.elasticsearch.search.aggregations.AggregationBuilders.filter;
import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
import static org.elasticsearch.search.aggregations.AggregationBuilders.max;
import static org.elasticsearch.search.aggregations.AggregationBuilders.min;
import static org.elasticsearch.search.aggregations.AggregationBuilders.percentiles;
import static org.elasticsearch.search.aggregations.AggregationBuilders.range;
import static org.elasticsearch.search.aggregations.AggregationBuilders.stats;
import static org.elasticsearch.search.aggregations.AggregationBuilders.sum;
import static org.elasticsearch.search.aggregations.AggregationBuilders.terms;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.core.IsNull.notNullValue;
/**
* Additional tests that aim at testing more complex aggregation trees on larger random datasets, so that things like
* the growth of dynamic arrays is tested.
*/
public class EquivalenceIT extends ESIntegTestCase {
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Collections.singleton(CustomScriptPlugin.class);
}
public static class CustomScriptPlugin extends MockScriptPlugin {
@Override
protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() {
return Collections.singletonMap("floor(_value / interval)", vars -> {
Double value = (Double) vars.get("_value");
Integer interval = (Integer) vars.get("interval");
return Math.floor(value / interval.doubleValue());
});
}
}
@Before
private void setupMaxBuckets() {
// disables the max bucket limit for this test
client().admin().cluster().prepareUpdateSettings()
.setTransientSettings(Collections.singletonMap("search.max_buckets", Integer.MAX_VALUE))
.get();
}
@After
private void cleanupMaxBuckets() {
client().admin().cluster().prepareUpdateSettings()
.setTransientSettings(Collections.singletonMap("search.max_buckets", null))
.get();
}
// Make sure that unordered, reversed, disjoint and/or overlapping ranges are supported
// Duel with filters
public void testRandomRanges() throws Exception {
final int numDocs = scaledRandomIntBetween(500, 5000);
final double[][] docs = new double[numDocs][];
for (int i = 0; i < numDocs; ++i) {
final int numValues = randomInt(5);
docs[i] = new double[numValues];
for (int j = 0; j < numValues; ++j) {
docs[i][j] = randomDouble() * 100;
}
}
prepareCreate("idx")
.addMapping("type", jsonBuilder()
.startObject()
.startObject("type")
.startObject("properties")
.startObject("values")
.field("type", "double")
.endObject()
.endObject()
.endObject()
.endObject()).execute().actionGet();
for (int i = 0; i < docs.length; ++i) {
XContentBuilder source = jsonBuilder()
.startObject()
.startArray("values");
for (int j = 0; j < docs[i].length; ++j) {
source = source.value(docs[i][j]);
}
source = source.endArray().endObject();
client().prepareIndex("idx", "type").setSource(source).execute().actionGet();
}
assertNoFailures(client().admin().indices().prepareRefresh("idx").
setIndicesOptions(IndicesOptions.lenientExpandOpen())
.get());
final int numRanges = randomIntBetween(1, 20);
final double[][] ranges = new double[numRanges][];
for (int i = 0; i < ranges.length; ++i) {
switch (randomInt(2)) {
case 0:
ranges[i] = new double[] { Double.NEGATIVE_INFINITY, randomInt(100) };
break;
case 1:
ranges[i] = new double[] { randomInt(100), Double.POSITIVE_INFINITY };
break;
case 2:
ranges[i] = new double[] { randomInt(100), randomInt(100) };
break;
default:
throw new AssertionError();
}
}
RangeAggregationBuilder query = range("range").field("values");
for (int i = 0; i < ranges.length; ++i) {
String key = Integer.toString(i);
if (ranges[i][0] == Double.NEGATIVE_INFINITY) {
query.addUnboundedTo(key, ranges[i][1]);
} else if (ranges[i][1] == Double.POSITIVE_INFINITY) {
query.addUnboundedFrom(key, ranges[i][0]);
} else {
query.addRange(key, ranges[i][0], ranges[i][1]);
}
}
SearchRequestBuilder reqBuilder = client().prepareSearch("idx").addAggregation(query);
for (int i = 0; i < ranges.length; ++i) {
RangeQueryBuilder filter = QueryBuilders.rangeQuery("values");
if (ranges[i][0] != Double.NEGATIVE_INFINITY) {
filter = filter.from(ranges[i][0]);
}
if (ranges[i][1] != Double.POSITIVE_INFINITY){
filter = filter.to(ranges[i][1]);
}
reqBuilder = reqBuilder.addAggregation(filter("filter" + i, filter));
}
SearchResponse resp = reqBuilder.execute().actionGet();
Range range = resp.getAggregations().get("range");
List<? extends Bucket> buckets = range.getBuckets();
HashMap<String, Bucket> bucketMap = new HashMap<>(buckets.size());
for (Bucket bucket : buckets) {
bucketMap.put(bucket.getKeyAsString(), bucket);
}
for (int i = 0; i < ranges.length; ++i) {
long count = 0;
for (double[] values : docs) {
for (double value : values) {
if (value >= ranges[i][0] && value < ranges[i][1]) {
++count;
break;
}
}
}
final Range.Bucket bucket = bucketMap.get(Integer.toString(i));
assertEquals(bucket.getKeyAsString(), Integer.toString(i), bucket.getKeyAsString());
assertEquals(bucket.getKeyAsString(), count, bucket.getDocCount());
final Filter filter = resp.getAggregations().get("filter" + i);
assertThat(filter.getDocCount(), equalTo(count));
}
}
// test long/double/string terms aggs with high number of buckets that require array growth
public void testDuelTerms() throws Exception {
final int numDocs = scaledRandomIntBetween(1000, 2000);
final int maxNumTerms = randomIntBetween(10, 5000);
final IntHashSet valuesSet = new IntHashSet();
cluster().wipeIndices("idx");
prepareCreate("idx")
.addMapping("type", jsonBuilder()
.startObject()
.startObject("type")
.startObject("properties")
.startObject("num")
.field("type", "double")
.endObject()
.startObject("string_values")
.field("type", "keyword")
.startObject("fields")
.startObject("doc_values")
.field("type", "keyword")
.field("index", false)
.endObject()
.endObject()
.endObject()
.startObject("long_values")
.field("type", "long")
.endObject()
.startObject("double_values")
.field("type", "double")
.endObject()
.endObject()
.endObject()
.endObject()).execute().actionGet();
List<IndexRequestBuilder> indexingRequests = new ArrayList<>();
for (int i = 0; i < numDocs; ++i) {
final int[] values = new int[randomInt(4)];
for (int j = 0; j < values.length; ++j) {
values[j] = randomInt(maxNumTerms - 1) - 1000;
valuesSet.add(values[j]);
}
XContentBuilder source = jsonBuilder()
.startObject()
.field("num", randomDouble())
.startArray("long_values");
for (int j = 0; j < values.length; ++j) {
source = source.value(values[j]);
}
source = source.endArray().startArray("double_values");
for (int j = 0; j < values.length; ++j) {
source = source.value((double) values[j]);
}
source = source.endArray().startArray("string_values");
for (int j = 0; j < values.length; ++j) {
source = source.value(Integer.toString(values[j]));
}
source = source.endArray().endObject();
indexingRequests.add(client().prepareIndex("idx", "type").setSource(source));
}
indexRandom(true, indexingRequests);
assertNoFailures(client().admin().indices().prepareRefresh("idx")
.setIndicesOptions(IndicesOptions.lenientExpandOpen())
.execute().get());
SearchResponse resp = client().prepareSearch("idx")
.addAggregation(
terms("long")
.field("long_values")
.size(maxNumTerms)
.collectMode(randomFrom(SubAggCollectionMode.values()))
.subAggregation(min("min").field("num")))
.addAggregation(
terms("double")
.field("double_values")
.size(maxNumTerms)
.collectMode(randomFrom(SubAggCollectionMode.values()))
.subAggregation(max("max").field("num")))
.addAggregation(
terms("string_map")
.field("string_values")
.collectMode(randomFrom(SubAggCollectionMode.values()))
.executionHint(TermsAggregatorFactory.ExecutionMode.MAP.toString())
.size(maxNumTerms)
.subAggregation(stats("stats").field("num")))
.addAggregation(
terms("string_global_ordinals")
.field("string_values")
.collectMode(randomFrom(SubAggCollectionMode.values()))
.executionHint(TermsAggregatorFactory.ExecutionMode.GLOBAL_ORDINALS.toString())
.size(maxNumTerms)
.subAggregation(extendedStats("stats").field("num")))
.addAggregation(
terms("string_global_ordinals_doc_values")
.field("string_values.doc_values")
.collectMode(randomFrom(SubAggCollectionMode.values()))
.executionHint(TermsAggregatorFactory.ExecutionMode.GLOBAL_ORDINALS.toString())
.size(maxNumTerms)
.subAggregation(extendedStats("stats").field("num")))
.execute().actionGet();
assertAllSuccessful(resp);
assertEquals(numDocs, resp.getHits().getTotalHits());
final Terms longTerms = resp.getAggregations().get("long");
final Terms doubleTerms = resp.getAggregations().get("double");
final Terms stringMapTerms = resp.getAggregations().get("string_map");
final Terms stringGlobalOrdinalsTerms = resp.getAggregations().get("string_global_ordinals");
final Terms stringGlobalOrdinalsDVTerms = resp.getAggregations().get("string_global_ordinals_doc_values");
assertEquals(valuesSet.size(), longTerms.getBuckets().size());
assertEquals(valuesSet.size(), doubleTerms.getBuckets().size());
assertEquals(valuesSet.size(), stringMapTerms.getBuckets().size());
assertEquals(valuesSet.size(), stringGlobalOrdinalsTerms.getBuckets().size());
assertEquals(valuesSet.size(), stringGlobalOrdinalsDVTerms.getBuckets().size());
for (Terms.Bucket bucket : longTerms.getBuckets()) {
final Terms.Bucket doubleBucket = doubleTerms.getBucketByKey(Double.toString(Long.parseLong(bucket.getKeyAsString())));
final Terms.Bucket stringMapBucket = stringMapTerms.getBucketByKey(bucket.getKeyAsString());
final Terms.Bucket stringGlobalOrdinalsBucket = stringGlobalOrdinalsTerms.getBucketByKey(bucket.getKeyAsString());
final Terms.Bucket stringGlobalOrdinalsDVBucket = stringGlobalOrdinalsDVTerms.getBucketByKey(bucket.getKeyAsString());
assertNotNull(doubleBucket);
assertNotNull(stringMapBucket);
assertNotNull(stringGlobalOrdinalsBucket);
assertNotNull(stringGlobalOrdinalsDVBucket);
assertEquals(bucket.getDocCount(), doubleBucket.getDocCount());
assertEquals(bucket.getDocCount(), stringMapBucket.getDocCount());
assertEquals(bucket.getDocCount(), stringGlobalOrdinalsBucket.getDocCount());
assertEquals(bucket.getDocCount(), stringGlobalOrdinalsDVBucket.getDocCount());
}
}
// Duel between histograms and scripted terms
public void testDuelTermsHistogram() throws Exception {
prepareCreate("idx")
.addMapping("type", jsonBuilder()
.startObject()
.startObject("type")
.startObject("properties")
.startObject("num")
.field("type", "double")
.endObject()
.endObject()
.endObject()
.endObject()).execute().actionGet();
final int numDocs = scaledRandomIntBetween(500, 5000);
final int maxNumTerms = randomIntBetween(10, 2000);
final int interval = randomIntBetween(1, 100);
final Integer[] values = new Integer[maxNumTerms];
for (int i = 0; i < values.length; ++i) {
values[i] = randomInt(maxNumTerms * 3) - maxNumTerms;
}
for (int i = 0; i < numDocs; ++i) {
XContentBuilder source = jsonBuilder()
.startObject()
.field("num", randomDouble())
.startArray("values");
final int numValues = randomInt(4);
for (int j = 0; j < numValues; ++j) {
source = source.value(randomFrom(values));
}
source = source.endArray().endObject();
client().prepareIndex("idx", "type").setSource(source).execute().actionGet();
}
assertNoFailures(client().admin().indices().prepareRefresh("idx")
.setIndicesOptions(IndicesOptions.lenientExpandOpen())
.execute().get());
Map<String, Object> params = new HashMap<>();
params.put("interval", interval);
SearchResponse resp = client().prepareSearch("idx")
.addAggregation(
terms("terms")
.field("values")
.collectMode(randomFrom(SubAggCollectionMode.values()))
.script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "floor(_value / interval)", params))
.size(maxNumTerms))
.addAggregation(
histogram("histo")
.field("values")
.interval(interval)
.minDocCount(1))
.execute().actionGet();
assertSearchResponse(resp);
Terms terms = resp.getAggregations().get("terms");
assertThat(terms, notNullValue());
Histogram histo = resp.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(terms.getBuckets().size(), equalTo(histo.getBuckets().size()));
for (Histogram.Bucket bucket : histo.getBuckets()) {
final double key = ((Number) bucket.getKey()).doubleValue() / interval;
final Terms.Bucket termsBucket = terms.getBucketByKey(String.valueOf(key));
assertEquals(bucket.getDocCount(), termsBucket.getDocCount());
}
}
public void testLargeNumbersOfPercentileBuckets() throws Exception {
// test high numbers of percentile buckets to make sure paging and release work correctly
prepareCreate("idx")
.addMapping("type", jsonBuilder()
.startObject()
.startObject("type")
.startObject("properties")
.startObject("double_value")
.field("type", "double")
.endObject()
.endObject()
.endObject()
.endObject()).execute().actionGet();
final int numDocs = scaledRandomIntBetween(2500, 5000);
logger.info("Indexing [{}] docs", numDocs);
List<IndexRequestBuilder> indexingRequests = new ArrayList<>();
for (int i = 0; i < numDocs; ++i) {
indexingRequests.add(client().prepareIndex("idx", "type", Integer.toString(i)).setSource("double_value", randomDouble()));
}
indexRandom(true, indexingRequests);
SearchResponse response = client().prepareSearch("idx")
.addAggregation(
terms("terms")
.field("double_value")
.collectMode(randomFrom(SubAggCollectionMode.values()))
.subAggregation(percentiles("pcts").field("double_value")))
.execute().actionGet();
assertAllSuccessful(response);
assertEquals(numDocs, response.getHits().getTotalHits());
}
// https://github.com/elastic/elasticsearch/issues/6435
public void testReduce() throws Exception {
createIndex("idx");
final int value = randomIntBetween(0, 10);
indexRandom(true, client().prepareIndex("idx", "type").setSource("f", value));
SearchResponse response = client().prepareSearch("idx")
.addAggregation(filter("filter", QueryBuilders.matchAllQuery())
.subAggregation(range("range")
.field("f")
.addUnboundedTo(6)
.addUnboundedFrom(6)
.subAggregation(sum("sum").field("f"))))
.execute().actionGet();
assertSearchResponse(response);
Filter filter = response.getAggregations().get("filter");
assertNotNull(filter);
assertEquals(1, filter.getDocCount());
Range range = filter.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(buckets.size(), equalTo(2));
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-6.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
assertThat(bucket.getDocCount(), equalTo(value < 6 ? 1L : 0L));
Sum sum = bucket.getAggregations().get("sum");
assertEquals(value < 6 ? value : 0, sum.getValue(), 0d);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("6.0-*"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getDocCount(), equalTo(value >= 6 ? 1L : 0L));
sum = bucket.getAggregations().get("sum");
assertEquals(value >= 6 ? value : 0, sum.getValue(), 0d);
}
private void assertEquals(Terms t1, Terms t2) {
List<? extends Terms.Bucket> t1Buckets = t1.getBuckets();
List<? extends Terms.Bucket> t2Buckets = t1.getBuckets();
assertEquals(t1Buckets.size(), t2Buckets.size());
for (Iterator<? extends Terms.Bucket> it1 = t1Buckets.iterator(), it2 = t2Buckets.iterator(); it1.hasNext(); ) {
final Terms.Bucket b1 = it1.next();
final Terms.Bucket b2 = it2.next();
assertEquals(b1.getDocCount(), b2.getDocCount());
assertEquals(b1.getKey(), b2.getKey());
}
}
public void testDuelDepthBreadthFirst() throws Exception {
createIndex("idx");
final int numDocs = randomIntBetween(100, 500);
List<IndexRequestBuilder> reqs = new ArrayList<>();
for (int i = 0; i < numDocs; ++i) {
final int v1 = randomInt(1 << randomInt(7));
final int v2 = randomInt(1 << randomInt(7));
final int v3 = randomInt(1 << randomInt(7));
reqs.add(client().prepareIndex("idx", "type").setSource("f1", v1, "f2", v2, "f3", v3));
}
indexRandom(true, reqs);
final SearchResponse r1 = client().prepareSearch("idx").addAggregation(
terms("f1").field("f1").collectMode(SubAggCollectionMode.DEPTH_FIRST)
.subAggregation(terms("f2").field("f2").collectMode(SubAggCollectionMode.DEPTH_FIRST)
.subAggregation(terms("f3").field("f3").collectMode(SubAggCollectionMode.DEPTH_FIRST)))).get();
assertSearchResponse(r1);
final SearchResponse r2 = client().prepareSearch("idx").addAggregation(
terms("f1").field("f1").collectMode(SubAggCollectionMode.BREADTH_FIRST)
.subAggregation(terms("f2").field("f2").collectMode(SubAggCollectionMode.BREADTH_FIRST)
.subAggregation(terms("f3").field("f3").collectMode(SubAggCollectionMode.BREADTH_FIRST)))).get();
assertSearchResponse(r2);
final Terms t1 = r1.getAggregations().get("f1");
final Terms t2 = r2.getAggregations().get("f1");
assertEquals(t1, t2);
for (Terms.Bucket b1 : t1.getBuckets()) {
final Terms.Bucket b2 = t2.getBucketByKey(b1.getKeyAsString());
final Terms sub1 = b1.getAggregations().get("f2");
final Terms sub2 = b2.getAggregations().get("f2");
assertEquals(sub1, sub2);
for (Terms.Bucket subB1 : sub1.getBuckets()) {
final Terms.Bucket subB2 = sub2.getBucketByKey(subB1.getKeyAsString());
final Terms subSub1 = subB1.getAggregations().get("f3");
final Terms subSub2 = subB2.getAggregations().get("f3");
assertEquals(subSub1, subSub2);
}
}
}
}
| |
/*
* Copyright (c) 2022, Peter Abeles. All Rights Reserved.
*
* This file is part of BoofCV (http://boofcv.org).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package boofcv.alg.fiducial.square;
import boofcv.abst.filter.binary.BinaryContourFinder;
import boofcv.abst.filter.binary.BinaryContourHelper;
import boofcv.abst.filter.binary.InputToBinary;
import boofcv.abst.geo.Estimate1ofEpipolar;
import boofcv.abst.geo.RefineEpipolar;
import boofcv.alg.distort.ImageDistort;
import boofcv.alg.distort.LensDistortionNarrowFOV;
import boofcv.alg.distort.PixelTransformCached_F32;
import boofcv.alg.distort.PointTransformHomography_F32;
import boofcv.alg.interpolate.InterpolatePixelS;
import boofcv.alg.shapes.polygon.DetectPolygonBinaryGrayRefine;
import boofcv.alg.shapes.polygon.DetectPolygonFromContour;
import boofcv.core.image.border.FactoryImageBorder;
import boofcv.factory.distort.FactoryDistort;
import boofcv.factory.geo.EpipolarError;
import boofcv.factory.geo.FactoryMultiView;
import boofcv.factory.interpolate.FactoryInterpolation;
import boofcv.misc.BoofMiscOps;
import boofcv.struct.ConfigLength;
import boofcv.struct.border.BorderType;
import boofcv.struct.distort.*;
import boofcv.struct.geo.AssociatedPair;
import boofcv.struct.image.GrayF32;
import boofcv.struct.image.GrayU8;
import boofcv.struct.image.ImageGray;
import georegression.geometry.UtilPolygons2D_F64;
import georegression.struct.ConvertFloatType;
import georegression.struct.homography.Homography2D_F64;
import georegression.struct.point.Point2D_F32;
import georegression.struct.point.Point2D_F64;
import georegression.struct.shapes.Polygon2D_F64;
import lombok.Getter;
import lombok.Setter;
import org.ddogleg.struct.DogArray;
import org.ddogleg.struct.VerbosePrint;
import org.ejml.UtilEjml;
import org.ejml.data.DMatrixRMaj;
import org.ejml.ops.DConvertMatrixStruct;
import org.jetbrains.annotations.Nullable;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
/**
* <p>
* Base class for square fiducial detectors. Searches for quadrilaterals inside the image with a black border
* and inner contours. It then removes perspective and lens distortion from the candidate quadrilateral and
* rendered onto a new image. The just mentioned image is then passed on to the class which extends this one.
* After being processed by the extending class, the corners are rotated to match and the 3D pose of the
* target found. Lens distortion is removed sparsely for performance reasons.
* </p>
*
* <p>
* Must call {@link #configure} before it can process an image.
* </p>
*
* <p>
* Target orientation. Corner 0 = (-r,r), 1 = (r,r) , 2 = (r,-r) , 3 = (-r,-r).
* </p>
*
* @author Peter Abeles
*/
// TODO create unit test for bright object
public abstract class BaseDetectFiducialSquare<T extends ImageGray<T>> implements VerbosePrint {
// Storage for the found fiducials
private final DogArray<FoundFiducial> found = new DogArray<>(FoundFiducial::new);
/** converts input image into a binary image */
@Getter InputToBinary<T> inputToBinary;
/** Detects the squares */
@Getter DetectPolygonBinaryGrayRefine<T> squareDetector;
// Helps adjust the binary image for input into the contour finding algorithm
BinaryContourHelper contourHelper;
// image with lens and perspective distortion removed from it
GrayF32 square;
// Used to compute/remove perspective distortion
private final Estimate1ofEpipolar computeHomography = FactoryMultiView.homographyDLT(true);
private final RefineEpipolar refineHomography = FactoryMultiView.homographyRefine(1e-4, 100, EpipolarError.SAMPSON);
private final DMatrixRMaj H = new DMatrixRMaj(3, 3);
private final DMatrixRMaj H_refined = new DMatrixRMaj(3, 3);
private final Homography2D_F64 H_fixed = new Homography2D_F64();
private final List<AssociatedPair> pairsRemovePerspective = new ArrayList<>();
private final ImageDistort<T, GrayF32> removePerspective;
private final PointTransformHomography_F32 transformHomography = new PointTransformHomography_F32();
private Point2Transform2_F64 undistToDist = new DoNothing2Transform2_F64();
/** How wide the border is relative to the fiducial's total width */
protected @Getter double borderWidthFraction;
// the minimum fraction of border pixels which must be black for it to be considered a fiducial
private final double minimumBorderBlackFraction;
// Storage for results of fiducial reading
private final Result result = new Result();
/** type of input image */
private @Getter final Class<T> inputType;
/** Smallest allowed aspect ratio between the smallest and largest side in a polygon */
private @Getter @Setter double thresholdSideRatio = 0.05;
@Nullable PrintStream verbose;
/**
* Configures the detector.
*
* @param inputToBinary Converts input image into a binary image
* @param squareDetector Detects the quadrilaterals in the image
* @param binaryCopy If true a copy is created of the binary image and it's not modified.
* @param borderWidthFraction Fraction of the fiducial's width that the border occupies. 0.25 is recommended.
* @param minimumBorderBlackFraction Minimum fraction of pixels inside the border which must be black. Try 0.65
* @param squarePixels Number of pixels wide the undistorted square image of the fiducial's interior is.
* This will include the black border.
* @param inputType Type of input image it's processing
*/
protected BaseDetectFiducialSquare( InputToBinary<T> inputToBinary,
DetectPolygonBinaryGrayRefine<T> squareDetector,
boolean binaryCopy,
double borderWidthFraction, double minimumBorderBlackFraction,
int squarePixels,
Class<T> inputType ) {
squareDetector.getDetector().setOutputClockwiseUpY(false);
squareDetector.getDetector().setConvex(true);
squareDetector.getDetector().setNumberOfSides(4, 4);
if (borderWidthFraction <= 0 || borderWidthFraction >= 0.5)
throw new RuntimeException("Border width fraction must be 0 < x < 0.5");
this.borderWidthFraction = borderWidthFraction;
this.minimumBorderBlackFraction = minimumBorderBlackFraction;
this.inputToBinary = inputToBinary;
this.squareDetector = squareDetector;
this.inputType = inputType;
this.square = new GrayF32(squarePixels, squarePixels);
for (int i = 0; i < 4; i++) {
pairsRemovePerspective.add(new AssociatedPair());
}
// this combines two separate sources of distortion together so that it can be removed in the final image which
// is sent to fiducial decoder
InterpolatePixelS<T> interp = FactoryInterpolation.nearestNeighborPixelS(inputType);
interp.setBorder(FactoryImageBorder.single(BorderType.EXTENDED, inputType));
removePerspective = FactoryDistort.distortSB(false, interp, GrayF32.class);
// if no camera parameters is specified default to this
removePerspective.setModel(new PointToPixelTransform_F32(transformHomography));
BinaryContourFinder contourFinder = squareDetector.getDetector().getContourFinder();
contourHelper = new BinaryContourHelper(contourFinder, binaryCopy);
}
/**
* Specifies the image's intrinsic parameters and target size
*
* @param distortion Lens distortion
* @param width Image width
* @param height Image height
* @param cache If there's lens distortion should it cache the transforms? Speeds it up by about 12%. Ignored
* if no lens distortion
*/
public void configure( @Nullable LensDistortionNarrowFOV distortion, int width, int height, boolean cache ) {
if (distortion == null) {
removePerspective.setModel(new PointToPixelTransform_F32(transformHomography));
squareDetector.setLensDistortion(width, height, null, null);
undistToDist = new DoNothing2Transform2_F64();
} else {
Point2Transform2_F32 pointSquareToInput;
Point2Transform2_F32 pointDistToUndist = distortion.undistort_F32(true, true);
Point2Transform2_F32 pointUndistToDist = distortion.distort_F32(true, true);
PixelTransform<Point2D_F32> distToUndist = new PointToPixelTransform_F32(pointDistToUndist);
PixelTransform<Point2D_F32> undistToDist = new PointToPixelTransform_F32(pointUndistToDist);
// Sanity check to see if the camera model has no lens distortion. If there is no lens distortion then
// there's no need to do distort/undistort the image and everything will run faster
Point2D_F32 test = new Point2D_F32();
pointDistToUndist.compute(0, 0, test);
if (test.norm() <= UtilEjml.TEST_F32) {
configure(null, width, height, false);
} else {
if (cache) {
distToUndist = new PixelTransformCached_F32(width, height, distToUndist);
undistToDist = new PixelTransformCached_F32(width, height, undistToDist);
}
squareDetector.setLensDistortion(width, height, distToUndist, undistToDist);
pointSquareToInput = new SequencePoint2Transform2_F32(transformHomography, pointUndistToDist);
// provide intrinsic camera parameters
PixelTransform<Point2D_F32> squareToInput = new PointToPixelTransform_F32(pointSquareToInput);
removePerspective.setModel(squareToInput);
this.undistToDist = distortion.distort_F64(true, true);
}
}
}
private final Polygon2D_F64 interpolationHack = new Polygon2D_F64(4);
List<Polygon2D_F64> candidates = new ArrayList<>();
List<DetectPolygonFromContour.Info> candidatesInfo = new ArrayList<>();
/**
* Examines the input image to detect fiducials inside it
*
* @param gray Undistorted input image
*/
public void process( T gray ) {
configureContourDetector(gray);
contourHelper.reshape(gray.width, gray.height);
inputToBinary.process(gray, contourHelper.withoutPadding());
squareDetector.process(gray, contourHelper.padded());
squareDetector.refineAll();
// These are in undistorted pixels
squareDetector.getPolygons(candidates, candidatesInfo);
found.reset();
if (verbose != null) verbose.println("---------- Got Polygons! " + candidates.size());
for (int i = 0; i < candidates.size(); i++) {
// compute the homography from the input image to an undistorted square image
// If lens distortion has been specified this polygon will be in undistorted pixels
Polygon2D_F64 p = candidates.get(i);
// System.out.println(i+" processing... "+p.areaSimple()+" at "+p.get(0));
// sanity check before processing
if (!checkSideSize(p)) {
if (verbose != null) verbose.println("_ rejected side aspect ratio or size");
continue;
}
// REMOVE EVENTUALLY This is a hack around how interpolation is performed
// Using a surface integral instead would remove the need for this. Basically by having it start
// interpolating from the lower extent it samples inside the image more
// A good unit test to see if this hack is no longer needed is to rotate the order of the polygon and
// see if it returns the same undistorted image each time
double best = Double.MAX_VALUE;
for (int j = 0; j < 4; j++) {
double found = p.get(0).normSq();
if (found < best) {
best = found;
interpolationHack.setTo(p);
}
UtilPolygons2D_F64.shiftDown(p);
}
p.setTo(interpolationHack);
// remember, visual clockwise isn't the same as math clockwise, hence
// counter clockwise visual to the clockwise quad
pairsRemovePerspective.get(0).setTo(0, 0, p.get(0).x, p.get(0).y);
pairsRemovePerspective.get(1).setTo(square.width, 0, p.get(1).x, p.get(1).y);
pairsRemovePerspective.get(2).setTo(square.width, square.height, p.get(2).x, p.get(2).y);
pairsRemovePerspective.get(3).setTo(0, square.height, p.get(3).x, p.get(3).y);
if (!computeHomography.process(pairsRemovePerspective, H)) {
if (verbose != null) verbose.println("_ rejected initial homography");
continue;
}
// refine homography estimate
if (!refineHomography.fitModel(pairsRemovePerspective, H, H_refined)) {
if (verbose != null) verbose.println("_ rejected refine homography");
continue;
}
// pass the found homography onto the image transform
DConvertMatrixStruct.convert(H_refined, H_fixed);
ConvertFloatType.convert(H_fixed, transformHomography.getModel());
// TODO Improve how perspective is removed
// The current method introduces artifacts. If the "square" is larger
// than the detected region and bilinear interpolation is used then pixels outside will// influence the
// value of pixels inside and shift things over. this is all bad
// remove the perspective distortion and process it
removePerspective.apply(gray, square);
DetectPolygonFromContour.Info info = candidatesInfo.get(i);
// see if the black border is actually black
if (minimumBorderBlackFraction > 0) {
double pixelThreshold = (info.edgeInside + info.edgeOutside)/2;
double foundFraction = computeFractionBoundary((float)pixelThreshold);
if (foundFraction < minimumBorderBlackFraction) {
if (verbose != null) verbose.println("_ rejected black border fraction " + foundFraction);
continue;
}
}
if (processSquare(square, result, info.edgeInside, info.edgeOutside)) {
prepareForOutput(p, result);
if (verbose != null) verbose.println("_ accepted!");
} else {
if (verbose != null) verbose.println("_ rejected process square");
}
}
}
/**
* Sanity check the polygon based on the size of its sides to see if it could be a fiducial that can
* be decoded
*/
private boolean checkSideSize( Polygon2D_F64 p ) {
double max = 0, min = Double.MAX_VALUE;
for (int i = 0; i < p.size(); i++) {
double l = p.getSideLength(i);
max = Math.max(max, l);
min = Math.min(min, l);
}
// See if a side is too small to decode
if (min < 10)
return false;
// see if it's under extreme perspective distortion and unlikely to be readable
return !(min/max < thresholdSideRatio);
}
/**
* Configures the contour detector based on the image size. Setting a maximum contour and turning off recording
* of inner contours and improve speed and reduce the memory foot print significantly.
*/
private void configureContourDetector( T gray ) {
// determine the maximum possible size of a square based on image size
int maxContourSize = Math.min(gray.width, gray.height)*4;
BinaryContourFinder contourFinder = squareDetector.getDetector().getContourFinder();
contourFinder.setMaxContour(ConfigLength.fixed(maxContourSize)); // TODO this should not be hardcoded
contourFinder.setSaveInnerContour(false);
}
/**
* Computes the fraction of pixels inside the image border which are black
*
* @param pixelThreshold Pixel's less than this value are considered black
* @return fraction of border that's black
*/
protected double computeFractionBoundary( float pixelThreshold ) {
// TODO ignore outer pixels from this computation. Will require 8 regions (4 corners + top/bottom + left/right)
final int w = square.width;
int radius = (int)(w*borderWidthFraction);
int innerWidth = w - 2*radius;
int total = w*w - innerWidth*innerWidth;
int count = 0;
for (int y = 0; y < radius; y++) {
int indexTop = y*w;
int indexBottom = (w - radius + y)*w;
for (int x = 0; x < w; x++) {
if (square.data[indexTop++] < pixelThreshold)
count++;
if (square.data[indexBottom++] < pixelThreshold)
count++;
}
}
for (int y = radius; y < w - radius; y++) {
int indexLeft = y*w;
int indexRight = y*w + w - radius;
for (int x = 0; x < radius; x++) {
if (square.data[indexLeft++] < pixelThreshold)
count++;
if (square.data[indexRight++] < pixelThreshold)
count++;
}
}
return count/(double)total;
}
/**
* Takes the found quadrilateral and the computed 3D information and prepares it for output
*/
private void prepareForOutput( Polygon2D_F64 imageShape, Result result ) {
// the rotation estimate, apply in counter clockwise direction
// since result.rotation is a clockwise rotation in the visual sense, which
// is CCW on the grid
int rotationCCW = (4 - result.rotation)%4;
for (int j = 0; j < rotationCCW; j++) {
UtilPolygons2D_F64.shiftUp(imageShape);
}
// save the results for output
FoundFiducial f = found.grow();
f.id = result.which;
f.encodingError = result.error;
for (int i = 0; i < 4; i++) {
Point2D_F64 a = imageShape.get(i);
undistToDist.compute(a.x, a.y, f.distortedPixels.get(i));
}
}
/**
* Returns list of found fiducials
*/
public DogArray<FoundFiducial> getFound() {
return found;
}
/**
* Processes the detected square and matches it to a known fiducial. Black border
* is included.
*
* @param square Image of the undistorted square
* @param result Which target and its orientation was found
* @param edgeInside Average pixel value along edge inside
* @param edgeOutside Average pixel value along edge outside
* @return true if the square matches a known target.
*/
protected abstract boolean processSquare( GrayF32 square, Result result, double edgeInside, double edgeOutside );
@Override public void setVerbose( @Nullable PrintStream out, @Nullable Set<String> configuration ) {
verbose = BoofMiscOps.addPrefix(this, out);
}
public GrayU8 getBinary() {
return contourHelper.withoutPadding();
}
public static class Result {
int which;
// length of one of the sides in world units
double lengthSide;
// amount of clockwise rotation. Each value = +90 degrees
// Just to make things confusion, the rotation is done in the visual clockwise, which
// is a counter-clockwise rotation when you look at the actual coordinates
int rotation;
// Optional error. How good of a fit the observed pattern is to the observed marker
double error;
}
}
| |
/**
*
* Statement object for the tinySQL driver
*
* A lot of this code is based on or directly taken from
* George Reese's (borg@imaginary.com) mSQL driver.
*
* So, it's probably safe to say:
*
* Portions of this code Copyright (c) 1996 George Reese
*
* The rest of it:
*
* Copyright 1996, Brian C. Jepson
* (bjepson@ids.net)
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
*/
package ORG.as220.tinySQL;
import ORG.as220.tinySQL.sqlparser.SQLStatement;
import ORG.as220.tinySQL.sqlparser.SQLStatementBatch;
import ORG.as220.tinySQL.util.Log;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.SQLWarning;
import java.sql.Statement;
import java.util.Vector;
//import checkers.inference.ownership.quals.*;
/**
* @author mgs - SQLStatement statment added to hold the last statement
*/
public class tinySQLStatement implements Statement
{
/**
* Holds the last used queryString. execute() has to be synchronized,
* to guarantee thread-safety
*/
private SQLStatement statement;
/**
* A Vector holding all resultsets opend by this statement
*/
private /*@RepRep*/ Vector results;
/**
*
* A connection object to execute queries and... stuff
*
*/
private tinySQLConnection connection;
/**
*
* A result set returned from this query
*
*/
private tinySQLResultSet result;
/**
*
* The max field size for tinySQL
* This can be pretty big, before things start to break.
*
*/
private int max_field_size = 0;
/**
*
* The max rows supported by tinySQL
* I can't think of any limits, right now, but I'm sure some
* will crop up...
* Anyway, setting 0 signals we dont know of a limit.
*/
private int max_rows = 0;
/**
*
* The number of seconds the driver will allow for a SQL statement to
* execute before giving up. The default is to wait forever (0).
*
*/
private int timeout = 0;
/**
* How many rows to fetch in a single run. Default is now 4096 rows.
*/
private int fetchsize = 2048;
/**
* the default resultset type
*/
private int defaultResultSetType = ResultSet.TYPE_SCROLL_INSENSITIVE;
/**
* the default fetchdirection for ResultSets
*/
private int defaultDirection = ResultSet.FETCH_UNKNOWN;
/**
* The statement batch of this resultset
*/
private /*@NoRep*/ SQLStatementBatch batch;
/**
*
* Constructs a new tinySQLStatement object.
* @param conn the tinySQLConnection object
*
*/
public tinySQLStatement(tinySQLConnection conn)
{
connection = conn;
results = new Vector();
batch = new SQLStatementBatch(( /*@NoRep*/ tinySQLStatement)this);
}
/**
*
* Execute an SQL statement and return a result set.
* @see java.sql.Statement#executeQuery
* @exception SQLException raised for any errors
* @param sql the SQL statement string
* @return the result set from the query
*
*/
public synchronized ResultSet executeQuery(String sql)
throws SQLException
{
boolean result = execute(sql);
if (result == true)
return getResultSet();
else
throw new tinySQLException("Statement returned an UpdateCount\n" + sql);
}
/**
*
* Execute an update, insert, delete, create table, etc. This can
* be anything that doesn't return rows.
* @see java.sql.Statement#executeUpdate
* @exception java.sql.SQLException thrown when an error occurs executing
* the SQL
* @return either the row count for INSERT, UPDATE or DELETE or 0 for SQL statements that return nothing
*/
public synchronized int executeUpdate(String sql) throws SQLException
{
boolean result = execute(sql);
if (result == false)
return getUpdateCount();
else
throw new tinySQLException("Statement returned a ResultSet");
}
/**
*
* Executes some SQL and returns true or false, depending on
* the success. The result set is stored in result, and can
* be retrieved with getResultSet();
* @see java.sql.Statement#execute
* @exception SQLException raised for any errors
* @param sql the SQL to be executed
* @return true if there is a result set available
*/
public synchronized boolean execute(String sql) throws SQLException
{
// a result set object
//
boolean result;
// execute the statement
//
setStatement(connection.getDatabaseEngine().parse(( /*@NoRep*/ tinySQLStatement)this, sql));
result = statement.execute();
return result;
}
/**
*
* Close any result sets. This is not used by tinySQL.
* @see java.sql.Statement#close
*
*/
public void close() throws SQLException
{
Vector v = new Vector(results);
for (int i = 0; i < v.size(); i++)
{
tinySQLResultSet res = (tinySQLResultSet) results.elementAt(i);
res.close();
}
}
/**
*
* Returns the last result set
* @see java.sql.Statement#getResultSet
* @return null if no result set is available, otherwise a result set
*
*/
public ResultSet getResultSet() throws SQLException
{
return statement.getResultSet();
}
/**
* CallBack function called from tinySQLResultSet when the resultset
* is opened. Add it to the list of open results.
*/
public void onCreateResultSet(tinySQLResultSet result)
{
Log.debug("Statement: ResultSet opened");
results.add(result);
}
/**
* CallBack function called from tinySQLResultSet when the resultset
* is opened. Add it to the list of open results.
*/
public void onCloseResultSet(tinySQLResultSet result)
{
Log.debug("Statement: ResultSet close");
results.remove(result);
}
/**
*
* Return the row count of the last operation. tinySQL does not support
* this, so it returns -1
* @see java.sql.Statement#getUpdateCount
* @return -1
*/
public int getUpdateCount() throws SQLException
{
return statement.getUpdateCount();
}
/**
*
* This returns true if there are any pending result sets. This
* should only be true after invoking execute()
* @see java.sql.Statement#getMoreResults
* @return true if rows are to be gotten
*
*/
public boolean getMoreResults() throws SQLException
{
return statement.getMoreResults();
}
/**
*
* Get the maximum field size to return in a result set.
* @see java.sql.Statement#getMaxFieldSize
* @return the value of max field size
*
*/
public int getMaxFieldSize() throws SQLException
{
return max_field_size;
}
/**
*
* set the max field size.
* @see java.sql.Statement#setMaxFieldSize
* @param max the maximum field size
*
*/
public void setMaxFieldSize(int max) throws SQLException
{
max_field_size = max;
}
/**
*
* Get the maximum row count that can be returned by a result set.
* @see java.sql.Statement#getMaxRows
* @return the maximum rows
*
*/
public int getMaxRows() throws SQLException
{
return max_rows;
}
/**
*
* Get the maximum row count that can be returned by a result set.
* @see java.sql.Statement.setMaxRows
* @param max the max rows
*
*/
public void setMaxRows(int max) throws SQLException
{
max_rows = max;
}
/**
*
* If escape scanning is on (the default) the driver will do
* escape substitution before sending the SQL to the database.
* @see java.sql.Statement#setEscapeProcessing
* @param enable this does nothing right now
*
*/
public void setEscapeProcessing(boolean enable)
throws SQLException
{
// ignore this call as the parser will always process escapes
}
/**
*
* Discover the query timeout.
* @see java.sql.Statement#getQueryTimeout
* @see setQueryTimeout
* @return the timeout value for this statement
*
*/
public int getQueryTimeout() throws SQLException
{
return timeout;
}
/**
*
* Set the query timeout.
* @see java.sql.Statement#setQueryTimeout
* @see getQueryTimeout
* @param x the new query timeout value
*
*/
public void setQueryTimeout(int x) throws SQLException
{
timeout = x;
}
/**
*
* This can be used by another thread to cancel a statement. This
* doesn't matter for tinySQL, as far as I can tell.
* @see java.sql.Statement#cancel
*
*/
public void cancel()
{
// not yet ...
}
/**
*
* Get the warning chain associated with this Statement
* @see java.sql.Statement#getWarnings
* @return the chain of warnings
*
*/
public final SQLWarning getWarnings() throws SQLException
{
return null;
}
/**
*
* Clear the warning chain associated with this Statement
* @see java.sql.Statement#clearWarnings
*
*/
public void clearWarnings() throws SQLException
{
}
/**
*
* Sets the cursor name for this connection. Presently unsupported.
*
*/
public void setCursorName(String unused) throws SQLException
{
throw new SQLException("tinySQL does not support cursors.");
}
//--------------------------JDBC 2.0-----------------------------
/**
* JDBC 2.0
*
* Gives the driver a hint as to the direction in which
* the rows in a result set
* will be processed. The hint applies only to result sets created
* using this Statement object. The default value is
* ResultSet.FETCH_FORWARD.
* <p>Note that this method sets the default fetch direction for
* result sets generated by this <code>Statement</code> object.
* Each result set has its own methods for getting and setting
* its own fetch direction.
* @param direction the initial direction for processing rows
* @exception SQLException if a database access error occurs
* or the given direction
* is not one of ResultSet.FETCH_FORWARD, ResultSet.FETCH_REVERSE, or
* ResultSet.FETCH_UNKNOWN
*/
public void setFetchDirection(int direction) throws SQLException
{
this.defaultDirection = direction;
}
/**
* JDBC 2.0
*
* Retrieves the direction for fetching rows from
* database tables that is the default for result sets
* generated from this <code>Statement</code> object.
* If this <code>Statement</code> object has not set
* a fetch direction by calling the method <code>setFetchDirection</code>,
* the return value is implementation-specific.
*
* @return the default fetch direction for result sets generated
* from this <code>Statement</code> object
* @exception SQLException if a database access error occurs
*/
public int getFetchDirection() throws SQLException
{
return this.defaultDirection;
}
/**
* JDBC 2.0
*
* Gives the JDBC driver a hint as to the number of rows that should
* be fetched from the database when more rows are needed. The number
* of rows specified affects only result sets created using this
* statement. If the value specified is zero, then the hint is ignored.
* The default value is zero.
*
* @param rows the number of rows to fetch
* @exception SQLException if a database access error occurs, or the
* condition 0 <= rows <= this.getMaxRows() is not satisfied.
*/
public void setFetchSize(int rows) throws SQLException
{
if ((rows <= 0) || (rows >= this.getMaxRows()))
throw new SQLException("Condition 0 <= rows <= this.getMaxRows() is not satisfied");
fetchsize = rows;
}
/**
* JDBC 2.0
*
* Retrieves the number of result set rows that is the default
* fetch size for result sets
* generated from this <code>Statement</code> object.
* If this <code>Statement</code> object has not set
* a fetch size by calling the method <code>setFetchSize</code>,
* the return value is implementation-specific.
* @return the default fetch size for result sets generated
* from this <code>Statement</code> object
* @exception SQLException if a database access error occurs
*/
public int getFetchSize() throws SQLException
{
return fetchsize;
}
/**
* JDBC 2.0
*
* Retrieves the result set concurrency.
*/
public int getResultSetConcurrency() throws SQLException
{
return ResultSet.CONCUR_READ_ONLY;
}
/**
* JDBC 2.0
*
* Determine the result set type.
*/
public int getResultSetType() throws SQLException
{
return defaultResultSetType;
}
public void setDefaultResultSetType(int type)
{
this.defaultResultSetType = type;
}
/**
* JDBC 2.0
*
* Adds a SQL command to the current batch of commmands for the statement.
* This method is optional.
*
* @param sql typically this is a static SQL INSERT or UPDATE statement
* @exception SQLException if a database access error occurs, or the
* driver does not support batch statements
*/
public void addBatch(String sql)
throws SQLException
{
statement = connection.getDatabaseEngine().parse(( /*@NoRep*/ tinySQLStatement)this, sql);
Vector params = statement.getParameters();
if (params.size() != 0)
{
throw new tinySQLException("Parameterized statements are not supported, use PreparedStatement for Parameters");
}
addBatch(statement);
}
/**
* JDBC 2.0
*
* Adds a SQL command to the current batch of commmands for the statement.
* This method is optional.
*
* @param sql typically this is a static SQL INSERT or UPDATE statement
* @exception SQLException if a database access error occurs, or the
* driver does not support batch statements
*/
protected void addBatch(SQLStatement sql) throws SQLException
{
batch.add(sql);
}
/**
* JDBC 2.0
*
* Makes the set of commands in the current batch empty.
* This method is optional.
*
* @exception SQLException if a database access error occurs or the
* driver does not support batch statements
*/
public void clearBatch() throws SQLException
{
batch.clear();
}
/**
* JDBC 2.0
*
* Submits a batch of commands to the database for execution.
* This method is optional.
*
* @return an array of update counts containing one element for each
* command in the batch. The array is ordered according
* to the order in which commands were inserted into the batch.
* @exception SQLException if a database access error occurs or the
* driver does not support batch statements
*/
public int[] executeBatch() throws SQLException
{
return batch.executeAll();
}
/**
* JDBC 2.0
*
* Returns the <code>Connection</code> object
* that produced this <code>Statement</code> object.
* @return the connection that produced this statement
* @exception SQLException if a database access error occurs
*/
public Connection getConnection() throws SQLException
{
return connection;
}
protected SQLStatement getStatement()
{
return statement;
}
protected void setStatement(SQLStatement statement)
{
this.statement = statement;
}
@Override
public boolean isWrapperFor(Class<?> iface) throws SQLException {
// TODO Auto-generated method stub
return false;
}
@Override
public <T> T unwrap(Class<T> iface) throws SQLException {
// TODO Auto-generated method stub
return null;
}
@Override
public boolean execute(String sql, int autoGeneratedKeys) throws SQLException {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean execute(String sql, int[] columnIndexes) throws SQLException {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean execute(String sql, String[] columnNames) throws SQLException {
// TODO Auto-generated method stub
return false;
}
@Override
public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException {
// TODO Auto-generated method stub
return 0;
}
@Override
public int executeUpdate(String sql, int[] columnIndexes) throws SQLException {
// TODO Auto-generated method stub
return 0;
}
@Override
public int executeUpdate(String sql, String[] columnNames) throws SQLException {
// TODO Auto-generated method stub
return 0;
}
@Override
public ResultSet getGeneratedKeys() throws SQLException {
// TODO Auto-generated method stub
return null;
}
@Override
public boolean getMoreResults(int current) throws SQLException {
// TODO Auto-generated method stub
return false;
}
@Override
public int getResultSetHoldability() throws SQLException {
// TODO Auto-generated method stub
return 0;
}
@Override
public boolean isClosed() throws SQLException {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean isPoolable() throws SQLException {
// TODO Auto-generated method stub
return false;
}
@Override
public void setPoolable(boolean poolable) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void closeOnCompletion() throws SQLException {
// TODO Auto-generated method stub
}
@Override
public boolean isCloseOnCompletion() throws SQLException {
// TODO Auto-generated method stub
return false;
}
}
| |
/*
* Copyright (c) 2017, Computing Science 301F17T31, University of Alberta - All Right Reserved
* You may use, distribute, or modify this code under terms and conditions of the Code of Student Behaviour at * University of Alberta
* You can find a copy of lisense in this project. Otherwise please contact contact@abc.ca
*/
package com.example.haotianzhu.noname;
import android.app.DatePickerDialog;
import android.content.Context;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.net.Uri;
import android.provider.MediaStore;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Base64;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.DatePicker;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
/**
* This activity class is used to update data for an existing habitEvent including its title, habitType, comment etc.
* @author Haotian Zhu
* @author Yuhang Xiong
* @author Yuntian Zhang
* @author Wenhan Yang*/
public class EditEventActivity extends AppCompatActivity implements DatePickerDialog.OnDateSetListener {
private static habitEvent editEvent;
private String userID;
private String title;
private String comment;
private EditText titleText;
private EditText commentText;
private TextView endDateText;
private int day,month,year;
private String habitType;
private TextView habitTypeText;
private ImageView imageView;
private Bitmap bmimage;
private Uri imageUri;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_edit_event);
editEvent = (habitEvent) getIntent().getBundleExtra("send").getSerializable("EditEvent");
Button doneButton = (Button) findViewById(R.id.done);
Button cancelButton = (Button) findViewById(R.id.cancel);
Button deleteButton = (Button) findViewById(R.id.delete);
titleText = (EditText) findViewById(R.id.eventTitle);
commentText = (EditText) findViewById(R.id.eventComment);
endDateText = (TextView) findViewById(R.id.endDateText);
habitTypeText = (TextView) findViewById(R.id.habitType);
//
userID = editEvent.getUserID();
title = editEvent.getTitle();
comment =editEvent.getComment();
year = editEvent.getYear();
month = editEvent.getMonth();
day = editEvent.getDay();
habitType = editEvent.getHabitType();
//
titleText.setHint("Title: "+title);
commentText.setHint("Comment: "+comment);
habitTypeText.setHint("HabitType "+habitType);
//
//
//
imageView =(ImageView) findViewById(R.id.imageView);
String bmString = editEvent.getPhoto();
if (bmString != null) {
byte[] decodedString = Base64.decode(bmString, Base64.DEFAULT);
bmimage = BitmapFactory.decodeByteArray(decodedString, 0, decodedString.length);
imageView.setImageBitmap(bmimage);
}
if (imageView.getDrawable() == null) {
imageView.requestLayout();
imageView.getLayoutParams().height = 200;
imageView.getLayoutParams().width = LinearLayout.LayoutParams.MATCH_PARENT;
}
imageView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
openGallry();
}
});
//set result as canceled and finish this activity, go back to last controller
cancelButton.setOnClickListener(new View.OnClickListener(){
public void onClick(View v) {
setResult(RESULT_CANCELED);
finish();
}
});
deleteButton.setOnClickListener(new View.OnClickListener(){
public void onClick(View v) {
setResult(-100);
Log.i("delete","here is delete");
finish();
}
});
// press done => c heck result ok
// if ok setResult and bundles results and return back to last controller
doneButton.setOnClickListener(new View.OnClickListener(){
public void onClick(View v) {
if (!titleText.getText().toString().equals("")){
title = titleText.getText().toString().toLowerCase();
editEvent.editTitle(title);
}
if (!commentText.getText().toString().equals("")){
comment = commentText.getText().toString().toLowerCase();
editEvent.editComment(comment);
}
if (imageView.getDrawable() != null) {
editEvent.setPhoto(bitmapToString(bmimage));
}else{
editEvent.deltePhoto();
}
Log.i("titlehere",editEvent.getTitle());
Elasticsearch.UpdateEventTask updateEventTask = new Elasticsearch.UpdateEventTask();
updateEventTask.execute(editEvent);
Intent resultIntent = new Intent();
Bundle result = new Bundle();
result.putSerializable("EditEvent",editEvent);
resultIntent.putExtra("result",result);
setResult(RESULT_OK,resultIntent);
finish();
}
});
}
//
//
public String dateToString(int y,int m,int d){
return " year: "+y+" month: "+(m)+" day: " +d ;
}
@Override
public void onDateSet(DatePicker datePicker, int i, int i1, int i2){
year = i;
month = i1+1;
day = i2;
endDateText.setText(dateToString(year,month,day));
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data){
if(requestCode == 100){
if (resultCode == RESULT_OK){
imageUri = data.getData();
//imageView.setImageURI(imageUri);
InputStream inputStream;
try {
inputStream = getContentResolver().openInputStream(imageUri);
bmimage = BitmapFactory.decodeStream(inputStream);
imageView.setImageBitmap(bmimage);
}catch (Exception e){
e.printStackTrace();
}
//imageView.setRotation(90);
imageView.requestLayout();
imageView.getLayoutParams().height = LinearLayout.LayoutParams.WRAP_CONTENT;
imageView.getLayoutParams().width = LinearLayout.LayoutParams.WRAP_CONTENT;
}
}
}
private void openGallry(){
Intent gallry = new Intent(Intent.ACTION_PICK, MediaStore.Images.Media.INTERNAL_CONTENT_URI);
startActivityForResult(gallry,100);
}
public String bitmapToString(Bitmap bm){
ByteArrayOutputStream baos = new ByteArrayOutputStream();
bm.compress(Bitmap.CompressFormat.PNG,100,baos);
byte[] b = baos.toByteArray();
String temp = Base64.encodeToString(b,Base64.DEFAULT);
return temp;
}
private Boolean checkIfOffline() {
Boolean online = false;
ConnectivityManager connectivityManager = (ConnectivityManager) getSystemService(Context.CONNECTIVITY_SERVICE);
if (connectivityManager.getNetworkInfo(ConnectivityManager.TYPE_MOBILE).getState() == NetworkInfo.State.CONNECTED ||
connectivityManager.getNetworkInfo(ConnectivityManager.TYPE_WIFI).getState() == NetworkInfo.State.CONNECTED) {
//we are connected to a network
online = true;
} else
online = false;
return online;
}
}
| |
/*******************************************************************************
* Copyright (c) 2011, Daniel Murphy
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
******************************************************************************/
/*
* JBox2D - A Java Port of Erin Catto's Box2D
*
* JBox2D homepage: http://jbox2d.sourceforge.net/
* Box2D homepage: http://www.box2d.org
*
* This software is provided 'as-is', without any express or implied
* warranty. In no event will the authors be held liable for any damages
* arising from the use of this software.
*
* Permission is granted to anyone to use this software for any purpose,
* including commercial applications, and to alter it and redistribute it
* freely, subject to the following restrictions:
*
* 1. The origin of this software must not be misrepresented; you must not
* claim that you wrote the original software. If you use this software
* in a product, an acknowledgment in the product documentation would be
* appreciated but is not required.
* 2. Altered source versions must be plainly marked as such, and must not be
* misrepresented as being the original software.
* 3. This notice may not be removed or altered from any source distribution.
*/
package org.jbox2d.common;
import java.util.Random;
/**
* A few math methods that don't fit very well anywhere else.
*/
public class MathUtils {
public static final float PI = (float) Math.PI;
public static final float TWOPI = (float) (Math.PI * 2);
public static final float INV_PI = 1f / PI;
public static final float HALF_PI = PI / 2;
public static final float QUARTER_PI = PI / 4;
public static final float THREE_HALVES_PI = TWOPI - HALF_PI;
/**
* Degrees to radians conversion factor
*/
public static final float DEG2RAD = PI / 180;
/**
* Radians to degrees conversion factor
*/
public static final float RAD2DEG = 180 / PI;
private static final float SHIFT23 = 1 << 23;
private static final float INV_SHIFT23 = 1.0f / SHIFT23;
public static final float[] sinLUT = new float[Settings.SINCOS_LUT_LENGTH];
public static final float[] cosLUT = new float[Settings.SINCOS_LUT_LENGTH];
static {
for (int i = 0; i < Settings.SINCOS_LUT_LENGTH; i++) {
sinLUT[i] = (float) Math.sin(i * Settings.SINCOS_LUT_PRECISION);
cosLUT[i] = (float) Math.cos(i * Settings.SINCOS_LUT_PRECISION);
}
}
public static final float sin(float x) {
if (Settings.SINCOS_LUT_ENABLED) {
return sinLUT(x);
}
else {
return (float) StrictMath.sin(x);
}
}
public static final float sinLUT(float x) {
x %= TWOPI;
while (x < 0) {
x += TWOPI;
}
if (Settings.SINCOS_LUT_LERP) {
x /= Settings.SINCOS_LUT_PRECISION;
final int index = (int) x;
if (index != 0) {
x %= index;
}
// the next index is 0
if (index == Settings.SINCOS_LUT_LENGTH - 1) {
return ((1 - x) * sinLUT[index] + x * sinLUT[0]);
}
else {
return ((1 - x) * sinLUT[index] + x * sinLUT[index + 1]);
}
}
else {
return sinLUT[MathUtils.round(x / Settings.SINCOS_LUT_PRECISION) % Settings.SINCOS_LUT_LENGTH];
}
}
public static final float cos(float x) {
if (Settings.SINCOS_LUT_ENABLED) {
x %= TWOPI;
while (x < 0) {
x += TWOPI;
}
if (Settings.SINCOS_LUT_LERP) {
x /= Settings.SINCOS_LUT_PRECISION;
final int index = (int) x;
if (index != 0) {
x %= index;
}
// the next index is 0
if (index == Settings.SINCOS_LUT_LENGTH - 1) {
return ((1 - x) * cosLUT[index] + x * cosLUT[0]);
}
else {
return ((1 - x) * cosLUT[index] + x * cosLUT[index + 1]);
}
}
else {
return cosLUT[MathUtils.round(x / Settings.SINCOS_LUT_PRECISION) % Settings.SINCOS_LUT_LENGTH];
}
}
else {
return (float) StrictMath.cos(x);
}
}
public static final float abs(final float x) {
if (Settings.FAST_MATH) {
return x > 0 ? x : -x;
}
else {
return Math.abs(x);
}
}
public static final int abs(int x) {
int y = x >> 31;
return (x ^ y) - y;
}
public static final int floor(final float x) {
if (Settings.FAST_MATH) {
int y = (int) x;
if (x < 0 && x != y) {
y--;
}
return y;
}
else {
return (int) Math.floor(x);
}
}
public static final int ceil(final float x) {
if (Settings.FAST_MATH) {
int y = (int) x;
if (x > 0 && x != y) {
y++;
}
return y;
}
else {
return (int) Math.ceil(x);
}
}
public static final int round(final float x) {
if (Settings.FAST_MATH) {
return floor(x + .5f);
}
else {
return StrictMath.round(x);
}
}
/**
* Rounds up the value to the nearest higher power^2 value.
*
* @param x
* @return power^2 value
*/
public static final int ceilPowerOf2(int x) {
int pow2 = 1;
while (pow2 < x) {
pow2 <<= 1;
}
return pow2;
}
public final static float max(final float a, final float b) {
return a > b ? a : b;
}
public final static int max(final int a, final int b) {
return a > b ? a : b;
}
public final static float min(final float a, final float b) {
return a < b ? a : b;
}
public final static int min(final int a, final int b) {
return a < b ? a : b;
}
public final static float map(final float val, final float fromMin, final float fromMax, final float toMin,
final float toMax) {
final float mult = (val - fromMin) / (fromMax - fromMin);
final float res = toMin + mult * (toMax - toMin);
return res;
}
/** Returns the closest value to 'a' that is in between 'low' and 'high' */
public final static float clamp(final float a, final float low, final float high) {
return max(low, min(a, high));
}
public final static Vec2 clamp(final Vec2 a, final Vec2 low, final Vec2 high) {
final Vec2 min = new Vec2();
min.x = a.x < high.x ? a.x : high.x;
min.y = a.y < high.y ? a.y : high.y;
min.x = low.x > min.x ? low.x : min.x;
min.y = low.y > min.y ? low.y : min.y;
return min;
}
public final static void clampToOut(final Vec2 a, final Vec2 low, final Vec2 high, final Vec2 dest) {
dest.x = a.x < high.x ? a.x : high.x;
dest.y = a.y < high.y ? a.y : high.y;
dest.x = low.x > dest.x ? low.x : dest.x;
dest.y = low.y > dest.y ? low.y : dest.y;
}
/**
* Next Largest Power of 2: Given a binary integer value x, the next largest
* power of 2 can be computed by a SWAR algorithm that recursively "folds"
* the upper bits into the lower bits. This process yields a bit vector with
* the same most significant 1 as x, but all 1's below it. Adding 1 to that
* value yields the next largest power of 2.
*/
public final static int nextPowerOfTwo(int x) {
x |= x >> 1;
x |= x >> 2;
x |= x >> 4;
x |= x >> 8;
x |= x >> 16;
return x + 1;
}
public final static boolean isPowerOfTwo(final int x) {
return x > 0 && (x & x - 1) == 0;
}
public static final float fastPow(float a, float b) {
float x = Float.floatToRawIntBits(a);
x *= INV_SHIFT23;
x -= 127;
float y = x - (x >= 0 ? (int) x : (int) x - 1);
b *= x + (y - y * y) * 0.346607f;
y = b - (b >= 0 ? (int) b : (int) b - 1);
y = (y - y * y) * 0.33971f;
return Float.intBitsToFloat((int) ((b + 127 - y) * SHIFT23));
}
public static final float atan2(final float y, final float x) {
if (Settings.FAST_MATH) {
return fastAtan2(y, x);
}
else {
return (float) StrictMath.atan2(y, x);
}
}
public static final float fastAtan2(float y, float x) {
if (x == 0.0f) {
if (y > 0.0f)
return HALF_PI;
if (y == 0.0f)
return 0.0f;
return -HALF_PI;
}
float atan;
final float z = y / x;
if (abs(z) < 1.0f) {
atan = z / (1.0f + 0.28f * z * z);
if (x < 0.0f) {
if (y < 0.0f)
return atan - PI;
return atan + PI;
}
}
else {
atan = HALF_PI - z / (z * z + 0.28f);
if (y < 0.0f)
return atan - PI;
}
return atan;
}
public static final float reduceAngle(float theta) {
theta %= TWOPI;
if (abs(theta) > PI) {
theta = theta - TWOPI;
}
if (abs(theta) > HALF_PI) {
theta = PI - theta;
}
return theta;
}
/**
* Computes a fast approximation to <code>Math.pow(a, b)</code>.
* Adapted from <url>http://www.dctsystems.co.uk/Software/power.html</url>.
*
* @param a
* a positive number
* @param b
* a number
* @return a^b
*/
// UNTESTED
public static final float pow(final float a, float b) {
// adapted from: http://www.dctsystems.co.uk/Software/power.html
if (Settings.FAST_MATH) {
float x = Float.floatToRawIntBits(a);
x *= 1.0f / (1 << 23);
x = x - 127;
float y = x - MathUtils.floor(x);
b *= x + (y - y * y) * 0.346607f;
y = b - MathUtils.floor(b);
y = (y - y * y) * 0.33971f;
return Float.intBitsToFloat((int) ((b + 127 - y) * (1 << 23)));
}
else {
return (float) Math.pow(a, b);
}
}
public static final float randomFloat(float argLow, float argHigh) {
return (float) Math.random() * (argHigh - argLow) + argLow;
}
public static final float randomFloat(Random r, float argLow, float argHigh) {
return r.nextFloat() * (argHigh - argLow) + argLow;
}
public static final float sqrt(float x) {
return (float) StrictMath.sqrt(x);
}
public final static float distanceSquared(Vec2 v1, Vec2 v2) {
float dx = (v1.x - v2.x);
float dy = (v1.y - v2.y);
return dx * dx + dy * dy;
}
public final static float distance(Vec2 v1, Vec2 v2) {
return sqrt(distanceSquared(v1, v2));
}
}
// SINCOS accuracy and speed chart
//
// Tables: 200
// Most Precise Table: 1.0E-5
// Least Precise Table: 0.01
// Accuracy Iterations: 1000000
// Speed Trials: 20
// Speed Iterations: 10000
// constructing tables
// doing accuracy tests
// Accuracy results, average displacement
// Table precision Not lerped Lerped Difference
// 9.99999E-6 1.59338E-6 6.33411E-8 1.53004E-6
// 5.99499E-5 9.52019E-6 5.42142E-8 9.46598E-6
// 1.09899E-4 1.75029E-5 5.53918E-8 1.74475E-5
// 1.59850E-4 2.54499E-5 5.99911E-8 2.53899E-5
// 2.09799E-4 3.33762E-5 5.96989E-8 3.33165E-5
// 2.59749E-4 4.13445E-5 5.60582E-8 4.12885E-5
// 3.09700E-4 4.92908E-5 6.12737E-8 4.92296E-5
// 3.59650E-4 5.72404E-5 5.88096E-8 5.71816E-5
// 4.09599E-4 6.50985E-5 6.21300E-8 6.50363E-5
// 4.59550E-4 7.31303E-5 5.78407E-8 7.30725E-5
// 5.09500E-4 8.10196E-5 5.92192E-8 8.09603E-5
// 5.59450E-4 8.90484E-5 6.07958E-8 8.89876E-5
// 6.09400E-4 9.69890E-5 7.20343E-8 9.69170E-5
// 6.59350E-4 1.04928E-4 6.93258E-8 1.04858E-4
// 7.09300E-4 1.13019E-4 7.11427E-8 1.12947E-4
// 7.59250E-4 1.20989E-4 7.06747E-8 1.20918E-4
// 8.09199E-4 1.28859E-4 7.89727E-8 1.28780E-4
// 8.59150E-4 1.36890E-4 7.43000E-8 1.36815E-4
// 9.09100E-4 1.44773E-4 8.13186E-8 1.44692E-4
// 9.59050E-4 1.52582E-4 8.60978E-8 1.52496E-4
// 0.00100899 1.60769E-4 8.80632E-8 1.60681E-4
// 0.00105894 1.68485E-4 9.66496E-8 1.68389E-4
// 0.00110889 1.76573E-4 9.58513E-8 1.76478E-4
// 0.00115885 1.84327E-4 1.06905E-7 1.84220E-4
// 0.00120880 1.92196E-4 1.19286E-7 1.92077E-4
// 0.00125874 2.00460E-4 1.28806E-7 2.00331E-4
// 0.00130869 2.08042E-4 1.19392E-7 2.07923E-4
// 0.00135865 2.16544E-4 1.44530E-7 2.16399E-4
// 0.00140860 2.24021E-4 1.49854E-7 2.23871E-4
// 0.00145854 2.31827E-4 1.54289E-7 2.31673E-4
// 0.00150850 2.40446E-4 1.44934E-7 2.40301E-4
// 0.00155845 2.47795E-4 1.76676E-7 2.47619E-4
// 0.00160839 2.56248E-4 1.82030E-7 2.56066E-4
// 0.00165834 2.64036E-4 1.87004E-7 2.63849E-4
// 0.00170829 2.71632E-4 1.70919E-7 2.71461E-4
// 0.00175824 2.79823E-4 2.13218E-7 2.79610E-4
// 0.00180820 2.87772E-4 2.19380E-7 2.87553E-4
// 0.00185815 2.95340E-4 2.30138E-7 2.95110E-4
// 0.00190809 3.03284E-4 2.31016E-7 3.03052E-4
// 0.00195805 3.12023E-4 2.42544E-7 3.11780E-4
// 0.00200800 3.19148E-4 2.29997E-7 3.18918E-4
// 0.00205795 3.27906E-4 2.43252E-7 3.27663E-4
// 0.00210790 3.35110E-4 2.93011E-7 3.34817E-4
// 0.00215785 3.43163E-4 3.08860E-7 3.42854E-4
// 0.00220780 3.51323E-4 3.00627E-7 3.51023E-4
// 0.00225775 3.59100E-4 3.04569E-7 3.58795E-4
// 0.00230770 3.67002E-4 3.60740E-7 3.66641E-4
// 0.00235765 3.75095E-4 3.06903E-7 3.74788E-4
// 0.00240760 3.83322E-4 3.94474E-7 3.82927E-4
// 0.00245755 3.91025E-4 4.06166E-7 3.90619E-4
// 0.00250750 3.99437E-4 4.16229E-7 3.99021E-4
// 0.00255744 4.07290E-4 4.24778E-7 4.06866E-4
// 0.00260740 4.15094E-4 4.45167E-7 4.14649E-4
// 0.00265734 4.23249E-4 4.65449E-7 4.22784E-4
// 0.00270730 4.30602E-4 4.63318E-7 4.30139E-4
// 0.00275725 4.38493E-4 4.91764E-7 4.38002E-4
// 0.00280720 4.46582E-4 4.58033E-7 4.46123E-4
// 0.00285715 4.55229E-4 4.52032E-7 4.54777E-4
// 0.00290710 4.63309E-4 5.08717E-7 4.62800E-4
// 0.00295704 4.70165E-4 5.64299E-7 4.69601E-4
// 0.00300700 4.78655E-4 5.78840E-7 4.78076E-4
// 0.00305695 4.86344E-4 5.59216E-7 4.85785E-4
// 0.00310690 4.94030E-4 5.77222E-7 4.93453E-4
// 0.00315685 5.02929E-4 5.99904E-7 5.02329E-4
// 0.00320679 5.10313E-4 6.00234E-7 5.09713E-4
// 0.00325675 5.17840E-4 6.19932E-7 5.17220E-4
// 0.00330670 5.26580E-4 5.96121E-7 5.25984E-4
// 0.00335665 5.34068E-4 6.90259E-7 5.33378E-4
// 0.00340660 5.42020E-4 7.11015E-7 5.41309E-4
// 0.00345655 5.50108E-4 7.76143E-7 5.49332E-4
// 0.00350650 5.58001E-4 7.61866E-7 5.57239E-4
// 0.00355645 5.66693E-4 8.23322E-7 5.65870E-4
// 0.00360640 5.73096E-4 7.33265E-7 5.72362E-4
// 0.00365635 5.81372E-4 8.16293E-7 5.80555E-4
// 0.00370630 5.90145E-4 7.89829E-7 5.89355E-4
// 0.00375625 5.98363E-4 9.15203E-7 5.97448E-4
// 0.00380620 6.05866E-4 9.28685E-7 6.04937E-4
// 0.00385614 6.12882E-4 9.08723E-7 6.11974E-4
// 0.00390610 6.21721E-4 9.86004E-7 6.20735E-4
// 0.00395605 6.30258E-4 8.95759E-7 6.29363E-4
// 0.00400600 6.36307E-4 1.00618E-6 6.35301E-4
// 0.00405595 6.44074E-4 8.99361E-7 6.43175E-4
// 0.00410589 6.52858E-4 9.84549E-7 6.51873E-4
// 0.00415585 6.60822E-4 1.03719E-6 6.59785E-4
// 0.00420580 6.70495E-4 1.02555E-6 6.69470E-4
// 0.00425575 6.77049E-4 1.10163E-6 6.75948E-4
// 0.00430570 6.85498E-4 1.07438E-6 6.84424E-4
// 0.00435565 6.93418E-4 1.18165E-6 6.92236E-4
// 0.00440560 7.01235E-4 1.07350E-6 7.00162E-4
// 0.00445555 7.09658E-4 1.09891E-6 7.08559E-4
// 0.00450550 7.17409E-4 1.32089E-6 7.16088E-4
// 0.00455545 7.25054E-4 1.18787E-6 7.23867E-4
// 0.00460540 7.32882E-4 1.22598E-6 7.31656E-4
// 0.00465534 7.40989E-4 1.42403E-6 7.39565E-4
// 0.00470530 7.48101E-4 1.30740E-6 7.46794E-4
// 0.00475525 7.58028E-4 1.32205E-6 7.56706E-4
// 0.00480520 7.64519E-4 1.47233E-6 7.63047E-4
// 0.00485515 7.71333E-4 1.28258E-6 7.70050E-4
// 0.00490510 7.80806E-4 1.37027E-6 7.79436E-4
// 0.00495505 7.88841E-4 1.31033E-6 7.87531E-4
// 0.00500500 7.96140E-4 1.51493E-6 7.94625E-4
// 0.00505495 8.04295E-4 1.41243E-6 8.02883E-4
// 0.00510489 8.12005E-4 1.65101E-6 8.10354E-4
// 0.00515484 8.19689E-4 1.58882E-6 8.18101E-4
// 0.00520480 8.26688E-4 1.49957E-6 8.25188E-4
// 0.00525475 8.36964E-4 1.79928E-6 8.35165E-4
// 0.00530469 8.43189E-4 1.73475E-6 8.41454E-4
// 0.00535465 8.51817E-4 1.74640E-6 8.50071E-4
// 0.00540460 8.59760E-4 1.86507E-6 8.57895E-4
// 0.00545455 8.68007E-4 1.75090E-6 8.66256E-4
// 0.00550450 8.76977E-4 1.89013E-6 8.75087E-4
// 0.00555445 8.83886E-4 1.71558E-6 8.82170E-4
// 0.00560440 8.91885E-4 1.69996E-6 8.90185E-4
// 0.00565434 8.99345E-4 1.78683E-6 8.97559E-4
// 0.00570430 9.06780E-4 2.08313E-6 9.04697E-4
// 0.00575425 9.16952E-4 1.93477E-6 9.15017E-4
// 0.00580420 9.22571E-4 2.10614E-6 9.20465E-4
// 0.00585414 9.31692E-4 1.99170E-6 9.29700E-4
// 0.00590409 9.39484E-4 1.94949E-6 9.37535E-4
// 0.00595405 9.47631E-4 2.03346E-6 9.45597E-4
// 0.00600400 9.54711E-4 2.26698E-6 9.52444E-4
// 0.00605395 9.64223E-4 2.24711E-6 9.61975E-4
// 0.00610390 9.71415E-4 2.24109E-6 9.69174E-4
// 0.00615385 9.79862E-4 2.01206E-6 9.77850E-4
// 0.00620380 9.85571E-4 2.43784E-6 9.83133E-4
// 0.00625375 9.94922E-4 2.52600E-6 9.92396E-4
// 0.00630370 0.00100276 2.54560E-6 0.00100022
// 0.00635365 0.00101061 2.36969E-6 0.00100824
// 0.00640359 0.00101889 2.27830E-6 0.00101661
// 0.00645354 0.00102795 2.68888E-6 0.00102526
// 0.00650350 0.00103515 2.29183E-6 0.00103286
// 0.00655344 0.00104347 2.73511E-6 0.00104073
// 0.00660340 0.00105219 2.72697E-6 0.00104946
// 0.00665335 0.00105808 2.64758E-6 0.00105544
// 0.00670330 0.00106720 2.62618E-6 0.00106457
// 0.00675325 0.00107393 2.77868E-6 0.00107115
// 0.00680320 0.00108390 2.98322E-6 0.00108092
// 0.00685315 0.00109122 2.93528E-6 0.00108829
// 0.00690310 0.00109721 2.64548E-6 0.00109457
// 0.00695304 0.00110478 3.15083E-6 0.00110163
// 0.00700300 0.00111550 2.76067E-6 0.00111274
// 0.00705295 0.00112321 3.05986E-6 0.00112015
// 0.00710290 0.00113068 3.25251E-6 0.00112743
// 0.00715284 0.00113965 3.15467E-6 0.00113650
// 0.00720280 0.00114663 3.05115E-6 0.00114358
// 0.00725275 0.00115164 3.11508E-6 0.00114853
// 0.00730270 0.00116219 3.20649E-6 0.00115898
// 0.00735265 0.00117260 3.43076E-6 0.00116917
// 0.00740260 0.00117605 3.48923E-6 0.00117257
// 0.00745255 0.00118615 2.97748E-6 0.00118317
// 0.00750250 0.00119260 3.50978E-6 0.00118909
// 0.00755245 0.00120218 3.25920E-6 0.00119892
// 0.00760240 0.00120917 3.61763E-6 0.00120555
// 0.00765235 0.00121934 3.13261E-6 0.00121620
// 0.00770229 0.00122512 3.79680E-6 0.00122132
// 0.00775224 0.00123384 3.76315E-6 0.00123008
// 0.00780220 0.00124021 3.54969E-6 0.00123666
// 0.00785215 0.00124851 3.41675E-6 0.00124510
// 0.00790209 0.00125781 3.39248E-6 0.00125441
// 0.00795205 0.00126521 3.44698E-6 0.00126176
// 0.00800199 0.00127389 3.56137E-6 0.00127033
// 0.00805194 0.00128038 3.83032E-6 0.00127655
// 0.00810190 0.00129096 4.15480E-6 0.00128681
// 0.00815184 0.00129471 4.26089E-6 0.00129045
// 0.00820179 0.00130539 3.59202E-6 0.00130180
// 0.00825174 0.00131406 4.19017E-6 0.00130987
// 0.00830169 0.00131918 4.22569E-6 0.00131495
// 0.00835164 0.00132855 4.15087E-6 0.00132440
// 0.00840159 0.00133826 4.32916E-6 0.00133394
// 0.00845155 0.00134554 4.42999E-6 0.00134111
// 0.00850149 0.00135052 3.86824E-6 0.00134665
// 0.00855144 0.00135946 4.69083E-6 0.00135477
// 0.00860140 0.00136739 4.61559E-6 0.00136277
// 0.00865134 0.00137649 4.28238E-6 0.00137221
// 0.00870130 0.00138384 4.06960E-6 0.00137977
// 0.00875124 0.00139290 4.20102E-6 0.00138870
// 0.00880119 0.00140084 4.58835E-6 0.00139625
// 0.00885115 0.00140791 4.76130E-6 0.00140315
// 0.00890109 0.00141798 4.73735E-6 0.00141324
// 0.00895104 0.00142531 4.55069E-6 0.00142076
// 0.00900099 0.00143613 4.31803E-6 0.00143181
// 0.00905094 0.00144260 4.53024E-6 0.00143807
// 0.00910090 0.00144842 5.03281E-6 0.00144338
// 0.00915084 0.00145642 5.40393E-6 0.00145101
// 0.00920080 0.00146572 5.03671E-6 0.00146068
// 0.00925074 0.00147037 4.78116E-6 0.00146559
// 0.00930069 0.00147971 5.53523E-6 0.00147418
// 0.00935065 0.00148841 4.94373E-6 0.00148346
// 0.00940059 0.00149644 5.32814E-6 0.00149112
// 0.00945054 0.00150297 5.51102E-6 0.00149746
// 0.00950049 0.00151124 5.36400E-6 0.00150588
// 0.00955044 0.00152146 5.43741E-6 0.00151602
// 0.00960039 0.00152633 5.75913E-6 0.00152057
// 0.00965034 0.00153369 4.98641E-6 0.00152870
// 0.00970030 0.00154431 6.03356E-6 0.00153828
// 0.00975024 0.00155032 5.84488E-6 0.00154448
// 0.00980020 0.00156000 5.21861E-6 0.00155478
// 0.00985014 0.00156649 5.87368E-6 0.00156061
// 0.00990009 0.00157338 6.38923E-6 0.00156699
// 0.00995005 0.00158487 6.22094E-6 0.00157865
//
// Doing speed tests
// Speed results, iterations per second
// Table precision Not lerped Lerped Difference
// 9.99999E-6 1.212988E7 1.143473E7 695148.0
// 5.99499E-5 1.029751E7 1.138980E7 -1092287.0
// 1.09899E-4 3.757065E7 1.091965E7 2.665100E7
// 1.59850E-4 3.705580E7 1.146008E7 2.559571E7
// 2.09799E-4 3.882527E7 1.151821E7 2.730706E7
// 2.59749E-4 3.927689E7 1.133740E7 2.793949E7
// 3.09700E-4 3.955518E7 1.152108E7 2.803409E7
// 3.59650E-4 3.860077E7 1.133821E7 2.726256E7
// 4.09599E-4 3.857839E7 1.130567E7 2.727272E7
// 4.59550E-4 3.755295E7 1.140718E7 2.614577E7
// 5.09500E-4 3.799789E7 1.158911E7 2.640877E7
// 5.59450E-4 3.804636E7 1.153696E7 2.650939E7
// 6.09400E-4 3.733554E7 1.137089E7 2.596465E7
// 6.59350E-4 3.898913E7 1.060818E7 2.838095E7
// 7.09300E-4 3.895097E7 1.142511E7 2.752586E7
// 7.59250E-4 3.818386E7 1.135579E7 2.682806E7
// 8.09199E-4 3.973571E7 1.152943E7 2.820627E7
// 8.59150E-4 3.966203E7 1.142634E7 2.823568E7
// 9.09100E-4 3.954913E7 1.137365E7 2.817547E7
// 9.59050E-4 3.962797E7 1.140886E7 2.821910E7
// 0.00100899 3.954863E7 1.133789E7 2.821074E7
// 0.00105894 3.961207E7 1.149570E7 2.811637E7
// 0.00110889 3.969774E7 1.128002E7 2.841772E7
// 0.00115885 3.978212E7 1.134445E7 2.843767E7
// 0.00120880 3.981473E7 1.151025E7 2.830447E7
// 0.00125874 3.966584E7 1.153041E7 2.813543E7
// 0.00130869 3.972691E7 1.142137E7 2.830554E7
// 0.00135865 3.978432E7 1.157375E7 2.821056E7
// 0.00140860 3.969332E7 1.136426E7 2.832906E7
// 0.00145854 3.972309E7 1.100793E7 2.871516E7
// 0.00150850 3.980202E7 1.106633E7 2.873568E7
// 0.00155845 3.958305E7 1.148122E7 2.810182E7
// 0.00160839 3.964443E7 1.130897E7 2.833546E7
// 0.00165834 3.969555E7 1.114758E7 2.854796E7
// 0.00170829 3.969663E7 1.136931E7 2.832731E7
// 0.00175824 3.985185E7 1.138785E7 2.846399E7
// 0.00180820 3.972582E7 1.158578E7 2.814004E7
// 0.00185815 3.977380E7 1.143126E7 2.834253E7
// 0.00190809 3.977327E7 1.153139E7 2.824188E7
// 0.00195805 3.985576E7 1.175924E7 2.809651E7
// 0.00200800 3.975390E7 1.129532E7 2.845858E7
// 0.00205795 3.979427E7 1.097670E7 2.881756E7
// 0.00210790 3.978652E7 1.147993E7 2.830659E7
// 0.00215785 3.970983E7 1.180000E7 2.790983E7
// 0.00220780 3.967958E7 1.152855E7 2.815103E7
// 0.00225775 3.959622E7 1.153845E7 2.805777E7
// 0.00230770 3.959839E7 1.152039E7 2.807800E7
// 0.00235765 3.972911E7 1.153069E7 2.819842E7
// 0.00240760 3.962466E7 1.149703E7 2.812763E7
// 0.00245755 3.982470E7 1.138654E7 2.843816E7
// 0.00250750 3.971259E7 1.143460E7 2.827799E7
// 0.00255744 3.779678E7 1.133296E7 2.646381E7
// 0.00260740 3.991298E7 1.147851E7 2.843447E7
// 0.00265734 3.990072E7 1.131849E7 2.858223E7
// 0.00270730 3.983470E7 1.137383E7 2.846086E7
// 0.00275725 3.992131E7 1.135962E7 2.856169E7
// 0.00280720 3.981419E7 1.136120E7 2.845299E7
// 0.00285715 3.983858E7 1.138197E7 2.845661E7
// 0.00290710 3.984522E7 1.156832E7 2.827689E7
// 0.00295704 4.004918E7 1.161054E7 2.843864E7
// 0.00300700 3.998488E7 1.159272E7 2.839215E7
// 0.00305695 3.999268E7 1.159183E7 2.840084E7
// 0.00310690 3.990239E7 1.158475E7 2.831764E7
// 0.00315685 4.000217E7 1.163592E7 2.836625E7
// 0.00320679 3.991911E7 1.160494E7 2.831416E7
// 0.00325675 4.008167E7 1.161449E7 2.846718E7
// 0.00330670 3.999880E7 1.161049E7 2.838831E7
// 0.00335665 4.001950E7 1.158822E7 2.843128E7
// 0.00340660 3.699261E7 1.012927E7 2.686334E7
// 0.00345655 4.003574E7 1.147414E7 2.85616 E7
// 0.00350650 3.944293E7 1.140768E7 2.803525E7
// 0.00355645 3.886007E7 1.143948E7 2.742058E7
// 0.00360640 3.996422E7 1.151733E7 2.844688E7
// 0.00365635 4.006544E7 1.119618E7 2.886926E7
// 0.00370630 3.876011E7 1.114849E7 2.761162E7
// 0.00375625 3.998152E7 1.146527E7 2.851624E7
// 0.00380620 4.008001E7 1.163507E7 2.844494E7
// 0.00385614 2.526210E7 1.146550E7 1.379659E7
// 0.00390610 3.964880E7 1.144501E7 2.820378E7
// 0.00395605 3.989851E7 1.142921E7 2.846929E7
// 0.00400600 3.975673E7 1.146688E7 2.828985E7
// 0.00405595 3.972967E7 1.147970E7 2.824997E7
// 0.00410589 3.942449E7 1.145225E7 2.797224E7
// 0.00415585 3.952786E7 1.138405E7 2.814380E7
// 0.00420580 3.921339E7 1.145559E7 2.775779E7
// 0.00425575 3.939407E7 1.162912E7 2.776495E7
// 0.00430570 3.963675E7 1.138858E7 2.824817E7
// 0.00435565 3.929571E7 1.101262E7 2.828308E7
// 0.00440560 3.755492E7 1.167073E7 2.588419E7
// 0.00445555 3.967081E7 1.134149E7 2.832932E7
// 0.00450550 3.949844E7 1.142488E7 2.807356E7
// 0.00455545 3.967131E7 1.147161E7 2.819969E7
// 0.00460540 3.953931E7 1.152744E7 2.801186E7
// 0.00465534 3.806153E7 1.151409E7 2.654743E7
// 0.00470530 3.966528E7 1.095654E7 2.870873E7
// 0.00475525 3.911213E7 9764338.0 2.93478 E7
// 0.00480520 3.682139E7 1.135475E7 2.546663E7
// 0.00485515 3.878268E7 1.145206E7 2.733061E7
// 0.00490510 3.962413E7 1.144730E7 2.817682E7
// 0.00495505 3.988569E7 1.140382E7 2.848187E7
// 0.00500500 3.966532E7 1.154128E7 2.812403E7
// 0.00505495 3.932646E7 1.135525E7 2.797121E7
// 0.00510489 3.973023E7 1.146642E7 2.826381E7
// 0.00515484 3.9487 E7 9638301.0 2.984869E7
// 0.00520480 3.936487E7 1.145619E7 2.790867E7
// 0.00525475 3.903806E7 1.152489E7 2.751317E7
// 0.00530469 3.911161E7 1.148624E7 2.762536E7
// 0.00535465 3.989463E7 6311543.0 3.358308E7
// 0.00540460 3.720507E7 1.142711E7 2.577795E7
// 0.00545455 3.759285E7 1.061498E7 2.697787E7
// 0.00550450 3.976554E7 1.157998E7 2.818556E7
// 0.00555445 3.714764E7 1.143748E7 2.571016E7
// 0.00560440 3.975560E7 1.165728E7 2.809832E7
// 0.00565434 3.954207E7 1.146086E7 2.808120E7
// 0.00570430 3.997480E7 8339432.0 3.163537E7
// 0.00575425 3.993465E7 1.144017E7 2.849448E7
// 0.00580420 3.944671E7 1.132619E7 2.812051E7
// 0.00585414 4.032495E7 1.147464E7 2.885031E7
// 0.00590409 3.928978E7 1.096288E7 2.832690E7
// 0.00595405 3.984080E7 1.162619E7 2.821461E7
// 0.00600400 3.979707E7 1.123276E7 2.856431E7
// 0.00605395 3.970712E7 1.167111E7 2.803600E7
// 0.00610390 3.988625E7 1.141450E7 2.847175E7
// 0.00615385 3.857478E7 1.159193E7 2.698285E7
// 0.00620380 3.734483E7 1.06283 E7 2.671653E7
// 0.00625375 4.009233E7 1.167649E7 2.841584E7
// 0.00630370 3.971646E7 1.150355E7 2.821291E7
// 0.00635365 4.000944E7 1.166616E7 2.834327E7
// 0.00640359 3.999268E7 1.152308E7 2.84696 E7
// 0.00645354 4.037726E7 1.106659E7 2.931066E7
// 0.00650350 4.031984E7 1.124993E7 2.906991E7
// 0.00655344 4.043771E7 1.157880E7 2.885890E7
// 0.00660340 4.025918E7 1.148233E7 2.877685E7
// 0.00665335 3.855504E7 1.107494E7 2.748010E7
// 0.00670330 4.003965E7 1.109286E7 2.894678E7
// 0.00675325 4.015644E7 1.138803E7 2.876840E7
// 0.00680320 3.849436E7 1.150299E7 2.699136E7
// 0.00685315 3.734721E7 1.154417E7 2.580303E7
// 0.00690310 3.865233E7 1.162732E7 2.702500E7
// 0.00695304 4.007045E7 1.151289E7 2.855756E7
// 0.00700300 3.962907E7 1.141404E7 2.821503E7
// 0.00705295 4.001670E7 1.115045E7 2.886625E7
// 0.00710290 4.000608E7 1.164467E7 2.836140E7
// 0.00715284 3.953113E7 1.151858E7 2.801255E7
// 0.00720280 4.013730E7 1.154780E7 2.858949E7
// 0.00725275 3.982857E7 1.128090E7 2.854766E7
// 0.00730270 3.441000E7 1.117891E7 2.323109E7
// 0.00735265 3.993633E7 1.127043E7 2.866590E7
// 0.00740260 3.826141E7 1.147998E7 2.678143E7
// 0.00745255 3.985519E7 1.168006E7 2.817513E7
// 0.00750250 4.002453E7 1.122501E7 2.879952E7
// 0.00755245 4.017108E7 1.152725E7 2.864383E7
// 0.00760240 3.955083E7 1.117455E7 2.837628E7
// 0.00765235 3.959622E7 1.077040E7 2.882581E7
// 0.00770229 4.021341E7 1.127083E7 2.894258E7
// 0.00775224 4.013672E7 1.122954E7 2.890717E7
// 0.00780220 4.032495E7 1.169021E7 2.863473E7
// 0.00785215 4.007719E7 1.154487E7 2.853232E7
// 0.00790209 3.891023E7 1.154607E7 2.736415E7
// 0.00795205 3.961808E7 1.151492E7 2.810315E7
// 0.00800199 4.009456E7 1.127770E7 2.881686E7
// 0.00805194 4.021115E7 1.177109E7 2.844006E7
// 0.00810190 4.027052E7 1.154980E7 2.872071E7
// 0.00815184 3.971204E7 1.154673E7 2.816531E7
// 0.00820179 4.033405E7 1.149426E7 2.883978E7
// 0.00825174 4.008169E7 1.134144E7 2.874024E7
// 0.00830169 4.026599E7 1.119600E7 2.906999E7
// 0.00835164 4.031584E7 1.117050E7 2.914534E7
// 0.00840159 3.871400E7 1.131670E7 2.739729E7
// 0.00845155 3.993575E7 1.14474 E7 2.848835E7
// 0.00850149 3.998821E7 1.109747E7 2.889074E7
// 0.00855144 4.022637E7 1.125222E7 2.897415E7
// 0.00860140 4.0363 E7 1.120748E7 2.915551E7
// 0.00865134 3.962578E7 1.140164E7 2.822414E7
// 0.00870130 4.017673E7 1.101355E7 2.916317E7
// 0.00875124 4.030113E7 1.114606E7 2.915507E7
// 0.00880119 4.033460E7 1.120691E7 2.912769E7
// 0.00885115 4.022183E7 1.008258E7 3.013924E7
// 0.00890109 3.970488E7 9142277.0 3.056260E7
// 0.00895104 4.016546E7 1.143081E7 2.873465E7
// 0.00900099 4.018854E7 1.169480E7 2.849373E7
// 0.00905094 4.024163E7 1.149980E7 2.874182E7
// 0.00910090 4.031982E7 9861174.0 3.045865E7
// 0.00915084 3.807014E7 1.124030E7 2.682983E7
// 0.00920080 3.625451E7 1.145481E7 2.479969E7
// 0.00925074 4.013957E7 1.150738E7 2.863218E7
// 0.00930069 4.042912E7 1.145931E7 2.896981E7
// 0.00935065 4.039945E7 1.117080E7 2.922865E7
// 0.00940059 4.034598E7 1.138337E7 2.896260E7
// 0.00945054 3.914958E7 1.168921E7 2.746036E7
// 0.00950049 4.020942E7 1.149976E7 2.870966E7
// 0.00955044 4.011481E7 1.149786E7 2.861694E7
// 0.00960039 4.032667E7 1.162940E7 2.869727E7
// 0.00965034 3.892767E7 1.145871E7 2.746895E7
// 0.00970030 4.051375E7 1.145028E7 2.906346E7
// 0.00975024 4.037726E7 1.164960E7 2.872765E7
// 0.00980020 4.047770E7 1.142615E7 2.905155E7
// 0.00985014 4.051839E7 1.146895E7 2.904944E7
// 0.00990009 4.064258E7 1.156954E7 2.907303E7
// 0.00995005 4.022187E7 1.144438E7 2.877749E7
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.client;
import com.google.common.base.Splitter;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
import io.airlift.http.client.FullJsonResponseHandler;
import io.airlift.http.client.HttpClient;
import io.airlift.http.client.HttpClient.HttpResponseFuture;
import io.airlift.http.client.HttpStatus;
import io.airlift.http.client.Request;
import io.airlift.json.JsonCodec;
import io.airlift.units.Duration;
import javax.annotation.concurrent.ThreadSafe;
import java.io.Closeable;
import java.net.URI;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import static com.facebook.presto.client.PrestoHeaders.PRESTO_CLEAR_SESSION;
import static com.facebook.presto.client.PrestoHeaders.PRESTO_SET_SESSION;
import static com.google.common.base.MoreObjects.firstNonNull;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.net.HttpHeaders.USER_AGENT;
import static io.airlift.http.client.FullJsonResponseHandler.JsonResponse;
import static io.airlift.http.client.FullJsonResponseHandler.createFullJsonResponseHandler;
import static io.airlift.http.client.HttpStatus.Family;
import static io.airlift.http.client.HttpStatus.familyForStatusCode;
import static io.airlift.http.client.HttpUriBuilder.uriBuilderFrom;
import static io.airlift.http.client.Request.Builder.prepareDelete;
import static io.airlift.http.client.Request.Builder.prepareGet;
import static io.airlift.http.client.Request.Builder.preparePost;
import static io.airlift.http.client.StaticBodyGenerator.createStaticBodyGenerator;
import static io.airlift.http.client.StatusResponseHandler.StatusResponse;
import static io.airlift.http.client.StatusResponseHandler.createStatusResponseHandler;
import static java.lang.String.format;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.MINUTES;
@ThreadSafe
public class StatementClient
implements Closeable
{
private static final Splitter SESSION_HEADER_SPLITTER = Splitter.on('=').limit(2).trimResults();
private static final String USER_AGENT_VALUE = StatementClient.class.getSimpleName() +
"/" +
firstNonNull(StatementClient.class.getPackage().getImplementationVersion(), "unknown");
private final HttpClient httpClient;
private final FullJsonResponseHandler<QueryResults> responseHandler;
private final boolean debug;
private final String query;
private final AtomicReference<QueryResults> currentResults = new AtomicReference<>();
private final Map<String, String> setSessionProperties = new ConcurrentHashMap<>();
private final Set<String> resetSessionProperties = Sets.newConcurrentHashSet();
private final AtomicBoolean closed = new AtomicBoolean();
private final AtomicBoolean gone = new AtomicBoolean();
private final AtomicBoolean valid = new AtomicBoolean(true);
private final String timeZoneId;
public StatementClient(HttpClient httpClient, JsonCodec<QueryResults> queryResultsCodec, ClientSession session, String query)
{
checkNotNull(httpClient, "httpClient is null");
checkNotNull(queryResultsCodec, "queryResultsCodec is null");
checkNotNull(session, "session is null");
checkNotNull(query, "query is null");
this.httpClient = httpClient;
this.responseHandler = createFullJsonResponseHandler(queryResultsCodec);
this.debug = session.isDebug();
this.timeZoneId = session.getTimeZoneId();
this.query = query;
Request request = buildQueryRequest(session, query);
JsonResponse<QueryResults> response = httpClient.execute(request, responseHandler);
if (response.getStatusCode() != HttpStatus.OK.code() || !response.hasValue()) {
throw requestFailedException("starting query", request, response);
}
processResponse(response);
}
private static Request buildQueryRequest(ClientSession session, String query)
{
Request.Builder builder = preparePost()
.setUri(uriBuilderFrom(session.getServer()).replacePath("/v1/statement").build())
.setBodyGenerator(createStaticBodyGenerator(query, UTF_8));
if (session.getUser() != null) {
builder.setHeader(PrestoHeaders.PRESTO_USER, session.getUser());
}
if (session.getSource() != null) {
builder.setHeader(PrestoHeaders.PRESTO_SOURCE, session.getSource());
}
if (session.getCatalog() != null) {
builder.setHeader(PrestoHeaders.PRESTO_CATALOG, session.getCatalog());
}
if (session.getSchema() != null) {
builder.setHeader(PrestoHeaders.PRESTO_SCHEMA, session.getSchema());
}
builder.setHeader(PrestoHeaders.PRESTO_TIME_ZONE, session.getTimeZoneId());
builder.setHeader(PrestoHeaders.PRESTO_LANGUAGE, session.getLocale().toLanguageTag());
builder.setHeader(USER_AGENT, USER_AGENT_VALUE);
Map<String, String> property = session.getProperties();
for (Entry<String, String> entry : property.entrySet()) {
builder.addHeader(PrestoHeaders.PRESTO_SESSION, entry.getKey() + "=" + entry.getValue());
}
return builder.build();
}
public String getQuery()
{
return query;
}
public String getTimeZoneId()
{
return timeZoneId;
}
public boolean isDebug()
{
return debug;
}
public boolean isClosed()
{
return closed.get();
}
public boolean isGone()
{
return gone.get();
}
public boolean isFailed()
{
return currentResults.get().getError() != null;
}
public StatementStats getStats()
{
return currentResults.get().getStats();
}
public QueryResults current()
{
checkState(isValid(), "current position is not valid (cursor past end)");
return currentResults.get();
}
public QueryResults finalResults()
{
checkState((!isValid()) || isFailed(), "current position is still valid");
return currentResults.get();
}
public Map<String, String> getSetSessionProperties()
{
return ImmutableMap.copyOf(setSessionProperties);
}
public Set<String> getResetSessionProperties()
{
return ImmutableSet.copyOf(resetSessionProperties);
}
public boolean isValid()
{
return valid.get() && (!isGone()) && (!isClosed());
}
public boolean advance()
{
URI nextUri = current().getNextUri();
if (isClosed() || (nextUri == null)) {
valid.set(false);
return false;
}
Request request = prepareGet()
.setHeader(USER_AGENT, USER_AGENT_VALUE)
.setUri(nextUri)
.build();
Exception cause = null;
long start = System.nanoTime();
long attempts = 0;
do {
// back-off on retry
if (attempts > 0) {
try {
MILLISECONDS.sleep(attempts * 100);
}
catch (InterruptedException e) {
close();
Thread.currentThread().isInterrupted();
throw new RuntimeException("StatementClient thread was interrupted");
}
}
attempts++;
JsonResponse<QueryResults> response;
try {
response = httpClient.execute(request, responseHandler);
}
catch (RuntimeException e) {
cause = e;
continue;
}
if (response.getStatusCode() == HttpStatus.OK.code() && response.hasValue()) {
processResponse(response);
return true;
}
if (response.getStatusCode() != HttpStatus.SERVICE_UNAVAILABLE.code()) {
throw requestFailedException("fetching next", request, response);
}
}
while ((System.nanoTime() - start) < MINUTES.toNanos(2) && !isClosed());
gone.set(true);
throw new RuntimeException("Error fetching next", cause);
}
private void processResponse(JsonResponse<QueryResults> response)
{
for (String setSession : response.getHeaders().get(PRESTO_SET_SESSION)) {
List<String> keyValue = SESSION_HEADER_SPLITTER.splitToList(setSession);
if (keyValue.size() != 2) {
continue;
}
setSessionProperties.put(keyValue.get(0), keyValue.size() > 1 ? keyValue.get(1) : "");
}
for (String clearSession : response.getHeaders().get(PRESTO_CLEAR_SESSION)) {
resetSessionProperties.add(clearSession);
}
currentResults.set(response.getValue());
}
private RuntimeException requestFailedException(String task, Request request, JsonResponse<QueryResults> response)
{
gone.set(true);
if (!response.hasValue()) {
return new RuntimeException(format("Error %s at %s returned an invalid response: %s", task, request.getUri(), response), response.getException());
}
return new RuntimeException(format("Error %s at %s returned %s: %s", task, request.getUri(), response.getStatusCode(), response.getStatusMessage()));
}
public boolean cancelLeafStage(Duration timeout)
{
checkState(!isClosed(), "client is closed");
URI uri = current().getPartialCancelUri();
if (uri == null) {
return false;
}
Request request = prepareDelete()
.setHeader(USER_AGENT, USER_AGENT_VALUE)
.setUri(uri)
.build();
HttpResponseFuture<StatusResponse> response = httpClient.executeAsync(request, createStatusResponseHandler());
try {
StatusResponse status = response.get(timeout.toMillis(), MILLISECONDS);
return familyForStatusCode(status.getStatusCode()) == Family.SUCCESSFUL;
}
catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw Throwables.propagate(e);
}
catch (ExecutionException e) {
throw Throwables.propagate(e.getCause());
}
catch (TimeoutException e) {
return false;
}
}
@Override
public void close()
{
if (!closed.getAndSet(true)) {
URI uri = currentResults.get().getNextUri();
if (uri != null) {
Request request = prepareDelete()
.setHeader(USER_AGENT, USER_AGENT_VALUE)
.setUri(uri)
.build();
httpClient.executeAsync(request, createStatusResponseHandler());
}
}
}
}
| |
package com.ycj.ycjlibrary.smartTab;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.RectF;
import android.util.AttributeSet;
import android.util.TypedValue;
import android.view.View;
import android.widget.LinearLayout;
import com.ycj.ycjlibrary.R;
class SmartTabStrip extends LinearLayout {
private static final int GRAVITY_BOTTOM = 0;
private static final int GRAVITY_TOP = 1;
private static final int GRAVITY_CENTER = 2;
private static final int AUTO_WIDTH = -1;
private static final int DEFAULT_TOP_BORDER_THICKNESS_DIPS = 0;
private static final byte DEFAULT_TOP_BORDER_COLOR_ALPHA = 0x26;
private static final int DEFAULT_BOTTOM_BORDER_THICKNESS_DIPS = 2;
private static final byte DEFAULT_BOTTOM_BORDER_COLOR_ALPHA = 0x26;
private static final int SELECTED_INDICATOR_THICKNESS_DIPS = 8;
private static final int DEFAULT_SELECTED_INDICATOR_COLOR = 0xFF33B5E5;
private static final float DEFAULT_INDICATOR_CORNER_RADIUS = 0f;
private static final int DEFAULT_DIVIDER_THICKNESS_DIPS = 1;
private static final byte DEFAULT_DIVIDER_COLOR_ALPHA = 0x20;
private static final float DEFAULT_DIVIDER_HEIGHT = 0.5f;
private static final boolean DEFAULT_INDICATOR_IN_CENTER = false;
private static final boolean DEFAULT_INDICATOR_IN_FRONT = false;
private static final boolean DEFAULT_INDICATOR_WITHOUT_PADDING = false;
private static final int DEFAULT_INDICATOR_GRAVITY = GRAVITY_BOTTOM;
private static final boolean DEFAULT_DRAW_DECORATION_AFTER_TAB = false;
private final int topBorderThickness;
private final int topBorderColor;
private final int bottomBorderThickness;
private final int bottomBorderColor;
private final Paint borderPaint;
private final RectF indicatorRectF = new RectF();
private final boolean indicatorWithoutPadding;
private final boolean indicatorAlwaysInCenter;
private final boolean indicatorInFront;
private final int indicatorThickness;
private final int indicatorWidth;
private final int indicatorGravity;
private final float indicatorCornerRadius;
private final Paint indicatorPaint;
private final int dividerThickness;
private final Paint dividerPaint;
private final float dividerHeight;
private final SimpleTabColorizer defaultTabColorizer;
private final boolean drawDecorationAfterTab;
private int lastPosition;
private int selectedPosition;
private float selectionOffset;
private SmartTabIndicationInterpolator indicationInterpolator;
private SmartTabLayout.TabColorizer customTabColorizer;
SmartTabStrip(Context context, AttributeSet attrs) {
super(context);
setWillNotDraw(false);
final float density = getResources().getDisplayMetrics().density;
TypedValue outValue = new TypedValue();
context.getTheme().resolveAttribute(android.R.attr.colorForeground, outValue, true);
final int themeForegroundColor = outValue.data;
boolean indicatorWithoutPadding = DEFAULT_INDICATOR_WITHOUT_PADDING;
boolean indicatorInFront = DEFAULT_INDICATOR_IN_FRONT;
boolean indicatorAlwaysInCenter = DEFAULT_INDICATOR_IN_CENTER;
int indicationInterpolatorId = SmartTabIndicationInterpolator.ID_SMART;
int indicatorGravity = DEFAULT_INDICATOR_GRAVITY;
int indicatorColor = DEFAULT_SELECTED_INDICATOR_COLOR;
int indicatorColorsId = NO_ID;
int indicatorThickness = (int) (SELECTED_INDICATOR_THICKNESS_DIPS * density);
int indicatorWidth = AUTO_WIDTH;
float indicatorCornerRadius = DEFAULT_INDICATOR_CORNER_RADIUS * density;
int overlineColor = setColorAlpha(themeForegroundColor, DEFAULT_TOP_BORDER_COLOR_ALPHA);
int overlineThickness = (int) (DEFAULT_TOP_BORDER_THICKNESS_DIPS * density);
int underlineColor = setColorAlpha(themeForegroundColor, DEFAULT_BOTTOM_BORDER_COLOR_ALPHA);
int underlineThickness = (int) (DEFAULT_BOTTOM_BORDER_THICKNESS_DIPS * density);
int dividerColor = setColorAlpha(themeForegroundColor, DEFAULT_DIVIDER_COLOR_ALPHA);
int dividerColorsId = NO_ID;
int dividerThickness = (int) (DEFAULT_DIVIDER_THICKNESS_DIPS * density);
boolean drawDecorationAfterTab = DEFAULT_DRAW_DECORATION_AFTER_TAB;
TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.stl_SmartTabLayout);
indicatorAlwaysInCenter = a.getBoolean(
R.styleable.stl_SmartTabLayout_stl_indicatorAlwaysInCenter, indicatorAlwaysInCenter);
indicatorWithoutPadding = a.getBoolean(
R.styleable.stl_SmartTabLayout_stl_indicatorWithoutPadding, indicatorWithoutPadding);
indicatorInFront = a.getBoolean(
R.styleable.stl_SmartTabLayout_stl_indicatorInFront, indicatorInFront);
indicationInterpolatorId = a.getInt(
R.styleable.stl_SmartTabLayout_stl_indicatorInterpolation, indicationInterpolatorId);
indicatorGravity = a.getInt(
R.styleable.stl_SmartTabLayout_stl_indicatorGravity, indicatorGravity);
indicatorColor = a.getColor(
R.styleable.stl_SmartTabLayout_stl_indicatorColor, indicatorColor);
indicatorColorsId = a.getResourceId(
R.styleable.stl_SmartTabLayout_stl_indicatorColors, indicatorColorsId);
indicatorThickness = a.getDimensionPixelSize(
R.styleable.stl_SmartTabLayout_stl_indicatorThickness, indicatorThickness);
indicatorWidth = a.getLayoutDimension(
R.styleable.stl_SmartTabLayout_stl_indicatorWidth, indicatorWidth);
indicatorCornerRadius = a.getDimension(
R.styleable.stl_SmartTabLayout_stl_indicatorCornerRadius, indicatorCornerRadius);
overlineColor = a.getColor(
R.styleable.stl_SmartTabLayout_stl_overlineColor, overlineColor);
overlineThickness = a.getDimensionPixelSize(
R.styleable.stl_SmartTabLayout_stl_overlineThickness, overlineThickness);
underlineColor = a.getColor(
R.styleable.stl_SmartTabLayout_stl_underlineColor, underlineColor);
underlineThickness = a.getDimensionPixelSize(
R.styleable.stl_SmartTabLayout_stl_underlineThickness, underlineThickness);
dividerColor = a.getColor(
R.styleable.stl_SmartTabLayout_stl_dividerColor, dividerColor);
dividerColorsId = a.getResourceId(
R.styleable.stl_SmartTabLayout_stl_dividerColors, dividerColorsId);
dividerThickness = a.getDimensionPixelSize(
R.styleable.stl_SmartTabLayout_stl_dividerThickness, dividerThickness);
drawDecorationAfterTab = a.getBoolean(
R.styleable.stl_SmartTabLayout_stl_drawDecorationAfterTab, drawDecorationAfterTab);
a.recycle();
final int[] indicatorColors = (indicatorColorsId == NO_ID)
? new int[] { indicatorColor }
: getResources().getIntArray(indicatorColorsId);
final int[] dividerColors = (dividerColorsId == NO_ID)
? new int[] { dividerColor }
: getResources().getIntArray(dividerColorsId);
this.defaultTabColorizer = new SimpleTabColorizer();
this.defaultTabColorizer.setIndicatorColors(indicatorColors);
this.defaultTabColorizer.setDividerColors(dividerColors);
this.topBorderThickness = overlineThickness;
this.topBorderColor = overlineColor;
this.bottomBorderThickness = underlineThickness;
this.bottomBorderColor = underlineColor;
this.borderPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
this.indicatorAlwaysInCenter = indicatorAlwaysInCenter;
this.indicatorWithoutPadding = indicatorWithoutPadding;
this.indicatorInFront = indicatorInFront;
this.indicatorThickness = indicatorThickness;
this.indicatorWidth = indicatorWidth;
this.indicatorPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
this.indicatorCornerRadius = indicatorCornerRadius;
this.indicatorGravity = indicatorGravity;
this.dividerHeight = DEFAULT_DIVIDER_HEIGHT;
this.dividerPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
this.dividerPaint.setStrokeWidth(dividerThickness);
this.dividerThickness = dividerThickness;
this.drawDecorationAfterTab = drawDecorationAfterTab;
this.indicationInterpolator = SmartTabIndicationInterpolator.of(indicationInterpolatorId);
}
/**
* Set the alpha value of the {@code color} to be the given {@code alpha} value.
*/
private static int setColorAlpha(int color, byte alpha) {
return Color.argb(alpha, Color.red(color), Color.green(color), Color.blue(color));
}
/**
* Blend {@code color1} and {@code color2} using the given ratio.
*
* @param ratio of which to blend. 1.0 will return {@code color1}, 0.5 will give an even blend,
* 0.0 will return {@code color2}.
*/
private static int blendColors(int color1, int color2, float ratio) {
final float inverseRation = 1f - ratio;
float r = (Color.red(color1) * ratio) + (Color.red(color2) * inverseRation);
float g = (Color.green(color1) * ratio) + (Color.green(color2) * inverseRation);
float b = (Color.blue(color1) * ratio) + (Color.blue(color2) * inverseRation);
return Color.rgb((int) r, (int) g, (int) b);
}
void setIndicationInterpolator(SmartTabIndicationInterpolator interpolator) {
indicationInterpolator = interpolator;
invalidate();
}
void setCustomTabColorizer(SmartTabLayout.TabColorizer customTabColorizer) {
this.customTabColorizer = customTabColorizer;
invalidate();
}
void setSelectedIndicatorColors(int... colors) {
// Make sure that the custom colorizer is removed
customTabColorizer = null;
defaultTabColorizer.setIndicatorColors(colors);
invalidate();
}
void setDividerColors(int... colors) {
// Make sure that the custom colorizer is removed
customTabColorizer = null;
defaultTabColorizer.setDividerColors(colors);
invalidate();
}
void onViewPagerPageChanged(int position, float positionOffset) {
selectedPosition = position;
selectionOffset = positionOffset;
if (positionOffset == 0f && lastPosition != selectedPosition) {
lastPosition = selectedPosition;
}
invalidate();
}
boolean isIndicatorAlwaysInCenter() {
return indicatorAlwaysInCenter;
}
SmartTabLayout.TabColorizer getTabColorizer() {
return (customTabColorizer != null) ? customTabColorizer : defaultTabColorizer;
}
@Override
protected void onDraw(Canvas canvas) {
if (!drawDecorationAfterTab) {
drawDecoration(canvas);
}
}
@Override
protected void dispatchDraw(Canvas canvas) {
super.dispatchDraw(canvas);
if (drawDecorationAfterTab) {
drawDecoration(canvas);
}
}
private void drawDecoration(Canvas canvas) {
final int height = getHeight();
final int width = getWidth();
final int tabCount = getChildCount();
final SmartTabLayout.TabColorizer tabColorizer = getTabColorizer();
final boolean isLayoutRtl = Utils.isLayoutRtl(this);
if (indicatorInFront) {
drawOverline(canvas, 0, width);
drawUnderline(canvas, 0, width, height);
}
// Thick colored underline below the current selection
if (tabCount > 0) {
View selectedTab = getChildAt(selectedPosition);
int selectedStart = Utils.getStart(selectedTab, indicatorWithoutPadding);
int selectedEnd = Utils.getEnd(selectedTab, indicatorWithoutPadding);
int left;
int right;
if (isLayoutRtl) {
left = selectedEnd;
right = selectedStart;
} else {
left = selectedStart;
right = selectedEnd;
}
int color = tabColorizer.getIndicatorColor(selectedPosition);
float thickness = indicatorThickness;
if (selectionOffset > 0f && selectedPosition < (getChildCount() - 1)) {
int nextColor = tabColorizer.getIndicatorColor(selectedPosition + 1);
if (color != nextColor) {
color = blendColors(nextColor, color, selectionOffset);
}
// Draw the selection partway between the tabs
float startOffset = indicationInterpolator.getLeftEdge(selectionOffset);
float endOffset = indicationInterpolator.getRightEdge(selectionOffset);
float thicknessOffset = indicationInterpolator.getThickness(selectionOffset);
View nextTab = getChildAt(selectedPosition + 1);
int nextStart = Utils.getStart(nextTab, indicatorWithoutPadding);
int nextEnd = Utils.getEnd(nextTab, indicatorWithoutPadding);
if (isLayoutRtl) {
left = (int) (endOffset * nextEnd + (1.0f - endOffset) * left);
right = (int) (startOffset * nextStart + (1.0f - startOffset) * right);
} else {
left = (int) (startOffset * nextStart + (1.0f - startOffset) * left);
right = (int) (endOffset * nextEnd + (1.0f - endOffset) * right);
}
thickness = thickness * thicknessOffset;
}
drawIndicator(canvas, left, right, height, thickness, color);
}
if (!indicatorInFront) {
drawOverline(canvas, 0, width);
drawUnderline(canvas, 0, getWidth(), height);
}
// Vertical separators between the titles
drawSeparator(canvas, height, tabCount);
}
private void drawSeparator(Canvas canvas, int height, int tabCount) {
if (dividerThickness <= 0) {
return;
}
final int dividerHeightPx = (int) (Math.min(Math.max(0f, dividerHeight), 1f) * height);
final SmartTabLayout.TabColorizer tabColorizer = getTabColorizer();
// Vertical separators between the titles
final int separatorTop = (height - dividerHeightPx) / 2;
final int separatorBottom = separatorTop + dividerHeightPx;
final boolean isLayoutRtl = Utils.isLayoutRtl(this);
for (int i = 0; i < tabCount - 1; i++) {
View child = getChildAt(i);
int end = Utils.getEnd(child);
int endMargin = Utils.getMarginEnd(child);
int separatorX = isLayoutRtl ? end - endMargin : end + endMargin;
dividerPaint.setColor(tabColorizer.getDividerColor(i));
canvas.drawLine(separatorX, separatorTop, separatorX, separatorBottom, dividerPaint);
}
}
private void drawIndicator(Canvas canvas, int left, int right, int height, float thickness,
int color) {
if (indicatorThickness <= 0 || indicatorWidth == 0) {
return;
}
float center;
float top;
float bottom;
switch (indicatorGravity) {
case GRAVITY_TOP:
center = indicatorThickness / 2f;
top = center - (thickness / 2f);
bottom = center + (thickness / 2f);
break;
case GRAVITY_CENTER:
center = height / 2f;
top = center - (thickness / 2f);
bottom = center + (thickness / 2f);
break;
case GRAVITY_BOTTOM:
default:
center = height - (indicatorThickness / 2f);
top = center - (thickness / 2f);
bottom = center + (thickness / 2f);
}
indicatorPaint.setColor(color);
if (indicatorWidth == AUTO_WIDTH) {
indicatorRectF.set(left, top, right, bottom);
} else {
float padding = (Math.abs(left - right) - indicatorWidth) / 2f;
indicatorRectF.set(left + padding, top, right - padding, bottom);
}
if (indicatorCornerRadius > 0f) {
canvas.drawRoundRect(
indicatorRectF, indicatorCornerRadius,
indicatorCornerRadius, indicatorPaint);
} else {
canvas.drawRect(indicatorRectF, indicatorPaint);
}
}
private void drawOverline(Canvas canvas, int left, int right) {
if (topBorderThickness <= 0) {
return;
}
// Thin overline along the entire top edge
borderPaint.setColor(topBorderColor);
canvas.drawRect(left, 0, right, topBorderThickness, borderPaint);
}
private void drawUnderline(Canvas canvas, int left, int right, int height) {
if (bottomBorderThickness <= 0) {
return;
}
// Thin underline along the entire bottom edge
borderPaint.setColor(bottomBorderColor);
canvas.drawRect(left, height - bottomBorderThickness, right, height, borderPaint);
}
private static class SimpleTabColorizer implements SmartTabLayout.TabColorizer {
private int[] indicatorColors;
private int[] dividerColors;
@Override
public final int getIndicatorColor(int position) {
return indicatorColors[position % indicatorColors.length];
}
@Override
public final int getDividerColor(int position) {
return dividerColors[position % dividerColors.length];
}
void setIndicatorColors(int... colors) {
indicatorColors = colors;
}
void setDividerColors(int... colors) {
dividerColors = colors;
}
}
}
| |
/**
* $Revision: $
* $Date: $
*
* Copyright (C) 2005-2008 Jive Software. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.openfire.http;
import java.net.InetAddress;
import java.util.List;
import java.util.Map;
import java.util.TimerTask;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.dom4j.DocumentException;
import org.dom4j.DocumentHelper;
import org.dom4j.Element;
import org.dom4j.QName;
import org.eclipse.jetty.util.log.Log;
import org.jivesoftware.openfire.SessionManager;
import org.jivesoftware.openfire.StreamID;
import org.jivesoftware.openfire.auth.UnauthorizedException;
import org.jivesoftware.util.JiveConstants;
import org.jivesoftware.util.JiveGlobals;
import org.jivesoftware.util.TaskEngine;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Manages sessions for all users connecting to Openfire using the HTTP binding protocal,
* <a href="http://www.xmpp.org/extensions/xep-0124.html">XEP-0124</a>.
*/
public class HttpSessionManager {
private static final Logger Log = LoggerFactory.getLogger(HttpSessionManager.class);
private SessionManager sessionManager;
private Map<String, HttpSession> sessionMap = new ConcurrentHashMap<String, HttpSession>(
JiveGlobals.getIntProperty("xmpp.httpbind.session.initial.count", 16));
private TimerTask inactivityTask;
private ThreadPoolExecutor sendPacketPool;
private SessionListener sessionListener = new SessionListener() {
public void connectionOpened(HttpSession session, HttpConnection connection) {
}
public void connectionClosed(HttpSession session, HttpConnection connection) {
}
public void sessionClosed(HttpSession session) {
sessionMap.remove(session.getStreamID().getID());
}
};
/**
* Creates a new HttpSessionManager instance.
*/
public HttpSessionManager() {
this.sessionManager = SessionManager.getInstance();
// Configure a pooled executor to handle async routing for incoming packets
// with a default size of 16 threads ("xmpp.httpbind.worker.threads"); also
// uses an unbounded task queue and configurable keep-alive (default: 60 secs)
// Note: server supports up to 254 client threads by default (@see HttpBindManager)
// BOSH installations expecting heavy loads may want to allocate additional threads
// to this worker pool to ensure timely delivery of inbound packets
int poolSize = JiveGlobals.getIntProperty("xmpp.httpbind.worker.threads",
// use deprecated property as default (shared with ConnectionManagerImpl)
JiveGlobals.getIntProperty("xmpp.client.processing.threads", 16));
int keepAlive = JiveGlobals.getIntProperty("xmpp.httpbind.worker.timeout", 60);
sendPacketPool = new ThreadPoolExecutor(poolSize, poolSize, keepAlive, TimeUnit.SECONDS,
new LinkedBlockingQueue<Runnable>(), // unbounded task queue
new ThreadFactory() { // custom thread factory for BOSH workers
final AtomicInteger counter = new AtomicInteger(1);
public Thread newThread(Runnable runnable) {
Thread thread = new Thread(Thread.currentThread().getThreadGroup(), runnable,
"httpbind-worker-" + counter.getAndIncrement());
thread.setDaemon(true);
return thread;
}
});
}
/**
* Starts the services used by the HttpSessionManager.
*/
public void start() {
inactivityTask = new HttpSessionReaper();
TaskEngine.getInstance().schedule(inactivityTask, 30 * JiveConstants.SECOND,
30 * JiveConstants.SECOND);
sendPacketPool.prestartCoreThread();
}
/**
* Stops any services and cleans up any resources used by the HttpSessionManager.
*/
public void stop() {
inactivityTask.cancel();
for (HttpSession session : sessionMap.values()) {
session.close();
}
sessionMap.clear();
sendPacketPool.shutdown();
}
/**
* Returns the session related to a stream id.
*
* @param streamID the stream id to retrieve the session.
* @return the session related to the provided stream id.
*/
public HttpSession getSession(String streamID) {
return sessionMap.get(streamID);
}
/**
* Creates an HTTP binding session which will allow a user to exchange packets with Openfire.
*
* @param address the internet address that was used to bind to Wildfie.
* @param rootNode the body element that was sent containing the request for a new session.
* @param connection the HTTP connection object which abstracts the individual connections to
* Openfire over the HTTP binding protocol. The initial session creation response is returned to
* this connection.
* @return the created HTTP session.
*
* @throws UnauthorizedException if the Openfire server is currently in an uninitialized state.
* Either shutting down or starting up.
* @throws HttpBindException when there is an internal server error related to the creation of
* the initial session creation response.
*/
public HttpSession createSession(InetAddress address, Element rootNode,
HttpConnection connection)
throws UnauthorizedException, HttpBindException {
// TODO Check if IP address is allowed to connect to the server
// Default language is English ("en").
String language = rootNode.attributeValue("xml:lang");
if (language == null || "".equals(language)) {
language = "en";
}
int wait = getIntAttribute(rootNode.attributeValue("wait"), 60);
int hold = getIntAttribute(rootNode.attributeValue("hold"), 1);
String version = rootNode.attributeValue("ver");
if (version == null || "".equals(version)) {
version = "1.5";
}
HttpSession session = createSession(connection.getRequestId(), address, connection);
session.setWait(Math.min(wait, getMaxWait()));
session.setHold(hold);
session.setSecure(connection.isSecure());
session.setMaxPollingInterval(getPollingInterval());
session.setMaxRequests(getMaxRequests());
session.setMaxPause(getMaxPause());
if(session.isPollingSession()) {
session.setDefaultInactivityTimeout(getPollingInactivityTimeout());
}
else {
session.setDefaultInactivityTimeout(getInactivityTimeout());
}
session.resetInactivityTimeout();
// Store language and version information in the connection.
session.setLanguage(language);
String [] versionString = version.split("\\.");
session.setMajorVersion(Integer.parseInt(versionString[0]));
session.setMinorVersion(Integer.parseInt(versionString[1]));
try {
connection.deliverBody(createSessionCreationResponse(session));
}
catch (HttpConnectionClosedException e) {
/* This won't happen here. */
}
catch (DocumentException e) {
Log.error("Error creating document", e);
throw new HttpBindException("Internal server error",
BoshBindingError.internalServerError);
}
return session;
}
/**
* Returns the maximum length of a temporary session pause (in seconds) that the client MAY
* request.
*
* @return the maximum length of a temporary session pause (in seconds) that the client MAY
* request.
*/
public int getMaxPause() {
return JiveGlobals.getIntProperty("xmpp.httpbind.client.maxpause", 300);
}
/**
* Returns the longest time (in seconds) that Openfire is allowed to wait before responding to
* any request during the session. This enables the client to prevent its TCP connection from
* expiring due to inactivity, as well as to limit the delay before it discovers any network
* failure.
*
* @return the longest time (in seconds) that Openfire is allowed to wait before responding to
* any request during the session.
*/
public int getMaxWait() {
return JiveGlobals.getIntProperty("xmpp.httpbind.client.requests.wait",
Integer.MAX_VALUE);
}
/**
* Openfire SHOULD include two additional attributes in the session creation response element,
* specifying the shortest allowable polling interval and the longest allowable inactivity
* period (both in seconds). Communication of these parameters enables the client to engage in
* appropriate behavior (e.g., not sending empty request elements more often than desired, and
* ensuring that the periods with no requests pending are never too long).
*
* @return the maximum allowable period over which a client can send empty requests to the
* server.
*/
public int getPollingInterval() {
return JiveGlobals.getIntProperty("xmpp.httpbind.client.requests.polling", 5);
}
/**
* Openfire MAY limit the number of simultaneous requests the client makes with the 'requests'
* attribute. The RECOMMENDED value is "2". Servers that only support polling behavior MUST
* prevent clients from making simultaneous requests by setting the 'requests' attribute to a
* value of "1" (however, polling is NOT RECOMMENDED). In any case, clients MUST NOT make more
* simultaneous requests than specified by the Openfire.
*
* @return the number of simultaneous requests allowable.
*/
public int getMaxRequests() {
return JiveGlobals.getIntProperty("xmpp.httpbind.client.requests.max", 2);
}
/**
* Seconds a session has to be idle to be closed. Default is 30. Sending stanzas to the
* client is not considered as activity. We are only considering the connection active when the
* client sends some data or hearbeats (i.e. whitespaces) to the server. The reason for this is
* that sending data will fail if the connection is closed. And if the thread is blocked while
* sending data (because the socket is closed) then the clean up thread will close the socket
* anyway.
*
* @return Seconds a session has to be idle to be closed.
*/
public int getInactivityTimeout() {
return JiveGlobals.getIntProperty("xmpp.httpbind.client.idle", 30);
}
/**
* Seconds a polling session has to be idle to be closed. Default is 60. Sending stanzas to the
* client is not considered as activity. We are only considering the connection active when the
* client sends some data or hearbeats (i.e. whitespaces) to the server. The reason for this is
* that sending data will fail if the connection is closed. And if the thread is blocked while
* sending data (because the socket is closed) then the clean up thread will close the socket
* anyway.
*
* @return Seconds a polling session has to be idle to be closed.
*/
public int getPollingInactivityTimeout() {
return JiveGlobals.getIntProperty("xmpp.httpbind.client.idle.polling", 60);
}
/**
* Forwards a client request, which is related to a session, to the server. A connection is
* created and queued up in the provided session. When a connection reaches the top of a queue
* any pending packets bound for the client will be forwarded to the client through the
* connection.
*
* @param rid the unique, sequential, requestID sent from the client.
* @param session the HTTP session of the client that made the request.
* @param isSecure true if the request was made over a secure channel, HTTPS, and false if it
* was not.
* @param rootNode the XML body of the request.
* @return the created HTTP connection.
*
* @throws HttpBindException for several reasons: if the encoding inside of an auth packet is
* not recognized by the server, or if the packet type is not recognized.
* @throws HttpConnectionClosedException if the session is no longer available.
*/
public HttpConnection forwardRequest(long rid, HttpSession session, boolean isSecure,
Element rootNode) throws HttpBindException,
HttpConnectionClosedException
{
//noinspection unchecked
List<Element> elements = rootNode.elements();
boolean isPoll = (elements.size() == 0);
if ("terminate".equals(rootNode.attributeValue("type")))
isPoll = false;
else if ("true".equals(rootNode.attributeValue(new QName("restart", rootNode.getNamespaceForPrefix("xmpp")))))
isPoll = false;
else if (rootNode.attributeValue("pause") != null)
isPoll = false;
HttpConnection connection = session.createConnection(rid, elements, isSecure, isPoll);
if (elements.size() > 0) {
// creates the runnable to forward the packets
new HttpPacketSender(session).init();
}
return connection;
}
private HttpSession createSession(long rid, InetAddress address, HttpConnection connection) throws UnauthorizedException {
// Create a ClientSession for this user.
StreamID streamID = SessionManager.getInstance().nextStreamID();
// Send to the server that a new client session has been created
HttpSession session = sessionManager.createClientHttpSession(rid, address, streamID, connection);
// Register that the new session is associated with the specified stream ID
sessionMap.put(streamID.getID(), session);
session.addSessionCloseListener(sessionListener);
return session;
}
private static int getIntAttribute(String value, int defaultValue) {
if (value == null || "".equals(value.trim())) {
return defaultValue;
}
try {
return Integer.valueOf(value);
}
catch (Exception ex) {
return defaultValue;
}
}
private double getDoubleAttribute(String doubleValue, double defaultValue) {
if (doubleValue == null || "".equals(doubleValue.trim())) {
return defaultValue;
}
try {
return Double.parseDouble(doubleValue);
}
catch (Exception ex) {
return defaultValue;
}
}
private String createSessionCreationResponse(HttpSession session) throws DocumentException {
Element response = DocumentHelper.createElement("body");
response.addNamespace("", "http://jabber.org/protocol/httpbind");
response.addNamespace("stream", "http://etherx.jabber.org/streams");
response.addAttribute("authid", session.getStreamID().getID());
response.addAttribute("sid", session.getStreamID().getID());
response.addAttribute("secure", Boolean.TRUE.toString());
response.addAttribute("requests", String.valueOf(session.getMaxRequests()));
response.addAttribute("inactivity", String.valueOf(session.getInactivityTimeout()));
response.addAttribute("polling", String.valueOf(session.getMaxPollingInterval()));
response.addAttribute("wait", String.valueOf(session.getWait()));
if ((session.getMajorVersion() == 1 && session.getMinorVersion() >= 6) ||
session.getMajorVersion() > 1) {
response.addAttribute("hold", String.valueOf(session.getHold()));
response.addAttribute("ack", String.valueOf(session.getLastAcknowledged()));
response.addAttribute("maxpause", String.valueOf(session.getMaxPause()));
response.addAttribute("ver", String.valueOf(session.getMajorVersion())
+ "." + String.valueOf(session.getMinorVersion()));
}
Element features = response.addElement("stream:features");
for (Element feature : session.getAvailableStreamFeaturesElements()) {
features.add(feature);
}
return response.asXML();
}
private class HttpSessionReaper extends TimerTask {
@Override
public void run() {
long currentTime = System.currentTimeMillis();
for (HttpSession session : sessionMap.values()) {
long lastActive = currentTime - session.getLastActivity();
if (Log.isDebugEnabled()) {
Log.debug("Session was last active " + lastActive + " ms ago: " + session.getAddress());
}
if (lastActive > session.getInactivityTimeout() * JiveConstants.SECOND) {
Log.info("Closing idle session: " + session.getAddress());
session.close();
}
}
}
}
/**
* A runner that guarantees that the packets per a session will be sent and
* processed in the order in which they were received.
*/
private class HttpPacketSender implements Runnable {
private HttpSession session;
HttpPacketSender(HttpSession session) {
this.session = session;
}
public void run() {
session.sendPendingPackets();
}
private void init() {
sendPacketPool.execute(this);
}
}
}
| |
/*
* Copyright 2022 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.tfssdk14;
import com.microsoft.tfs.core.TFSTeamProjectCollection;
import com.microsoft.tfs.core.clients.versioncontrol.GetOptions;
import com.microsoft.tfs.core.clients.versioncontrol.soapextensions.Changeset;
import com.microsoft.tfs.core.clients.versioncontrol.soapextensions.GetRequest;
import com.microsoft.tfs.core.clients.versioncontrol.specs.version.ChangesetVersionSpec;
import com.thoughtworks.go.domain.materials.Modification;
import com.thoughtworks.go.domain.materials.mercurial.StringRevision;
import com.thoughtworks.go.tfssdk14.wrapper.GoTfsVersionControlClient;
import com.thoughtworks.go.tfssdk14.wrapper.GoTfsWorkspace;
import com.thoughtworks.go.util.command.StringArgument;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import java.io.File;
import java.util.List;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.AdditionalMatchers.or;
import static org.mockito.Mockito.*;
class TfsSDKCommandTest {
private TfsSDKCommand tfsCommand;
private final String DOMAIN = "domain";
private final String USERNAME = "username";
private final String PASSWORD = "password";
private final String TFS_COLLECTION = "http://some.repo.local:8000/";
private final String TFS_PROJECT = "$/project_path";
private final String TFS_WORKSPACE = "workspace";
private GoTfsVersionControlClient client;
private TFSTeamProjectCollection collection;
@BeforeEach
void setUp() {
client = mock(GoTfsVersionControlClient.class);
collection = mock(TFSTeamProjectCollection.class);
tfsCommand = new TfsSDKCommand(client, collection, null, new StringArgument(TFS_COLLECTION), DOMAIN, USERNAME, PASSWORD, TFS_WORKSPACE, TFS_PROJECT);
}
@Test
void shouldGetLatestModifications() {
Changeset[] changeSets = getChangeSets(42);
when(client.queryHistory(TFS_PROJECT, null, 1)).thenReturn(changeSets);
TfsSDKCommand spy = spy(tfsCommand);
doReturn(null).when(spy).getModifiedFiles(changeSets[0]);
assertThat(spy.latestModification(null).isEmpty()).isFalse();
verify(client).queryHistory(TFS_PROJECT, null, 1);
verify(spy).getModifiedFiles(changeSets[0]);
}
@Test
void shouldCheckConnectionSuccessfullyIfAllCredentialsAreValid() {
Changeset[] changeSets = getChangeSets(42);
when(client.queryHistory(TFS_PROJECT, null, 1)).thenReturn(changeSets);
TfsSDKCommand spy = spy(tfsCommand);
doReturn(null).when(spy).getModifiedFiles(changeSets[0]);
try {
spy.checkConnection();
} catch (Exception e) {
fail("Should not have thrown exception");
}
verify(client).queryHistory(TFS_PROJECT, null, 1);
verify(spy).getModifiedFiles(changeSets[0]);
}
@Test
void shouldThrowExceptionDuringCheckConnectionIfInvalid() {
when(client.queryHistory(TFS_PROJECT, null, 1)).thenThrow(new RuntimeException("could not connect"));
try {
tfsCommand.checkConnection();
fail("should have thrown an exception");
} catch (RuntimeException e) {
assertThat(e.getMessage()).isEqualTo("Failed while checking connection using Url: http://some.repo.local:8000/, Project Path: $/project_path, Username: username, Domain: domain, Root Cause: could not connect");
}
verify(client).queryHistory(TFS_PROJECT, null, 1);
}
@Test
void shouldReturnChangeSetsFromAPreviouslyKnownRevisionUptilTheLatest() {
Changeset[] changeSets = getChangeSets(42);
when(client.queryHistory(eq(TFS_PROJECT), or(isNull(), any(ChangesetVersionSpec.class)), anyInt())).thenReturn(changeSets);
TfsSDKCommand spy = spy(tfsCommand);
doReturn(null).when(spy).getModifiedFiles(changeSets[0]);
List<Modification> modifications = spy.modificationsSince(null, new StringRevision("2"));
assertThat(modifications.isEmpty()).isFalse();
verify(client, times(2)).queryHistory(eq(TFS_PROJECT), or(isNull(), any(ChangesetVersionSpec.class)), anyInt());
}
@Test
void shouldCreateWorkspaceAndMapDirectory() throws Exception {
File workingDirectory = mock(File.class);
when(workingDirectory.exists()).thenReturn(false);
when(workingDirectory.getCanonicalPath()).thenReturn("/some-random-path/");
GoTfsWorkspace[] workspaces = {};
when(client.queryWorkspaces(TFS_WORKSPACE, USERNAME)).thenReturn(workspaces);
GoTfsWorkspace workspace = mock(GoTfsWorkspace.class);
when(client.createWorkspace(TFS_WORKSPACE)).thenReturn(workspace);
when(workspace.isLocalPathMapped("/some-random-path/")).thenReturn(false);
doNothing().when(workspace).createWorkingFolder(any(com.microsoft.tfs.core.clients.versioncontrol.soapextensions.WorkingFolder.class));
TfsSDKCommand spy = spy(tfsCommand);
doNothing().when(spy).retrieveFiles(workingDirectory, null);
spy.checkout(workingDirectory, null);
verify(client, times(1)).queryWorkspaces(TFS_WORKSPACE, USERNAME);
verify(client, times(1)).createWorkspace(TFS_WORKSPACE);
verify(workspace, times(1)).isLocalPathMapped(anyString());
verify(workspace, times(1)).createWorkingFolder(any(com.microsoft.tfs.core.clients.versioncontrol.soapextensions.WorkingFolder.class));
verify(spy).retrieveFiles(workingDirectory, null);
}
@Test
void shouldOnlyMapDirectoryAndNotCreateAWorkspaceIfWorkspaceIsAlreadyCreated() throws Exception {
File workingDirectory = mock(File.class);
when(workingDirectory.exists()).thenReturn(false);
when(workingDirectory.getCanonicalPath()).thenReturn("/some-random-path/");
GoTfsWorkspace workspace = mock(GoTfsWorkspace.class);
GoTfsWorkspace[] workspaces = {workspace};
when(client.queryWorkspaces(TFS_WORKSPACE, USERNAME)).thenReturn(workspaces);
when(workspace.isLocalPathMapped("/some-random-path/")).thenReturn(false);
doNothing().when(workspace).createWorkingFolder(any(com.microsoft.tfs.core.clients.versioncontrol.soapextensions.WorkingFolder.class));
TfsSDKCommand spy = spy(tfsCommand);
doNothing().when(spy).retrieveFiles(workingDirectory, null);
spy.checkout(workingDirectory, null);
verify(client, times(1)).queryWorkspaces(TFS_WORKSPACE, USERNAME);
verify(client, never()).createWorkspace(TFS_WORKSPACE);
verify(workspace, times(1)).isLocalPathMapped("/some-random-path/");
verify(workspace, times(1)).createWorkingFolder(any(com.microsoft.tfs.core.clients.versioncontrol.soapextensions.WorkingFolder.class));
verify(spy).retrieveFiles(workingDirectory, null);
}
@Test
void shouldThrowUpWhenUrlIsInvalid() throws Exception {
TfsSDKCommand tfsCommandForInvalidCollection = new TfsSDKCommand(null, new StringArgument("invalid_url"), DOMAIN, USERNAME, PASSWORD, TFS_WORKSPACE, TFS_PROJECT);
try {
tfsCommandForInvalidCollection.init();
} catch (RuntimeException e) {
assertThat(e.getMessage()).isEqualTo("Unable to connect to TFS Collection invalid_url java.lang.RuntimeException: [TFS] Failed when converting the url string to a uri: invalid_url, Project Path: $/project_path, Username: username, Domain: domain");
}
}
@Test
void shouldCheckoutAllFilesWhenWorkingDirectoryIsDeleted() throws Exception {
File workingDirectory = mock(File.class);
when(workingDirectory.exists()).thenReturn(false);
when(workingDirectory.getCanonicalPath()).thenReturn("canonical_path");
when(workingDirectory.listFiles()).thenReturn(null);
TfsSDKCommand spy = spy(tfsCommand);
doNothing().when(spy).initializeWorkspace(workingDirectory);
GoTfsWorkspace workspace = mock(GoTfsWorkspace.class);
when(client.queryWorkspace(TFS_WORKSPACE, USERNAME)).thenReturn(workspace);
doNothing().when(workspace).get(any(GetRequest.class), eq(GetOptions.GET_ALL));
spy.checkout(workingDirectory, null);
verify(workingDirectory).getCanonicalPath();
verify(workingDirectory).listFiles();
verify(workspace).get(any(GetRequest.class), eq(GetOptions.GET_ALL));
}
@Test
void should_GetLatestRevisions_WhenCheckingOutToLaterRevision() throws Exception {
File workingDirectory = mock(File.class);
when(workingDirectory.exists()).thenReturn(false);
when(workingDirectory.getCanonicalPath()).thenReturn("canonical_path");
File[] checkedOutFiles = {mock(File.class)};
when(workingDirectory.listFiles()).thenReturn(checkedOutFiles);
TfsSDKCommand spy = spy(tfsCommand);
doNothing().when(spy).initializeWorkspace(workingDirectory);
GoTfsWorkspace workspace = mock(GoTfsWorkspace.class);
when(client.queryWorkspace(TFS_WORKSPACE, USERNAME)).thenReturn(workspace);
doNothing().when(workspace).get(any(GetRequest.class), eq(GetOptions.NONE));
spy.checkout(workingDirectory, null);
verify(workingDirectory).getCanonicalPath();
verify(workingDirectory).listFiles();
verify(workspace).get(any(GetRequest.class), eq(GetOptions.NONE));
}
@Test
void shouldClearWorkingDirectoryBeforeCheckingOut() {
File workingDirectory = mock(File.class);
when(workingDirectory.exists()).thenReturn(true);
TfsSDKCommand spy = spy(tfsCommand);
doNothing().when(spy).initializeWorkspace(workingDirectory);
doNothing().when(spy).retrieveFiles(workingDirectory, null);
spy.checkout(workingDirectory, null);
verify(workingDirectory).exists();
}
@Test
void shouldDeleteWorkspace() {
GoTfsWorkspace workspace = mock(GoTfsWorkspace.class);
when(client.queryWorkspace(TFS_WORKSPACE, USERNAME)).thenReturn(workspace);
doNothing().when(client).deleteWorkspace(workspace);
tfsCommand.deleteWorkspace();
verify(client).queryWorkspace(TFS_WORKSPACE, USERNAME);
verify(client).deleteWorkspace(workspace);
}
@Test
void destroyShouldCloseClientAndCollection() {
doNothing().when(client).close();
doNothing().when(collection).close();
tfsCommand.destroy();
verify(client).close();
verify(collection).close();
}
private Changeset[] getChangeSets(int changeSetID) {
Changeset oneChangeSet = new Changeset("owner", "comment", null, null);
oneChangeSet.setChangesetID(changeSetID);
return new Changeset[]{oneChangeSet};
}
}
| |
package org.insightech.er.editor.view.outline;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.eclipse.gef.EditPart;
import org.eclipse.gef.EditPartViewer;
import org.eclipse.gef.ui.actions.ActionRegistry;
import org.eclipse.jface.action.IAction;
import org.eclipse.jface.action.IContributionItem;
import org.eclipse.jface.action.IMenuListener;
import org.eclipse.jface.action.IMenuManager;
import org.eclipse.jface.action.MenuManager;
import org.eclipse.jface.action.Separator;
import org.eclipse.ui.actions.ActionFactory;
import org.insightech.er.ERDiagramActivator;
import org.insightech.er.ResourceString;
import org.insightech.er.db.DBManager;
import org.insightech.er.db.DBManagerFactory;
import org.insightech.er.editor.controller.editpart.outline.group.GroupSetOutlineEditPart;
import org.insightech.er.editor.controller.editpart.outline.sequence.SequenceSetOutlineEditPart;
import org.insightech.er.editor.controller.editpart.outline.table.TableOutlineEditPart;
import org.insightech.er.editor.controller.editpart.outline.tablespace.TablespaceSetOutlineEditPart;
import org.insightech.er.editor.controller.editpart.outline.trigger.TriggerSetOutlineEditPart;
import org.insightech.er.editor.model.ERDiagram;
import org.insightech.er.editor.model.settings.Settings;
import org.insightech.er.editor.view.action.group.GroupManageAction;
import org.insightech.er.editor.view.action.outline.index.CreateIndexAction;
import org.insightech.er.editor.view.action.outline.notation.type.ChangeOutlineViewToBothAction;
import org.insightech.er.editor.view.action.outline.notation.type.ChangeOutlineViewToLogicalAction;
import org.insightech.er.editor.view.action.outline.notation.type.ChangeOutlineViewToPhysicalAction;
import org.insightech.er.editor.view.action.outline.orderby.ChangeOutlineViewOrderByLogicalNameAction;
import org.insightech.er.editor.view.action.outline.orderby.ChangeOutlineViewOrderByPhysicalNameAction;
import org.insightech.er.editor.view.action.outline.sequence.CreateSequenceAction;
import org.insightech.er.editor.view.action.outline.tablespace.CreateTablespaceAction;
import org.insightech.er.editor.view.action.outline.trigger.CreateTriggerAction;
public class ERDiagramOutlinePopupMenuManager extends MenuManager {
private static Map<Class, String> ACTION_MAP = new HashMap<Class, String>();
static {
ACTION_MAP.put(SequenceSetOutlineEditPart.class,
CreateSequenceAction.ID);
ACTION_MAP.put(TriggerSetOutlineEditPart.class, CreateTriggerAction.ID);
ACTION_MAP.put(GroupSetOutlineEditPart.class, GroupManageAction.ID);
ACTION_MAP.put(TableOutlineEditPart.class, CreateIndexAction.ID);
ACTION_MAP.put(TablespaceSetOutlineEditPart.class,
CreateTablespaceAction.ID);
}
private ActionRegistry actionRegistry;
private ActionRegistry outlineActionRegistry;
public ERDiagramOutlinePopupMenuManager(final ERDiagram diagram,
ActionRegistry actionRegistry,
ActionRegistry outlineActionRegistry,
final EditPartViewer editPartViewer) {
try {
this.actionRegistry = actionRegistry;
this.outlineActionRegistry = outlineActionRegistry;
this.add(this.getAction(ActionFactory.DELETE));
this.add(this.getAction(GroupManageAction.ID));
this.add(this.getAction(CreateTriggerAction.ID));
this.add(this.getAction(CreateSequenceAction.ID));
this.add(this.getAction(CreateIndexAction.ID));
this.add(this.getAction(CreateTablespaceAction.ID));
this.add(new Separator());
MenuManager viewModeMenu = new MenuManager(ResourceString
.getResourceString("label.outline.view.mode"));
viewModeMenu.add(this
.getAction(ChangeOutlineViewToPhysicalAction.ID));
viewModeMenu.add(this
.getAction(ChangeOutlineViewToLogicalAction.ID));
viewModeMenu.add(this.getAction(ChangeOutlineViewToBothAction.ID));
this.add(viewModeMenu);
MenuManager orderByMenu = new MenuManager(ResourceString
.getResourceString("label.order.by"));
orderByMenu.add(this
.getAction(ChangeOutlineViewOrderByPhysicalNameAction.ID));
orderByMenu.add(this
.getAction(ChangeOutlineViewOrderByLogicalNameAction.ID));
this.add(orderByMenu);
this.addMenuListener(new IMenuListener() {
public void menuAboutToShow(IMenuManager manager) {
try {
List selectedEditParts = editPartViewer
.getSelectedEditParts();
if (selectedEditParts.isEmpty()) {
for (IContributionItem menuItem : getItems()) {
if (menuItem.getId() != null
&& !menuItem
.getId()
.equals(
ChangeOutlineViewToPhysicalAction.ID)
&& !menuItem
.getId()
.equals(
ChangeOutlineViewToLogicalAction.ID)
&& !menuItem
.getId()
.equals(
ChangeOutlineViewToBothAction.ID)
&& !menuItem
.getId()
.equals(
ChangeOutlineViewOrderByPhysicalNameAction.ID)
&& !menuItem
.getId()
.equals(
ChangeOutlineViewOrderByLogicalNameAction.ID)) {
enabled(menuItem.getId(), false);
// menuItem.setVisible(false);
}
}
} else {
EditPart editPart = (EditPart) selectedEditParts
.get(0);
for (Class clazz : ACTION_MAP.keySet()) {
String actionId = ACTION_MAP.get(clazz);
if (!clazz.isInstance(editPart)) {
enabled(actionId, false);
} else {
if (CreateSequenceAction.ID
.equals(actionId)
&& !DBManagerFactory.getDBManager(
diagram).isSupported(
DBManager.SUPPORT_SEQUENCE)) {
enabled(actionId, false);
} else {
enabled(actionId, true);
}
}
}
}
Settings settings = diagram.getDiagramContents()
.getSettings();
IAction action0 = getAction(ChangeOutlineViewToPhysicalAction.ID);
IAction action1 = getAction(ChangeOutlineViewToLogicalAction.ID);
IAction action2 = getAction(ChangeOutlineViewToBothAction.ID);
if (settings.getOutlineViewMode() == Settings.VIEW_MODE_PHYSICAL) {
action0.setChecked(true);
action1.setChecked(false);
action2.setChecked(false);
} else if (settings.getOutlineViewMode() == Settings.VIEW_MODE_LOGICAL) {
action0.setChecked(false);
action1.setChecked(true);
action2.setChecked(false);
} else {
action0.setChecked(false);
action1.setChecked(false);
action2.setChecked(true);
}
action0 = getAction(ChangeOutlineViewOrderByPhysicalNameAction.ID);
action1 = getAction(ChangeOutlineViewOrderByLogicalNameAction.ID);
if (settings.getViewOrderBy() == Settings.VIEW_MODE_PHYSICAL) {
action0.setChecked(true);
action1.setChecked(false);
} else {
action0.setChecked(false);
action1.setChecked(true);
}
manager.update(true);
} catch (Exception e) {
ERDiagramActivator.showExceptionDialog(e);
}
}
});
} catch (Exception e) {
ERDiagramActivator.showExceptionDialog(e);
}
}
private IAction getAction(ActionFactory actionFactory) {
return this.actionRegistry.getAction(actionFactory.getId());
}
private IAction getAction(String id) {
IAction action = this.actionRegistry.getAction(id);
if (action == null) {
action = this.outlineActionRegistry.getAction(id);
}
return action;
}
private void enabled(String id, boolean enabled) {
IAction action = getAction(id);
action.setEnabled(enabled);
// for (IContributionItem menuItem : getItems()) {
// if (menuItem.getId().equals(id)) {
// menuItem.setVisible(enabled);
// break;
// }
// }
}
}
| |
package org.apache.archiva.webdav;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import junit.framework.TestCase;
import org.apache.archiva.redback.authentication.AuthenticationException;
import org.apache.archiva.redback.policy.AccountLockedException;
import org.apache.archiva.redback.policy.MustChangePasswordException;
import org.apache.archiva.redback.users.User;
import org.apache.jackrabbit.webdav.DavSessionProvider;
import org.apache.jackrabbit.webdav.WebdavRequest;
import org.apache.jackrabbit.webdav.WebdavRequestImpl;
import org.apache.archiva.security.ServletAuthenticator;
import org.apache.archiva.redback.authentication.AuthenticationDataSource;
import org.apache.archiva.redback.authentication.AuthenticationResult;
import org.apache.archiva.redback.authorization.AuthorizationException;
import org.apache.archiva.redback.authorization.UnauthorizedException;
import org.apache.archiva.redback.system.SecuritySession;
import org.apache.archiva.redback.integration.filter.authentication.HttpAuthenticator;
import javax.servlet.AsyncContext;
import javax.servlet.DispatcherType;
import javax.servlet.RequestDispatcher;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.ServletInputStream;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import javax.servlet.http.Part;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.security.Principal;
import java.util.Collection;
import java.util.Enumeration;
import java.util.Locale;
import java.util.Map;
import org.apache.archiva.test.utils.ArchivaBlockJUnit4ClassRunner;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
@RunWith( ArchivaBlockJUnit4ClassRunner.class )
public class ArchivaDavSessionProviderTest
extends TestCase
{
private DavSessionProvider sessionProvider;
private WebdavRequest request;
@Override
@Before
public void setUp()
throws Exception
{
super.setUp();
sessionProvider = new ArchivaDavSessionProvider( new ServletAuthenticatorMock(), new HttpAuthenticatorMock() );
request = new WebdavRequestImpl( new HttpServletRequestMock(), null );
}
@Test
public void testAttachSession()
throws Exception
{
assertNull( request.getDavSession() );
sessionProvider.attachSession( request );
assertNotNull( request.getDavSession() );
}
@Test
public void testReleaseSession()
throws Exception
{
assertNull( request.getDavSession() );
sessionProvider.attachSession( request );
assertNotNull( request.getDavSession() );
sessionProvider.releaseSession( request );
assertNull( request.getDavSession() );
}
@SuppressWarnings( "unchecked" )
private class HttpServletRequestMock
implements HttpServletRequest
{
@Override
public boolean authenticate( HttpServletResponse httpServletResponse )
throws IOException, ServletException
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public void login( String s, String s1 )
throws ServletException
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public void logout()
throws ServletException
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public Collection<Part> getParts()
throws IOException, ServletException
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public Part getPart( String s )
throws IOException, ServletException
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public ServletContext getServletContext()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public AsyncContext startAsync()
throws IllegalStateException
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public AsyncContext startAsync( ServletRequest servletRequest, ServletResponse servletResponse )
throws IllegalStateException
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public boolean isAsyncStarted()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public boolean isAsyncSupported()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public AsyncContext getAsyncContext()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public DispatcherType getDispatcherType()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public Object getAttribute( String arg0 )
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public Enumeration getAttributeNames()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public String getCharacterEncoding()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public int getContentLength()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public String getContentType()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public ServletInputStream getInputStream()
throws IOException
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public String getLocalAddr()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public String getLocalName()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public int getLocalPort()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public Locale getLocale()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public Enumeration getLocales()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public String getParameter( String arg0 )
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public Map getParameterMap()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public Enumeration getParameterNames()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public String[] getParameterValues( String arg0 )
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public String getProtocol()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public BufferedReader getReader()
throws IOException
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public String getRealPath( String arg0 )
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public String getRemoteAddr()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public String getRemoteHost()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public int getRemotePort()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public RequestDispatcher getRequestDispatcher( String arg0 )
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public String getScheme()
{
return "";
}
@Override
public String getServerName()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public int getServerPort()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public boolean isSecure()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public void removeAttribute( String arg0 )
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public void setAttribute( String arg0, Object arg1 )
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public void setCharacterEncoding( String arg0 )
throws UnsupportedEncodingException
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public String getAuthType()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public String getContextPath()
{
return "/";
}
@Override
public Cookie[] getCookies()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public long getDateHeader( String arg0 )
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public String getHeader( String arg0 )
{
return "";
}
@Override
public Enumeration getHeaderNames()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public Enumeration getHeaders( String arg0 )
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public int getIntHeader( String arg0 )
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public String getMethod()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public String getPathInfo()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public String getPathTranslated()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public String getQueryString()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public String getRemoteUser()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public String getRequestURI()
{
return "/";
}
@Override
public StringBuffer getRequestURL()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public String getRequestedSessionId()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public String getServletPath()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public HttpSession getSession( boolean arg0 )
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public HttpSession getSession()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public Principal getUserPrincipal()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public boolean isRequestedSessionIdFromCookie()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public boolean isRequestedSessionIdFromURL()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public boolean isRequestedSessionIdFromUrl()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public boolean isRequestedSessionIdValid()
{
throw new UnsupportedOperationException( "Not supported yet." );
}
@Override
public boolean isUserInRole( String arg0 )
{
throw new UnsupportedOperationException( "Not supported yet." );
}
}
private class ServletAuthenticatorMock
implements ServletAuthenticator
{
@Override
public boolean isAuthenticated( HttpServletRequest arg0, AuthenticationResult arg1 )
throws AuthenticationException, AccountLockedException, MustChangePasswordException
{
return true;
}
@Override
public boolean isAuthorized( HttpServletRequest request, SecuritySession securitySession, String repositoryId,
String permission )
throws AuthorizationException, UnauthorizedException
{
return true;
}
@Override
public boolean isAuthorized( String principal, String repoId, String permission )
throws UnauthorizedException
{
return true;
}
}
private class HttpAuthenticatorMock
extends HttpAuthenticator
{
@Override
public void challenge( HttpServletRequest arg0, HttpServletResponse arg1, String arg2,
AuthenticationException arg3 )
throws IOException
{
//Do nothing
}
@Override
public AuthenticationResult getAuthenticationResult( HttpServletRequest arg0, HttpServletResponse arg1 )
throws AuthenticationException, AccountLockedException, MustChangePasswordException
{
return new AuthenticationResult();
}
@Override
public AuthenticationResult authenticate( AuthenticationDataSource arg0, HttpSession httpSession )
throws AuthenticationException, AccountLockedException, MustChangePasswordException
{
return new AuthenticationResult();
}
@Override
public void authenticate( HttpServletRequest arg0, HttpServletResponse arg1 )
throws AuthenticationException
{
//Do nothing
}
@Override
public SecuritySession getSecuritySession( HttpSession httpSession )
{
return super.getSecuritySession( httpSession );
}
@Override
public User getSessionUser( HttpSession httpSession )
{
return super.getSessionUser( httpSession );
}
@Override
public boolean isAlreadyAuthenticated( HttpSession httpSession )
{
return super.isAlreadyAuthenticated( httpSession );
}
}
}
| |
/*
* Copyright 2006-2016 Edward Smith
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package root.jdbc;
import java.io.PrintWriter;
import java.sql.Connection;
import java.sql.Driver;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
import java.util.Properties;
import java.util.logging.Logger;
import root.annotation.Inheritance;
import root.lang.StringExtractor;
import root.log.Log;
import root.pool.PoolObjectFactory;
/**
*
* @author Edward Smith
* @version 0.5
* @since 0.5
*/
@Inheritance
public final class PooledDataSource extends RootDataSource {
// <><><><><><><><><><><><><>< Private Classes ><><><><><><><><><><><><><>
/**
*
* @author Edward Smith
* @version 0.5
* @since 0.5
*/
private class PooledConnectionFactory implements PoolObjectFactory<PooledConnection> {
@Override
public final PooledConnection create() {
try {
final Connection conn = PooledDataSource.this.driver.connect(PooledDataSource.this.url, PooledDataSource.this.dbProperties);
return new PooledConnection(PooledDataSource.this, conn);
} catch (final SQLException e) {
throw new DatabaseException(e);
}
}
@Override
public final void destroy(final PooledConnection conn) {
conn.destroy();
}
@Override
public final String getObjectClassName() {
return PooledConnection.class.getName();
}
@Override
public final boolean validate(final PooledConnection conn) {
return conn.isValid(0);
}
} // End PooledConnectionFactory
// <><><><><><><><><><><><><><><> Constants <><><><><><><><><><><><><><><>
private static final Log log = new Log(PooledDataSource.class);
// <><><><><><><><><><><><><><><> Attributes <><><><><><><><><><><><><><><>
/** The URL connection string used to create a JDBC connection to the database */
private final String url;
/** The {@link Driver} used to connect to the database with the specified URL */
private final Driver driver;
/** The set of {@link Properties} used to connect to the database with the {@link Driver} */
private final Properties dbProperties;
// <><><><><><><><><><><><><><>< Constructors ><><><><><><><><><><><><><><>
/**
* Creates a {@link PooledDataSource} using the specified JDBC driver, URL connection string, and pool capacity.
*
* @param driverName
* The fully qualified JDBC driver class name
* @param url
* The URL connection string
* @param capacity
* The pool capacity
*/
public PooledDataSource(final String driverName, final String url, final int capacity) {
super(capacity);
log.debug("Creating PooledDataSource from JDBC driver [{P}] [URL={P}]", driverName, url);
try {
Class.forName(driverName);
this.driver = DriverManager.getDriver(url);
} catch (final ClassNotFoundException e) {
log.error("Cannot load JDBC driver [{P}]", e, driverName);
throw new DatabaseException("Cannot load JDBC driver [{P}]", e, driverName);
} catch (final SQLException e) {
log.error("Cannot get JDBC driver [{P}] from DriverManager with URL [{P}]", e, driverName, url);
throw new DatabaseException("Cannot get JDBC driver [{P}] from DriverManager with URL [{P}]", e, driverName, url);
}
this.url = url;
this.dbProperties = new Properties();
}
/**
* Creates a {@link PooledDataSource} using the specified JDBC driver, URL connection string, pool capacity, and database properties. The database
* properties most commonly used are {@code user} and {@code password}, although other vendor-specific database properties may be passed along as
* well.
*
* @param driverName
* The fully qualified JDBC driver class name
* @param url
* The URL connection string
* @param capacity
* The pool capacity
* @param dbProperties
* The database properties to use when creating a {@link Connection}
*/
public PooledDataSource(final String driverName, final String url, final int capacity, final Properties dbProperties) {
this(driverName, url, capacity);
this.dbProperties.putAll(dbProperties);
}
// <><><><><><><><><><><><><><> Public Methods <><><><><><><><><><><><><><>
@Override
public final void extract(final StringExtractor extractor) {
extractor.append("PooledDataSource [");
extractor.append("driver=").append(this.driver.getClass().getName());
extractor.append(", url=").append(this.url);
extractor.append(", stmt cache size=").append(this.stmtCacheSize);
extractor.append(", maxIdleTime=").append(this.maxIdleTime);
extractor.append(", maxWait=").append(this.pool.getMaxWait());
extractor.append(", poolCapacity=").append(this.pool.getCapacity());
extractor.append(']');
}
@Override
public final int getLoginTimeout() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public final PrintWriter getLogWriter() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public final Logger getParentLogger() throws SQLFeatureNotSupportedException {
return log.getParent();
}
@Override
public final void setLoginTimeout(final int seconds) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public final void setLogWriter(final PrintWriter out) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public final String toString() {
final StringExtractor extractor = new StringExtractor(512);
this.extract(extractor);
return extractor.toString();
}
// <><><><><><><><><><><><><> Protected Methods <><><><><><><><><><><><><>
@Override
protected final Log getLog() {
return log;
}
@Override
protected final PoolObjectFactory<PooledConnection> getPooledConnectionFactory() {
return new PooledConnectionFactory();
}
} // End PooledDataSource
| |
/**
* Copyright 2008 Marvin Herman Froeder
* -->
* <!--
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* -->
*
* <!--
* http://www.apache.org/licenses/LICENSE-2.0
* -->
*
* <!--
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sonatype.flexmojos.generator;
import static java.lang.Thread.currentThread;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.jar.JarEntry;
import java.util.jar.JarInputStream;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.DependencyResolutionRequiredException;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.project.MavenProject;
import org.codehaus.plexus.util.DirectoryScanner;
import org.codehaus.plexus.util.SelectorUtils;
import org.sonatype.flexmojos.generator.GenerationException;
import org.sonatype.flexmojos.generator.GenerationRequest;
import org.sonatype.flexmojos.generator.Generator;
import org.sonatype.flexmojos.generator.GeneratorFactory;
/**
* This goal generate code based on Java classes.
*
* @author Marvin Herman Froeder (velo.br@gmail.com)
* @author edward.yakop@gmail.com
* @goal generate
* @phase generate-sources
* @requiresDependencyResolution test
* @since 3.6
*/
public class SimpleGeneratorMojo
extends AbstractMojo
{
/**
* The maven project.
*
* @parameter expression="${project}"
* @required
* @readonly
*/
private MavenProject project;
/**
* File to generate as3 file. If not defined assumes all classes must be included
*
* @parameter
*/
private String[] includeJavaClasses;
/**
* File to exclude from as3 generation. If not defined, assumes no exclusions
*
* @parameter
*/
private String[] excludeJavaClasses;
/**
* @parameter default-value="${project.build.sourceDirectory}"
*/
private File outputDirectory;
/**
* @parameter default-value="${project.build.directory}/generated-sources/flexmojos"
*/
private File baseOutputDirectory;
/**
* @parameter default-value="graniteds22" expression="${generatorToUse}"
*/
private String generatorToUse;
/**
* @component role="org.sonatype.flexmojos.generator.GeneratorFactory"
*/
private GeneratorFactory generatorFactory;
/**
* Configurations used by the generator implementation, check generator docs to see which parameters can be used.
*
* @parameter
*/
private Map<String, String> extraOptions;
/**
* Templates used by the generator implementation, check generator docs to see which Templates can be used. Example:
*
* <pre>
* <templates>
* <base-enum-template>your-template</base-enum-template>
* </templates>
* </pre>
*
* @parameter
*/
private Map<String, String> templates;
/**
* A '=' separated list of Strings, format: packageToTranslate=packageToReplace
*
* @parameter
*/
private String[] translators;
public void execute()
throws MojoExecutionException
{
setUp();
GeneratorLogger logger = new MavenGeneratorLogger( getLog() );
GenerationRequest request = new GenerationRequest();
request.setClasses( getFilesToGenerator() );
request.setClassLoader( this.initializeClassLoader() );
request.setExtraOptions( extraOptions );
request.setPersistentOutputFolder( outputDirectory );
request.setTemplates( templates );
request.setTransientOutputFolder( baseOutputDirectory );
request.setTranslators( translators );
request.setLogger( logger );
ClassLoader cl = currentThread().getContextClassLoader();
try
{
currentThread().setContextClassLoader( request.getClassLoader() );
Generator generator = generatorFactory.getGenerator( generatorToUse );
generator.generate( request );
}
catch ( GenerationException e )
{
throw new MojoExecutionException( e.getMessage(), e );
}
finally
{
currentThread().setContextClassLoader( cl );
}
}
private void setUp()
throws MojoExecutionException
{
if ( includeJavaClasses == null )
{
includeJavaClasses = new String[] { "*" };
}
if ( !outputDirectory.exists() )
{
outputDirectory.mkdirs();
}
String outputPath = outputDirectory.getAbsolutePath();
if ( !project.getCompileSourceRoots().contains( outputPath ) )
{
project.addCompileSourceRoot( outputPath );
}
if ( !baseOutputDirectory.exists() )
{
baseOutputDirectory.mkdirs();
}
String baseOutputPath = baseOutputDirectory.getAbsolutePath();
if ( !project.getCompileSourceRoots().contains( baseOutputPath ) )
{
project.addCompileSourceRoot( baseOutputPath );
}
if ( translators == null )
{
translators = new String[0];
}
}
private final Map<String, File> getFilesToGenerator()
throws MojoExecutionException
{
List<String> classpaths = getDirectDependencies();
Map<String, File> classes = new HashMap<String, File>();
for ( String fileName : classpaths )
{
File file = new File( fileName ).getAbsoluteFile();
if ( file.isDirectory() )
{
DirectoryScanner ds = new DirectoryScanner();
ds.setBasedir( file );
ds.setIncludes( new String[] { "**/*.class" } );
ds.scan();
for ( String classFileName : ds.getIncludedFiles() )
{
String className = classFileName.replace( File.separatorChar, '.' );
className = className.substring( 0, className.length() - 6 );
if ( matchWildCard( className, includeJavaClasses )
&& !matchWildCard( className, excludeJavaClasses ) )
{
classes.put( className, new File( file, classFileName ) );
}
}
}
else
{
try
{
JarInputStream jar = new JarInputStream( new FileInputStream( file ) );
JarEntry jarEntry;
while ( true )
{
jarEntry = jar.getNextJarEntry();
if ( jarEntry == null )
{
break;
}
String className = jarEntry.getName();
if ( jarEntry.isDirectory() || !className.endsWith( ".class" ) )
{
continue;
}
className = className.replace( '/', '.' );
className = className.substring( 0, className.length() - 6 );
if ( matchWildCard( className, includeJavaClasses )
&& !matchWildCard( className, excludeJavaClasses ) )
{
classes.put( className, file );
}
}
}
catch ( IOException e )
{
throw new MojoExecutionException( "Error on classes resolve", e );
}
}
}
return classes;
}
private boolean matchWildCard( String className, String... wildCards )
{
if ( wildCards == null )
{
return false;
}
for ( String wildCard : wildCards )
{
if ( className.equals( wildCard ) )
{
return true;
}
if ( SelectorUtils.matchPath( wildCard, className ) )
{
return true;
}
}
return false;
}
private ClassLoader initializeClassLoader()
throws MojoExecutionException
{
List<String> classpaths = getClasspath();
try
{
List<URL> classpathsUrls = new ArrayList<URL>();
// add all the jars to the new child realm
for ( String path : classpaths )
{
URL url = new File( path ).toURI().toURL();
classpathsUrls.add( url );
}
return new URLClassLoader( classpathsUrls.toArray( new URL[0] ), currentThread().getContextClassLoader() );
}
catch ( MalformedURLException e )
{
throw new MojoExecutionException( "Unable to get dependency URL", e );
}
}
private List<String> getClasspath()
throws MojoExecutionException
{
List<String> classpaths;
try
{
classpaths = project.getCompileClasspathElements();
classpaths.remove( project.getBuild().getOutputDirectory() );
}
catch ( DependencyResolutionRequiredException e )
{
throw new MojoExecutionException( "Failed to find dependencies", e );
}
return classpaths;
}
private List<String> getDirectDependencies()
throws MojoExecutionException
{
List<String> classpaths = new ArrayList<String>();
Set<Artifact> artifacts = project.getDependencyArtifacts();
for ( Artifact artifact : artifacts )
{
if ( "jar".equals( artifact.getType() ) )
{
classpaths.add( artifact.getFile().getAbsolutePath() );
}
}
return classpaths;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.sql.validate;
import org.apache.calcite.linq4j.Linq4j;
import org.apache.calcite.linq4j.Ord;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.sql.SqlCall;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlNodeList;
import org.apache.calcite.sql.SqlSelect;
import org.apache.calcite.util.ImmutableBitSet;
import org.apache.calcite.util.Litmus;
import org.apache.calcite.util.Pair;
import com.google.common.base.Suppliers;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSortedMultiset;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.function.Supplier;
import static org.apache.calcite.sql.SqlUtil.stripAs;
/**
* Scope for resolving identifiers within a SELECT statement that has a
* GROUP BY clause.
*
* <p>The same set of identifiers are in scope, but it won't allow access to
* identifiers or expressions which are not group-expressions.
*/
public class AggregatingSelectScope
extends DelegatingScope implements AggregatingScope {
//~ Instance fields --------------------------------------------------------
private final SqlSelect select;
private final boolean distinct;
/** Use while resolving. */
private SqlValidatorUtil.GroupAnalyzer groupAnalyzer;
public final Supplier<Resolved> resolved =
Suppliers.memoize(this::resolve)::get;
//~ Constructors -----------------------------------------------------------
/**
* Creates an AggregatingSelectScope.
*
* @param selectScope Parent scope
* @param select Enclosing SELECT node
* @param distinct Whether SELECT is DISTINCT
*/
AggregatingSelectScope(
SqlValidatorScope selectScope,
SqlSelect select,
boolean distinct) {
// The select scope is the parent in the sense that all columns which
// are available in the select scope are available. Whether they are
// valid as aggregation expressions... now that's a different matter.
super(selectScope);
this.select = select;
this.distinct = distinct;
}
//~ Methods ----------------------------------------------------------------
private Resolved resolve() {
assert groupAnalyzer == null : "resolve already in progress";
groupAnalyzer = new SqlValidatorUtil.GroupAnalyzer();
try {
final ImmutableList.Builder<ImmutableList<ImmutableBitSet>> builder =
ImmutableList.builder();
if (select.getGroup() != null) {
final SqlNodeList groupList = select.getGroup();
for (SqlNode groupExpr : groupList) {
SqlValidatorUtil.analyzeGroupItem(this, groupAnalyzer, builder,
groupExpr);
}
}
final List<ImmutableBitSet> flatGroupSets = new ArrayList<>();
for (List<ImmutableBitSet> groupSet : Linq4j.product(builder.build())) {
flatGroupSets.add(ImmutableBitSet.union(groupSet));
}
// For GROUP BY (), we need a singleton grouping set.
if (flatGroupSets.isEmpty()) {
flatGroupSets.add(ImmutableBitSet.of());
}
return new Resolved(groupAnalyzer.extraExprs, groupAnalyzer.groupExprs,
flatGroupSets, groupAnalyzer.groupExprProjection);
} finally {
groupAnalyzer = null;
}
}
/**
* Returns the expressions that are in the GROUP BY clause (or the SELECT
* DISTINCT clause, if distinct) and that can therefore be referenced
* without being wrapped in aggregate functions.
*
* <p>The expressions are fully-qualified, and any "*" in select clauses are
* expanded.
*
* @return list of grouping expressions
*/
private Pair<ImmutableList<SqlNode>, ImmutableList<SqlNode>> getGroupExprs() {
if (distinct) {
// Cannot compute this in the constructor: select list has not been
// expanded yet.
assert select.isDistinct();
// Remove the AS operator so the expressions are consistent with
// OrderExpressionExpander.
ImmutableList.Builder<SqlNode> groupExprs = ImmutableList.builder();
final SelectScope selectScope = (SelectScope) parent;
for (SqlNode selectItem : selectScope.getExpandedSelectList()) {
groupExprs.add(stripAs(selectItem));
}
return Pair.of(ImmutableList.of(), groupExprs.build());
} else if (select.getGroup() != null) {
if (groupAnalyzer != null) {
// we are in the middle of resolving
return Pair.of(ImmutableList.of(),
ImmutableList.copyOf(groupAnalyzer.groupExprs));
} else {
final Resolved resolved = this.resolved.get();
return Pair.of(resolved.extraExprList, resolved.groupExprList);
}
} else {
return Pair.of(ImmutableList.of(), ImmutableList.of());
}
}
@Override public SqlNode getNode() {
return select;
}
@Override public RelDataType nullifyType(SqlNode node, RelDataType type) {
final Resolved r = this.resolved.get();
for (Ord<SqlNode> groupExpr : Ord.zip(r.groupExprList)) {
if (groupExpr.e.equalsDeep(node, Litmus.IGNORE)) {
if (r.isNullable(groupExpr.i)) {
return validator.getTypeFactory().createTypeWithNullability(type,
true);
}
}
}
return type;
}
@Override public SqlValidatorScope getOperandScope(SqlCall call) {
if (call.getOperator().isAggregator()) {
// If we're the 'SUM' node in 'select a + sum(b + c) from t
// group by a', then we should validate our arguments in
// the non-aggregating scope, where 'b' and 'c' are valid
// column references.
return parent;
} else {
// Check whether expression is constant within the group.
//
// If not, throws. Example, 'empno' in
// SELECT empno FROM emp GROUP BY deptno
//
// If it perfectly matches an expression in the GROUP BY
// clause, we validate its arguments in the non-aggregating
// scope. Example, 'empno + 1' in
//
// SELECT empno + 1 FROM emp GROUP BY empno + 1
final boolean matches = checkAggregateExpr(call, false);
if (matches) {
return parent;
}
}
return super.getOperandScope(call);
}
@Override public boolean checkAggregateExpr(SqlNode expr, boolean deep) {
// Fully-qualify any identifiers in expr.
if (deep) {
expr = validator.expand(expr, this);
}
// Make sure expression is valid, throws if not.
Pair<ImmutableList<SqlNode>, ImmutableList<SqlNode>> pair = getGroupExprs();
final AggChecker aggChecker =
new AggChecker(validator, this, pair.left, pair.right, distinct);
if (deep) {
expr.accept(aggChecker);
}
// Return whether expression exactly matches one of the group
// expressions.
return aggChecker.isGroupExpr(expr);
}
@Override public void validateExpr(SqlNode expr) {
checkAggregateExpr(expr, true);
}
/** Information about an aggregating scope that can only be determined
* after validation has occurred. Therefore it cannot be populated when
* the scope is created. */
@SuppressWarnings("UnstableApiUsage")
public static class Resolved {
public final ImmutableList<SqlNode> extraExprList;
public final ImmutableList<SqlNode> groupExprList;
public final ImmutableBitSet groupSet;
public final ImmutableSortedMultiset<ImmutableBitSet> groupSets;
public final Map<Integer, Integer> groupExprProjection;
Resolved(List<SqlNode> extraExprList, List<SqlNode> groupExprList,
Iterable<ImmutableBitSet> groupSets,
Map<Integer, Integer> groupExprProjection) {
this.extraExprList = ImmutableList.copyOf(extraExprList);
this.groupExprList = ImmutableList.copyOf(groupExprList);
this.groupSet = ImmutableBitSet.range(groupExprList.size());
this.groupSets = ImmutableSortedMultiset.copyOf(groupSets);
this.groupExprProjection = ImmutableMap.copyOf(groupExprProjection);
}
/** Returns whether a field should be nullable due to grouping sets. */
public boolean isNullable(int i) {
return i < groupExprList.size() && !ImmutableBitSet.allContain(groupSets, i);
}
/** Returns whether a given expression is equal to one of the grouping
* expressions. Determines whether it is valid as an operand to GROUPING. */
public boolean isGroupingExpr(SqlNode operand) {
return lookupGroupingExpr(operand) >= 0;
}
public int lookupGroupingExpr(SqlNode operand) {
for (Ord<SqlNode> groupExpr : Ord.zip(groupExprList)) {
if (operand.equalsDeep(groupExpr.e, Litmus.IGNORE)) {
return groupExpr.i;
}
}
return -1;
}
}
}
| |
/*
* Generated by the Jasper component of Apache Tomcat
* Version: JspC/ApacheTomcat8
* Generated at: 2016-11-04 01:18:34 UTC
* Note: The last modified time of this file was set to
* the last modified time of the source file after
* generation to assist with modification tracking.
*/
package org.jivesoftware.openfire.admin;
import javax.servlet.*;
import javax.servlet.http.*;
import javax.servlet.jsp.*;
import org.jivesoftware.openfire.ConnectionManager;
import org.jivesoftware.openfire.SessionManager;
import org.jivesoftware.openfire.XMPPServer;
import org.jivesoftware.openfire.multiplex.ConnectionMultiplexerManager;
import org.jivesoftware.openfire.session.ConnectionMultiplexerSession;
import org.jivesoftware.util.ParamUtils;
import org.jivesoftware.util.StringUtils;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public final class connection_002dmanagers_002dsettings_jsp extends org.apache.jasper.runtime.HttpJspBase
implements org.apache.jasper.runtime.JspSourceDependent {
private static final javax.servlet.jsp.JspFactory _jspxFactory =
javax.servlet.jsp.JspFactory.getDefaultFactory();
private static java.util.Map<java.lang.String,java.lang.Long> _jspx_dependants;
private org.apache.jasper.runtime.TagHandlerPool _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody;
private org.apache.jasper.runtime.TagHandlerPool _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey;
private org.apache.jasper.runtime.TagHandlerPool _005fjspx_005ftagPool_005ffmt_005fparam_0026_005fvalue_005fnobody;
private javax.el.ExpressionFactory _el_expressionfactory;
private org.apache.tomcat.InstanceManager _jsp_instancemanager;
public java.util.Map<java.lang.String,java.lang.Long> getDependants() {
return _jspx_dependants;
}
public void _jspInit() {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig());
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig());
_005fjspx_005ftagPool_005ffmt_005fparam_0026_005fvalue_005fnobody = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig());
_el_expressionfactory = _jspxFactory.getJspApplicationContext(getServletConfig().getServletContext()).getExpressionFactory();
_jsp_instancemanager = org.apache.jasper.runtime.InstanceManagerFactory.getInstanceManager(getServletConfig());
}
public void _jspDestroy() {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.release();
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey.release();
_005fjspx_005ftagPool_005ffmt_005fparam_0026_005fvalue_005fnobody.release();
}
public void _jspService(final javax.servlet.http.HttpServletRequest request, final javax.servlet.http.HttpServletResponse response)
throws java.io.IOException, javax.servlet.ServletException {
final javax.servlet.jsp.PageContext pageContext;
javax.servlet.http.HttpSession session = null;
final javax.servlet.ServletContext application;
final javax.servlet.ServletConfig config;
javax.servlet.jsp.JspWriter out = null;
final java.lang.Object page = this;
javax.servlet.jsp.JspWriter _jspx_out = null;
javax.servlet.jsp.PageContext _jspx_page_context = null;
try {
response.setContentType("text/html");
pageContext = _jspxFactory.getPageContext(this, request, response,
"error.jsp", true, 8192, true);
_jspx_page_context = pageContext;
application = pageContext.getServletContext();
config = pageContext.getServletConfig();
session = pageContext.getSession();
out = pageContext.getOut();
_jspx_out = out;
out.write("\n\n\n\n\n\n\n\n\n\n\n");
org.jivesoftware.util.WebManager webManager = null;
webManager = (org.jivesoftware.util.WebManager) _jspx_page_context.getAttribute("webManager", javax.servlet.jsp.PageContext.PAGE_SCOPE);
if (webManager == null){
webManager = new org.jivesoftware.util.WebManager();
_jspx_page_context.setAttribute("webManager", webManager, javax.servlet.jsp.PageContext.PAGE_SCOPE);
}
out.write('\n');
webManager.init(request, response, session, application, out);
out.write("\n\n<html>\n<head>\n <title>\n ");
if (_jspx_meth_fmt_005fmessage_005f0(_jspx_page_context))
return;
out.write("</title>\n <meta name=\"pageID\" content=\"connection-managers-settings\"/>\n </head>\n <body>\n\n");
// Get parameters
boolean update = request.getParameter("update") != null;
boolean managerEnabled = ParamUtils.getBooleanParameter(request,"managerEnabled");
int port = ParamUtils.getIntParameter(request,"port", 0);
String defaultSecret = ParamUtils.getParameter(request,"defaultSecret");
boolean updateSucess = false;
ConnectionManager connectionManager = XMPPServer.getInstance().getConnectionManager();
// Update the session kick policy if requested
Map<String, String> errors = new HashMap<String, String>();
if (update) {
// Validate params
if (managerEnabled) {
if (defaultSecret == null || defaultSecret.trim().length() == 0) {
errors.put("defaultSecret","");
}
if (port <= 0) {
errors.put("port","");
}
}
// If no errors, continue:
if (errors.isEmpty()) {
if (!managerEnabled) {
connectionManager.enableConnectionManagerListener(false);
// Log the event
webManager.logEvent("disabled connection manager settings", null);
}
else {
connectionManager.enableConnectionManagerListener(true);
connectionManager.setConnectionManagerListenerPort(port);
// Get hash value of existing default secret
String existingHashDefaultSecret = "";
if (ConnectionMultiplexerManager.getDefaultSecret() != null) {
existingHashDefaultSecret = StringUtils.hash(ConnectionMultiplexerManager.getDefaultSecret());
}
// Check if the new default secret was changed. If it wasn't changed, then it is the original hashed
// default secret
// NOTE: if the new PLAIN default secret equals the previous HASHED default secret this fails,
// but is unlikely.
if (!existingHashDefaultSecret.equals(defaultSecret)) {
// Hash the new default secret since it was changed
String newHashDefaultSecret = "";
if (defaultSecret != null) {
newHashDefaultSecret = StringUtils.hash(defaultSecret);
}
// Change default secret if hash values are different
if (!existingHashDefaultSecret.equals(newHashDefaultSecret)) {
ConnectionMultiplexerManager.setDefaultSecret(defaultSecret);
}
}
// Log the event
webManager.logEvent("enabled connection manager settings", "port = "+port);
}
updateSucess = true;
}
}
// Set page vars
if (errors.size() == 0) {
managerEnabled = connectionManager.isConnectionManagerListenerEnabled();
port = connectionManager.getConnectionManagerListenerPort();
defaultSecret = ConnectionMultiplexerManager.getDefaultSecret();
}
else {
if (port == 0) {
port = connectionManager.getConnectionManagerListenerPort();
}
if (defaultSecret == null) {
defaultSecret = ConnectionMultiplexerManager.getDefaultSecret();
}
}
out.write("\n\n<p>\n");
if (_jspx_meth_fmt_005fmessage_005f1(_jspx_page_context))
return;
out.write("\n</p>\n\n");
if (!errors.isEmpty()) {
out.write("\n\n <div class=\"jive-error\">\n <table cellpadding=\"0\" cellspacing=\"0\" border=\"0\">\n <tbody>\n <tr>\n <td class=\"jive-icon\"><img src=\"images/error-16x16.gif\" width=\"16\" height=\"16\" border=\"0\" alt=\"\"/></td>\n <td class=\"jive-icon-label\">\n\n ");
if (errors.get("port") != null) {
out.write("\n ");
if (_jspx_meth_fmt_005fmessage_005f2(_jspx_page_context))
return;
out.write("\n ");
} else if (errors.get("defaultSecret") != null) {
out.write("\n ");
if (_jspx_meth_fmt_005fmessage_005f3(_jspx_page_context))
return;
out.write("\n ");
}
out.write("\n </td>\n </tr>\n </tbody>\n </table>\n </div>\n <br>\n\n");
} else if (updateSucess) {
out.write("\n\n <div class=\"jive-success\">\n <table cellpadding=\"0\" cellspacing=\"0\" border=\"0\">\n <tbody>\n <tr><td class=\"jive-icon\"><img src=\"images/success-16x16.gif\" width=\"16\" height=\"16\" border=\"0\" alt=\"\"></td>\n <td class=\"jive-icon-label\">\n ");
if (_jspx_meth_fmt_005fmessage_005f4(_jspx_page_context))
return;
out.write("\n </td></tr>\n </tbody>\n </table>\n </div><br>\n\n");
}
out.write("\n\n<form action=\"connection-managers-settings.jsp\" method=\"post\">\n\n<fieldset>\n <div>\n <table cellpadding=\"3\" cellspacing=\"0\" border=\"0\" width=\"100%\">\n <tbody>\n <tr valign=\"middle\">\n <td width=\"1%\" nowrap>\n <input type=\"radio\" name=\"managerEnabled\" value=\"false\" id=\"rb01\"\n ");
out.print( (!managerEnabled ? "checked" : "") );
out.write(">\n </td>\n <td width=\"99%\">\n <label for=\"rb01\">\n <b>");
if (_jspx_meth_fmt_005fmessage_005f5(_jspx_page_context))
return;
out.write("</b> - ");
if (_jspx_meth_fmt_005fmessage_005f6(_jspx_page_context))
return;
out.write("\n </label>\n </td>\n </tr>\n <tr valign=\"middle\">\n <td width=\"1%\" nowrap>\n <input type=\"radio\" name=\"managerEnabled\" value=\"true\" id=\"rb02\"\n ");
out.print( (managerEnabled ? "checked" : "") );
out.write(">\n </td>\n <td width=\"99%\">\n <label for=\"rb02\">\n <b>");
if (_jspx_meth_fmt_005fmessage_005f7(_jspx_page_context))
return;
out.write("</b> - ");
if (_jspx_meth_fmt_005fmessage_005f8(_jspx_page_context))
return;
out.write("\n </label>\n </td>\n </tr>\n <tr valign=\"top\">\n <td width=\"1%\" nowrap>\n \n </td>\n <td width=\"99%\">\n <table cellpadding=\"3\" cellspacing=\"0\" border=\"0\">\n <tr valign=\"top\">\n <td width=\"1%\" align=\"right\" nowrap class=\"c1\">\n ");
if (_jspx_meth_fmt_005fmessage_005f9(_jspx_page_context))
return;
out.write("\n </td>\n <td width=\"99%\">\n <input type=\"text\" size=\"10\" maxlength=\"50\" name=\"port\"\n value=\"");
out.print( port );
out.write("\">\n </td>\n </tr>\n <tr valign=\"top\">\n <td width=\"1%\" nowrap align=\"right\" class=\"c1\">\n ");
if (_jspx_meth_fmt_005fmessage_005f10(_jspx_page_context))
return;
out.write("\n </td>\n <td width=\"99%\">\n <input type=\"password\" size=\"30\" maxlength=\"150\" name=\"defaultSecret\"\n value=\"");
out.print( ((defaultSecret != null) ? StringUtils.hash(defaultSecret) : "") );
out.write("\">\n </td>\n </tr>\n </table>\n </td>\n </tr>\n </tbody>\n </table>\n </div>\n</fieldset>\n<br>\n\n<input type=\"submit\" name=\"update\" value=\"");
if (_jspx_meth_fmt_005fmessage_005f11(_jspx_page_context))
return;
out.write("\">\n\n</form>\n\n");
if (managerEnabled) {
out.write("\n\n<br>\n\n<style type=\"text/css\">\n.connectionManagers {\n\tmargin-top: 8px;\n\tborder: 1px solid #DCDCDC;\n\tborder-bottom: none;\n\t}\n.connectionManagers tr.head {\n\tbackground-color: #F3F7FA;\n\tborder-bottom: 1px solid red;\n\t}\n.connectionManagers tr.head td {\n\tpadding: 3px 6px 3px 6px;\n\tborder-bottom: 1px solid #DCDCDC;\n\t}\n.connectionManagers tr td {\n\tpadding: 3px;\n\tborder-bottom: 1px solid #DCDCDC;\n\t}\n.connectionManagers tr td img {\n\tmargin: 3px;\n\t}\n</style>\n<b>");
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f12 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f12.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f12.setParent(null);
// /connection-managers-settings.jsp(260,3) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f12.setKey("connection-manager.details.title");
int _jspx_eval_fmt_005fmessage_005f12 = _jspx_th_fmt_005fmessage_005f12.doStartTag();
if (_jspx_eval_fmt_005fmessage_005f12 != javax.servlet.jsp.tagext.Tag.SKIP_BODY) {
if (_jspx_eval_fmt_005fmessage_005f12 != javax.servlet.jsp.tagext.Tag.EVAL_BODY_INCLUDE) {
out = _jspx_page_context.pushBody();
_jspx_th_fmt_005fmessage_005f12.setBodyContent((javax.servlet.jsp.tagext.BodyContent) out);
_jspx_th_fmt_005fmessage_005f12.doInitBody();
}
do {
out.write("\n ");
// fmt:param
org.apache.taglibs.standard.tag.rt.fmt.ParamTag _jspx_th_fmt_005fparam_005f2 = (org.apache.taglibs.standard.tag.rt.fmt.ParamTag) _005fjspx_005ftagPool_005ffmt_005fparam_0026_005fvalue_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.ParamTag.class);
_jspx_th_fmt_005fparam_005f2.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fparam_005f2.setParent((javax.servlet.jsp.tagext.Tag) _jspx_th_fmt_005fmessage_005f12);
// /connection-managers-settings.jsp(261,8) name = value type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fparam_005f2.setValue( XMPPServer.getInstance().getServerInfo().getXMPPDomain() );
int _jspx_eval_fmt_005fparam_005f2 = _jspx_th_fmt_005fparam_005f2.doStartTag();
if (_jspx_th_fmt_005fparam_005f2.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fparam_0026_005fvalue_005fnobody.reuse(_jspx_th_fmt_005fparam_005f2);
return;
}
_005fjspx_005ftagPool_005ffmt_005fparam_0026_005fvalue_005fnobody.reuse(_jspx_th_fmt_005fparam_005f2);
out.write("\n ");
int evalDoAfterBody = _jspx_th_fmt_005fmessage_005f12.doAfterBody();
if (evalDoAfterBody != javax.servlet.jsp.tagext.BodyTag.EVAL_BODY_AGAIN)
break;
} while (true);
if (_jspx_eval_fmt_005fmessage_005f12 != javax.servlet.jsp.tagext.Tag.EVAL_BODY_INCLUDE) {
out = _jspx_page_context.popBody();
}
}
if (_jspx_th_fmt_005fmessage_005f12.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey.reuse(_jspx_th_fmt_005fmessage_005f12);
return;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey.reuse(_jspx_th_fmt_005fmessage_005f12);
out.write("\n</b>\n<br>\n<table cellpadding=\"0\" cellspacing=\"0\" border=\"0\" width=\"100%\" class=\"connectionManagers\">\n <tr class=\"head\">\n <td><strong>");
if (_jspx_meth_fmt_005fmessage_005f13(_jspx_page_context))
return;
out.write("</strong></td>\n <td><strong>");
if (_jspx_meth_fmt_005fmessage_005f14(_jspx_page_context))
return;
out.write("</strong></td>\n <td align=\"center\" width=\"15%\"><strong>");
if (_jspx_meth_fmt_005fmessage_005f15(_jspx_page_context))
return;
out.write("</strong></td>\n </tr>\n<tbody>\n");
ConnectionMultiplexerManager multiplexerManager = ConnectionMultiplexerManager.getInstance();
SessionManager sessionManager = SessionManager.getInstance();
Collection<String> connectionManagers = multiplexerManager.getMultiplexers();
if (connectionManagers.isEmpty()) {
out.write("\n <tr>\n <td width=\"100%\" colspan=\"3\" align=\"center\" nowrap>");
if (_jspx_meth_fmt_005fmessage_005f16(_jspx_page_context))
return;
out.write("</td>\n </tr>\n");
} else {
for (String managerName : connectionManagers) {
List<ConnectionMultiplexerSession> sessions = sessionManager.getConnectionMultiplexerSessions(managerName);
if (sessions.isEmpty()) {
continue;
}
String hostAddress = sessions.get(0).getHostAddress();
String hostName = sessions.get(0).getHostName();
out.write("\n<tr>\n <td><img src=\"images/connection-manager_16x16.gif\" width=\"16\" height=\"16\" border=\"0\" alt=\"\" align=\"absmiddle\">");
out.print( managerName);
out.write("</td>\n <td>");
out.print( hostAddress );
out.write(' ');
out.write('/');
out.write(' ');
out.print( hostName );
out.write("</td>\n <td align=\"center\">");
out.print( multiplexerManager.getNumConnectedClients(managerName));
out.write("</td>\n</tr>\n");
}
}
out.write("\n</tbody>\n</table>\n\n\n");
}
out.write("\n\n</body>\n</html>\n");
} catch (java.lang.Throwable t) {
if (!(t instanceof javax.servlet.jsp.SkipPageException)){
out = _jspx_out;
if (out != null && out.getBufferSize() != 0)
try {
if (response.isCommitted()) {
out.flush();
} else {
out.clearBuffer();
}
} catch (java.io.IOException e) {}
if (_jspx_page_context != null) _jspx_page_context.handlePageException(t);
else throw new ServletException(t);
}
} finally {
_jspxFactory.releasePageContext(_jspx_page_context);
}
}
private boolean _jspx_meth_fmt_005fmessage_005f0(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f0 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f0.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f0.setParent(null);
// /connection-managers-settings.jsp(44,8) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f0.setKey("connection-manager.settings.title");
int _jspx_eval_fmt_005fmessage_005f0 = _jspx_th_fmt_005fmessage_005f0.doStartTag();
if (_jspx_th_fmt_005fmessage_005f0.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f0);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f0);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f1(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f1 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f1.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f1.setParent(null);
// /connection-managers-settings.jsp(127,0) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f1.setKey("connection-manager.settings.info");
int _jspx_eval_fmt_005fmessage_005f1 = _jspx_th_fmt_005fmessage_005f1.doStartTag();
if (_jspx_eval_fmt_005fmessage_005f1 != javax.servlet.jsp.tagext.Tag.SKIP_BODY) {
if (_jspx_eval_fmt_005fmessage_005f1 != javax.servlet.jsp.tagext.Tag.EVAL_BODY_INCLUDE) {
out = _jspx_page_context.pushBody();
_jspx_th_fmt_005fmessage_005f1.setBodyContent((javax.servlet.jsp.tagext.BodyContent) out);
_jspx_th_fmt_005fmessage_005f1.doInitBody();
}
do {
out.write("\n ");
if (_jspx_meth_fmt_005fparam_005f0(_jspx_th_fmt_005fmessage_005f1, _jspx_page_context))
return true;
out.write("\n ");
if (_jspx_meth_fmt_005fparam_005f1(_jspx_th_fmt_005fmessage_005f1, _jspx_page_context))
return true;
out.write('\n');
int evalDoAfterBody = _jspx_th_fmt_005fmessage_005f1.doAfterBody();
if (evalDoAfterBody != javax.servlet.jsp.tagext.BodyTag.EVAL_BODY_AGAIN)
break;
} while (true);
if (_jspx_eval_fmt_005fmessage_005f1 != javax.servlet.jsp.tagext.Tag.EVAL_BODY_INCLUDE) {
out = _jspx_page_context.popBody();
}
}
if (_jspx_th_fmt_005fmessage_005f1.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey.reuse(_jspx_th_fmt_005fmessage_005f1);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey.reuse(_jspx_th_fmt_005fmessage_005f1);
return false;
}
private boolean _jspx_meth_fmt_005fparam_005f0(javax.servlet.jsp.tagext.JspTag _jspx_th_fmt_005fmessage_005f1, javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:param
org.apache.taglibs.standard.tag.rt.fmt.ParamTag _jspx_th_fmt_005fparam_005f0 = (org.apache.taglibs.standard.tag.rt.fmt.ParamTag) _005fjspx_005ftagPool_005ffmt_005fparam_0026_005fvalue_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.ParamTag.class);
_jspx_th_fmt_005fparam_005f0.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fparam_005f0.setParent((javax.servlet.jsp.tagext.Tag) _jspx_th_fmt_005fmessage_005f1);
// /connection-managers-settings.jsp(128,4) name = value type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fparam_005f0.setValue("<a href='connection-manager-session-summary.jsp'>");
int _jspx_eval_fmt_005fparam_005f0 = _jspx_th_fmt_005fparam_005f0.doStartTag();
if (_jspx_th_fmt_005fparam_005f0.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fparam_0026_005fvalue_005fnobody.reuse(_jspx_th_fmt_005fparam_005f0);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fparam_0026_005fvalue_005fnobody.reuse(_jspx_th_fmt_005fparam_005f0);
return false;
}
private boolean _jspx_meth_fmt_005fparam_005f1(javax.servlet.jsp.tagext.JspTag _jspx_th_fmt_005fmessage_005f1, javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:param
org.apache.taglibs.standard.tag.rt.fmt.ParamTag _jspx_th_fmt_005fparam_005f1 = (org.apache.taglibs.standard.tag.rt.fmt.ParamTag) _005fjspx_005ftagPool_005ffmt_005fparam_0026_005fvalue_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.ParamTag.class);
_jspx_th_fmt_005fparam_005f1.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fparam_005f1.setParent((javax.servlet.jsp.tagext.Tag) _jspx_th_fmt_005fmessage_005f1);
// /connection-managers-settings.jsp(129,4) name = value type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fparam_005f1.setValue("</a>");
int _jspx_eval_fmt_005fparam_005f1 = _jspx_th_fmt_005fparam_005f1.doStartTag();
if (_jspx_th_fmt_005fparam_005f1.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fparam_0026_005fvalue_005fnobody.reuse(_jspx_th_fmt_005fparam_005f1);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fparam_0026_005fvalue_005fnobody.reuse(_jspx_th_fmt_005fparam_005f1);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f2(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f2 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f2.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f2.setParent(null);
// /connection-managers-settings.jsp(143,16) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f2.setKey("connection-manager.settings.valid.port");
int _jspx_eval_fmt_005fmessage_005f2 = _jspx_th_fmt_005fmessage_005f2.doStartTag();
if (_jspx_th_fmt_005fmessage_005f2.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f2);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f2);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f3(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f3 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f3.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f3.setParent(null);
// /connection-managers-settings.jsp(145,16) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f3.setKey("connection-manager.settings.valid.defaultSecret");
int _jspx_eval_fmt_005fmessage_005f3 = _jspx_th_fmt_005fmessage_005f3.doStartTag();
if (_jspx_th_fmt_005fmessage_005f3.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f3);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f3);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f4(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f4 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f4.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f4.setParent(null);
// /connection-managers-settings.jsp(161,8) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f4.setKey("connection-manager.settings.confirm.updated");
int _jspx_eval_fmt_005fmessage_005f4 = _jspx_th_fmt_005fmessage_005f4.doStartTag();
if (_jspx_th_fmt_005fmessage_005f4.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f4);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f4);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f5(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f5 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f5.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f5.setParent(null);
// /connection-managers-settings.jsp(182,19) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f5.setKey("connection-manager.settings.label_disable");
int _jspx_eval_fmt_005fmessage_005f5 = _jspx_th_fmt_005fmessage_005f5.doStartTag();
if (_jspx_th_fmt_005fmessage_005f5.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f5);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f5);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f6(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f6 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f6.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f6.setParent(null);
// /connection-managers-settings.jsp(182,89) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f6.setKey("connection-manager.settings.label_disable_info");
int _jspx_eval_fmt_005fmessage_005f6 = _jspx_th_fmt_005fmessage_005f6.doStartTag();
if (_jspx_th_fmt_005fmessage_005f6.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f6);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f6);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f7(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f7 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f7.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f7.setParent(null);
// /connection-managers-settings.jsp(193,19) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f7.setKey("connection-manager.settings.label_enable");
int _jspx_eval_fmt_005fmessage_005f7 = _jspx_th_fmt_005fmessage_005f7.doStartTag();
if (_jspx_th_fmt_005fmessage_005f7.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f7);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f7);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f8(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f8 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f8.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f8.setParent(null);
// /connection-managers-settings.jsp(193,88) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f8.setKey("connection-manager.settings.label_enable_info");
int _jspx_eval_fmt_005fmessage_005f8 = _jspx_th_fmt_005fmessage_005f8.doStartTag();
if (_jspx_th_fmt_005fmessage_005f8.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f8);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f8);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f9(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f9 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f9.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f9.setParent(null);
// /connection-managers-settings.jsp(205,24) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f9.setKey("connection-manager.settings.port");
int _jspx_eval_fmt_005fmessage_005f9 = _jspx_th_fmt_005fmessage_005f9.doStartTag();
if (_jspx_th_fmt_005fmessage_005f9.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f9);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f9);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f10(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f10 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f10.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f10.setParent(null);
// /connection-managers-settings.jsp(214,24) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f10.setKey("connection-manager.settings.defaultSecret");
int _jspx_eval_fmt_005fmessage_005f10 = _jspx_th_fmt_005fmessage_005f10.doStartTag();
if (_jspx_th_fmt_005fmessage_005f10.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f10);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f10);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f11(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f11 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f11.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f11.setParent(null);
// /connection-managers-settings.jsp(230,42) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f11.setKey("global.save_settings");
int _jspx_eval_fmt_005fmessage_005f11 = _jspx_th_fmt_005fmessage_005f11.doStartTag();
if (_jspx_th_fmt_005fmessage_005f11.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f11);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f11);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f13(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f13 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f13.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f13.setParent(null);
// /connection-managers-settings.jsp(267,20) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f13.setKey("connection-manager.details.name");
int _jspx_eval_fmt_005fmessage_005f13 = _jspx_th_fmt_005fmessage_005f13.doStartTag();
if (_jspx_th_fmt_005fmessage_005f13.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f13);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f13);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f14(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f14 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f14.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f14.setParent(null);
// /connection-managers-settings.jsp(268,20) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f14.setKey("connection-manager.details.address");
int _jspx_eval_fmt_005fmessage_005f14 = _jspx_th_fmt_005fmessage_005f14.doStartTag();
if (_jspx_th_fmt_005fmessage_005f14.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f14);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f14);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f15(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f15 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f15.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f15.setParent(null);
// /connection-managers-settings.jsp(269,47) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f15.setKey("connection-manager.details.sessions");
int _jspx_eval_fmt_005fmessage_005f15 = _jspx_th_fmt_005fmessage_005f15.doStartTag();
if (_jspx_th_fmt_005fmessage_005f15.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f15);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f15);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f16(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f16 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f16.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f16.setParent(null);
// /connection-managers-settings.jsp(279,59) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f16.setKey("connection-manager.details.no-managers-connected");
int _jspx_eval_fmt_005fmessage_005f16 = _jspx_th_fmt_005fmessage_005f16.doStartTag();
if (_jspx_th_fmt_005fmessage_005f16.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f16);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f16);
return false;
}
}
| |
package demo.binea.com.pullrefreshstickyheaderlistview.widget.se.emilsjolander.stickylistheaders.pulltorefresh;
import android.content.Context;
import android.util.AttributeSet;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.LinearInterpolator;
import android.view.animation.RotateAnimation;
import android.widget.AbsListView;
import android.widget.AbsListView.OnScrollListener;
import android.widget.ImageView;
import android.widget.ListAdapter;
import android.widget.ListView;
import android.widget.ProgressBar;
import android.widget.RelativeLayout;
import android.widget.TextView;
import demo.binea.com.pullrefreshstickyheaderlistview.R;
public class PullToRefreshListView extends ListView implements OnScrollListener {
protected static final int TAP_TO_REFRESH = 1;
protected static final int PULL_TO_REFRESH = 2;
protected static final int RELEASE_TO_REFRESH = 3;
protected static final int REFRESHING = 4;
protected static final String TAG = "PullToRefreshListView";
private OnRefreshListener mOnRefreshListener;
/**
* Listener that will receive notifications every time the list scrolls.
*/
private OnScrollListener mOnScrollListener;
protected LayoutInflater mInflater;
private RelativeLayout mRefreshView;
private TextView mRefreshViewText;
private ImageView mRefreshViewImage;
private ProgressBar mRefreshViewProgress;
private TextView mRefreshViewLastUpdated;
protected int mCurrentScrollState;
protected int mRefreshState;
private RotateAnimation mFlipAnimation;
private RotateAnimation mReverseFlipAnimation;
private int mRefreshViewHeight;
private int mRefreshOriginalTopPadding;
private int mLastMotionY;
private boolean mBounceHack;
public PullToRefreshListView(Context context) {
super(context);
init(context);
}
public PullToRefreshListView(Context context, AttributeSet attrs) {
super(context, attrs);
init(context);
}
public PullToRefreshListView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
init(context);
}
private void init(Context context) {
// Load all of the animations we need in code rather than through XML
mFlipAnimation = new RotateAnimation(0, -180, RotateAnimation.RELATIVE_TO_SELF, 0.5f,
RotateAnimation.RELATIVE_TO_SELF, 0.5f);
mFlipAnimation.setInterpolator(new LinearInterpolator());
mFlipAnimation.setDuration(250);
mFlipAnimation.setFillAfter(true);
mReverseFlipAnimation = new RotateAnimation(-180, 0, RotateAnimation.RELATIVE_TO_SELF, 0.5f,
RotateAnimation.RELATIVE_TO_SELF, 0.5f);
mReverseFlipAnimation.setInterpolator(new LinearInterpolator());
mReverseFlipAnimation.setDuration(250);
mReverseFlipAnimation.setFillAfter(true);
mInflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
mRefreshView =
(RelativeLayout) mInflater.inflate(R.layout.pull_to_refresh_header, this, false);
mRefreshViewText = (TextView) mRefreshView.findViewById(R.id.pull_to_refresh_text);
mRefreshViewImage = (ImageView) mRefreshView.findViewById(R.id.pull_to_refresh_image);
mRefreshViewProgress =
(ProgressBar) mRefreshView.findViewById(R.id.pull_to_refresh_progress);
mRefreshViewLastUpdated =
(TextView) mRefreshView.findViewById(R.id.pull_to_refresh_updated_at);
mRefreshViewImage.setMinimumHeight(50);
mRefreshView.setOnClickListener(new OnClickRefreshListener());
mRefreshOriginalTopPadding = mRefreshView.getPaddingTop();
mRefreshState = TAP_TO_REFRESH;
addHeaderView(mRefreshView);
super.setOnScrollListener(this);
measureView(mRefreshView);
mRefreshViewHeight = mRefreshView.getMeasuredHeight();
}
@Override protected void onAttachedToWindow() {
super.onAttachedToWindow();
setSelection(1);
}
@Override public void setAdapter(ListAdapter adapter) {
super.setAdapter(adapter);
setSelection(1);
}
/**
* Set the listener that will receive notifications every time the list
* scrolls.
*
* @param l The scroll listener.
*/
@Override public void setOnScrollListener(OnScrollListener l) {
mOnScrollListener = l;
}
/**
* Register a callback to be invoked when this list should be refreshed.
*
* @param onRefreshListener The callback to run.
*/
public void setOnRefreshListener(OnRefreshListener onRefreshListener) {
mOnRefreshListener = onRefreshListener;
}
/**
* Set a text to represent when the list was last updated.
*
* @param lastUpdated Last updated at.
*/
public void setLastUpdated(CharSequence lastUpdated) {
if (lastUpdated != null) {
mRefreshViewLastUpdated.setVisibility(View.VISIBLE);
mRefreshViewLastUpdated.setText(lastUpdated);
} else {
mRefreshViewLastUpdated.setVisibility(View.GONE);
}
}
@Override public boolean onTouchEvent(MotionEvent event) {
final int y = (int) event.getY();
mBounceHack = false;
switch (event.getAction()) {
case MotionEvent.ACTION_UP:
if (!isVerticalScrollBarEnabled()) {
setVerticalScrollBarEnabled(true);
}
if (getFirstVisiblePosition() == 0 && mRefreshState != REFRESHING) {
if ((mRefreshView.getBottom() >= mRefreshViewHeight
|| mRefreshView.getTop() >= 0) && mRefreshState == RELEASE_TO_REFRESH) {
// Initiate the refresh
mRefreshState = REFRESHING;
prepareForRefresh();
onRefresh();
} else if (mRefreshView.getBottom() < mRefreshViewHeight
|| mRefreshView.getTop() <= 0) {
// Abort refresh and scroll down below the refresh view
resetHeader();
setSelection(1);
}
}
break;
case MotionEvent.ACTION_DOWN:
mLastMotionY = y;
break;
case MotionEvent.ACTION_MOVE:
applyHeaderPadding(event);
break;
}
return super.onTouchEvent(event);
}
private void applyHeaderPadding(MotionEvent ev) {
// getHistorySize has been available since API 1
int pointerCount = ev.getHistorySize();
for (int p = 0; p < pointerCount; p++) {
if (mRefreshState == RELEASE_TO_REFRESH) {
if (isVerticalFadingEdgeEnabled()) {
setVerticalScrollBarEnabled(false);
}
int historicalY = (int) ev.getHistoricalY(p);
// Calculate the padding to apply, we divide by 1.7 to
// simulate a more resistant effect during pull.
int topPadding = (int) (((historicalY - mLastMotionY) - mRefreshViewHeight) / 1.7);
mRefreshView.setPadding(mRefreshView.getPaddingLeft(), topPadding,
mRefreshView.getPaddingRight(), mRefreshView.getPaddingBottom());
}
}
}
/**
* Sets the header padding back to original size.
*/
private void resetHeaderPadding() {
mRefreshView.setPadding(mRefreshView.getPaddingLeft(), mRefreshOriginalTopPadding,
mRefreshView.getPaddingRight(), mRefreshView.getPaddingBottom());
}
/**
* Resets the header to the original state.
*/
private void resetHeader() {
if (mRefreshState != TAP_TO_REFRESH) {
mRefreshState = TAP_TO_REFRESH;
resetHeaderPadding();
// Set refresh view text to the pull label
mRefreshViewText.setText(R.string.pull_to_refresh_tap_label);
// Replace refresh drawable with arrow drawable
mRefreshViewImage.setImageResource(R.mipmap.ic_launcher);
// Clear the full rotation animation
mRefreshViewImage.clearAnimation();
// Hide progress bar and arrow.
mRefreshViewImage.setVisibility(View.GONE);
mRefreshViewProgress.setVisibility(View.GONE);
}
}
private void measureView(View child) {
ViewGroup.LayoutParams p = child.getLayoutParams();
if (p == null) {
p = new ViewGroup.LayoutParams(ViewGroup.LayoutParams.FILL_PARENT,
ViewGroup.LayoutParams.WRAP_CONTENT);
}
int childWidthSpec = ViewGroup.getChildMeasureSpec(0, 0 + 0, p.width);
int lpHeight = p.height;
int childHeightSpec;
if (lpHeight > 0) {
childHeightSpec = MeasureSpec.makeMeasureSpec(lpHeight, MeasureSpec.EXACTLY);
} else {
childHeightSpec = MeasureSpec.makeMeasureSpec(0, MeasureSpec.UNSPECIFIED);
}
child.measure(childWidthSpec, childHeightSpec);
}
@Override public void onScroll(AbsListView view, int firstVisibleItem, int visibleItemCount,
int totalItemCount) {
// When the refresh view is completely visible, change the text to say
// "Release to refresh..." and flip the arrow drawable.
if (mCurrentScrollState == SCROLL_STATE_TOUCH_SCROLL && mRefreshState != REFRESHING) {
if (firstVisibleItem == 0) {
mRefreshViewImage.setVisibility(View.VISIBLE);
if ((mRefreshView.getBottom() >= mRefreshViewHeight + 20
|| mRefreshView.getTop() >= 0) && mRefreshState != RELEASE_TO_REFRESH) {
mRefreshViewText.setText(R.string.pull_to_refresh_release_label);
mRefreshViewImage.clearAnimation();
mRefreshViewImage.startAnimation(mFlipAnimation);
mRefreshState = RELEASE_TO_REFRESH;
} else if (mRefreshView.getBottom() < mRefreshViewHeight + 20
&& mRefreshState != PULL_TO_REFRESH) {
mRefreshViewText.setText(R.string.pull_to_refresh_pull_label);
if (mRefreshState != TAP_TO_REFRESH) {
mRefreshViewImage.clearAnimation();
mRefreshViewImage.startAnimation(mReverseFlipAnimation);
}
mRefreshState = PULL_TO_REFRESH;
}
} else {
mRefreshViewImage.setVisibility(View.GONE);
resetHeader();
}
} else if (mCurrentScrollState == SCROLL_STATE_FLING
&& firstVisibleItem == 0
&& mRefreshState != REFRESHING) {
setSelection(1);
mBounceHack = true;
} else if (mBounceHack && mCurrentScrollState == SCROLL_STATE_FLING) {
setSelection(1);
}
if (mOnScrollListener != null) {
mOnScrollListener.onScroll(view, firstVisibleItem, visibleItemCount, totalItemCount);
}
}
@Override public void onScrollStateChanged(AbsListView view, int scrollState) {
mCurrentScrollState = scrollState;
if (mCurrentScrollState == SCROLL_STATE_IDLE) {
mBounceHack = false;
}
if (mOnScrollListener != null) {
mOnScrollListener.onScrollStateChanged(view, scrollState);
}
}
public void prepareForRefresh() {
resetHeaderPadding();
mRefreshViewImage.setVisibility(View.GONE);
// We need this hack, otherwise it will keep the previous drawable.
mRefreshViewImage.setImageDrawable(null);
mRefreshViewProgress.setVisibility(View.VISIBLE);
// Set refresh view text to the refreshing label
mRefreshViewText.setText(R.string.pull_to_refresh_refreshing_label);
mRefreshState = REFRESHING;
}
public void onRefresh() {
Log.d(TAG, "onRefresh");
if (mOnRefreshListener != null) {
mOnRefreshListener.onRefresh();
}
}
/**
* Resets the list to a normal state after a refresh.
*
* @param lastUpdated Last updated at.
*/
public void onRefreshComplete(CharSequence lastUpdated) {
setLastUpdated(lastUpdated);
onRefreshComplete();
}
/**
* Resets the list to a normal state after a refresh.
*/
public void onRefreshComplete() {
Log.d(TAG, "onRefreshComplete");
resetHeader();
// If refresh view is visible when loading completes, scroll down to
// the next item.
if (mRefreshView.getBottom() > 0) {
invalidateViews();
setSelection(1);
}
}
/**
* Invoked when the refresh view is clicked on. This is mainly used when
* there's only a few items in the list and it's not possible to drag the
* list.
*/
private class OnClickRefreshListener implements OnClickListener {
@Override public void onClick(View v) {
if (mRefreshState != REFRESHING) {
prepareForRefresh();
onRefresh();
}
}
}
/**
* Interface definition for a callback to be invoked when list should be
* refreshed.
*/
public interface OnRefreshListener {
/**
* Called when the list should be refreshed.
* <p>
* A call to {@link PullToRefreshListView #onRefreshComplete()} is
* expected to indicate that the refresh has completed.
*/
public void onRefresh();
}
}
| |
package com.bumptech.glide.annotation.compiler;
import com.bumptech.glide.annotation.GlideExtension;
import com.bumptech.glide.annotation.GlideOption;
import com.google.common.base.Function;
import com.google.common.base.Joiner;
import com.google.common.base.Predicate;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.squareup.javapoet.AnnotationSpec;
import com.squareup.javapoet.ClassName;
import com.squareup.javapoet.CodeBlock;
import com.squareup.javapoet.MethodSpec;
import com.squareup.javapoet.ParameterSpec;
import com.squareup.javapoet.ParameterizedTypeName;
import com.squareup.javapoet.TypeName;
import com.squareup.javapoet.TypeSpec;
import com.squareup.javapoet.TypeVariableName;
import com.squareup.javapoet.WildcardTypeName;
import java.io.File;
import java.util.Collections;
import java.util.List;
import javax.annotation.Nullable;
import javax.annotation.processing.ProcessingEnvironment;
import javax.lang.model.element.ExecutableElement;
import javax.lang.model.element.Modifier;
import javax.lang.model.element.TypeElement;
import javax.lang.model.element.VariableElement;
import javax.lang.model.type.DeclaredType;
import javax.lang.model.type.TypeMirror;
/**
* Generates a {@link com.bumptech.glide.RequestBuilder} subclass containing all methods from
* the base class, all methods from {@link com.bumptech.glide.request.RequestOptions} and all
* non-override {@link GlideOption} annotated methods in {@link GlideExtension} annotated
* classes.
*
* <p>Generated code looks like this:
* <pre>
* <code>
* public final class GlideRequest<TranscodeType> extends RequestBuilder<TranscodeType> {
* GlideRequest(Class<TranscodeType> transcodeClass, RequestBuilder<?> other) {
* super(transcodeClass, other);
* }
*
* GlideRequest(GlideContext context, RequestManager requestManager,
* Class<TranscodeType> transcodeClass) {
* super(context, requestManager ,transcodeClass);
* }
*
* {@literal @Override}
* protected GlideRequest<File> getDownloadOnlyRequest() {
* return new GlideRequest<>(File.class, this).apply(DOWNLOAD_ONLY_OPTIONS);
* }
*
* /**
* * {@literal @see} GlideOptions#dontAnimate()
* *\/
* public GlideRequest<TranscodeType> dontAnimate() {
* if (getMutableOptions() instanceof GlideOptions) {
* this.requestOptions = ((GlideOptions) getMutableOptions()).dontAnimate();
* } else {
* this.requestOptions = new GlideOptions().apply(this.requestOptions).dontAnimate();
* }
* return this;
* }
*
* /**
* * {@literal @see} RequestOptions#sizeMultiplier(float)
* *\/
* public GlideRequest<TranscodeType> sizeMultiplier(float sizeMultiplier) {
* this.requestOptions = getMutableOptions().sizeMultiplier(sizeMultiplier);
* return this;
* }
*
* ...
* }
* </code>
* </pre>
*/
final class RequestBuilderGenerator {
private static final String REQUEST_OPTIONS_PACKAGE_NAME = "com.bumptech.glide.request";
private static final String REQUEST_OPTIONS_SIMPLE_NAME = "RequestOptions";
private static final String REQUEST_OPTIONS_QUALIFIED_NAME =
REQUEST_OPTIONS_PACKAGE_NAME + "." + REQUEST_OPTIONS_SIMPLE_NAME;
private static final String REQUEST_BUILDER_PACKAGE_NAME = "com.bumptech.glide";
private static final String REQUEST_BUILDER_SIMPLE_NAME = "RequestBuilder";
static final String REQUEST_BUILDER_QUALIFIED_NAME =
REQUEST_BUILDER_PACKAGE_NAME + "." + REQUEST_BUILDER_SIMPLE_NAME;
// Uses package private methods and variables.
private static final String GENERATED_REQUEST_BUILDER_SIMPLE_NAME = "GlideRequest";
/**
* An arbitrary name of the Generic type in the generated RequestBuilder.
* e.g. RequestBuilder<TranscodeType>
*/
private static final String TRANSCODE_TYPE_NAME = "TranscodeType";
/** A set of method names to avoid overriding from RequestOptions. */
private static final ImmutableSet<String> EXCLUDED_METHODS_FROM_BASE_REQUEST_OPTIONS =
ImmutableSet.of("clone", "apply", "autoLock", "lock", "autoClone");
private final ProcessingEnvironment processingEnv;
private final ProcessorUtil processorUtil;
private ClassName generatedRequestBuilderClassName;
private final TypeVariableName transcodeTypeName;
private ParameterizedTypeName generatedRequestBuilderOfTranscodeType;
private final TypeElement requestOptionsType;
private final TypeElement requestBuilderType;
private ClassName requestOptionsClassName;
RequestBuilderGenerator(ProcessingEnvironment processingEnv, ProcessorUtil processorUtil) {
this.processingEnv = processingEnv;
this.processorUtil = processorUtil;
requestBuilderType = processingEnv.getElementUtils()
.getTypeElement(REQUEST_BUILDER_QUALIFIED_NAME);
transcodeTypeName = TypeVariableName.get(TRANSCODE_TYPE_NAME);
requestOptionsType = processingEnv.getElementUtils().getTypeElement(
REQUEST_OPTIONS_QUALIFIED_NAME);
}
TypeSpec generate(String generatedCodePackageName, @Nullable TypeSpec generatedOptions) {
generatedRequestBuilderClassName =
ClassName.get(generatedCodePackageName, GENERATED_REQUEST_BUILDER_SIMPLE_NAME);
generatedRequestBuilderOfTranscodeType =
ParameterizedTypeName.get(generatedRequestBuilderClassName, transcodeTypeName);
if (generatedOptions != null) {
requestOptionsClassName =
ClassName.get(generatedCodePackageName, generatedOptions.name);
} else {
requestOptionsClassName =
ClassName.get(
RequestOptionsGenerator.REQUEST_OPTIONS_PACKAGE_NAME,
RequestBuilderGenerator.REQUEST_OPTIONS_SIMPLE_NAME);
}
ParameterizedTypeName requestBuilderOfTranscodeType =
ParameterizedTypeName.get(
ClassName.get(REQUEST_BUILDER_PACKAGE_NAME, REQUEST_BUILDER_SIMPLE_NAME),
transcodeTypeName);
return TypeSpec.classBuilder(GENERATED_REQUEST_BUILDER_SIMPLE_NAME)
.addJavadoc("Contains all public methods from {@link $T}, all options from\n",
requestBuilderType)
.addJavadoc("{@link $T} and all generated options from\n", requestOptionsType)
.addJavadoc("{@link $T} in annotated methods in\n", GlideOption.class)
.addJavadoc("{@link $T} annotated classes.\n", GlideExtension.class)
.addJavadoc("\n")
.addJavadoc("<p>Generated code, do not modify.\n")
.addJavadoc("\n")
.addJavadoc("@see $T\n", requestBuilderType)
.addJavadoc("@see $T\n", requestOptionsType)
.addAnnotation(
AnnotationSpec.builder(SuppressWarnings.class)
.addMember("value", "$S", "unused")
.addMember("value", "$S", "deprecation")
.build())
.addModifiers(Modifier.PUBLIC, Modifier.FINAL)
.addTypeVariable(transcodeTypeName)
.superclass(requestBuilderOfTranscodeType)
.addMethods(generateConstructors())
.addMethod(generateDownloadOnlyRequestMethod())
.addMethods(generateGeneratedRequestOptionsEquivalents(generatedOptions))
.addMethods(generateRequestBuilderOverrides())
.build();
}
/**
* Generates overrides of all methods in {@link com.bumptech.glide.RequestBuilder} that return
* {@link com.bumptech.glide.RequestBuilder} so that they return our generated subclass instead.
*/
private List<MethodSpec> generateRequestBuilderOverrides() {
TypeMirror rawRequestBuilderType =
processingEnv.getTypeUtils().erasure(requestBuilderType.asType());
return Lists.transform(
processorUtil.findInstanceMethodsReturning(requestBuilderType, rawRequestBuilderType),
new Function<ExecutableElement, MethodSpec>() {
@Override
public MethodSpec apply(ExecutableElement input) {
return generateRequestBuilderOverride(input);
}
});
}
/**
* Generates an override of a particular method in {@link com.bumptech.glide.RequestBuilder} that
* returns {@link com.bumptech.glide.RequestBuilder} so that it returns our generated subclass
* instead.
*/
private MethodSpec generateRequestBuilderOverride(ExecutableElement methodToOverride) {
// We've already verified that this method returns a RequestBuilder and RequestBuilders have
// exactly one type argument, so this is safe unless those assumptions change.
TypeMirror typeArgument =
((DeclaredType) methodToOverride.getReturnType()).getTypeArguments().get(0);
ParameterizedTypeName generatedRequestBuilderOfType =
ParameterizedTypeName.get(generatedRequestBuilderClassName, ClassName.get(typeArgument));
return MethodSpec.overriding(methodToOverride)
.returns(generatedRequestBuilderOfType)
.addCode(CodeBlock.builder()
.add("return ($T) super.$N(",
generatedRequestBuilderOfType, methodToOverride.getSimpleName())
.add(FluentIterable.from(methodToOverride.getParameters())
.transform(new Function<VariableElement, String>() {
@Override
public String apply(VariableElement input) {
return input.getSimpleName().toString();
}
})
.join(Joiner.on(", ")))
.add(");\n")
.build())
.build();
}
/**
* Generates methods with equivalent names and arguments to methods annotated with
* {@link GlideOption} in
* {@link com.bumptech.glide.annotation.GlideExtension}s that return our generated
* {@link com.bumptech.glide.RequestBuilder} subclass.
*/
private List<MethodSpec> generateGeneratedRequestOptionsEquivalents(
@Nullable final TypeSpec generatedOptions) {
if (generatedOptions == null) {
return Collections.emptyList();
}
return FluentIterable
.from(generatedOptions.methodSpecs)
.filter(new Predicate<MethodSpec>() {
@Override
public boolean apply(MethodSpec input) {
return isUsefulGeneratedRequestOption(input);
}
})
.transform(new Function<MethodSpec, MethodSpec>() {
@Override
public MethodSpec apply(MethodSpec input) {
return generateGeneratedRequestOptionEquivalent(input);
}
})
.toList();
}
/**
* Returns {@code true} if the given {@link MethodSpec} is a useful method to have in our
* {@link com.bumptech.glide.RequestBuilder} subclass.
*
* <p>Only newly generated methods will be included in the generated
* {@link com.bumptech.glide.request.BaseRequestBuilder} subclass, so we only have to filter out
* methods that override other methods to avoid duplicates.
*/
private boolean isUsefulGeneratedRequestOption(MethodSpec requestOptionMethod) {
return
!EXCLUDED_METHODS_FROM_BASE_REQUEST_OPTIONS.contains(requestOptionMethod.name)
&& requestOptionMethod.hasModifier(Modifier.PUBLIC)
&& !requestOptionMethod.hasModifier(Modifier.STATIC)
&& requestOptionMethod.returnType.toString()
.equals(requestOptionsClassName.toString());
}
/**
* Generates a particular method with an equivalent name and arguments to the given method
* from the generated {@link com.bumptech.glide.request.BaseRequestBuilder} subclass.
*/
private MethodSpec generateGeneratedRequestOptionEquivalent(MethodSpec requestOptionMethod) {
CodeBlock callRequestOptionsMethod = CodeBlock.builder()
.add(".$N(", requestOptionMethod.name)
.add(FluentIterable.from(requestOptionMethod.parameters)
.transform(new Function<ParameterSpec, String>() {
@Override
public String apply(ParameterSpec input) {
return input.name;
}
})
.join(Joiner.on(", ")))
.add(");\n")
.build();
return MethodSpec.methodBuilder(requestOptionMethod.name)
.addJavadoc(
processorUtil.generateSeeMethodJavadoc(requestOptionsClassName, requestOptionMethod))
.addModifiers(Modifier.PUBLIC)
.addTypeVariables(requestOptionMethod.typeVariables)
.addParameters(requestOptionMethod.parameters)
.returns(generatedRequestBuilderOfTranscodeType)
.beginControlFlow(
"if (getMutableOptions() instanceof $T)", requestOptionsClassName)
.addCode("this.requestOptions = (($T) getMutableOptions())",
requestOptionsClassName)
.addCode(callRequestOptionsMethod)
.nextControlFlow("else")
.addCode(CodeBlock.of("this.requestOptions = new $T().apply(this.requestOptions)",
requestOptionsClassName))
.addCode(callRequestOptionsMethod)
.endControlFlow()
.addStatement("return this")
.build();
}
private List<MethodSpec> generateConstructors() {
ParameterizedTypeName classOfTranscodeType =
ParameterizedTypeName.get(ClassName.get(Class.class), transcodeTypeName);
TypeName wildcardOfObject = WildcardTypeName.subtypeOf(Object.class);
ParameterizedTypeName requestBuilderOfWildcardOfObject =
ParameterizedTypeName.get(ClassName.get(requestBuilderType), wildcardOfObject);
MethodSpec firstConstructor =
MethodSpec.constructorBuilder()
.addParameter(classOfTranscodeType, "transcodeClass")
.addParameter(requestBuilderOfWildcardOfObject, "other")
.addStatement("super($N, $N)", "transcodeClass", "other")
.build();
ClassName glide = ClassName.get("com.bumptech.glide", "Glide");
ClassName requestManager = ClassName.get("com.bumptech.glide", "RequestManager");
MethodSpec secondConstructor =
MethodSpec.constructorBuilder()
.addParameter(glide, "glide")
.addParameter(requestManager, "requestManager")
.addParameter(classOfTranscodeType, "transcodeClass")
.addStatement("super($N, $N ,$N)", "glide", "requestManager", "transcodeClass")
.build();
return ImmutableList.of(firstConstructor, secondConstructor);
}
/**
* Overrides the protected downloadOnly method in {@link com.bumptech.glide.RequestBuilder} to
* return our generated subclass instead.
*/
private MethodSpec generateDownloadOnlyRequestMethod() {
ParameterizedTypeName generatedRequestBuilderOfFile
= ParameterizedTypeName.get(generatedRequestBuilderClassName, ClassName.get(File.class));
return MethodSpec.methodBuilder("getDownloadOnlyRequest")
.addAnnotation(Override.class)
.returns(generatedRequestBuilderOfFile)
.addModifiers(Modifier.PROTECTED)
.addStatement("return new $T<>($T.class, $N).apply($N)",
generatedRequestBuilderClassName, File.class, "this",
"DOWNLOAD_ONLY_OPTIONS")
.build();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.rabbitmq.springboot;
import java.util.Map;
import javax.annotation.Generated;
import javax.net.ssl.TrustManager;
import com.rabbitmq.client.ConnectionFactory;
import org.apache.camel.spring.boot.ComponentConfigurationPropertiesCommon;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.context.properties.NestedConfigurationProperty;
/**
* The rabbitmq component allows you produce and consume messages from RabbitMQ
* instances.
*
* Generated by camel-package-maven-plugin - do not edit this file!
*/
@Generated("org.apache.camel.maven.packaging.SpringBootAutoConfigurationMojo")
@ConfigurationProperties(prefix = "camel.component.rabbitmq")
public class RabbitMQComponentConfiguration
extends
ComponentConfigurationPropertiesCommon {
/**
* The hostname of the running rabbitmq instance or cluster.
*/
private String hostname;
/**
* Port number for the host with the running rabbitmq instance or cluster.
*/
private Integer portNumber = 5672;
/**
* Username in case of authenticated access
*/
private String username = "guest";
/**
* Password for authenticated access
*/
private String password = "guest";
/**
* The vhost for the channel
*/
private String vhost = "/";
/**
* If this option is set, camel-rabbitmq will try to create connection based
* on the setting of option addresses. The addresses value is a string which
* looks like server1:12345, server2:12345
*/
private String addresses;
/**
* To use a custom RabbitMQ connection factory. When this option is set, all
* connection options (connectionTimeout, requestedChannelMax...) set on URI
* are not used
*/
@NestedConfigurationProperty
private ConnectionFactory connectionFactory;
/**
* The consumer uses a Thread Pool Executor with a fixed number of threads.
* This setting allows you to set that number of threads.
*/
private Integer threadPoolSize = 10;
/**
* Whether to auto-detect looking up RabbitMQ connection factory from the
* registry. When enabled and a single instance of the connection factory is
* found then it will be used. An explicit connection factory can be
* configured on the component or endpoint level which takes precedence.
*/
private Boolean autoDetectConnectionFactory = true;
/**
* Connection timeout
*/
private Integer connectionTimeout = 60000;
/**
* Connection requested channel max (max number of channels offered)
*/
private Integer requestedChannelMax = 0;
/**
* Connection requested frame max (max size of frame offered)
*/
private Integer requestedFrameMax = 0;
/**
* Connection requested heartbeat (heart-beat in seconds offered)
*/
private Integer requestedHeartbeat = 60;
/**
* Enables connection automatic recovery (uses connection implementation
* that performs automatic recovery when connection shutdown is not
* initiated by the application)
*/
private Boolean automaticRecoveryEnabled;
/**
* Network recovery interval in milliseconds (interval used when recovering
* from network failure)
*/
private Integer networkRecoveryInterval = 5000;
/**
* Enables connection topology recovery (should topology recovery be
* performed)
*/
private Boolean topologyRecoveryEnabled;
/**
* Enables the quality of service on the RabbitMQConsumer side. You need to
* specify the option of prefetchSize, prefetchCount, prefetchGlobal at the
* same time
*/
private Boolean prefetchEnabled = false;
/**
* The maximum amount of content (measured in octets) that the server will
* deliver, 0 if unlimited. You need to specify the option of prefetchSize,
* prefetchCount, prefetchGlobal at the same time
*/
private Integer prefetchSize;
/**
* The maximum number of messages that the server will deliver, 0 if
* unlimited. You need to specify the option of prefetchSize, prefetchCount,
* prefetchGlobal at the same time
*/
private Integer prefetchCount;
/**
* If the settings should be applied to the entire channel rather than each
* consumer You need to specify the option of prefetchSize, prefetchCount,
* prefetchGlobal at the same time
*/
private Boolean prefetchGlobal = false;
/**
* Get maximum number of opened channel in pool
*/
private Integer channelPoolMaxSize = 10;
/**
* Set the maximum number of milliseconds to wait for a channel from the
* pool
*/
private Long channelPoolMaxWait = 1000L;
/**
* Set timeout for waiting for a reply when using the InOut Exchange Pattern
* (in milliseconds)
*/
private Long requestTimeout = 20000L;
/**
* Set requestTimeoutCheckerInterval for inOut exchange
*/
private Long requestTimeoutCheckerInterval = 1000L;
/**
* When true and an inOut Exchange failed on the consumer side send the
* caused Exception back in the response
*/
private Boolean transferException = false;
/**
* When true, the message will be published with publisher acknowledgements
* turned on
*/
private Boolean publisherAcknowledgements = false;
/**
* The amount of time in milliseconds to wait for a basic.ack response from
* RabbitMQ server
*/
private Long publisherAcknowledgementsTimeout;
/**
* When true, an exception will be thrown when the message cannot be
* delivered (basic.return) and the message is marked as mandatory.
* PublisherAcknowledgement will also be activated in this case. See also
* publisher acknowledgements - When will messages be confirmed.
*/
private Boolean guaranteedDeliveries = false;
/**
* This flag tells the server how to react if the message cannot be routed
* to a queue. If this flag is set, the server will return an unroutable
* message with a Return method. If this flag is zero, the server silently
* drops the message. If the header is present rabbitmq.MANDATORY it will
* override this option.
*/
private Boolean mandatory = false;
/**
* This flag tells the server how to react if the message cannot be routed
* to a queue consumer immediately. If this flag is set, the server will
* return an undeliverable message with a Return method. If this flag is
* zero, the server will queue the message, but with no guarantee that it
* will ever be consumed. If the header is present rabbitmq.IMMEDIATE it
* will override this option.
*/
private Boolean immediate = false;
/**
* Specify arguments for configuring the different RabbitMQ concepts, a
* different prefix is required for each: Exchange: arg.exchange. Queue:
* arg.queue. Binding: arg.binding. For example to declare a queue with
* message ttl argument:
* http://localhost:5672/exchange/queueargs=arg.queue.x-message-ttl=60000
*/
private Map<String, Object> args;
/**
* Connection client properties (client info used in negotiating with the
* server)
*/
private Map<String, Object> clientProperties;
/**
* Enables SSL on connection, accepted value are true, TLS and 'SSLv3
*/
private String sslProtocol;
/**
* Configure SSL trust manager, SSL should be enabled for this option to be
* effective
*/
private TrustManager trustManager;
/**
* If messages should be auto acknowledged
*/
private Boolean autoAck = true;
/**
* If it is true, the exchange will be deleted when it is no longer in use
*/
private Boolean autoDelete = true;
/**
* If we are declaring a durable exchange (the exchange will survive a
* server restart)
*/
private Boolean durable = true;
/**
* Exclusive queues may only be accessed by the current connection, and are
* deleted when that connection closes.
*/
private Boolean exclusive = false;
/**
* Passive queues depend on the queue already to be available at RabbitMQ.
*/
private Boolean passive = false;
/**
* If true the producer will not declare and bind a queue. This can be used
* for directing messages via an existing routing key.
*/
private Boolean skipQueueDeclare = false;
/**
* If true the queue will not be bound to the exchange after declaring it
*/
private Boolean skipQueueBind = false;
/**
* This can be used if we need to declare the queue but not the exchange
*/
private Boolean skipExchangeDeclare = false;
/**
* If the option is true, camel declare the exchange and queue name and bind
* them together. If the option is false, camel won't declare the exchange
* and queue name on the server.
*/
private Boolean declare = true;
/**
* The name of the dead letter exchange
*/
private String deadLetterExchange;
/**
* The name of the dead letter queue
*/
private String deadLetterQueue;
/**
* The routing key for the dead letter exchange
*/
private String deadLetterRoutingKey;
/**
* The type of the dead letter exchange
*/
private String deadLetterExchangeType = "direct";
/**
* Whether the component should resolve property placeholders on itself when
* starting. Only properties which are of String type can use property
* placeholders.
*/
private Boolean resolvePropertyPlaceholders = true;
public String getHostname() {
return hostname;
}
public void setHostname(String hostname) {
this.hostname = hostname;
}
public Integer getPortNumber() {
return portNumber;
}
public void setPortNumber(Integer portNumber) {
this.portNumber = portNumber;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getVhost() {
return vhost;
}
public void setVhost(String vhost) {
this.vhost = vhost;
}
public String getAddresses() {
return addresses;
}
public void setAddresses(String addresses) {
this.addresses = addresses;
}
public ConnectionFactory getConnectionFactory() {
return connectionFactory;
}
public void setConnectionFactory(ConnectionFactory connectionFactory) {
this.connectionFactory = connectionFactory;
}
public Integer getThreadPoolSize() {
return threadPoolSize;
}
public void setThreadPoolSize(Integer threadPoolSize) {
this.threadPoolSize = threadPoolSize;
}
public Boolean getAutoDetectConnectionFactory() {
return autoDetectConnectionFactory;
}
public void setAutoDetectConnectionFactory(
Boolean autoDetectConnectionFactory) {
this.autoDetectConnectionFactory = autoDetectConnectionFactory;
}
public Integer getConnectionTimeout() {
return connectionTimeout;
}
public void setConnectionTimeout(Integer connectionTimeout) {
this.connectionTimeout = connectionTimeout;
}
public Integer getRequestedChannelMax() {
return requestedChannelMax;
}
public void setRequestedChannelMax(Integer requestedChannelMax) {
this.requestedChannelMax = requestedChannelMax;
}
public Integer getRequestedFrameMax() {
return requestedFrameMax;
}
public void setRequestedFrameMax(Integer requestedFrameMax) {
this.requestedFrameMax = requestedFrameMax;
}
public Integer getRequestedHeartbeat() {
return requestedHeartbeat;
}
public void setRequestedHeartbeat(Integer requestedHeartbeat) {
this.requestedHeartbeat = requestedHeartbeat;
}
public Boolean getAutomaticRecoveryEnabled() {
return automaticRecoveryEnabled;
}
public void setAutomaticRecoveryEnabled(Boolean automaticRecoveryEnabled) {
this.automaticRecoveryEnabled = automaticRecoveryEnabled;
}
public Integer getNetworkRecoveryInterval() {
return networkRecoveryInterval;
}
public void setNetworkRecoveryInterval(Integer networkRecoveryInterval) {
this.networkRecoveryInterval = networkRecoveryInterval;
}
public Boolean getTopologyRecoveryEnabled() {
return topologyRecoveryEnabled;
}
public void setTopologyRecoveryEnabled(Boolean topologyRecoveryEnabled) {
this.topologyRecoveryEnabled = topologyRecoveryEnabled;
}
public Boolean getPrefetchEnabled() {
return prefetchEnabled;
}
public void setPrefetchEnabled(Boolean prefetchEnabled) {
this.prefetchEnabled = prefetchEnabled;
}
public Integer getPrefetchSize() {
return prefetchSize;
}
public void setPrefetchSize(Integer prefetchSize) {
this.prefetchSize = prefetchSize;
}
public Integer getPrefetchCount() {
return prefetchCount;
}
public void setPrefetchCount(Integer prefetchCount) {
this.prefetchCount = prefetchCount;
}
public Boolean getPrefetchGlobal() {
return prefetchGlobal;
}
public void setPrefetchGlobal(Boolean prefetchGlobal) {
this.prefetchGlobal = prefetchGlobal;
}
public Integer getChannelPoolMaxSize() {
return channelPoolMaxSize;
}
public void setChannelPoolMaxSize(Integer channelPoolMaxSize) {
this.channelPoolMaxSize = channelPoolMaxSize;
}
public Long getChannelPoolMaxWait() {
return channelPoolMaxWait;
}
public void setChannelPoolMaxWait(Long channelPoolMaxWait) {
this.channelPoolMaxWait = channelPoolMaxWait;
}
public Long getRequestTimeout() {
return requestTimeout;
}
public void setRequestTimeout(Long requestTimeout) {
this.requestTimeout = requestTimeout;
}
public Long getRequestTimeoutCheckerInterval() {
return requestTimeoutCheckerInterval;
}
public void setRequestTimeoutCheckerInterval(
Long requestTimeoutCheckerInterval) {
this.requestTimeoutCheckerInterval = requestTimeoutCheckerInterval;
}
public Boolean getTransferException() {
return transferException;
}
public void setTransferException(Boolean transferException) {
this.transferException = transferException;
}
public Boolean getPublisherAcknowledgements() {
return publisherAcknowledgements;
}
public void setPublisherAcknowledgements(Boolean publisherAcknowledgements) {
this.publisherAcknowledgements = publisherAcknowledgements;
}
public Long getPublisherAcknowledgementsTimeout() {
return publisherAcknowledgementsTimeout;
}
public void setPublisherAcknowledgementsTimeout(
Long publisherAcknowledgementsTimeout) {
this.publisherAcknowledgementsTimeout = publisherAcknowledgementsTimeout;
}
public Boolean getGuaranteedDeliveries() {
return guaranteedDeliveries;
}
public void setGuaranteedDeliveries(Boolean guaranteedDeliveries) {
this.guaranteedDeliveries = guaranteedDeliveries;
}
public Boolean getMandatory() {
return mandatory;
}
public void setMandatory(Boolean mandatory) {
this.mandatory = mandatory;
}
public Boolean getImmediate() {
return immediate;
}
public void setImmediate(Boolean immediate) {
this.immediate = immediate;
}
public Map<String, Object> getArgs() {
return args;
}
public void setArgs(Map<String, Object> args) {
this.args = args;
}
public Map<String, Object> getClientProperties() {
return clientProperties;
}
public void setClientProperties(Map<String, Object> clientProperties) {
this.clientProperties = clientProperties;
}
public String getSslProtocol() {
return sslProtocol;
}
public void setSslProtocol(String sslProtocol) {
this.sslProtocol = sslProtocol;
}
public TrustManager getTrustManager() {
return trustManager;
}
public void setTrustManager(TrustManager trustManager) {
this.trustManager = trustManager;
}
public Boolean getAutoAck() {
return autoAck;
}
public void setAutoAck(Boolean autoAck) {
this.autoAck = autoAck;
}
public Boolean getAutoDelete() {
return autoDelete;
}
public void setAutoDelete(Boolean autoDelete) {
this.autoDelete = autoDelete;
}
public Boolean getDurable() {
return durable;
}
public void setDurable(Boolean durable) {
this.durable = durable;
}
public Boolean getExclusive() {
return exclusive;
}
public void setExclusive(Boolean exclusive) {
this.exclusive = exclusive;
}
public Boolean getPassive() {
return passive;
}
public void setPassive(Boolean passive) {
this.passive = passive;
}
public Boolean getSkipQueueDeclare() {
return skipQueueDeclare;
}
public void setSkipQueueDeclare(Boolean skipQueueDeclare) {
this.skipQueueDeclare = skipQueueDeclare;
}
public Boolean getSkipQueueBind() {
return skipQueueBind;
}
public void setSkipQueueBind(Boolean skipQueueBind) {
this.skipQueueBind = skipQueueBind;
}
public Boolean getSkipExchangeDeclare() {
return skipExchangeDeclare;
}
public void setSkipExchangeDeclare(Boolean skipExchangeDeclare) {
this.skipExchangeDeclare = skipExchangeDeclare;
}
public Boolean getDeclare() {
return declare;
}
public void setDeclare(Boolean declare) {
this.declare = declare;
}
public String getDeadLetterExchange() {
return deadLetterExchange;
}
public void setDeadLetterExchange(String deadLetterExchange) {
this.deadLetterExchange = deadLetterExchange;
}
public String getDeadLetterQueue() {
return deadLetterQueue;
}
public void setDeadLetterQueue(String deadLetterQueue) {
this.deadLetterQueue = deadLetterQueue;
}
public String getDeadLetterRoutingKey() {
return deadLetterRoutingKey;
}
public void setDeadLetterRoutingKey(String deadLetterRoutingKey) {
this.deadLetterRoutingKey = deadLetterRoutingKey;
}
public String getDeadLetterExchangeType() {
return deadLetterExchangeType;
}
public void setDeadLetterExchangeType(String deadLetterExchangeType) {
this.deadLetterExchangeType = deadLetterExchangeType;
}
public Boolean getResolvePropertyPlaceholders() {
return resolvePropertyPlaceholders;
}
public void setResolvePropertyPlaceholders(
Boolean resolvePropertyPlaceholders) {
this.resolvePropertyPlaceholders = resolvePropertyPlaceholders;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.physical.impl.writer;
import static org.apache.drill.exec.store.parquet.ParquetRecordWriter.DRILL_VERSION_PROPERTY;
import static org.apache.drill.test.TestBuilder.convertToLocalDateTime;
import static org.apache.parquet.format.converter.ParquetMetadataConverter.NO_FILTER;
import static org.apache.parquet.format.converter.ParquetMetadataConverter.SKIP_ROW_GROUPS;
import static org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.BINARY;
import static org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.FIXED_LEN_BYTE_ARRAY;
import static org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.INT32;
import static org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.INT64;
import static org.junit.Assert.assertEquals;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.math.BigDecimal;
import java.nio.file.Paths;
import java.time.LocalDate;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.calcite.util.Pair;
import org.apache.drill.categories.ParquetTest;
import org.apache.drill.categories.SlowTest;
import org.apache.drill.categories.UnlikelyTest;
import org.apache.drill.common.util.DrillVersionInfo;
import org.apache.drill.exec.ExecConstants;
import org.apache.drill.exec.fn.interp.TestConstantFolding;
import org.apache.drill.exec.planner.physical.PlannerSettings;
import org.apache.drill.exec.util.JsonStringArrayList;
import org.apache.drill.test.BaseTestQuery;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.parquet.hadoop.ParquetFileReader;
import org.apache.parquet.hadoop.metadata.ParquetMetadata;
import org.apache.parquet.schema.MessageType;
import org.apache.parquet.schema.OriginalType;
import org.apache.parquet.schema.PrimitiveType;
import org.joda.time.Period;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
@RunWith(Parameterized.class)
@Category({SlowTest.class, ParquetTest.class})
public class TestParquetWriter extends BaseTestQuery {
@Parameterized.Parameters
public static Collection<Object[]> data() {
return Arrays.asList(new Object[][] { {100} });
}
@BeforeClass
public static void setupTestFiles() {
dirTestWatcher.copyResourceToRoot(Paths.get("parquet", "int96_dict_change"));
}
private static FileSystem fs;
// Map storing a convenient name as well as the cast type necessary
// to produce it casting from a varchar
private static final Map<String, String> allTypes = new HashMap<>();
// Select statement for all supported Drill types, for use in conjunction with
// the file parquet/alltypes.json in the resources directory
private static final String allTypesSelection;
static {
allTypes.put("int", "int");
allTypes.put("bigint", "bigint");
allTypes.put("decimal(9, 4)", "decimal9");
allTypes.put("decimal(18,9)", "decimal18");
allTypes.put("decimal(28, 14)", "decimal28sparse");
allTypes.put("decimal(38, 19)", "decimal38sparse");
allTypes.put("decimal(38, 15)", "vardecimal");
allTypes.put("date", "date");
allTypes.put("timestamp", "timestamp");
allTypes.put("float", "float4");
allTypes.put("double", "float8");
allTypes.put("varbinary(65000)", "varbinary");
// TODO(DRILL-2297)
// allTypes.put("interval year", "intervalyear");
allTypes.put("interval day", "intervalday");
allTypes.put("boolean", "bit");
allTypes.put("varchar", "varchar");
allTypes.put("time", "time");
List<String> allTypeSelectsAndCasts = new ArrayList<>();
for (String s : allTypes.keySet()) {
// don't need to cast a varchar, just add the column reference
if (s.equals("varchar")) {
allTypeSelectsAndCasts.add(String.format("`%s_col`", allTypes.get(s)));
continue;
}
allTypeSelectsAndCasts.add(String.format("cast(`%s_col` AS %S) `%s_col`", allTypes.get(s), s, allTypes.get(s)));
}
allTypesSelection = Joiner.on(",").join(allTypeSelectsAndCasts);
}
private final String allTypesTable = "cp.`parquet/alltypes.json`";
@Parameterized.Parameter
public int repeat = 1;
@BeforeClass
public static void initFs() throws Exception {
fs = getLocalFileSystem();
alterSession(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY, true);
}
@AfterClass
public static void disableDecimalDataType() {
resetSessionOption(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY);
}
@Test
public void testSmallFileValueReadWrite() throws Exception {
String selection = "key";
String inputTable = "cp.`store/json/intData.json`";
runTestAndValidate(selection, selection, inputTable, "smallFileTest");
}
@Test
public void testSimple() throws Exception {
String selection = "*";
String inputTable = "cp.`employee.json`";
runTestAndValidate(selection, selection, inputTable, "employee_parquet");
}
@Test
public void testLargeFooter() throws Exception {
StringBuilder sb = new StringBuilder();
// create a JSON document with a lot of columns
sb.append("{");
final int numCols = 1000;
String[] colNames = new String[numCols];
Object[] values = new Object[numCols];
for (int i = 0; i < numCols - 1; i++) {
sb.append(String.format("\"col_%d\" : 100,", i));
colNames[i] = "col_" + i;
values[i] = 100L;
}
// add one column without a comma after it
sb.append(String.format("\"col_%d\" : 100", numCols - 1));
sb.append("}");
colNames[numCols - 1] = "col_" + (numCols - 1);
values[numCols - 1] = 100L;
String path = "test";
File pathDir = dirTestWatcher.makeRootSubDir(Paths.get(path));
// write it to a file in the temp directory for the test
new TestConstantFolding.SmallFileCreator(pathDir)
.setRecord(sb.toString()).createFiles(1, 1, "json");
test("use dfs.tmp");
test("create table WIDE_PARQUET_TABLE_TestParquetWriter_testLargeFooter as select * from dfs.`%s/smallfile/smallfile.json`", path);
testBuilder()
.sqlQuery("select * from dfs.tmp.WIDE_PARQUET_TABLE_TestParquetWriter_testLargeFooter")
.unOrdered()
.baselineColumns(colNames)
.baselineValues(values)
.build().run();
}
@Test
public void testAllScalarTypes() throws Exception {
/// read once with the flat reader
runTestAndValidate(allTypesSelection, "*", allTypesTable, "donuts_json");
try {
// read all of the types with the complex reader
alterSession(ExecConstants.PARQUET_NEW_RECORD_READER, true);
runTestAndValidate(allTypesSelection, "*", allTypesTable, "donuts_json");
} finally {
resetSessionOption(ExecConstants.PARQUET_NEW_RECORD_READER);
}
}
@Test
public void testAllScalarTypesDictionary() throws Exception {
try {
alterSession(ExecConstants.PARQUET_WRITER_ENABLE_DICTIONARY_ENCODING, true);
/// read once with the flat reader
runTestAndValidate(allTypesSelection, "*", allTypesTable, "donuts_json");
// read all of the types with the complex reader
alterSession(ExecConstants.PARQUET_NEW_RECORD_READER, true);
runTestAndValidate(allTypesSelection, "*", allTypesTable, "donuts_json");
} finally {
resetSessionOption(ExecConstants.PARQUET_NEW_RECORD_READER);
resetSessionOption(ExecConstants.PARQUET_WRITER_ENABLE_DICTIONARY_ENCODING);
}
}
@Test
public void testDictionaryError() throws Exception {
compareParquetReadersColumnar("*", "cp.`parquet/required_dictionary.parquet`");
runTestAndValidate("*", "*", "cp.`parquet/required_dictionary.parquet`", "required_dictionary");
}
@Test
public void testDictionaryEncoding() throws Exception {
String selection = "type";
String inputTable = "cp.`donuts.json`";
try {
alterSession(ExecConstants.PARQUET_WRITER_ENABLE_DICTIONARY_ENCODING, true);
runTestAndValidate(selection, selection, inputTable, "donuts_json");
} finally {
resetSessionOption(ExecConstants.PARQUET_WRITER_ENABLE_DICTIONARY_ENCODING);
}
}
@Test
public void testComplex() throws Exception {
String selection = "*";
String inputTable = "cp.`donuts.json`";
runTestAndValidate(selection, selection, inputTable, "donuts_json");
}
@Test
public void testComplexRepeated() throws Exception {
String selection = "*";
String inputTable = "cp.`testRepeatedWrite.json`";
runTestAndValidate(selection, selection, inputTable, "repeated_json");
}
@Test
public void testCastProjectBug_Drill_929() throws Exception {
String selection = "L_ORDERKEY, L_PARTKEY, L_SUPPKEY, L_LINENUMBER, L_QUANTITY, L_EXTENDEDPRICE, L_DISCOUNT, L_TAX, " +
"L_RETURNFLAG, L_LINESTATUS, L_SHIPDATE, cast(L_COMMITDATE as DATE) as COMMITDATE, cast(L_RECEIPTDATE as DATE) AS RECEIPTDATE, L_SHIPINSTRUCT, L_SHIPMODE, L_COMMENT";
String validationSelection = "L_ORDERKEY, L_PARTKEY, L_SUPPKEY, L_LINENUMBER, L_QUANTITY, L_EXTENDEDPRICE, L_DISCOUNT, L_TAX, " +
"L_RETURNFLAG, L_LINESTATUS, L_SHIPDATE,COMMITDATE ,RECEIPTDATE, L_SHIPINSTRUCT, L_SHIPMODE, L_COMMENT";
String inputTable = "cp.`tpch/lineitem.parquet`";
runTestAndValidate(selection, validationSelection, inputTable, "drill_929");
}
@Test
public void testTPCHReadWrite1() throws Exception {
String inputTable = "cp.`tpch/lineitem.parquet`";
runTestAndValidate("*", "*", inputTable, "lineitem_parquet_all");
}
@Test
public void testTPCHReadWrite1_date_convertedType() throws Exception {
try {
alterSession(ExecConstants.PARQUET_WRITER_ENABLE_DICTIONARY_ENCODING, false);
String selection = "L_ORDERKEY, L_PARTKEY, L_SUPPKEY, L_LINENUMBER, L_QUANTITY, L_EXTENDEDPRICE, L_DISCOUNT, L_TAX, " +
"L_RETURNFLAG, L_LINESTATUS, L_SHIPDATE, cast(L_COMMITDATE as DATE) as L_COMMITDATE, cast(L_RECEIPTDATE as DATE) AS L_RECEIPTDATE, L_SHIPINSTRUCT, L_SHIPMODE, L_COMMENT";
String validationSelection = "L_ORDERKEY, L_PARTKEY, L_SUPPKEY, L_LINENUMBER, L_QUANTITY, L_EXTENDEDPRICE, L_DISCOUNT, L_TAX, " +
"L_RETURNFLAG, L_LINESTATUS, L_SHIPDATE,L_COMMITDATE ,L_RECEIPTDATE, L_SHIPINSTRUCT, L_SHIPMODE, L_COMMENT";
String inputTable = "cp.`tpch/lineitem.parquet`";
runTestAndValidate(selection, validationSelection, inputTable, "lineitem_parquet_converted");
} finally {
resetSessionOption(ExecConstants.PARQUET_WRITER_ENABLE_DICTIONARY_ENCODING);
}
}
@Test
public void testTPCHReadWrite2() throws Exception {
String inputTable = "cp.`tpch/customer.parquet`";
runTestAndValidate("*", "*", inputTable, "customer_parquet");
}
@Test
public void testTPCHReadWrite3() throws Exception {
String inputTable = "cp.`tpch/nation.parquet`";
runTestAndValidate("*", "*", inputTable, "nation_parquet");
}
@Test
public void testTPCHReadWrite4() throws Exception {
String inputTable = "cp.`tpch/orders.parquet`";
runTestAndValidate("*", "*", inputTable, "orders_parquet");
}
@Test
public void testTPCHReadWrite5() throws Exception {
String inputTable = "cp.`tpch/part.parquet`";
runTestAndValidate("*", "*", inputTable, "part_parquet");
}
@Test
public void testTPCHReadWrite6() throws Exception {
String inputTable = "cp.`tpch/partsupp.parquet`";
runTestAndValidate("*", "*", inputTable, "partsupp_parquet");
}
@Test
public void testTPCHReadWrite7() throws Exception {
String inputTable = "cp.`tpch/region.parquet`";
runTestAndValidate("*", "*", inputTable, "region_parquet");
}
@Test
public void testTPCHReadWrite8() throws Exception {
String inputTable = "cp.`tpch/supplier.parquet`";
runTestAndValidate("*", "*", inputTable, "supplier_parquet");
}
@Test
public void testTPCHReadWriteNoDictUncompressed() throws Exception {
try {
alterSession(ExecConstants.PARQUET_WRITER_ENABLE_DICTIONARY_ENCODING, false);
alterSession(ExecConstants.PARQUET_WRITER_COMPRESSION_TYPE, "none");
String inputTable = "cp.`tpch/supplier.parquet`";
runTestAndValidate("*", "*", inputTable, "supplier_parquet_no_dict_uncompressed");
} finally {
resetSessionOption(ExecConstants.PARQUET_WRITER_ENABLE_DICTIONARY_ENCODING);
resetSessionOption(ExecConstants.PARQUET_WRITER_COMPRESSION_TYPE);
}
}
@Test
public void testTPCHReadWriteDictGzip() throws Exception {
try {
alterSession(ExecConstants.PARQUET_WRITER_COMPRESSION_TYPE, "gzip");
String inputTable = "cp.`tpch/supplier.parquet`";
runTestAndValidate("*", "*", inputTable, "supplier_parquet_dict_gzip");
} finally {
resetSessionOption(ExecConstants.PARQUET_WRITER_COMPRESSION_TYPE);
}
}
// working to create an exhaustive test of the format for this one. including all convertedTypes
// will not be supporting interval for Beta as of current schedule
// Types left out:
// "TIMESTAMPTZ_col"
@Test
public void testRepeated() throws Exception {
String inputTable = "cp.`parquet/basic_repeated.json`";
runTestAndValidate("*", "*", inputTable, "basic_repeated");
}
@Test
public void testRepeatedDouble() throws Exception {
String inputTable = "cp.`parquet/repeated_double_data.json`";
runTestAndValidate("*", "*", inputTable, "repeated_double_parquet");
}
@Test
public void testRepeatedLong() throws Exception {
String inputTable = "cp.`parquet/repeated_integer_data.json`";
runTestAndValidate("*", "*", inputTable, "repeated_int_parquet");
}
@Test
public void testRepeatedBool() throws Exception {
String inputTable = "cp.`parquet/repeated_bool_data.json`";
runTestAndValidate("*", "*", inputTable, "repeated_bool_parquet");
}
@Test
public void testNullReadWrite() throws Exception {
String inputTable = "cp.`parquet/null_test_data.json`";
runTestAndValidate("*", "*", inputTable, "nullable_test");
}
@Ignore("Test file not available")
@Test
public void testBitError_Drill_2031() throws Exception {
compareParquetReadersHyperVector("*", "dfs.`tmp/wide2/0_0_3.parquet`");
}
@Test
public void testDecimal() throws Exception {
String selection = "cast(salary as decimal(8,2)) as decimal8, cast(salary as decimal(15,2)) as decimal15, " +
"cast(salary as decimal(24,2)) as decimal24, cast(salary as decimal(38,2)) as decimal38";
String validateSelection = "decimal8, decimal15, decimal24, decimal38";
String inputTable = "cp.`employee.json`";
// DRILL-5833: The "old" writer had a decimal bug, but the new one
// did not. The one used was random. Force the test to run both
// the old and new readers.
try {
alterSession(ExecConstants.PARQUET_NEW_RECORD_READER, true);
runTestAndValidate(selection, validateSelection, inputTable, "parquet_decimal");
alterSession(ExecConstants.PARQUET_NEW_RECORD_READER, false);
runTestAndValidate(selection, validateSelection, inputTable, "parquet_decimal");
} finally {
resetSessionOption(ExecConstants.PARQUET_NEW_RECORD_READER);
}
}
@Test
public void testMulipleRowGroups() throws Exception {
try {
alterSession(ExecConstants.PARQUET_BLOCK_SIZE, 1024*1024);
String selection = "mi";
String inputTable = "cp.`customer.json`";
runTestAndValidate(selection, selection, inputTable, "foodmart_customer_parquet");
} finally {
resetSessionOption(ExecConstants.PARQUET_BLOCK_SIZE);
}
}
@Test
public void testDate() throws Exception {
String selection = "cast(hire_date as DATE) as hire_date";
String validateSelection = "hire_date";
String inputTable = "cp.`employee.json`";
runTestAndValidate(selection, validateSelection, inputTable, "foodmart_employee_parquet");
}
@Test
public void testBoolean() throws Exception {
String selection = "true as x, false as y";
String validateSelection = "x, y";
String inputTable = "cp.`tpch/region.parquet`";
runTestAndValidate(selection, validateSelection, inputTable, "region_boolean_parquet");
}
@Test //DRILL-2030
public void testWriterWithStarAndExp() throws Exception {
String selection = " *, r_regionkey + 1 r_regionkey2";
String validateSelection = "r_regionkey, r_name, r_comment, r_regionkey + 1 r_regionkey2";
String inputTable = "cp.`tpch/region.parquet`";
runTestAndValidate(selection, validateSelection, inputTable, "region_star_exp");
}
@Test // DRILL-2458
public void testWriterWithStarAndRegluarCol() throws Exception {
String outputFile = "region_sort";
String ctasStmt = "create table " + outputFile + " as select *, r_regionkey + 1 as key1 from cp.`tpch/region.parquet` order by r_name";
String query = "select r_regionkey, r_name, r_comment, r_regionkey +1 as key1 from cp.`tpch/region.parquet` order by r_name";
String queryFromWriteOut = "select * from " + outputFile;
try {
test("use dfs.tmp");
test(ctasStmt);
testBuilder()
.ordered()
.sqlQuery(queryFromWriteOut)
.sqlBaselineQuery(query)
.build().run();
} finally {
deleteTableIfExists(outputFile);
}
}
public void compareParquetReadersColumnar(String selection, String table) throws Exception {
String query = "select " + selection + " from " + table;
try {
testBuilder()
.ordered()
.sqlQuery(query)
.optionSettingQueriesForTestQuery("alter system set `store.parquet.use_new_reader` = false")
.sqlBaselineQuery(query)
.optionSettingQueriesForBaseline("alter system set `store.parquet.use_new_reader` = true")
.build().run();
} finally {
resetSessionOption(ExecConstants.PARQUET_NEW_RECORD_READER);
}
}
public void compareParquetReadersHyperVector(String selection, String table) throws Exception {
String query = "select " + selection + " from " + table;
try {
testBuilder()
.ordered()
.highPerformanceComparison()
.sqlQuery(query)
.optionSettingQueriesForTestQuery(
"alter system set `store.parquet.use_new_reader` = false")
.sqlBaselineQuery(query)
.optionSettingQueriesForBaseline(
"alter system set `store.parquet.use_new_reader` = true")
.build().run();
} finally {
resetSessionOption(ExecConstants.PARQUET_NEW_RECORD_READER);
}
}
@Ignore("Binary file too large for version control")
@Test
public void testReadVoter() throws Exception {
compareParquetReadersHyperVector("*", "dfs.`tmp/voter.parquet`");
}
@Ignore("Test file not available")
@Test
public void testReadSf_100_supplier() throws Exception {
compareParquetReadersHyperVector("*", "dfs.`tmp/sf100_supplier.parquet`");
}
@Ignore("Binary file too large for version control")
@Test
public void testParquetRead_checkNulls_NullsFirst() throws Exception {
compareParquetReadersColumnar("*",
"dfs.`tmp/parquet_with_nulls_should_sum_100000_nulls_first.parquet`");
}
@Ignore("Test file not available")
@Test
public void testParquetRead_checkNulls() throws Exception {
compareParquetReadersColumnar("*", "dfs.`tmp/parquet_with_nulls_should_sum_100000.parquet`");
}
@Ignore("Binary file too large for version control")
@Test
public void test958_sql() throws Exception {
compareParquetReadersHyperVector("ss_ext_sales_price", "dfs.`tmp/store_sales`");
}
@Ignore("Binary file too large for version control")
@Test
public void testReadSf_1_supplier() throws Exception {
compareParquetReadersHyperVector("*", "dfs.`tmp/orders_part-m-00001.parquet`");
}
@Ignore("Binary file too large for version control")
@Test
public void test958_sql_all_columns() throws Exception {
compareParquetReadersHyperVector("*", "dfs.`tmp/store_sales`");
compareParquetReadersHyperVector("ss_addr_sk, ss_hdemo_sk", "dfs.`tmp/store_sales`");
// TODO - Drill 1388 - this currently fails, but it is an issue with project, not the reader, pulled out the physical plan
// removed the unneeded project in the plan and ran it against both readers, they outputs matched
// compareParquetReadersHyperVector("pig_schema,ss_sold_date_sk,ss_item_sk,ss_cdemo_sk,ss_addr_sk, ss_hdemo_sk",
// "dfs.`tmp/store_sales`");
}
@Ignore("Binary file too large for version control")
@Test
public void testDrill_1314() throws Exception {
compareParquetReadersColumnar("l_partkey ", "dfs.`tmp/drill_1314.parquet`");
}
@Ignore("Binary file too large for version control")
@Test
public void testDrill_1314_all_columns() throws Exception {
compareParquetReadersHyperVector("*", "dfs.`tmp/drill_1314.parquet`");
compareParquetReadersColumnar(
"l_orderkey,l_partkey,l_suppkey,l_linenumber, l_quantity, l_extendedprice,l_discount,l_tax",
"dfs.`tmp/drill_1314.parquet`");
}
@Ignore("Test file not available")
@Test
public void testParquetRead_checkShortNullLists() throws Exception {
compareParquetReadersColumnar("*", "dfs.`tmp/short_null_lists.parquet`");
}
@Ignore("Test file not available")
@Test
public void testParquetRead_checkStartWithNull() throws Exception {
compareParquetReadersColumnar("*", "dfs.`tmp/start_with_null.parquet`");
}
@Ignore("Binary file too large for version control")
@Test
public void testParquetReadWebReturns() throws Exception {
compareParquetReadersColumnar("wr_returning_customer_sk", "dfs.`tmp/web_returns`");
}
@Test
public void testWriteDecimal() throws Exception {
String outputTable = "decimal_test";
try {
test("use dfs.tmp; " +
"create table %s as select " +
"cast('1.2' as decimal(38, 2)) col1, cast('1.2' as decimal(28, 2)) col2 " +
"from cp.`employee.json` limit 1", outputTable);
BigDecimal result = new BigDecimal("1.20");
testBuilder()
.unOrdered()
.sqlQuery("select col1, col2 from %s ", outputTable)
.baselineColumns("col1", "col2")
.baselineValues(result, result)
.go();
} finally {
deleteTableIfExists(outputTable);
}
}
@Test // DRILL-2341
@Category(UnlikelyTest.class)
public void tableSchemaWhenSelectFieldsInDef_SelectFieldsInView() throws Exception {
final String newTblName = "testTableOutputSchema";
try {
test("CREATE TABLE dfs.tmp.%s(id, name, bday) AS SELECT " +
"cast(`employee_id` as integer), " +
"cast(`full_name` as varchar(100)), " +
"cast(`birth_date` as date) " +
"FROM cp.`employee.json` ORDER BY `employee_id` LIMIT 1", newTblName);
testBuilder()
.unOrdered()
.sqlQuery("SELECT * FROM dfs.tmp.`%s`", newTblName)
.baselineColumns("id", "name", "bday")
.baselineValues(1, "Sheri Nowmer", LocalDate.parse("1961-08-26"))
.go();
} finally {
deleteTableIfExists(newTblName);
}
}
/*
* Method tests CTAS with interval data type. We also verify reading back the data to ensure we
* have written the correct type. For every CTAS operation we use both the readers to verify results.
*/
@Test
public void testCTASWithIntervalTypes() throws Exception {
test("use dfs.tmp");
String tableName = "drill_1980_t1";
// test required interval day type
test("create table %s as " +
"select " +
"interval '10 20:30:40.123' day to second col1, " +
"interval '-1000000000 20:12:23.999' day(10) to second col2 " +
"from cp.`employee.json` limit 2", tableName);
Period row1Col1 = new Period(0, 0, 0, 10, 0, 0, 0, 73840123);
Period row1Col2 = new Period(0, 0, 0, -1000000000, 0, 0, 0, -72743999);
testParquetReaderHelper(tableName, row1Col1, row1Col2, row1Col1, row1Col2);
tableName = "drill_1980_2";
// test required interval year type
test("create table %s as " +
"select " +
"interval '10-2' year to month col1, " +
"interval '-100-8' year(3) to month col2 " +
"from cp.`employee.json` limit 2", tableName);
row1Col1 = new Period(0, 122, 0, 0, 0, 0, 0, 0);
row1Col2 = new Period(0, -1208, 0, 0, 0, 0, 0, 0);
testParquetReaderHelper(tableName, row1Col1, row1Col2, row1Col1, row1Col2);
// test nullable interval year type
tableName = "drill_1980_t3";
test("create table %s as " +
"select " +
"cast (intervalyear_col as interval year) col1," +
"cast(intervalyear_col as interval year) + interval '2' year col2 " +
"from cp.`parquet/alltypes.json` where tinyint_col = 1 or tinyint_col = 2", tableName);
row1Col1 = new Period(0, 12, 0, 0, 0, 0, 0, 0);
row1Col2 = new Period(0, 36, 0, 0, 0, 0, 0, 0);
Period row2Col1 = new Period(0, 24, 0, 0, 0, 0, 0, 0);
Period row2Col2 = new Period(0, 48, 0, 0, 0, 0, 0, 0);
testParquetReaderHelper(tableName, row1Col1, row1Col2, row2Col1, row2Col2);
// test nullable interval day type
tableName = "drill_1980_t4";
test("create table %s as " +
"select " +
"cast(intervalday_col as interval day) col1, " +
"cast(intervalday_col as interval day) + interval '1' day col2 " +
"from cp.`parquet/alltypes.json` where tinyint_col = 1 or tinyint_col = 2", tableName);
row1Col1 = new Period(0, 0, 0, 1, 0, 0, 0, 0);
row1Col2 = new Period(0, 0, 0, 2, 0, 0, 0, 0);
row2Col1 = new Period(0, 0, 0, 2, 0, 0, 0, 0);
row2Col2 = new Period(0, 0, 0, 3, 0, 0, 0, 0);
testParquetReaderHelper(tableName, row1Col1, row1Col2, row2Col1, row2Col2);
}
private void testParquetReaderHelper(String tableName, Period row1Col1, Period row1Col2,
Period row2Col1, Period row2Col2) throws Exception {
final String switchReader = "alter session set `store.parquet.use_new_reader` = %s; ";
final String enableVectorizedReader = String.format(switchReader, true);
final String disableVectorizedReader = String.format(switchReader, false);
String query = String.format("select * from %s", tableName);
testBuilder()
.sqlQuery(query)
.unOrdered()
.optionSettingQueriesForTestQuery(enableVectorizedReader)
.baselineColumns("col1", "col2")
.baselineValues(row1Col1, row1Col2)
.baselineValues(row2Col1, row2Col2)
.go();
testBuilder()
.sqlQuery(query)
.unOrdered()
.optionSettingQueriesForTestQuery(disableVectorizedReader)
.baselineColumns("col1", "col2")
.baselineValues(row1Col1, row1Col2)
.baselineValues(row2Col1, row2Col2)
.go();
}
private static void deleteTableIfExists(String tableName) {
try {
Path path = new Path(dirTestWatcher.getDfsTestTmpDir().getAbsolutePath(), tableName);
if (fs.exists(path)) {
fs.delete(path, true);
}
} catch (Exception e) {
// ignore exceptions.
}
}
public void runTestAndValidate(String selection, String validationSelection, String inputTable, String outputFile) throws Exception {
try {
deleteTableIfExists(outputFile);
final String query = String.format("SELECT %s FROM %s", selection, inputTable);
test("use dfs.tmp");
test("CREATE TABLE %s AS %s", outputFile, query);
testBuilder()
.unOrdered()
.sqlQuery(query)
.sqlBaselineQuery("SELECT %s FROM %s", validationSelection, outputFile)
.go();
Configuration hadoopConf = new Configuration();
hadoopConf.set(FileSystem.FS_DEFAULT_NAME_KEY, FileSystem.DEFAULT_FS);
@SuppressWarnings("resource")
Path output = new Path(dirTestWatcher.getDfsTestTmpDir().getAbsolutePath(), outputFile);
FileSystem fs = output.getFileSystem(hadoopConf);
for (FileStatus file : fs.listStatus(output)) {
ParquetMetadata footer = ParquetFileReader.readFooter(hadoopConf, file, SKIP_ROW_GROUPS);
String version = footer.getFileMetaData().getKeyValueMetaData().get(DRILL_VERSION_PROPERTY);
assertEquals(DrillVersionInfo.getVersion(), version);
}
} finally {
deleteTableIfExists(outputFile);
}
}
/*
Impala encodes timestamp values as int96 fields. Test the reading of an int96 field with two converters:
the first one converts parquet INT96 into drill VARBINARY and the second one (works while
store.parquet.reader.int96_as_timestamp option is enabled) converts parquet INT96 into drill TIMESTAMP.
*/
@Test
public void testImpalaParquetInt96() throws Exception {
compareParquetReadersColumnar("field_impala_ts", "cp.`parquet/int96_impala_1.parquet`");
try {
alterSession(ExecConstants.PARQUET_READER_INT96_AS_TIMESTAMP, true);
compareParquetReadersColumnar("field_impala_ts", "cp.`parquet/int96_impala_1.parquet`");
} finally {
resetSessionOption(ExecConstants.PARQUET_READER_INT96_AS_TIMESTAMP);
}
}
/*
Test the reading of a binary field as drill varbinary where data is in dictionary _and_ non-dictionary encoded pages
*/
@Test
public void testImpalaParquetBinaryAsVarBinary_DictChange() throws Exception {
compareParquetReadersColumnar("field_impala_ts", "cp.`parquet/int96_dict_change.parquet`");
}
/*
Test the reading of a binary field as drill timestamp where data is in dictionary _and_ non-dictionary encoded pages
*/
@Test
public void testImpalaParquetBinaryAsTimeStamp_DictChange() throws Exception {
try {
testBuilder()
.sqlQuery("select min(int96_ts) date_value from dfs.`parquet/int96_dict_change`")
.optionSettingQueriesForTestQuery(
"alter session set `%s` = true", ExecConstants.PARQUET_READER_INT96_AS_TIMESTAMP)
.ordered()
.baselineColumns("date_value")
.baselineValues(convertToLocalDateTime("1970-01-01 00:00:01.000"))
.build().run();
} finally {
resetSessionOption(ExecConstants.PARQUET_READER_INT96_AS_TIMESTAMP);
}
}
@Test
public void testSparkParquetBinaryAsTimeStamp_DictChange() throws Exception {
try {
testBuilder()
.sqlQuery("select distinct run_date from cp.`parquet/spark-generated-int96-timestamp.snappy.parquet`")
.optionSettingQueriesForTestQuery(
"alter session set `%s` = true", ExecConstants.PARQUET_READER_INT96_AS_TIMESTAMP)
.ordered()
.baselineColumns("run_date")
.baselineValues(convertToLocalDateTime("2017-12-06 16:38:43.988"))
.build().run();
} finally {
resetSessionOption(ExecConstants.PARQUET_READER_INT96_AS_TIMESTAMP);
}
}
/*
Test the conversion from int96 to impala timestamp
*/
@Test
public void testTimestampImpalaConvertFrom() throws Exception {
compareParquetReadersColumnar("convert_from(field_impala_ts, 'TIMESTAMP_IMPALA')", "cp.`parquet/int96_impala_1.parquet`");
}
/*
Test reading parquet Int96 as TimeStamp and comparing obtained values with the
old results (reading the same values as VarBinary and convert_fromTIMESTAMP_IMPALA function using)
*/
@Test
public void testImpalaParquetTimestampInt96AsTimeStamp() throws Exception {
try {
alterSession(ExecConstants.PARQUET_NEW_RECORD_READER, false);
compareParquetInt96Converters("field_impala_ts", "cp.`parquet/int96_impala_1.parquet`");
alterSession(ExecConstants.PARQUET_NEW_RECORD_READER, true);
compareParquetInt96Converters("field_impala_ts", "cp.`parquet/int96_impala_1.parquet`");
} finally {
resetSessionOption(ExecConstants.PARQUET_NEW_RECORD_READER);
}
}
/*
Test a file with partitions and an int96 column. (Data generated using Hive)
*/
@Test
public void testImpalaParquetInt96Partitioned() throws Exception {
compareParquetReadersColumnar("timestamp_field", "cp.`parquet/part1/hive_all_types.parquet`");
}
/*
Test the conversion from int96 to impala timestamp with hive data including nulls. Validate against old reader
*/
@Test
public void testHiveParquetTimestampAsInt96_compare() throws Exception {
compareParquetReadersColumnar("convert_from(timestamp_field, 'TIMESTAMP_IMPALA')",
"cp.`parquet/part1/hive_all_types.parquet`");
}
/*
Test the conversion from int96 to impala timestamp with hive data including nulls. Validate against expected values
*/
@Test
public void testHiveParquetTimestampAsInt96_basic() throws Exception {
testBuilder()
.unOrdered()
.sqlQuery("SELECT convert_from(timestamp_field, 'TIMESTAMP_IMPALA') as timestamp_field "
+ "from cp.`parquet/part1/hive_all_types.parquet` ")
.baselineColumns("timestamp_field")
.baselineValues(convertToLocalDateTime("2013-07-06 00:01:00"))
.baselineValues((Object)null)
.go();
}
@Test
@Ignore
public void testSchemaChange() throws Exception {
File dir = new File("target/" + this.getClass());
if ((!dir.exists() && !dir.mkdirs()) || (dir.exists() && !dir.isDirectory())) {
throw new RuntimeException("can't create dir " + dir);
}
File input1 = new File(dir, "1.json");
File input2 = new File(dir, "2.json");
try (FileWriter fw = new FileWriter(input1)) {
fw.append("{\"a\":\"foo\"}\n");
}
try (FileWriter fw = new FileWriter(input2)) {
fw.append("{\"b\":\"foo\"}\n");
}
test("select * from " + "dfs.`" + dir.getAbsolutePath() + "`");
runTestAndValidate("*", "*", "dfs.`" + dir.getAbsolutePath() + "`", "schema_change_parquet");
}
/*
The following test boundary conditions for null values occurring on page boundaries. All files have at least one dictionary
encoded page for all columns
*/
@Test
public void testAllNulls() throws Exception {
compareParquetReadersColumnar(
"c_varchar, c_integer, c_bigint, c_float, c_double, c_date, c_time, c_timestamp, c_boolean",
"cp.`parquet/all_nulls.parquet`");
}
@Test
public void testNoNulls() throws Exception {
compareParquetReadersColumnar(
"c_varchar, c_integer, c_bigint, c_float, c_double, c_date, c_time, c_timestamp, c_boolean",
"cp.`parquet/no_nulls.parquet`");
}
@Test
public void testFirstPageAllNulls() throws Exception {
compareParquetReadersColumnar(
"c_varchar, c_integer, c_bigint, c_float, c_double, c_date, c_time, c_timestamp, c_boolean",
"cp.`parquet/first_page_all_nulls.parquet`");
}
@Test
public void testLastPageAllNulls() throws Exception {
compareParquetReadersColumnar(
"c_varchar, c_integer, c_bigint, c_float, c_double, c_date, c_time, c_timestamp, c_boolean",
"cp.`parquet/first_page_all_nulls.parquet`");
}
@Test
public void testFirstPageOneNull() throws Exception {
compareParquetReadersColumnar(
"c_varchar, c_integer, c_bigint, c_float, c_double, c_date, c_time, c_timestamp, c_boolean",
"cp.`parquet/first_page_one_null.parquet`");
}
@Test
public void testLastPageOneNull() throws Exception {
compareParquetReadersColumnar(
"c_varchar, c_integer, c_bigint, c_float, c_double, c_date, c_time, c_timestamp, c_boolean",
"cp.`parquet/last_page_one_null.parquet`");
}
private void compareParquetInt96Converters(String selection, String table) throws Exception {
try {
testBuilder()
.ordered()
.sqlQuery("select `%1$s` from %2$s order by `%1$s`", selection, table)
.optionSettingQueriesForTestQuery(
"alter session set `%s` = true", ExecConstants.PARQUET_READER_INT96_AS_TIMESTAMP)
.sqlBaselineQuery("select convert_from(`%1$s`, 'TIMESTAMP_IMPALA') as `%1$s` from %2$s order by `%1$s`",
selection, table)
.optionSettingQueriesForBaseline(
"alter session set `%s` = false", ExecConstants.PARQUET_READER_INT96_AS_TIMESTAMP)
.build()
.run();
} finally {
resetSessionOption(ExecConstants.PARQUET_READER_INT96_AS_TIMESTAMP);
}
}
@Ignore ("Used to test decompression in AsyncPageReader. Takes too long.")
@Test
public void testTPCHReadWriteRunRepeated() throws Exception {
for (int i = 1; i <= repeat; i++) {
testTPCHReadWriteGzip();
testTPCHReadWriteSnappy();
}
}
@Test
public void testTPCHReadWriteGzip() throws Exception {
try {
alterSession(ExecConstants.PARQUET_WRITER_COMPRESSION_TYPE, "gzip");
String inputTable = "cp.`tpch/supplier.parquet`";
runTestAndValidate("*", "*", inputTable, "suppkey_parquet_dict_gzip");
} finally {
resetSessionOption(ExecConstants.PARQUET_WRITER_COMPRESSION_TYPE);
}
}
@Test
public void testTPCHReadWriteSnappy() throws Exception {
try {
alterSession(ExecConstants.PARQUET_WRITER_COMPRESSION_TYPE, "snappy");
String inputTable = "cp.`supplier_snappy.parquet`";
runTestAndValidate("*", "*", inputTable, "suppkey_parquet_dict_snappy");
} finally {
resetSessionOption(ExecConstants.PARQUET_WRITER_COMPRESSION_TYPE);
}
}
@Test // DRILL-5097
@Category(UnlikelyTest.class)
public void testInt96TimeStampValueWidth() throws Exception {
try {
testBuilder()
.unOrdered()
.sqlQuery("select c, d from cp.`parquet/data.snappy.parquet` " +
"where `a` is not null and `c` is not null and `d` is not null")
.optionSettingQueriesForTestQuery(
"alter session set `%s` = true", ExecConstants.PARQUET_READER_INT96_AS_TIMESTAMP)
.baselineColumns("c", "d")
.baselineValues(LocalDate.parse("2012-12-15"),
convertToLocalDateTime("2016-04-24 20:06:28"))
.baselineValues(LocalDate.parse("2011-07-09"),
convertToLocalDateTime("2015-04-15 22:35:49"))
.build()
.run();
} finally {
resetSessionOption(ExecConstants.PARQUET_READER_INT96_AS_TIMESTAMP);
}
}
@Test
public void testWriteDecimalIntBigIntFixedLen() throws Exception {
String tableName = "decimalIntBigIntFixedLen";
try {
alterSession(ExecConstants.PARQUET_WRITER_LOGICAL_TYPE_FOR_DECIMALS, FIXED_LEN_BYTE_ARRAY.name());
test(
"create table dfs.tmp.%s as\n" +
"select cast('123456.789' as decimal(9, 3)) as decInt,\n" +
"cast('123456.789123456789' as decimal(18, 12)) as decBigInt,\n" +
"cast('123456.789123456789' as decimal(19, 12)) as fixedLen", tableName);
checkTableTypes(tableName,
ImmutableList.of(
Pair.of("decInt", INT32),
Pair.of("decBigInt", INT64),
Pair.of("fixedLen", FIXED_LEN_BYTE_ARRAY)),
true);
testBuilder()
.sqlQuery("select * from dfs.tmp.%s", tableName)
.unOrdered()
.baselineColumns("decInt", "decBigInt", "fixedLen")
.baselineValues(new BigDecimal("123456.789"),
new BigDecimal("123456.789123456789"),
new BigDecimal("123456.789123456789"))
.go();
} finally {
resetSessionOption(ExecConstants.PARQUET_WRITER_USE_PRIMITIVE_TYPES_FOR_DECIMALS);
resetSessionOption(ExecConstants.PARQUET_WRITER_LOGICAL_TYPE_FOR_DECIMALS);
test("drop table if exists dfs.tmp.%s", tableName);
}
}
@Test
public void testWriteDecimalIntBigIntBinary() throws Exception {
String tableName = "decimalIntBigIntBinary";
try {
alterSession(ExecConstants.PARQUET_WRITER_USE_PRIMITIVE_TYPES_FOR_DECIMALS, true);
alterSession(ExecConstants.PARQUET_WRITER_LOGICAL_TYPE_FOR_DECIMALS, BINARY.name());
test(
"create table dfs.tmp.%s as\n" +
"select cast('123456.789' as decimal(9, 3)) as decInt,\n" +
"cast('123456.789123456789' as decimal(18, 12)) as decBigInt,\n" +
"cast('123456.789123456789' as decimal(19, 12)) as binCol", tableName);
checkTableTypes(tableName,
ImmutableList.of(
Pair.of("decInt", INT32),
Pair.of("decBigInt", INT64),
Pair.of("binCol", BINARY)),
true);
testBuilder()
.sqlQuery("select * from dfs.tmp.%s", tableName)
.unOrdered()
.baselineColumns("decInt", "decBigInt", "binCol")
.baselineValues(new BigDecimal("123456.789"),
new BigDecimal("123456.789123456789"),
new BigDecimal("123456.789123456789"))
.go();
} finally {
resetSessionOption(ExecConstants.PARQUET_WRITER_USE_PRIMITIVE_TYPES_FOR_DECIMALS);
resetSessionOption(ExecConstants.PARQUET_WRITER_LOGICAL_TYPE_FOR_DECIMALS);
test("drop table if exists dfs.tmp.%s", tableName);
}
}
@Test
public void testWriteDecimalFixedLenOnly() throws Exception {
String tableName = "decimalFixedLenOnly";
try {
alterSession(ExecConstants.PARQUET_WRITER_USE_PRIMITIVE_TYPES_FOR_DECIMALS, false);
alterSession(ExecConstants.PARQUET_WRITER_LOGICAL_TYPE_FOR_DECIMALS, FIXED_LEN_BYTE_ARRAY.name());
test(
"create table dfs.tmp.%s as\n" +
"select cast('123456.789' as decimal(9, 3)) as decInt,\n" +
"cast('123456.789123456789' as decimal(18, 12)) as decBigInt,\n" +
"cast('123456.789123456789' as decimal(19, 12)) as fixedLen", tableName);
checkTableTypes(tableName,
ImmutableList.of(
Pair.of("decInt", FIXED_LEN_BYTE_ARRAY),
Pair.of("decBigInt", FIXED_LEN_BYTE_ARRAY),
Pair.of("fixedLen", FIXED_LEN_BYTE_ARRAY)),
true);
testBuilder()
.sqlQuery("select * from dfs.tmp.%s", tableName)
.unOrdered()
.baselineColumns("decInt", "decBigInt", "fixedLen")
.baselineValues(new BigDecimal("123456.789"),
new BigDecimal("123456.789123456789"),
new BigDecimal("123456.789123456789"))
.go();
} finally {
resetSessionOption(ExecConstants.PARQUET_WRITER_USE_PRIMITIVE_TYPES_FOR_DECIMALS);
resetSessionOption(ExecConstants.PARQUET_WRITER_LOGICAL_TYPE_FOR_DECIMALS);
test("drop table if exists dfs.tmp.%s", tableName);
}
}
@Test
public void testWriteDecimalBinaryOnly() throws Exception {
String tableName = "decimalBinaryOnly";
try {
alterSession(ExecConstants.PARQUET_WRITER_USE_PRIMITIVE_TYPES_FOR_DECIMALS, false);
alterSession(ExecConstants.PARQUET_WRITER_LOGICAL_TYPE_FOR_DECIMALS, BINARY.name());
test(
"create table dfs.tmp.%s as\n" +
"select cast('123456.789' as decimal(9, 3)) as decInt,\n" +
"cast('123456.789123456789' as decimal(18, 12)) as decBigInt,\n" +
"cast('123456.789123456789' as decimal(19, 12)) as binCol", tableName);
checkTableTypes(tableName,
ImmutableList.of(
Pair.of("decInt", BINARY),
Pair.of("decBigInt", BINARY),
Pair.of("binCol", BINARY)),
true);
testBuilder()
.sqlQuery("select * from dfs.tmp.%s", tableName)
.unOrdered()
.baselineColumns("decInt", "decBigInt", "binCol")
.baselineValues(new BigDecimal("123456.789"),
new BigDecimal("123456.789123456789"),
new BigDecimal("123456.789123456789"))
.go();
} finally {
resetSessionOption(ExecConstants.PARQUET_WRITER_USE_PRIMITIVE_TYPES_FOR_DECIMALS);
resetSessionOption(ExecConstants.PARQUET_WRITER_LOGICAL_TYPE_FOR_DECIMALS);
test("drop table if exists dfs.tmp.%s", tableName);
}
}
@Test
public void testWriteDecimalIntBigIntRepeated() throws Exception {
String tableName = "decimalIntBigIntRepeated";
JsonStringArrayList<BigDecimal> ints = new JsonStringArrayList<>();
ints.add(new BigDecimal("999999.999"));
ints.add(new BigDecimal("-999999.999"));
ints.add(new BigDecimal("0.000"));
JsonStringArrayList<BigDecimal> longs = new JsonStringArrayList<>();
longs.add(new BigDecimal("999999999.999999999"));
longs.add(new BigDecimal("-999999999.999999999"));
longs.add(new BigDecimal("0.000000000"));
JsonStringArrayList<BigDecimal> fixedLen = new JsonStringArrayList<>();
fixedLen.add(new BigDecimal("999999999999.999999"));
fixedLen.add(new BigDecimal("-999999999999.999999"));
fixedLen.add(new BigDecimal("0.000000"));
try {
alterSession(ExecConstants.PARQUET_WRITER_USE_PRIMITIVE_TYPES_FOR_DECIMALS, true);
alterSession(ExecConstants.PARQUET_WRITER_LOGICAL_TYPE_FOR_DECIMALS, FIXED_LEN_BYTE_ARRAY.name());
test(
"create table dfs.tmp.%s as\n" +
"select * from cp.`parquet/repeatedIntLondFixedLenBinaryDecimal.parquet`", tableName);
checkTableTypes(tableName,
ImmutableList.of(
Pair.of("decimal_int32", INT32),
Pair.of("decimal_int64", INT64),
Pair.of("decimal_fixedLen", INT64),
Pair.of("decimal_binary", INT64)),
true);
testBuilder()
.sqlQuery("select * from dfs.tmp.%s", tableName)
.unOrdered()
.baselineColumns("decimal_int32", "decimal_int64", "decimal_fixedLen", "decimal_binary")
.baselineValues(ints, longs, fixedLen, fixedLen)
.go();
} finally {
resetSessionOption(ExecConstants.PARQUET_WRITER_USE_PRIMITIVE_TYPES_FOR_DECIMALS);
resetSessionOption(ExecConstants.PARQUET_WRITER_LOGICAL_TYPE_FOR_DECIMALS);
test("drop table if exists dfs.tmp.%s", tableName);
}
}
@Test
public void testWriteDecimalFixedLenRepeated() throws Exception {
String tableName = "decimalFixedLenRepeated";
JsonStringArrayList<BigDecimal> ints = new JsonStringArrayList<>();
ints.add(new BigDecimal("999999.999"));
ints.add(new BigDecimal("-999999.999"));
ints.add(new BigDecimal("0.000"));
JsonStringArrayList<BigDecimal> longs = new JsonStringArrayList<>();
longs.add(new BigDecimal("999999999.999999999"));
longs.add(new BigDecimal("-999999999.999999999"));
longs.add(new BigDecimal("0.000000000"));
JsonStringArrayList<BigDecimal> fixedLen = new JsonStringArrayList<>();
fixedLen.add(new BigDecimal("999999999999.999999"));
fixedLen.add(new BigDecimal("-999999999999.999999"));
fixedLen.add(new BigDecimal("0.000000"));
try {
alterSession(ExecConstants.PARQUET_WRITER_USE_PRIMITIVE_TYPES_FOR_DECIMALS, false);
alterSession(ExecConstants.PARQUET_WRITER_LOGICAL_TYPE_FOR_DECIMALS, FIXED_LEN_BYTE_ARRAY.name());
test(
"create table dfs.tmp.%s as\n" +
"select * from cp.`parquet/repeatedIntLondFixedLenBinaryDecimal.parquet`", tableName);
checkTableTypes(tableName,
ImmutableList.of(
Pair.of("decimal_int32", FIXED_LEN_BYTE_ARRAY),
Pair.of("decimal_int64", FIXED_LEN_BYTE_ARRAY),
Pair.of("decimal_fixedLen", FIXED_LEN_BYTE_ARRAY),
Pair.of("decimal_binary", FIXED_LEN_BYTE_ARRAY)),
true);
testBuilder()
.sqlQuery("select * from dfs.tmp.%s", tableName)
.unOrdered()
.baselineColumns("decimal_int32", "decimal_int64", "decimal_fixedLen", "decimal_binary")
.baselineValues(ints, longs, fixedLen, fixedLen)
.go();
} finally {
resetSessionOption(ExecConstants.PARQUET_WRITER_USE_PRIMITIVE_TYPES_FOR_DECIMALS);
resetSessionOption(ExecConstants.PARQUET_WRITER_LOGICAL_TYPE_FOR_DECIMALS);
test("drop table if exists dfs.tmp.%s", tableName);
}
}
@Test
public void testWriteDecimalBinaryRepeated() throws Exception {
String tableName = "decimalBinaryRepeated";
JsonStringArrayList<BigDecimal> ints = new JsonStringArrayList<>();
ints.add(new BigDecimal("999999.999"));
ints.add(new BigDecimal("-999999.999"));
ints.add(new BigDecimal("0.000"));
JsonStringArrayList<BigDecimal> longs = new JsonStringArrayList<>();
longs.add(new BigDecimal("999999999.999999999"));
longs.add(new BigDecimal("-999999999.999999999"));
longs.add(new BigDecimal("0.000000000"));
JsonStringArrayList<BigDecimal> fixedLen = new JsonStringArrayList<>();
fixedLen.add(new BigDecimal("999999999999.999999"));
fixedLen.add(new BigDecimal("-999999999999.999999"));
fixedLen.add(new BigDecimal("0.000000"));
try {
alterSession(ExecConstants.PARQUET_WRITER_USE_PRIMITIVE_TYPES_FOR_DECIMALS, false);
alterSession(ExecConstants.PARQUET_WRITER_LOGICAL_TYPE_FOR_DECIMALS, BINARY.name());
test(
"create table dfs.tmp.%s as\n" +
"select * from cp.`parquet/repeatedIntLondFixedLenBinaryDecimal.parquet`", tableName);
checkTableTypes(tableName,
ImmutableList.of(
Pair.of("decimal_int32", BINARY),
Pair.of("decimal_int64", BINARY),
Pair.of("decimal_fixedLen", BINARY),
Pair.of("decimal_binary", BINARY)),
true);
testBuilder()
.sqlQuery("select * from dfs.tmp.%s", tableName)
.unOrdered()
.baselineColumns("decimal_int32", "decimal_int64", "decimal_fixedLen", "decimal_binary")
.baselineValues(ints, longs, fixedLen, fixedLen)
.go();
} finally {
resetSessionOption(ExecConstants.PARQUET_WRITER_USE_PRIMITIVE_TYPES_FOR_DECIMALS);
resetSessionOption(ExecConstants.PARQUET_WRITER_LOGICAL_TYPE_FOR_DECIMALS);
test("drop table if exists dfs.tmp.%s", tableName);
}
}
/**
* Checks that specified parquet table contains specified columns with specified types.
*
* @param tableName name of the table that should be checked.
* @param columnsToCheck pair of column name and column type that should be checked in the table.
* @param isDecimalType is should be specified columns annotated ad DECIMAL.
* @throws IOException If table file was not found.
*/
private void checkTableTypes(String tableName,
List<Pair<String, PrimitiveType.PrimitiveTypeName>> columnsToCheck,
boolean isDecimalType) throws IOException {
MessageType schema = ParquetFileReader.readFooter(
new Configuration(),
new Path(Paths.get(dirTestWatcher.getDfsTestTmpDir().getPath(), tableName, "0_0_0.parquet").toUri().getPath()),
NO_FILTER).getFileMetaData().getSchema();
for (Pair<String, PrimitiveType.PrimitiveTypeName> nameType : columnsToCheck) {
assertEquals(
String.format("Table %s does not contain column %s with type %s",
tableName, nameType.getKey(), nameType.getValue()),
nameType.getValue(),
schema.getType(nameType.getKey()).asPrimitiveType().getPrimitiveTypeName());
assertEquals(
String.format("Table %s %s column %s with DECIMAL type", tableName,
isDecimalType ? "does not contain" : "contains unexpected", nameType.getKey()),
isDecimalType, schema.getType(nameType.getKey()).getOriginalType() == OriginalType.DECIMAL);
}
}
}
| |
package com.planet_ink.coffee_mud.Abilities.Common;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2000-2010 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
@SuppressWarnings("unchecked")
public class Weaving extends EnhancedCraftingSkill implements ItemCraftor, MendingSkill
{
public String ID() { return "Weaving"; }
public String name(){ return "Weaving";}
private static final String[] triggerStrings = {"WEAVING","WEAVE"};
public String[] triggerStrings(){return triggerStrings;}
public String supportedResourceString(){return "WHEAT|VINE|SEAWEED|HEMP|SILK|COTTON";}
public String parametersFormat(){ return
"ITEM_NAME\tITEM_LEVEL\tBUILD_TIME_TICKS\tAMOUNT_MATERIAL_REQUIRED\tITEM_BASE_VALUE\t"
+"ITEM_CLASS_ID\tWEAPON_CLASS||CODED_WEAR_LOCATION||RIDE_BASIS||LID_LOCK\t"
+"CONTAINER_CAPACITY||WEAPON_HANDS_REQUIRED\tBASE_ARMOR_AMOUNT||BASE_DAMAGE\t"
+"CONTAINER_TYPE\tCODED_SPELL_LIST";}
protected static final int RCP_FINALNAME=0;
protected static final int RCP_LEVEL=1;
protected static final int RCP_TICKS=2;
protected static final int RCP_WOOD=3;
protected static final int RCP_VALUE=4;
protected static final int RCP_CLASSTYPE=5;
protected static final int RCP_MISCTYPE=6;
protected static final int RCP_CAPACITY=7;
protected static final int RCP_ARMORDMG=8;
protected static final int RCP_CONTAINMASK=9;
protected static final int RCP_SPELL=10;
protected Item key=null;
public Weaving()
{
super();
building = null;
mending = false;
refitting = false;
messedUp = false;
}
public boolean tick(Tickable ticking, int tickID)
{
if((affected!=null)&&(affected instanceof MOB)&&(tickID==Tickable.TICKID_MOB))
{
if(building==null)
unInvoke();
}
return super.tick(ticking,tickID);
}
public String parametersFile(){ return "weaving.txt";}
protected Vector loadRecipes(){return super.loadRecipes(parametersFile());}
public void unInvoke()
{
if(canBeUninvoked())
{
if((affected!=null)&&(affected instanceof MOB))
{
MOB mob=(MOB)affected;
if((building!=null)&&(!aborted))
{
if(messedUp)
{
if(mending)
messedUpCrafting(mob);
else
if(refitting)
commonEmote(mob,"<S-NAME> mess(es) up refitting "+building.name()+".");
else
commonEmote(mob,"<S-NAME> mess(es) up weaving "+building.name()+".");
}
else
{
if(mending)
building.setUsesRemaining(100);
else
if(refitting)
{
building.baseEnvStats().setHeight(0);
building.recoverEnvStats();
}
else
{
dropAWinner(mob,building);
if(key!=null)
{
dropAWinner(mob,key);
key.setContainer(building);
}
}
}
}
building=null;
key=null;
mending=false;
}
}
super.unInvoke();
}
public boolean supportsMending(Environmental E){ return canMend(null,E,true);}
protected boolean canMend(MOB mob, Environmental E, boolean quiet)
{
if(!super.canMend(mob,E,quiet)) return false;
Item IE=(Item)E;
if((IE.material()!=RawMaterial.RESOURCE_COTTON)
&&(IE.material()!=RawMaterial.RESOURCE_SILK)
&&(IE.material()!=RawMaterial.RESOURCE_HEMP)
&&(IE.material()!=RawMaterial.RESOURCE_VINE)
&&(IE.material()!=RawMaterial.RESOURCE_WHEAT)
&&(IE.material()!=RawMaterial.RESOURCE_SEAWEED))
{
if(!quiet)
commonTell(mob,"That's not made of any sort of weavable material. It can't be mended.");
return false;
}
return true;
}
public boolean invoke(MOB mob, Vector commands, Environmental givenTarget, boolean auto, int asLevel)
{
int autoGenerate=0;
if((auto)&&(givenTarget==this)&&(commands.size()>0)&&(commands.firstElement() instanceof Integer))
{
autoGenerate=((Integer)commands.firstElement()).intValue();
commands.removeElementAt(0);
givenTarget=null;
}
DVector enhancedTypes=enhancedTypes(mob,commands);
randomRecipeFix(mob,addRecipes(mob,loadRecipes()),commands,autoGenerate);
if(commands.size()==0)
{
commonTell(mob,"Weave what? Enter \"weave list\" for a list, \"weave refit <item>\" to resize, \"weave scan\", or \"weave mend <item>\".");
return false;
}
if((!auto)
&&(commands.size()>0)
&&(((String)commands.firstElement()).equalsIgnoreCase("bundle")))
{
bundling=true;
if(super.invoke(mob,commands,givenTarget,auto,asLevel))
return super.bundle(mob,commands);
return false;
}
Vector recipes=addRecipes(mob,loadRecipes());
String str=(String)commands.elementAt(0);
bundling=false;
String startStr=null;
int duration=4;
if(str.equalsIgnoreCase("list"))
{
String mask=CMParms.combine(commands,1);
StringBuffer buf=new StringBuffer("");
int toggler=1;
int toggleTop=3;
for(int r=0;r<toggleTop;r++)
buf.append(CMStrings.padRight("Item",10)+" "+CMStrings.padRight("Lvl",3)+" "+CMStrings.padRight("Material",10)+" ");
buf.append("\n\r");
for(int r=0;r<recipes.size();r++)
{
Vector V=(Vector)recipes.elementAt(r);
if(V.size()>0)
{
String item=replacePercent((String)V.elementAt(RCP_FINALNAME),"");
int level=CMath.s_int((String)V.elementAt(RCP_LEVEL));
int wood=CMath.s_int((String)V.elementAt(RCP_WOOD));
wood=adjustWoodRequired(wood,mob);
if((level<=xlevel(mob))
&&((mask==null)||(mask.length()==0)||mask.equalsIgnoreCase("all")||CMLib.english().containsString(item,mask)))
{
buf.append(CMStrings.padRight(item,10)+" "+CMStrings.padRight(""+level,3)+" "+CMStrings.padRight(""+wood,10)+((toggler!=toggleTop)?" ":"\n\r"));
if(++toggler>toggleTop) toggler=1;
}
}
}
if(toggler!=1) buf.append("\n\r");
commonTell(mob,buf.toString());
enhanceList(mob);
return true;
}
if(str.equalsIgnoreCase("scan"))
return publicScan(mob,commands);
else
if(str.equalsIgnoreCase("mend"))
{
building=null;
mending=false;
messedUp=false;
key=null;
Vector newCommands=CMParms.parse(CMParms.combine(commands,1));
building=getTarget(mob,mob.location(),givenTarget,newCommands,Wearable.FILTER_UNWORNONLY);
if(!canMend(mob,building,false)) return false;
mending=true;
if(!super.invoke(mob,commands,givenTarget,auto,asLevel))
return false;
startStr="<S-NAME> start(s) mending "+building.name()+".";
displayText="You are mending "+building.name();
verb="mending "+building.name();
}
else
if(str.equalsIgnoreCase("refit"))
{
building=null;
mending=false;
key=null;
refitting=false;
messedUp=false;
Vector newCommands=CMParms.parse(CMParms.combine(commands,1));
building=getTarget(mob,mob.location(),givenTarget,newCommands,Wearable.FILTER_UNWORNONLY);
if(building==null) return false;
if((building.material()!=RawMaterial.RESOURCE_COTTON)
&&(building.material()!=RawMaterial.RESOURCE_SILK)
&&(building.material()!=RawMaterial.RESOURCE_HEMP)
&&(building.material()!=RawMaterial.RESOURCE_VINE)
&&(building.material()!=RawMaterial.RESOURCE_WHEAT)
&&(building.material()!=RawMaterial.RESOURCE_SEAWEED))
{
commonTell(mob,"That's not made of any sort of weavable material. It can't be refitted.");
return false;
}
if(!(building instanceof Armor))
{
commonTell(mob,"You don't know how to refit that sort of thing.");
return false;
}
if(building.envStats().height()==0)
{
commonTell(mob,building.name()+" is already the right size.");
return false;
}
refitting=true;
if(!super.invoke(mob,commands,givenTarget,auto,asLevel))
return false;
startStr="<S-NAME> start(s) refitting "+building.name()+".";
displayText="You are refitting "+building.name();
verb="refitting "+building.name();
}
else
{
building=null;
mending=false;
messedUp=false;
refitting=false;
aborted=false;
key=null;
int amount=-1;
if((commands.size()>1)&&(CMath.isNumber((String)commands.lastElement())))
{
amount=CMath.s_int((String)commands.lastElement());
commands.removeElementAt(commands.size()-1);
}
String recipeName=CMParms.combine(commands,0);
Vector foundRecipe=null;
Vector matches=matchingRecipeNames(recipes,recipeName,true);
for(int r=0;r<matches.size();r++)
{
Vector V=(Vector)matches.elementAt(r);
if(V.size()>0)
{
int level=CMath.s_int((String)V.elementAt(RCP_LEVEL));
if(level<=xlevel(mob))
{
foundRecipe=V;
break;
}
}
}
if(foundRecipe==null)
{
commonTell(mob,"You don't know how to weave a '"+recipeName+"'. Try \"weave list\" for a list.");
return false;
}
int woodRequired=CMath.s_int((String)foundRecipe.elementAt(RCP_WOOD));
woodRequired=adjustWoodRequired(woodRequired,mob);
if(amount>woodRequired) woodRequired=amount;
int[] pm={RawMaterial.RESOURCE_COTTON,
RawMaterial.RESOURCE_SILK,
RawMaterial.RESOURCE_HEMP,
RawMaterial.RESOURCE_VINE,
RawMaterial.RESOURCE_WHEAT,
RawMaterial.RESOURCE_SEAWEED};
String misctype=(String)foundRecipe.elementAt(RCP_MISCTYPE);
String spell=(foundRecipe.size()>RCP_SPELL)?((String)foundRecipe.elementAt(RCP_SPELL)).trim():"";
bundling=spell.equalsIgnoreCase("BUNDLE")||misctype.equalsIgnoreCase("BUNDLE");
int[][] data=fetchFoundResourceData(mob,
woodRequired,"weavable material",pm,
0,null,null,
false,
autoGenerate,
enhancedTypes);
if(data==null) return false;
woodRequired=data[0][FOUND_AMT];
if(!super.invoke(mob,commands,givenTarget,auto,asLevel))
return false;
int lostValue=autoGenerate>0?0:
CMLib.materials().destroyResources(mob.location(),woodRequired,data[0][FOUND_CODE],0,null);
building=CMClass.getItem((String)foundRecipe.elementAt(RCP_CLASSTYPE));
if(building==null)
{
commonTell(mob,"There's no such thing as a "+foundRecipe.elementAt(RCP_CLASSTYPE)+"!!!");
return false;
}
duration=getDuration(CMath.s_int((String)foundRecipe.elementAt(RCP_TICKS)),mob,CMath.s_int((String)foundRecipe.elementAt(RCP_LEVEL)),4);
String itemName=replacePercent((String)foundRecipe.elementAt(RCP_FINALNAME),RawMaterial.CODES.NAME(data[0][FOUND_CODE])).toLowerCase();
if(bundling)
itemName="a "+woodRequired+"# "+itemName;
else
if(itemName.endsWith("s"))
itemName="some "+itemName;
else
itemName=CMLib.english().startWithAorAn(itemName);
building.setName(itemName);
startStr="<S-NAME> start(s) weaving "+building.name()+".";
displayText="You are weaving "+building.name();
verb="weaving "+building.name();
building.setDisplayText(itemName+" lies here");
building.setDescription(itemName+". ");
building.baseEnvStats().setWeight(woodRequired/2);
building.setBaseValue(CMath.s_int((String)foundRecipe.elementAt(RCP_VALUE)));
building.setMaterial(data[0][FOUND_CODE]);
building.baseEnvStats().setLevel(CMath.s_int((String)foundRecipe.elementAt(RCP_LEVEL)));
building.setSecretIdentity("This is the work of "+mob.Name()+".");
int capacity=CMath.s_int((String)foundRecipe.elementAt(RCP_CAPACITY));
int canContain=CMath.s_int((String)foundRecipe.elementAt(RCP_CONTAINMASK));
int armordmg=CMath.s_int((String)foundRecipe.elementAt(RCP_ARMORDMG));
if(bundling)
{
building.setBaseValue(lostValue);
building.baseEnvStats().setWeight(woodRequired);
}
addSpells(building,spell);
if(building instanceof Weapon)
{
((Weapon)building).setWeaponClassification(Weapon.CLASS_FLAILED);
for(int cl=0;cl<Weapon.CLASS_DESCS.length;cl++)
{
if(misctype.equalsIgnoreCase(Weapon.CLASS_DESCS[cl]))
((Weapon)building).setWeaponClassification(cl);
}
building.baseEnvStats().setDamage(armordmg);
((Weapon)building).setRawProperLocationBitmap(Wearable.WORN_WIELD|Wearable.WORN_HELD);
((Weapon)building).setRawLogicalAnd((capacity>1));
}
key=null;
if(building instanceof Armor)
{
if(capacity>0)
{
((Armor)building).setCapacity(capacity+woodRequired);
((Armor)building).setContainTypes(canContain);
}
((Armor)building).baseEnvStats().setArmor(0);
if(armordmg!=0)
((Armor)building).baseEnvStats().setArmor(armordmg+(abilityCode()-1));
setWearLocation(building,misctype,0);
}
else
if(building instanceof Container)
{
if(capacity>0)
{
((Container)building).setCapacity(capacity+woodRequired);
((Container)building).setContainTypes(canContain);
}
if(misctype.equalsIgnoreCase("LID"))
((Container)building).setLidsNLocks(true,false,false,false);
else
if(misctype.equalsIgnoreCase("LOCK"))
{
((Container)building).setLidsNLocks(true,false,true,false);
((Container)building).setKeyName(Double.toString(Math.random()));
key=CMClass.getItem("GenKey");
((Key)key).setKey(((Container)building).keyName());
key.setName("a key");
key.setDisplayText("a small key sits here");
key.setDescription("looks like a key to "+building.name());
key.recoverEnvStats();
key.text();
}
}
if(building instanceof Rideable)
{
setRideBasis((Rideable)building,misctype);
}
building.recoverEnvStats();
building.text();
building.recoverEnvStats();
}
messedUp=!proficiencyCheck(mob,0,auto);
if(bundling)
{
messedUp=false;
duration=1;
verb="bundling "+RawMaterial.CODES.NAME(building.material()).toLowerCase();
startStr="<S-NAME> start(s) "+verb+".";
displayText="You are "+verb;
}
if(autoGenerate>0)
{
if(key!=null) commands.addElement(key);
commands.addElement(building);
return true;
}
CMMsg msg=CMClass.getMsg(mob,building,this,CMMsg.MSG_NOISYMOVEMENT,startStr);
if(mob.location().okMessage(mob,msg))
{
mob.location().send(mob,msg);
building=(Item)msg.target();
beneficialAffect(mob,mob,asLevel,duration);
enhanceItem(mob,building,enhancedTypes);
}
else
if(bundling)
{
messedUp=false;
aborted=false;
unInvoke();
}
return true;
}
}
| |
/*
* Copyright 2007-2021 The jdeb developers.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.vafer.jdeb;
import org.apache.commons.compress.archivers.ar.ArArchiveEntry;
import org.apache.commons.compress.archivers.ar.ArArchiveOutputStream;
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.bouncycastle.bcpg.HashAlgorithmTags;
import org.bouncycastle.crypto.digests.MD5Digest;
import org.bouncycastle.jce.provider.BouncyCastleProvider;
import org.bouncycastle.openpgp.PGPSignature;
import org.bouncycastle.openpgp.PGPSignatureGenerator;
import org.bouncycastle.openpgp.operator.bc.BcPGPContentSignerBuilder;
import org.bouncycastle.util.encoders.Hex;
import org.vafer.jdeb.changes.ChangeSet;
import org.vafer.jdeb.changes.ChangesProvider;
import org.vafer.jdeb.changes.TextfileChangesProvider;
import org.vafer.jdeb.debian.BinaryPackageControlFile;
import org.vafer.jdeb.debian.ChangesFile;
import org.vafer.jdeb.signing.PGPSigner;
import org.vafer.jdeb.utils.PGPSignatureOutputStream;
import org.vafer.jdeb.utils.Utils;
import org.vafer.jdeb.utils.VariableResolver;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.math.BigInteger;
import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.security.Security;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import java.util.concurrent.TimeUnit;
/**
* A generic class for creating Debian archives. Even supports signed changes
* files.
*/
public class DebMaker {
private static final int DEFAULT_MODE = 33188;
/** A console to output log message with */
private Console console;
/** The Debian package produced */
private File deb;
/** The directory containing the control files to build the package */
private File control;
/** The name of the package. Default value if not specified in the control file */
private String packageName;
/** The section of the package. Default value if not specified in the control file */
private String section = "java";
/** The dependencies of the package. */
private String depends;
/** The description of the package. Default value if not specified in the control file */
private String description;
/** The homepage of the application. Default value if not specified in the control file */
private String homepage;
/** The file containing the PGP keys */
private File keyring;
/** The key to use in the keyring */
private String key;
/** The passphrase for the key to sign the changes file */
private String passphrase;
/** The file to read the changes from */
private File changesIn;
/** The file where to write the changes to */
private File changesOut;
/** The file where to write the changes of the changes input to */
private File changesSave;
/** The compression method used for the data file (none, gzip, bzip2 or xz) */
private String compression = "gzip";
/** Whether to sign the package that is created */
private boolean signPackage;
/** Whether to sign the changes file that is created */
private boolean signChanges;
/** Defines which utility is used to verify the signed package */
private String signMethod;
/** Defines the role to sign with */
private String signRole;
/** Defines the longFileMode of the tar file that is built */
private String tarLongFileMode;
/** Defines the bigNumberMode of the tar file that is built */
private String tarBigNumberMode;
private Long outputTimestampMs;
private VariableResolver variableResolver;
private String openReplaceToken;
private String closeReplaceToken;
private final Collection<DataProducer> dataProducers = new ArrayList<>();
private final Collection<DataProducer> conffilesProducers = new ArrayList<>();
private String digest = "SHA256";
public DebMaker(Console console, Collection<DataProducer> dataProducers, Collection<DataProducer> conffileProducers) {
this.console = console;
if (dataProducers != null) {
this.dataProducers.addAll(dataProducers);
}
if (conffileProducers != null) {
this.conffilesProducers.addAll(conffileProducers);
}
Security.addProvider(new BouncyCastleProvider());
}
public void setDeb(File deb) {
this.deb = deb;
}
public void setControl(File control) {
this.control = control;
}
public void setPackage(String packageName) {
this.packageName = packageName;
}
public void setSection(String section) {
this.section = section;
}
public void setDepends(String depends) {
this.depends = depends;
}
public void setDescription(String description) {
this.description = description;
}
public void setHomepage(String homepage) {
this.homepage = homepage;
}
public void setChangesIn(File changes) {
this.changesIn = changes;
}
public void setChangesOut(File changes) {
this.changesOut = changes;
}
public void setChangesSave(File changes) {
this.changesSave = changes;
}
public void setSignPackage(boolean signPackage) {
this.signPackage = signPackage;
}
public void setSignChanges(boolean signChanges) {
this.signChanges = signChanges;
}
public void setSignMethod(String signMethod) {
this.signMethod = signMethod;
}
public void setSignRole(String signRole) {
this.signRole = signRole;
}
public void setKeyring(File keyring) {
this.keyring = keyring;
}
public void setKey(String key) {
this.key = key;
}
public void setPassphrase(String passphrase) {
this.passphrase = passphrase;
}
public void setCompression(String compression) {
this.compression = compression;
}
public void setResolver(VariableResolver variableResolver) {
this.variableResolver = variableResolver;
}
private boolean isWritableFile(File file) {
return !file.exists() || file.isFile() && file.canWrite();
}
public String getDigest() {
return digest;
}
public void setDigest(String digest) {
this.digest = digest;
}
public void setTarLongFileMode(String tarLongFileMode) {
this.tarLongFileMode = tarLongFileMode;
}
public void setTarBigNumberMode(String tarBigNumberMode) {
this.tarBigNumberMode = tarBigNumberMode;
}
public void setOutputTimestampMs(Long outputTimestampMs) {
this.outputTimestampMs = outputTimestampMs;
}
/**
* Validates the input parameters.
*/
public void validate() throws PackagingException {
if (control == null || !control.isDirectory()) {
throw new PackagingException("The 'control' attribute doesn't point to a directory. " + control);
}
if (changesIn != null) {
if (changesIn.exists() && (!changesIn.isFile() || !changesIn.canRead())) {
throw new PackagingException("The 'changesIn' setting needs to point to a readable file. " + changesIn + " was not found/readable.");
}
if (changesOut != null && !isWritableFile(changesOut)) {
throw new PackagingException("Cannot write the output for 'changesOut' to " + changesOut);
}
if (changesSave != null && !isWritableFile(changesSave)) {
throw new PackagingException("Cannot write the output for 'changesSave' to " + changesSave);
}
} else {
if (changesOut != null || changesSave != null) {
throw new PackagingException("The 'changesOut' or 'changesSave' settings may only be used when there is a 'changesIn' specified.");
}
}
if (Compression.toEnum(compression) == null) {
throw new PackagingException("The compression method '" + compression + "' is not supported (expected 'none', 'gzip', 'bzip2' or 'xz')");
}
if (deb == null) {
throw new PackagingException("You need to specify where the deb file is supposed to be created.");
}
getDigestCode(digest);
}
static int getDigestCode(String digestName) throws PackagingException {
if ("SHA1".equals(digestName)) {
return HashAlgorithmTags.SHA1;
} else if ("MD2".equals(digestName)) {
return HashAlgorithmTags.MD2;
} else if ("MD5".equals(digestName)) {
return HashAlgorithmTags.MD5;
} else if ("RIPEMD160".equals(digestName)) {
return HashAlgorithmTags.RIPEMD160;
} else if ("SHA256".equals(digestName)) {
return HashAlgorithmTags.SHA256;
} else if ("SHA384".equals(digestName)) {
return HashAlgorithmTags.SHA384;
} else if ("SHA512".equals(digestName)) {
return HashAlgorithmTags.SHA512;
} else if ("SHA224".equals(digestName)) {
return HashAlgorithmTags.SHA224;
} else {
throw new PackagingException("unknown hash algorithm tag in digestName: " + digestName);
}
}
public void makeDeb() throws PackagingException {
BinaryPackageControlFile packageControlFile;
try {
console.info("Creating debian package: " + deb);
// If we should sign the package
if (signPackage) {
if (keyring == null || !keyring.exists()) {
console.warn("Signing requested, but no keyring supplied");
}
if (key == null) {
console.warn("Signing requested, but no key supplied");
}
if (passphrase == null) {
console.warn("Signing requested, but no passphrase supplied");
}
PGPSigner signer;
try (FileInputStream keyRingInput = new FileInputStream(keyring)) {
signer = new PGPSigner(keyRingInput, key, passphrase, getDigestCode(digest));
}
PGPSignatureGenerator signatureGenerator = new PGPSignatureGenerator(new BcPGPContentSignerBuilder(signer.getSecretKey().getPublicKey().getAlgorithm(), getDigestCode(digest)));
signatureGenerator.init(PGPSignature.BINARY_DOCUMENT, signer.getPrivateKey());
packageControlFile = createSignedDeb(Compression.toEnum(compression), signatureGenerator, signer);
} else {
packageControlFile = createDeb(Compression.toEnum(compression));
}
} catch (Exception e) {
throw new PackagingException("Failed to create debian package " + deb, e);
}
makeChangesFiles(packageControlFile);
}
private void makeChangesFiles(final BinaryPackageControlFile packageControlFile) throws PackagingException {
if (changesOut == null) {
changesOut = new File(deb.getParentFile(), FilenameUtils.getBaseName(deb.getName()) + ".changes");
}
ChangesProvider changesProvider;
FileOutputStream out = null;
try {
console.info("Creating changes file: " + changesOut);
out = new FileOutputStream(changesOut);
if (changesIn != null && changesIn.exists()) {
// read the changes form a textfile provider
changesProvider = new TextfileChangesProvider(new FileInputStream(changesIn), packageControlFile);
} else {
// create an empty changelog
changesProvider = new ChangesProvider() {
public ChangeSet[] getChangesSets() {
return new ChangeSet[] {
new ChangeSet(packageControlFile.get("Package"),
packageControlFile.get("Version"),
new Date(),
packageControlFile.get("Distribution"),
packageControlFile.get("Urgency"),
packageControlFile.get("Maintainer"),
new String[0])
};
}
};
}
ChangesFileBuilder builder = new ChangesFileBuilder();
ChangesFile changesFile = builder.createChanges(packageControlFile, deb, changesProvider);
//(signChanges || signPackage) - for backward compatibility. signPackage is signing both changes and deb.
if ((signChanges || signPackage) && keyring != null && key != null && passphrase != null) {
console.info("Signing the changes file with the key " + key);
PGPSigner signer = new PGPSigner(new FileInputStream(keyring), key, passphrase, getDigestCode(digest));
signer.clearSign(changesFile.toString(), out);
} else {
out.write(changesFile.toString().getBytes(StandardCharsets.UTF_8));
}
out.flush();
} catch (Exception e) {
throw new PackagingException("Failed to create the Debian changes file " + changesOut, e);
} finally {
IOUtils.closeQuietly(out);
}
if (changesSave == null || !(changesProvider instanceof TextfileChangesProvider)) {
return;
}
try {
console.info("Saving changes to file: " + changesSave);
((TextfileChangesProvider) changesProvider).save(new FileOutputStream(changesSave));
} catch (Exception e) {
throw new PackagingException("Failed to save debian changes file " + changesSave, e);
}
}
private List<String> populateConffiles(Collection<DataProducer> producers) {
final List<String> result = new ArrayList<>();
if (producers == null || producers.isEmpty()) {
return result;
}
final DataConsumer receiver = new DataConsumer() {
public void onEachFile(InputStream input, TarArchiveEntry entry) {
String tempConffileItem = entry.getName();
// Make sure the conffile path is absolute
if (tempConffileItem.startsWith(".")) {
tempConffileItem = tempConffileItem.substring(1);
}
if (!tempConffileItem.startsWith("/")) {
tempConffileItem = "/" + tempConffileItem;
}
console.info("Adding conffile: " + tempConffileItem);
result.add(tempConffileItem);
}
public void onEachLink(TarArchiveEntry entry) {
}
public void onEachDir(TarArchiveEntry tarArchiveEntry) {
}
};
try {
for (DataProducer data : producers) {
data.produce(receiver);
}
} catch(Exception e) {
//
}
return result;
}
/**
* Create the debian archive with from the provided control files and data producers.
*
* @param compression the compression method used for the data file
* @return BinaryPackageControlFile
* @throws PackagingException
*/
public BinaryPackageControlFile createDeb(Compression compression) throws PackagingException {
return createSignedDeb(compression, null, null);
}
/**
* Create the debian archive with from the provided control files and data producers.
*
* @param compression the compression method used for the data file (gzip, bzip2 or anything else for no compression)
* @param signatureGenerator the signature generator
*
* @return PackageDescriptor
* @throws PackagingException
*/
public BinaryPackageControlFile createSignedDeb(Compression compression, final PGPSignatureGenerator signatureGenerator, PGPSigner signer ) throws PackagingException {
File tempData = null;
File tempControl = null;
try {
tempData = File.createTempFile("deb", "data");
tempControl = File.createTempFile("deb", "control");
console.debug("Building data");
DataBuilder dataBuilder = new DataBuilder(console, outputTimestampMs);
StringBuilder md5s = new StringBuilder();
TarOptions options = new TarOptions()
.compression(compression)
.longFileMode(tarLongFileMode)
.bigNumberMode(tarBigNumberMode);
BigInteger size = dataBuilder.buildData(dataProducers, tempData, md5s, options);
console.info("Building conffiles");
List<String> tempConffiles = populateConffiles(conffilesProducers);
console.debug("Building control");
ControlBuilder controlBuilder = new ControlBuilder(console, variableResolver, openReplaceToken, closeReplaceToken, outputTimestampMs);
BinaryPackageControlFile packageControlFile = controlBuilder.createPackageControlFile(new File(control, "control"), size);
if (packageControlFile.get("Package") == null) {
packageControlFile.set("Package", packageName);
}
if (packageControlFile.get("Section") == null) {
packageControlFile.set("Section", section);
}
if (packageControlFile.get("Description") == null) {
packageControlFile.set("Description", description);
}
if (packageControlFile.get("Depends") == null) {
// Only add a depends entry to the control file if the field in this object has actually been set
if (depends != null && depends.length() > 0) {
packageControlFile.set("Depends", depends);
}
}
if (packageControlFile.get("Homepage") == null) {
packageControlFile.set("Homepage", homepage);
}
controlBuilder.buildControl(packageControlFile, control.listFiles(), tempConffiles , md5s, tempControl);
if (!packageControlFile.isValid()) {
throw new PackagingException("Control file fields are invalid " + packageControlFile.invalidFields() +
". The following fields are mandatory: " + packageControlFile.getMandatoryFields() +
". Please check your pom.xml/build.xml and your control file.");
}
deb.getParentFile().mkdirs();
ArArchiveOutputStream ar = new ArArchiveOutputStream(new FileOutputStream(deb));
String binaryName = "debian-binary";
String binaryContent = "2.0\n";
String controlName = "control.tar.gz";
String dataName = "data.tar" + compression.getExtension();
addTo(ar, binaryName, binaryContent);
addTo(ar, controlName, tempControl);
addTo(ar, dataName, tempData);
if (signatureGenerator != null) {
console.info("Signing package with key " + key);
if(signRole == null) {
signRole = "origin";
}
// Use debsig-verify as default
if (!"dpkg-sig".equals(signMethod)) {
// Sign file to verify with debsig-verify
PGPSignatureOutputStream sigStream = new PGPSignatureOutputStream(signatureGenerator);
addTo(sigStream, binaryContent);
addTo(sigStream, tempControl);
addTo(sigStream, tempData);
addTo(ar, "_gpg" + signRole, sigStream.generateASCIISignature());
} else {
// Sign file to verify with dpkg-sig --verify
final String outputStr =
"Version: 4\n" +
"Signer: \n" +
"Date: " + new SimpleDateFormat("EEE MMM dd HH:mm:ss yyyy", Locale.ENGLISH).format(new Date()) + "\n" +
"Role: " + signRole +"\n" +
"Files: \n" +
addFile(binaryName, binaryContent) +
addFile(controlName, tempControl) +
addFile(dataName, tempData);
ByteArrayOutputStream message = new ByteArrayOutputStream();
signer.clearSign(outputStr, message);
addTo(ar, "_gpg" + signRole, message.toString());
}
}
ar.close();
return packageControlFile;
} catch (Exception e) {
throw new PackagingException("Could not create deb package", e);
} finally {
if (tempData != null) {
if (!tempData.delete()) {
console.warn("Could not delete the temporary file " + tempData);
}
}
if (tempControl != null) {
if (!tempControl.delete()) {
console.warn("Could not delete the temporary file " + tempControl);
}
}
}
}
private String addFile(String name, String input){
return addLine(md5Hash(input), sha1Hash(input), input.length(), name);
}
private String addFile(String name, File input){
return addLine(md5Hash(input), sha1Hash(input), input.length(), name);
}
private String addLine(String md5, String sha1, long size, String name){
return "\t" + md5 + " " + sha1 + " " + size + " " + name + "\n";
}
private String md5Hash(String input){
return md5Hash(input.getBytes());
}
private String md5Hash(File input){
try {
return md5Hash(FileUtils.readFileToByteArray(input));
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
private String md5Hash(byte[] input){
//update the input of MD5
MD5Digest md5 = new MD5Digest();
md5.update(input, 0, input.length);
//get the output/ digest size and hash it
byte[] digest = new byte[md5.getDigestSize()];
md5.doFinal(digest, 0);
return new String(Hex.encode(digest));
}
private String sha1Hash(String input){
return sha1Hash(input.getBytes());
}
private String sha1Hash(File input){
try {
return sha1Hash(FileUtils.readFileToByteArray(input));
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
private String sha1Hash(byte[] input){
try
{
//prepare the input
MessageDigest hash = MessageDigest.getInstance(digest);
hash.update(input);
//proceed ....
byte[] digest = hash.digest();
return new String(Hex.encode(digest));
}
catch (NoSuchAlgorithmException e)
{
System.err.println("No such algorithm");
e.printStackTrace();
}
return null;
}
private void addTo(ArArchiveOutputStream pOutput, String pName, String pContent) throws IOException {
final byte[] content = pContent.getBytes();
ArArchiveEntry archiveEntry = createArArchiveEntry(pName, content.length);
pOutput.putArchiveEntry(archiveEntry);
pOutput.write(content);
pOutput.closeArchiveEntry();
}
private void addTo(ArArchiveOutputStream pOutput, String pName, File pContent) throws IOException {
ArArchiveEntry archiveEntry = createArArchiveEntry(pName, pContent.length());
pOutput.putArchiveEntry(archiveEntry);
try (InputStream input = new FileInputStream(pContent)) {
Utils.copy(input, pOutput);
}
pOutput.closeArchiveEntry();
}
private void addTo(final PGPSignatureOutputStream pOutput, final String pContent) throws IOException {
final byte[] content = pContent.getBytes();
pOutput.write(content);
}
private void addTo(final PGPSignatureOutputStream pOutput, final File pContent) throws IOException {
try (InputStream input = new FileInputStream(pContent)) {
Utils.copy(input, pOutput);
}
}
public void setOpenReplaceToken(String openReplaceToken) {
this.openReplaceToken = openReplaceToken;
}
public void setCloseReplaceToken(String closeReplaceToken) {
this.closeReplaceToken = closeReplaceToken;
}
private ArArchiveEntry createArArchiveEntry(String pName, long contentLength) {
if (outputTimestampMs != null) {
return new ArArchiveEntry(pName, contentLength, 0, 0, DEFAULT_MODE, outputTimestampMs / TimeUnit.SECONDS.toMillis(1));
}
return new ArArchiveEntry(pName, contentLength);
}
}
| |
/*
* Copyright (C) 2009 Dimagi Inc., UNICEF
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
*/
/**
*
*/
package org.rapidsms.java.test;
import java.util.Vector;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import junit.framework.TestCase;
/**
* @author dmyung
* @created Jan 16, 2009
*/
public class JavaParsingTests extends TestCase {
String[] goodMessages = { "bednets nyc 100 30 80", "bednets lax 1,500, 750 1000",
"nutrition 12345 20 100 0.6 5 y n", "nutrition 55446 20kg 100cm 60% 5cm yes no",
"nutrition 55446 21 kg 100cm 60% 5cm yes no", "nutrition 55446 2 kg 100 m 3/4 5cm yes no" };
String[] problematic = { "bednets nyc 100 30", "bednets lax 1,500", "bednets", "bednets 100 200 300",
"bednets 100 200 300 400", "bednets BOS 100 200 300 12321", "bednets 100 BOS 100 200 120",
"bednets 100 BOS 100 200 120 51231", "bednetsBOS 100 200 120 51231",
"nutrition asdfsadf 12345 20 100 0.6 5 y n", "nutrition 55446 20kg 100cm 60% 5cm yes no",
"nutrition 55446 20kg 60% 5cm yes no", "nutrition 55446 21 100cm 60% 5cm yes no",
"nutrition 55446 2 kg 100 m 5cm yes no"
};
String[] badMessages = { "bednwafasd asd2 12983 klasd12 ajds", "nutritiasndfqwer asd2 12983 klasd12 ajds",
"aklsjdfl234", "bedntestgklajsdljwler", "nutritionaslkdfklwer" };
Vector<String> regexes;
/*
* (non-Javadoc)
*
* @see junit.framework.TestCase#tearDown()
*/
@Override
protected void tearDown() throws Exception {
// TODO Auto-generated method stub
super.tearDown();
regexes.clear();
}
private final int BOOL = 0;
private final int HEIGHT = 1;
private final int RATIO = 2;
private final int LENGTH = 3;
private final int WEIGHT = 4;
private final int NUMBER = 5;
private final int WORD = 6;
/*
* (non-Javadoc)
*
* @see junit.framework.TestCase#setUp()
*/
@Override
protected void setUp() throws Exception {
// TODO Auto-generated method stub
super.setUp();
regexes = new Vector<String>();
regexes.add("^(t|f|true|false|y|no|yes|n|n0)(\\s|$)"); // bool
regexes.add("^(\\d+)(\\s*(cm|mm|m|meter|meters))($|\\s)"); // height
regexes.add("^(\\d+\\:\\d+)|(\\d+\\/\\d+)|(\\d+\\s*%)|(\\d+\\s*pct)");// ratio
regexes.add("^(\\d+)(\\s*(cm|m))($|\\s)"); // length
regexes.add("^(\\d+)(\\s*(kg|kilo|kilos))($|\\s)"); // weight
regexes.add("^(\\d+)($|\\s)"); // number
regexes.add("^([A-Za-z]+)($|\\s)");// word
// System.out.println("Setting up regexes " + regexes.size());
}
public void testLenghts() {
// System.out.println("Begin test: Lengths");
//
//
// doParse("goodMessages", goodMessages, regexes.get(LENGTH));
// doParse("problematic", problematic, regexes.get(LENGTH));
// doParse("badMessages", badMessages, regexes.get(LENGTH));
}
public void testNumeric() {
// System.out
// .println("\n\n#########################Begin test: Numeric########################################");
//
// doParse("goodMessages", goodMessages, regexes.get(this.NUMBER));
// doParse("problematic", problematic, regexes.get(NUMBER));
// doParse("badMessages", badMessages, regexes.get(NUMBER));
}
public void testSubtractive() {
System.out
.println("######################################Begin Subtractive test##################################");
String[] messages = goodMessages;
for (int i = 0; i < messages.length; i++) {
String messageToParse = messages[i];
String parsedMessage = "";
System.out.println("\n\n\n\t*** Begin subtractive parse: " + messageToParse);
while (true) {
String justParsedMessage = subtractiveParse(messageToParse, regexes.get(this.WEIGHT));
if (justParsedMessage.equals(messageToParse)) {
break;
} else {
messageToParse = justParsedMessage + "";
System.out.println("\tIterate subtractive parse: " + messageToParse);
}
}
System.out.println("\t*** End subtractive parse for message: " + i);
}
}
public String subtractiveParse(String fragmentToParse, String regex) {
Pattern mPattern;
mPattern = Pattern.compile(regex);
Matcher matcher = mPattern.matcher(fragmentToParse);
System.out.println("\tMatching regex: " + regex);
boolean isMatched = matcher.find();
int maxSize = -1;
int maxGroup = -1;
int minstart = 0;
int maxend = 0;
if (isMatched) {
// while (isMatched) {
for (int q = 0; q < matcher.groupCount(); q++) {
int currsize = matcher.group(q).length(); // matcher.end(q)
// -
// matcher.start(q);
if (currsize > maxSize) {
maxGroup = q;
maxSize = currsize;
}
}
// isMatched = matcher.find();
// }
minstart = matcher.start(maxGroup);
maxend = matcher.end(maxGroup);
System.out.println(matcher.group(maxGroup));
} else {
// System.out.println("\t\tNo match!");
}
if (minstart < maxend) {
System.out.println("\t\tFragmenting: " + minstart + "-" + maxend);
String parsed = fragmentToParse.substring(minstart, maxend);
if (parsed.charAt(parsed.length() - 1) == ' ') {
parsed.trim();
System.out.println("trim!");
maxend = maxend - 1;
}
System.out.println("\t\tMatched fragment: ##" + parsed + "##");
if (minstart == 0) {
minstart = 1;
}
String ret = fragmentToParse.subSequence(0, minstart) + fragmentToParse.substring(maxend);
return ret;
} else {
return fragmentToParse;
}
}
private void doParse(String testname, String[] messages, String regex) {
System.out.println("############### Do Parse: " + testname + " ###################\n\n");
for (int i = 0; i < messages.length; i++) {
System.out.println("Matching message: " + messages[i]);
Pattern mPattern;
mPattern = Pattern.compile(regex);
Matcher matcher = mPattern.matcher(messages[i]);
System.out.println("\tMatching regex: " + regex);
boolean isMatched = matcher.find();
if (isMatched) {
while (isMatched) {
System.out.println("\t\t**********************");
for (int q = 0; q < matcher.groupCount(); q++) {
System.out.println("\t\tRegion:" + matcher.start(q) + "-" + matcher.end(q));
System.out.println("\t\t\tgroup: " + q + " ##" + matcher.group(q) + "##");
}
System.out.println("\t\t**********************");
isMatched = matcher.find();
}
} else {
System.out.println("\t\tNo match!");
}
}
}
}
| |
/*
Derby - Class org.apache.derby.impl.store.raw.xact.TransactionTableEntry
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derby.impl.store.raw.xact;
import org.apache.derby.shared.common.sanity.SanityManager;
import org.apache.derby.iapi.services.io.Formatable;
import org.apache.derby.iapi.services.io.FormatIdUtil;
import org.apache.derby.iapi.services.io.StoredFormatIds;
import org.apache.derby.iapi.error.StandardException;
import org.apache.derby.iapi.sql.conn.LanguageConnectionContext;
import org.apache.derby.iapi.sql.conn.StatementContext;
import org.apache.derby.iapi.store.access.TransactionInfo;
import org.apache.derby.iapi.store.raw.GlobalTransactionId;
import org.apache.derby.iapi.store.raw.xact.TransactionId;
import org.apache.derby.iapi.store.raw.log.LogInstant;
import java.io.ObjectOutput;
import java.io.ObjectInput;
import java.io.IOException;
/**
Transaction table entry is used to store all relevant information of a
transaction into the transaction table for the use of checkpoint, recovery,
Transaction management during Quiesce state, and for dumping transaction table. Only works
with the following classes: TransactionTable, XactFactory, Xact
<BR>
During run time, whenever any transaction is started, it is put into the
transaction table. Whenever any transaction is closed, it is removed from
the transaction table.
*/
public class TransactionTableEntry implements Formatable, TransactionInfo, Cloneable
{
// These fields are only populated if this TTE has been read in from the
// log. Otherwise, they are gotten from the transaction object myxact.
private TransactionId xid;
private GlobalTransactionId gid;
private LogInstant firstLog;
private LogInstant lastLog;
// this field is always present - it is 0 for read only transaction, this
// is a copy of the status from the Xact (the copy is necessary as during
// recovery the Xact is shared by all transaction table entries during
// redo and undo).
private int transactionStatus;
// fields useful for returning transaction information if read from
// transaction log during recovery
private transient Xact myxact;
private transient boolean update;
private transient boolean recovery; // is this a transaction read
// from the log during recovery?
private transient boolean needExclusion; // in a quiesce state , this
// transaction needs to be
// barred from activation
// during quiesce state
private boolean isClone; // am I a clone made for the
// TransactionVTI?
private transient LanguageConnectionContext lcc;
/* package */
// entry attribute
static final int UPDATE = 0x1;
static final int RECOVERY = 0x2;
static final int EXCLUDE = 0x4;
TransactionTableEntry(
Xact xact,
TransactionId tid,
int status,
int attribute)
{
myxact = xact;
xid = tid;
transactionStatus = status;
update = (attribute & UPDATE) != 0;
needExclusion = (attribute & EXCLUDE) != 0;
recovery = (attribute & RECOVERY) != 0;
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(tid != null, "tid is null");
if (update && xact.getFirstLogInstant() == null)
{
SanityManager.THROWASSERT(
"update transaction has firstLog = null");
}
/*
if (!update && xact.getFirstLogInstant() != null)
{
SanityManager.THROWASSERT(
"read only transaction has firstLog = " +
xact.getFirstLogInstant());
}
*/
}
// Normally, we don't need to remember the gid, firstLog and lastLog
// because myxact will have the same information. However, in
// recovery, there is only one transaction taking on different identity
// as the log is replayed. Then each transaction table entry has keep
// its own identity and not rely on myxact. These recovery
// transactions are materialized in the transaction table via a
// readObject in the checkpoint log record, or are added by
// addUpdateTransaction when the log is scanned.
if (recovery)
{
// make a copy of everything
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(update, "recovery but not update");
if (tid != xact.getId())
{
SanityManager.THROWASSERT(
"adding a update transaction during recovery " +
" but the tids doesn't match" +
tid + " " + xact.getId());
}
}
gid = xact.getGlobalId();
firstLog = xact.getFirstLogInstant();
lastLog = xact.getLastLogInstant();
}
}
/*
* Formatable methods
*/
public TransactionTableEntry()
{ }
public void writeExternal(ObjectOutput out) throws IOException
{
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(!recovery, "writing out a recovery transaction");
SanityManager.ASSERT(update, "writing out read only transaction");
SanityManager.ASSERT(myxact.getFirstLogInstant() != null,
"myxact.getFirstLogInstant is null");
SanityManager.ASSERT(!isClone, "cannot write out a clone");
}
// Why is is safe to access first and last log instant in myxact while
// this is happening? Because we only writes out update transaction
// during run time. When a read only transactions becomes an update
// transaction , or when an update transaction commits, the beginXact
// and endXact log record's doMe method will try to change the
// transaction table entry's state to updat and non-update
// respectively. That change needs to go thru the transaction table
// which is mutually exclusive to writing out the transaction table.
// Since we are only looking at update transactions and it is "stuck"
// in update state in the middle of a TransactionTable.writeExternal
// call, all the fields we access in myxact is stable (actually the xid
// is also stable but we already have it).
//
out.writeObject(xid);
out.writeObject(myxact.getGlobalId());
out.writeObject(myxact.getFirstLogInstant());
out.writeObject(myxact.getLastLogInstant());
out.writeInt(transactionStatus);
}
public void readExternal(ObjectInput in)
throws ClassNotFoundException, IOException
{
// the only time a transaction table entry is written out is to the
// log, so this must be read in during recovery.
if (SanityManager.DEBUG)
SanityManager.ASSERT(!isClone, "cannot write out a clone");
xid = (TransactionId)in.readObject();
gid = (GlobalTransactionId)in.readObject();
firstLog = (LogInstant)in.readObject();
lastLog = (LogInstant)in.readObject();
transactionStatus = in.readInt();
update = true;
recovery = true;
needExclusion = true;
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(xid != null, "read in transaction table entry with null id");
SanityManager.ASSERT(firstLog != null, "read in transaction table entry with firstLog");
}
}
// set my transaction instance variable for a recovery transaction
void setXact(Xact xact)
{
/*
RESOLVE (mikem) - prepared transactions now call setXact() when they are
not in recovery.
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(recovery,
"setting non-recovery transaction table entry xact");
SanityManager.ASSERT(!isClone, "cannot setXact with a clone");
}
*/
myxact = xact;
}
/**
Return my format identifier.
*/
public int getTypeFormatId() {
return StoredFormatIds.RAW_STORE_TRANSACTION_TABLE_ENTRY;
}
public String toString()
{
if (SanityManager.DEBUG)
{
StringBuffer str = new StringBuffer(500).
append("Xid=").append(getXid()).
append(" gid=").append(getGid()).
append(" firstLog=").append(getFirstLog()).
append(" lastLog=").append(getLastLog()).
append(" transactionStatus=").append(transactionStatus).
append(" myxact=").append(myxact).
append(" update=").append(update).
append(" recovery=").append(recovery).
append(" prepare=").append(isPrepared()).
append(" needExclusion=").append(needExclusion).
append("\n");
return str.toString();
}
else
return null;
}
void updateTransactionStatus(Xact xact, int status, int attribute)
{
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(myxact == xact,
"update transaction status for wrong xact");
SanityManager.ASSERT(!isClone, "cannot change a clone");
}
this.update = (attribute & UPDATE) != 0;
}
void removeUpdateTransaction()
{
if (SanityManager.DEBUG)
SanityManager.ASSERT(!isClone, "cannot change a clone");
this.update = false;
transactionStatus = 0;
}
void unsetRecoveryStatus()
{
if (SanityManager.DEBUG)
SanityManager.ASSERT(!isClone, "cannot change a clone");
// RESOLVE (mikem) - this is kind of ugly. move to a better place?
firstLog = null;
this.recovery = false;
}
void prepareTransaction()
{
if (SanityManager.DEBUG)
SanityManager.ASSERT(!isClone, "cannot change a clone");
transactionStatus |= Xact.END_PREPARED;
}
/**************************************************************************
* get instance variables
**************************************************************************
*/
TransactionId getXid()
{
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(xid != null, "TTE with null xid");
SanityManager.ASSERT(!isClone, "cannot call method with a clone");
}
return xid;
}
public final GlobalTransactionId getGid()
{
if (SanityManager.DEBUG)
SanityManager.ASSERT(!isClone, "cannot call method with a clone");
if (gid != null)
return gid;
if (myxact != null)
return myxact.getGlobalId();
return null;
}
LogInstant getFirstLog()
{
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(!isClone, "cannot call method with a clone");
if (recovery)
{
SanityManager.ASSERT(
firstLog != null,
"a recovery transaction with a null firstLog");
}
else
{
SanityManager.ASSERT(
firstLog == null,
"a normal transaction with a non-null firstLog" +
"myxact.getFirstLogInstant() = " + myxact.getFirstLogInstant());
}
}
if (firstLog != null)
return firstLog;
if (myxact != null)
return myxact.getFirstLogInstant();
return null;
}
LogInstant getLastLog()
{
if (SanityManager.DEBUG)
SanityManager.ASSERT(!isClone, "cannot call method with a clone");
if (lastLog != null)
return lastLog;
if (myxact != null)
return myxact.getLastLogInstant();
return null;
}
public final Xact getXact()
{
if (SanityManager.DEBUG)
SanityManager.ASSERT(!isClone, "cannot call method with a clone");
return myxact;
}
int getTransactionStatus()
{
if (SanityManager.DEBUG)
SanityManager.ASSERT(!isClone, "cannot call method with a clone");
return transactionStatus;
}
boolean isUpdate()
{
if (SanityManager.DEBUG)
SanityManager.ASSERT(!isClone, "cannot call method with a clone");
return update;
}
boolean isRecovery()
{
if (SanityManager.DEBUG)
SanityManager.ASSERT(!isClone, "cannot call method with a clone");
return recovery;
}
boolean isPrepared()
{
if (SanityManager.DEBUG)
SanityManager.ASSERT(!isClone, "cannot call method with a clone");
return((transactionStatus & Xact.END_PREPARED) != 0);
}
public boolean needExclusion()
{
if (SanityManager.DEBUG)
SanityManager.ASSERT(!isClone, "cannot call method with a clone");
return needExclusion;
}
/**
Methods of TransactionInfo
*/
public String getTransactionIdString()
{
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(
!recovery, "trying to display recovery transaction");
SanityManager.ASSERT(myxact != null, "my xact is null");
SanityManager.ASSERT(isClone, "Should only call method on a clone");
}
TransactionId t = myxact.getIdNoCheck();
return (t == null) ? "CLOSED" : t.toString();
}
public String getGlobalTransactionIdString()
{
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(
!recovery, "trying to display recovery transaction");
SanityManager.ASSERT(myxact != null, "my xact is null");
SanityManager.ASSERT(isClone, "Should only call method on a clone");
}
GlobalTransactionId gid = myxact.getGlobalId();
return (gid == null) ? null : gid.toString();
}
public String getUsernameString()
{
if (SanityManager.DEBUG)
SanityManager.ASSERT(isClone, "Should only call method on a clone");
getlcc();
return (lcc == null) ? null : lcc.getSessionUserId();
}
public String getTransactionTypeString()
{
if (SanityManager.DEBUG)
SanityManager.ASSERT(isClone, "Should only call method on a clone");
if (myxact == null)
return null;
else if (myxact.getTransName() != null)
return myxact.getTransName();
else
return myxact.getContextId();
}
public String getTransactionStatusString()
{
if (SanityManager.DEBUG)
SanityManager.ASSERT(isClone, "Should only call method on a clone");
return (myxact == null) ? null : myxact.getState();
}
public String getStatementTextString()
{
if (SanityManager.DEBUG)
SanityManager.ASSERT(isClone, "Should only call method on a clone");
getlcc();
if (lcc != null)
{
StatementContext sc = lcc.getStatementContext();
if (sc != null)
return sc.getStatementText() ;
}
return null;
}
public String getFirstLogInstantString()
{
if (SanityManager.DEBUG)
SanityManager.ASSERT(isClone, "Should only call method on a clone");
LogInstant logInstant =
(myxact == null) ? null : myxact.getFirstLogInstant();
return (logInstant == null) ? null : logInstant.toString();
}
private void getlcc()
{
if (SanityManager.DEBUG)
SanityManager.ASSERT(isClone, "Should only call method on a clone");
if (lcc == null && myxact != null && myxact.xc != null)
{
XactContext xc = myxact.xc;
lcc = (LanguageConnectionContext)
xc.getContextManager().getContext(
LanguageConnectionContext.CONTEXT_ID);
}
}
/**
Cloneable
*/
protected Object clone()
{
try
{
Object c = super.clone();
((TransactionTableEntry)c).isClone = true;
return c;
}
catch (CloneNotSupportedException e)
{
// this should not happen, we are cloneable
if (SanityManager.DEBUG)
{
SanityManager.THROWASSERT(
"TransactionTableEntry cloneable but throws " +
"CloneNotSupportedException", e);
}
return null;
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.ec2.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
import com.amazonaws.Request;
import com.amazonaws.services.ec2.model.transform.ModifyInstanceCreditSpecificationRequestMarshaller;
/**
*
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ModifyInstanceCreditSpecificationRequest extends AmazonWebServiceRequest implements Serializable, Cloneable,
DryRunSupportedRequest<ModifyInstanceCreditSpecificationRequest> {
/**
* <p>
* A unique, case-sensitive token that you provide to ensure idempotency of your modification request. For more
* information, see <a
* href="https://docs.aws.amazon.com/AWSEC2/latest/APIReference/Run_Instance_Idempotency.html">Ensuring
* Idempotency</a>.
* </p>
*/
private String clientToken;
/**
* <p>
* Information about the credit option for CPU usage.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<InstanceCreditSpecificationRequest> instanceCreditSpecifications;
/**
* <p>
* A unique, case-sensitive token that you provide to ensure idempotency of your modification request. For more
* information, see <a
* href="https://docs.aws.amazon.com/AWSEC2/latest/APIReference/Run_Instance_Idempotency.html">Ensuring
* Idempotency</a>.
* </p>
*
* @param clientToken
* A unique, case-sensitive token that you provide to ensure idempotency of your modification request. For
* more information, see <a
* href="https://docs.aws.amazon.com/AWSEC2/latest/APIReference/Run_Instance_Idempotency.html">Ensuring
* Idempotency</a>.
*/
public void setClientToken(String clientToken) {
this.clientToken = clientToken;
}
/**
* <p>
* A unique, case-sensitive token that you provide to ensure idempotency of your modification request. For more
* information, see <a
* href="https://docs.aws.amazon.com/AWSEC2/latest/APIReference/Run_Instance_Idempotency.html">Ensuring
* Idempotency</a>.
* </p>
*
* @return A unique, case-sensitive token that you provide to ensure idempotency of your modification request. For
* more information, see <a
* href="https://docs.aws.amazon.com/AWSEC2/latest/APIReference/Run_Instance_Idempotency.html">Ensuring
* Idempotency</a>.
*/
public String getClientToken() {
return this.clientToken;
}
/**
* <p>
* A unique, case-sensitive token that you provide to ensure idempotency of your modification request. For more
* information, see <a
* href="https://docs.aws.amazon.com/AWSEC2/latest/APIReference/Run_Instance_Idempotency.html">Ensuring
* Idempotency</a>.
* </p>
*
* @param clientToken
* A unique, case-sensitive token that you provide to ensure idempotency of your modification request. For
* more information, see <a
* href="https://docs.aws.amazon.com/AWSEC2/latest/APIReference/Run_Instance_Idempotency.html">Ensuring
* Idempotency</a>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ModifyInstanceCreditSpecificationRequest withClientToken(String clientToken) {
setClientToken(clientToken);
return this;
}
/**
* <p>
* Information about the credit option for CPU usage.
* </p>
*
* @return Information about the credit option for CPU usage.
*/
public java.util.List<InstanceCreditSpecificationRequest> getInstanceCreditSpecifications() {
if (instanceCreditSpecifications == null) {
instanceCreditSpecifications = new com.amazonaws.internal.SdkInternalList<InstanceCreditSpecificationRequest>();
}
return instanceCreditSpecifications;
}
/**
* <p>
* Information about the credit option for CPU usage.
* </p>
*
* @param instanceCreditSpecifications
* Information about the credit option for CPU usage.
*/
public void setInstanceCreditSpecifications(java.util.Collection<InstanceCreditSpecificationRequest> instanceCreditSpecifications) {
if (instanceCreditSpecifications == null) {
this.instanceCreditSpecifications = null;
return;
}
this.instanceCreditSpecifications = new com.amazonaws.internal.SdkInternalList<InstanceCreditSpecificationRequest>(instanceCreditSpecifications);
}
/**
* <p>
* Information about the credit option for CPU usage.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setInstanceCreditSpecifications(java.util.Collection)} or
* {@link #withInstanceCreditSpecifications(java.util.Collection)} if you want to override the existing values.
* </p>
*
* @param instanceCreditSpecifications
* Information about the credit option for CPU usage.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ModifyInstanceCreditSpecificationRequest withInstanceCreditSpecifications(InstanceCreditSpecificationRequest... instanceCreditSpecifications) {
if (this.instanceCreditSpecifications == null) {
setInstanceCreditSpecifications(new com.amazonaws.internal.SdkInternalList<InstanceCreditSpecificationRequest>(instanceCreditSpecifications.length));
}
for (InstanceCreditSpecificationRequest ele : instanceCreditSpecifications) {
this.instanceCreditSpecifications.add(ele);
}
return this;
}
/**
* <p>
* Information about the credit option for CPU usage.
* </p>
*
* @param instanceCreditSpecifications
* Information about the credit option for CPU usage.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ModifyInstanceCreditSpecificationRequest withInstanceCreditSpecifications(
java.util.Collection<InstanceCreditSpecificationRequest> instanceCreditSpecifications) {
setInstanceCreditSpecifications(instanceCreditSpecifications);
return this;
}
/**
* This method is intended for internal use only. Returns the marshaled request configured with additional
* parameters to enable operation dry-run.
*/
@Override
public Request<ModifyInstanceCreditSpecificationRequest> getDryRunRequest() {
Request<ModifyInstanceCreditSpecificationRequest> request = new ModifyInstanceCreditSpecificationRequestMarshaller().marshall(this);
request.addParameter("DryRun", Boolean.toString(true));
return request;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getClientToken() != null)
sb.append("ClientToken: ").append(getClientToken()).append(",");
if (getInstanceCreditSpecifications() != null)
sb.append("InstanceCreditSpecifications: ").append(getInstanceCreditSpecifications());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ModifyInstanceCreditSpecificationRequest == false)
return false;
ModifyInstanceCreditSpecificationRequest other = (ModifyInstanceCreditSpecificationRequest) obj;
if (other.getClientToken() == null ^ this.getClientToken() == null)
return false;
if (other.getClientToken() != null && other.getClientToken().equals(this.getClientToken()) == false)
return false;
if (other.getInstanceCreditSpecifications() == null ^ this.getInstanceCreditSpecifications() == null)
return false;
if (other.getInstanceCreditSpecifications() != null && other.getInstanceCreditSpecifications().equals(this.getInstanceCreditSpecifications()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getClientToken() == null) ? 0 : getClientToken().hashCode());
hashCode = prime * hashCode + ((getInstanceCreditSpecifications() == null) ? 0 : getInstanceCreditSpecifications().hashCode());
return hashCode;
}
@Override
public ModifyInstanceCreditSpecificationRequest clone() {
return (ModifyInstanceCreditSpecificationRequest) super.clone();
}
}
| |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.editor.impl;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.impl.ApplicationInfoImpl;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.event.DocumentEvent;
import com.intellij.openapi.editor.ex.PrioritizedDocumentListener;
import com.intellij.openapi.editor.ex.RangeMarkerEx;
import com.intellij.openapi.util.Getter;
import com.intellij.util.DocumentEventUtil;
import com.intellij.util.SmartList;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
class RangeMarkerTree<T extends RangeMarkerEx> extends IntervalTreeImpl<T> implements PrioritizedDocumentListener {
RangeMarkerTree(@NotNull Document document) {
document.addDocumentListener(this);
}
RangeMarkerTree() {
}
@Override
public int getPriority() {
return EditorDocumentPriorities.RANGE_MARKER; // Need to make sure we invalidate all the stuff before someone (like LineStatusTracker) starts to modify highlights.
}
@Override
protected int compareEqualStartIntervals(@NotNull IntervalTreeImpl.IntervalNode<T> i1, @NotNull IntervalTreeImpl.IntervalNode<T> i2) {
RMNode<?> o1 = (RMNode<?>)i1;
RMNode<?> o2 = (RMNode<?>)i2;
boolean greedyL1 = o1.isGreedyToLeft();
boolean greedyL2 = o2.isGreedyToLeft();
if (greedyL1 != greedyL2) return greedyL1 ? -1 : 1;
int o1Length = o1.intervalEnd() - o1.intervalStart();
int o2Length = o2.intervalEnd() - o2.intervalStart();
int d = o1Length - o2Length;
if (d != 0) return d;
boolean greedyR1 = o1.isGreedyToRight();
boolean greedyR2 = o2.isGreedyToRight();
if (greedyR1 != greedyR2) return greedyR1 ? -1 : 1;
boolean stickyR1 = o1.isStickingToRight();
boolean stickyR2 = o2.isStickingToRight();
if (stickyR1 != stickyR2) return stickyR1 ? -1 : 1;
return 0;
}
void dispose(@NotNull Document document) {
document.removeDocumentListener(this);
}
private static final int DUPLICATE_LIMIT = 30; // assertion: no more than DUPLICATE_LIMIT range markers are allowed to be registered at given (start, end)
@NotNull
@Override
public RMNode<T> addInterval(@NotNull T interval, int start, int end,
boolean greedyToLeft, boolean greedyToRight, boolean stickingToRight, int layer) {
((RangeMarkerImpl)interval).setValid(true);
RMNode<T> node = (RMNode<T>)super.addInterval(interval, start, end, greedyToLeft, greedyToRight, stickingToRight, layer);
if (DEBUG && node.intervals.size() > DUPLICATE_LIMIT && !ApplicationInfoImpl.isInStressTest() && ApplicationManager.getApplication().isUnitTestMode()) {
l.readLock().lock();
try {
String msg = errMsg(node);
if (msg != null) {
LOG.warn(msg);
}
}
finally {
l.readLock().unlock();
}
}
return node;
}
private String errMsg(@NotNull RMNode<T> node) {
System.gc();
final AtomicInteger alive = new AtomicInteger();
node.processAliveKeys(t -> {
alive.incrementAndGet();
return true;
});
if (alive.get() > DUPLICATE_LIMIT) {
return "Too many range markers (" + alive + ") registered for interval "+node;
}
return null;
}
@NotNull
@Override
protected RMNode<T> createNewNode(@NotNull T key, int start, int end,
boolean greedyToLeft, boolean greedyToRight, boolean stickingToRight, int layer) {
return new RMNode<>(this, key, start, end, greedyToLeft, greedyToRight, stickingToRight);
}
@Override
protected RMNode<T> lookupNode(@NotNull T key) {
//noinspection unchecked
return (RMNode<T>)((RangeMarkerImpl)key).myNode;
}
@Override
protected void setNode(@NotNull T key, IntervalNode<T> intervalNode) {
((RangeMarkerImpl)key).myNode = (RMNode<RangeMarkerEx>)intervalNode;
}
static class RMNode<T extends RangeMarkerEx> extends IntervalTreeImpl.IntervalNode<T> {
private static final byte EXPAND_TO_LEFT_FLAG = VALID_FLAG<<1;
private static final byte EXPAND_TO_RIGHT_FLAG = EXPAND_TO_LEFT_FLAG<<1;
static final byte STICK_TO_RIGHT_FLAG = EXPAND_TO_RIGHT_FLAG<<1;
RMNode(@NotNull RangeMarkerTree<T> rangeMarkerTree,
@NotNull T key,
int start,
int end,
boolean greedyToLeft,
boolean greedyToRight,
boolean stickingToRight) {
super(rangeMarkerTree, key, start, end);
setFlag(EXPAND_TO_LEFT_FLAG, greedyToLeft);
setFlag(EXPAND_TO_RIGHT_FLAG, greedyToRight);
setFlag(STICK_TO_RIGHT_FLAG, stickingToRight);
}
boolean isGreedyToLeft() {
return isFlagSet(EXPAND_TO_LEFT_FLAG);
}
boolean isGreedyToRight() {
return isFlagSet(EXPAND_TO_RIGHT_FLAG);
}
boolean isStickingToRight() {
return isFlagSet(STICK_TO_RIGHT_FLAG);
}
void onRemoved() {}
@Override
public String toString() {
return (isGreedyToLeft() ? "[" : "(") + intervalStart() + "," + intervalEnd() + (isGreedyToRight() ? "]" : ")");
}
}
@Override
public void documentChanged(@NotNull DocumentEvent e) {
try {
l.writeLock().lock();
if (size() != 0) {
updateMarkersOnChange(e);
if (DocumentEventUtil.isMoveInsertion(e)) {
reTargetMarkersOnChange(e);
}
IntervalNode<T> root = getRoot();
assert root == null || root.maxEnd + root.delta <= e.getDocument().getTextLength() : "Root: "+root+"; root.maxEnd="+root.maxEnd+"; root.delta="+root.delta+"; e.getDocument().getTextLength()="+e.getDocument().getTextLength()+"; event: "+e;
}
}
finally {
l.writeLock().unlock();
}
}
private void updateMarkersOnChange(@NotNull DocumentEvent e) {
checkMax(true);
incModCount();
List<IntervalNode<T>> affected = new SmartList<>();
final int start = e.getOffset();
final int oldLength = e.getOldLength();
final int newLength = e.getNewLength();
collectAffectedMarkersAndShiftSubtrees(getRoot(), start, start + oldLength, newLength - oldLength, affected);
checkMax(false);
if (!affected.isEmpty()) {
updateAffectedNodes(e, 0, affected);
}
}
private void updateAffectedNodes(@NotNull DocumentEvent e, int reTargetShift,
@NotNull List<IntervalNode<T>> affected) {
// reverse direction to visit leaves first - it's cheaper to compute maxEndOf for them first
for (int i = affected.size() - 1; i >= 0; i--) {
IntervalNode<T> node = affected.get(i);
// assumption: interval.getEndOffset() will never be accessed during remove()
int startOffset = node.intervalStart();
int endOffset = node.intervalEnd();
removeNode(node);
checkMax(false);
node.setParent(null);
node.setLeft(null);
node.setRight(null);
node.setValid(true);
if (reTargetShift == 0) {
// we can do it because all the deltas up from the root to this node were cleared in the collectAffectedMarkersAndShiftSubtrees
node.clearDelta();
assert node.intervalStart() == startOffset;
assert node.intervalEnd() == endOffset;
}
else {
node.changeDelta(reTargetShift);
pushDelta(node);
}
}
checkMax(true);
for (IntervalNode<T> node : affected) {
RangeMarkerImpl marker = getNodeMarker(node);
if (marker == null) continue; // node remains removed from the tree
if (reTargetShift == 0) {
marker.documentChanged(e);
}
else {
marker.onReTarget(e);
}
if (marker.isValid()) {
findOrInsertWithIntervals(node);
}
else {
node.setValid(false);
((RMNode<?>)node).onRemoved();
}
}
checkMax(true);
}
@Nullable
private static <T extends RangeMarkerEx> RangeMarkerImpl getNodeMarker(@NotNull IntervalNode<T> node) {
List<Getter<T>> keys = node.intervals;
for (int i = keys.size() - 1; i >= 0; i--) {
Getter<T> key = keys.get(i);
RangeMarkerImpl marker = (RangeMarkerImpl)key.get();
if (marker != null) {
if (marker.isValid()) return marker;
// marker can become invalid on its own, e.g. FoldRegion
node.removeIntervalInternal(i);
}
}
return null;
}
private void findOrInsertWithIntervals(IntervalNode<T> node) {
IntervalNode<T> insertedNode = findOrInsert(node);
// can change if two range become the one
if (insertedNode != node) {
// merge happened
insertedNode.addIntervalsFrom(node);
}
}
// returns true if all deltas involved are still 0
boolean collectAffectedMarkersAndShiftSubtrees(@Nullable IntervalNode<T> root,
int start, int end, int lengthDelta,
@NotNull List<? super IntervalNode<T>> affected) {
if (root == null) return true;
boolean norm = pushDelta(root);
int maxEnd = root.maxEnd;
assert root.isValid();
boolean hasAliveKeys = root.hasAliveKey(false);
if (!hasAliveKeys) {
// marker was garbage collected
affected.add(root);
}
if (start > maxEnd) {
// no need to bother
}
else if (end < root.intervalStart()) {
// shift entire subtree
int newD = root.changeDelta(lengthDelta);
norm &= newD == 0;
IntervalNode<T> left = root.getLeft();
if (left != null) {
int newL = left.changeDelta(-lengthDelta);
norm &= newL == 0;
}
norm &= pushDelta(root);
norm &= collectAffectedMarkersAndShiftSubtrees(left, start, end, lengthDelta, affected);
correctMax(root, 0);
}
else {
if (start <= root.intervalEnd()) {
// unlucky enough so that change affects the interval
if (hasAliveKeys) affected.add(root); // otherwise we've already added it
root.setValid(false); //make invisible
}
norm &= collectAffectedMarkersAndShiftSubtrees(root.getLeft(), start, end, lengthDelta, affected);
norm &= collectAffectedMarkersAndShiftSubtrees(root.getRight(), start, end, lengthDelta, affected);
correctMax(root, 0);
}
return norm;
}
// All intervals contained in (e.getMoveOffset(), e.getMoveOffset() + e.getNewLength())
// will be shifted by (e.getOffset() - e.getMoveOffset()).
// That's what happens when you "move" text in document, e.g. ctrl-shift-up/down the selection.
private void reTargetMarkersOnChange(@NotNull DocumentEvent e) {
checkMax(true);
List<IntervalNode<T>> affected = new SmartList<>();
int moveStart = e.getMoveOffset();
int moveEnd = moveStart + e.getNewLength();
collectNodesToRetarget(getRoot(), moveStart, moveEnd, affected);
if (!affected.isEmpty()) {
updateAffectedNodes(e, e.getOffset() - e.getMoveOffset(), affected);
}
}
private void collectNodesToRetarget(@Nullable IntervalNode<T> root,
int start, int end,
@NotNull List<? super IntervalNode<T>> affected) {
if (root == null) return;
pushDelta(root);
int maxEnd = root.maxEnd;
assert root.isValid();
if (start > maxEnd) {
// no need to bother
return;
}
collectNodesToRetarget(root.getLeft(), start, end, affected);
if (start <= root.intervalStart() && root.intervalEnd() <= end) {
affected.add(root);
}
if (end < root.intervalStart()) {
return;
}
collectNodesToRetarget(root.getRight(), start, end, affected);
}
}
| |
/**
* Copyright 2005-2015 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package io.fabric8.kubernetes.api;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.fabric8.kubernetes.api.extensions.Templates;
import io.fabric8.kubernetes.api.model.HasMetadata;
import io.fabric8.kubernetes.api.model.KubernetesList;
import io.fabric8.kubernetes.api.model.Namespace;
import io.fabric8.kubernetes.api.model.ObjectMeta;
import io.fabric8.kubernetes.api.model.PersistentVolumeClaim;
import io.fabric8.kubernetes.api.model.Pod;
import io.fabric8.kubernetes.api.model.PodSpec;
import io.fabric8.kubernetes.api.model.PodTemplateSpec;
import io.fabric8.kubernetes.api.model.ReplicationController;
import io.fabric8.kubernetes.api.model.ReplicationControllerSpec;
import io.fabric8.kubernetes.api.model.Secret;
import io.fabric8.kubernetes.api.model.SecretVolumeSource;
import io.fabric8.kubernetes.api.model.Service;
import io.fabric8.kubernetes.api.model.ServiceAccount;
import io.fabric8.kubernetes.api.model.Volume;
import io.fabric8.kubernetes.client.DefaultKubernetesClient;
import io.fabric8.kubernetes.client.KubernetesClient;
import io.fabric8.kubernetes.client.dsl.ClientOperation;
import io.fabric8.kubernetes.client.dsl.ClientResource;
import io.fabric8.openshift.api.model.BuildConfig;
import io.fabric8.openshift.api.model.DeploymentConfig;
import io.fabric8.openshift.api.model.ImageStream;
import io.fabric8.openshift.api.model.OAuthClient;
import io.fabric8.openshift.api.model.Route;
import io.fabric8.openshift.api.model.Template;
import io.fabric8.openshift.client.OpenShiftClient;
import io.fabric8.utils.Files;
import io.fabric8.utils.IOHelpers;
import io.fabric8.utils.Objects;
import io.fabric8.utils.Strings;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.yaml.snakeyaml.Yaml;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.util.List;
import java.util.Map;
import static io.fabric8.kubernetes.api.KubernetesHelper.getKind;
import static io.fabric8.kubernetes.api.KubernetesHelper.getName;
import static io.fabric8.kubernetes.api.KubernetesHelper.getObjectId;
import static io.fabric8.kubernetes.api.KubernetesHelper.getOrCreateMetadata;
import static io.fabric8.kubernetes.api.KubernetesHelper.loadJson;
import static io.fabric8.kubernetes.api.KubernetesHelper.summaryText;
import static io.fabric8.kubernetes.api.KubernetesHelper.toItemList;
/**
* Applies DTOs to the current Kubernetes master
*/
public class Controller {
private static final transient Logger LOG = LoggerFactory.getLogger(Controller.class);
private final KubernetesClient kubernetesClient;
private boolean throwExceptionOnError = true;
private boolean allowCreate = true;
private boolean recreateMode;
private boolean servicesOnlyMode;
private boolean ignoreServiceMode;
private boolean ignoreRunningOAuthClients = true;
private boolean rollingUpgrade;
private boolean processTemplatesLocally;
private File logJsonDir;
private File basedir;
private boolean failOnMissingParameterValue;
private boolean supportOAuthClients;
private boolean deletePodsOnReplicationControllerUpdate = true;
private String namesapce = KubernetesHelper.defaultNamespace();
private boolean requireSecretsCreatedBeforeReplicationControllers;
private boolean rollingUpgradePreserveScale = true;
public Controller() {
this(new DefaultKubernetesClient());
}
public Controller(KubernetesClient kubernetesClient) {
this.kubernetesClient = kubernetesClient;
}
public String apply(File file) throws Exception {
String ext = Files.getFileExtension(file);
if ("yaml".equalsIgnoreCase(ext)) {
return applyYaml(file);
} else if ("json".equalsIgnoreCase(ext)) {
return applyJson(file);
} else {
throw new IllegalArgumentException("Unknown file type " + ext);
}
}
/**
* Applies the given JSON to the underlying REST APIs in a single operation without needing to explicitly parse first.
*/
public String applyJson(byte[] json) throws Exception {
Object dto = loadJson(json);
apply(dto, "REST call");
return "";
}
/**
* Applies the given JSON to the underlying REST APIs in a single operation without needing to explicitly parse first.
*/
public String applyJson(String json) throws Exception {
Object dto = loadJson(json);
apply(dto, "REST call");
return "";
}
/**
* Applies the given JSON to the underlying REST APIs in a single operation without needing to explicitly parse first.
*/
public String applyJson(File json) throws Exception {
Object dto = loadJson(json);
apply(dto, "REST call");
return "";
}
/**
* Applies the given YAML to the underlying REST APIs in a single operation without needing to explicitly parse first.
*/
public String applyYaml(String yaml) throws Exception {
String json = convertYamlToJson(yaml);
Object dto = loadJson(json);
apply(dto, "REST call");
return "";
}
/**
* Applies the given YAML to the underlying REST APIs in a single operation without needing to explicitly parse first.
*/
public String applyYaml(File yaml) throws Exception {
String json = convertYamlToJson(yaml);
Object dto = loadJson(json);
apply(dto, "REST call");
return "";
}
private String convertYamlToJson(String yamlString) throws FileNotFoundException {
Yaml yaml = new Yaml();
Map<String, Object> map = (Map<String, Object>) yaml.load(yamlString);
JSONObject jsonObject = new JSONObject(map);
return jsonObject.toString();
}
private String convertYamlToJson(File yamlFile) throws FileNotFoundException {
Yaml yaml = new Yaml();
FileInputStream fstream = new FileInputStream(yamlFile);
Map<String, Object> map = (Map<String, Object>) yaml.load(fstream);
JSONObject jsonObject = new JSONObject(map);
return jsonObject.toString();
}
/**
* Applies the given JSON to the underlying REST APIs in a single operation without needing to explicitly parse first.
*/
public String applyJson(InputStream json) throws Exception {
Object dto = loadJson(json);
apply(dto, "REST call");
return "";
}
/**
* Applies the given DTOs onto the Kubernetes master
*/
public void apply(Object dto, String sourceName) throws Exception {
if (dto instanceof List) {
List list = (List) dto;
for (Object element : list) {
if (dto == element) {
LOG.warn("Found recursive nested object for " + dto + " of class: " + dto.getClass().getName());
continue;
}
apply(element, sourceName);
}
} else if (dto instanceof KubernetesList) {
applyList((KubernetesList) dto, sourceName);
} else if (dto != null) {
applyEntity(dto, sourceName);
}
}
/**
* Applies the given DTOs onto the Kubernetes master
*/
public void applyEntity(Object dto, String sourceName) throws Exception {
if (dto instanceof Pod) {
applyPod((Pod) dto, sourceName);
} else if (dto instanceof ReplicationController) {
applyReplicationController((ReplicationController) dto, sourceName);
} else if (dto instanceof Service) {
applyService((Service) dto, sourceName);
} else if (dto instanceof Namespace) {
applyNamespace((Namespace) dto);
} else if (dto instanceof Route) {
applyRoute((Route) dto, sourceName);
} else if (dto instanceof BuildConfig) {
applyBuildConfig((BuildConfig) dto, sourceName);
} else if (dto instanceof DeploymentConfig) {
applyDeploymentConfig((DeploymentConfig) dto, sourceName);
} else if (dto instanceof ImageStream) {
applyImageStream((ImageStream) dto, sourceName);
} else if (dto instanceof OAuthClient) {
applyOAuthClient((OAuthClient) dto, sourceName);
} else if (dto instanceof PersistentVolumeClaim) {
applyResource((PersistentVolumeClaim) dto, sourceName, kubernetesClient.persistentVolumeClaims());
} else if (dto instanceof Template) {
applyTemplate((Template) dto, sourceName);
} else if (dto instanceof ServiceAccount) {
applyServiceAccount((ServiceAccount) dto, sourceName);
} else if (dto instanceof Secret) {
applySecret((Secret) dto, sourceName);
} else {
throw new IllegalArgumentException("Unknown entity type " + dto);
}
}
public void applyOAuthClient(OAuthClient entity, String sourceName) {
OpenShiftClient openShiftClient = kubernetesClient.adapt(OpenShiftClient.class);
if (supportOAuthClients) {
String id = getName(entity);
Objects.notNull(id, "No name for " + entity + " " + sourceName);
if (isServicesOnlyMode()) {
LOG.debug("Only processing Services right now so ignoring OAuthClient: " + id);
return;
}
OAuthClient old = openShiftClient.oAuthClients().withName(id).get();
if (isRunning(old)) {
if (isIgnoreRunningOAuthClients()) {
LOG.info("Not updating the OAuthClient which are shared across namespaces as its already running");
return;
}
if (UserConfigurationCompare.configEqual(entity, old)) {
LOG.info("OAuthClient has not changed so not doing anything");
} else {
if (isRecreateMode()) {
openShiftClient.oAuthClients().withName(id).delete();
doCreateOAuthClient(entity, sourceName);
} else {
try {
Object answer = openShiftClient.oAuthClients().withName(id).replace(entity);
LOG.info("Updated OAuthClient result: " + answer);
} catch (Exception e) {
onApplyError("Failed to update OAuthClient from " + sourceName + ". " + e + ". " + entity, e);
}
}
}
} else {
if (!isAllowCreate()) {
LOG.warn("Creation disabled so not creating an OAuthClient from " + sourceName + " name " + getName(entity));
} else {
doCreateOAuthClient(entity, sourceName);
}
}
}
}
protected void doCreateOAuthClient(OAuthClient entity, String sourceName) {
Object result = null;
try {
result = kubernetesClient.adapt(OpenShiftClient.class).oAuthClients().create(entity);
} catch (Exception e) {
onApplyError("Failed to create OAuthClient from " + sourceName + ". " + e + ". " + entity, e);
}
}
/**
* Creates/updates the template and processes it returning the processed DTOs
*/
public Object applyTemplate(Template entity, String sourceName) throws Exception {
installTemplate(entity, sourceName);
return processTemplate(entity, sourceName);
}
/**
* Installs the template into the namespace without processing it
*/
public void installTemplate(Template entity, String sourceName) {
OpenShiftClient openShiftClient = kubernetesClient.adapt(OpenShiftClient.class);
if (!isProcessTemplatesLocally()) {
String namespace = getNamespace();
String id = getName(entity);
Objects.notNull(id, "No name for " + entity + " " + sourceName);
Template old = openShiftClient.templates().inNamespace(namespace).withName(id).get();
if (isRunning(old)) {
if (UserConfigurationCompare.configEqual(entity, old)) {
LOG.info("Template has not changed so not doing anything");
} else {
boolean recreateMode = isRecreateMode();
// TODO seems you can't update templates right now
recreateMode = true;
if (recreateMode) {
openShiftClient.templates().inNamespace(namespace).withName(id).delete();
doCreateTemplate(entity, namespace, sourceName);
} else {
LOG.info("Updating a Template from " + sourceName);
try {
Object answer = openShiftClient.templates().inNamespace(namespace).withName(id).replace(entity);
LOG.info("Updated Template: " + answer);
} catch (Exception e) {
onApplyError("Failed to update Template from " + sourceName + ". " + e + ". " + entity, e);
}
}
}
} else {
if (!isAllowCreate()) {
LOG.warn("Creation disabled so not creating a Template from " + sourceName + " namespace " + namespace + " name " + getName(entity));
} else {
doCreateTemplate(entity, namespace, sourceName);
}
}
}
}
protected void doCreateTemplate(Template entity, String namespace, String sourceName) {
LOG.info("Creating a Template from " + sourceName + " namespace " + namespace + " name " + getName(entity));
try {
Object answer = kubernetesClient.adapt(OpenShiftClient.class).templates().inNamespace(namespace).create(entity);
logGeneratedEntity("Created Template: ", namespace, entity, answer);
} catch (Exception e) {
onApplyError("Failed to Template entity from " + sourceName + ". " + e + ". " + entity, e);
}
}
/**
* Creates/updates a service account and processes it returning the processed DTOs
*/
public void applyServiceAccount(ServiceAccount serviceAccount, String sourceName) throws Exception {
String namespace = getNamespace();
String id = getName(serviceAccount);
Objects.notNull(id, "No name for " + serviceAccount + " " + sourceName);
if (isServicesOnlyMode()) {
LOG.debug("Only processing Services right now so ignoring ServiceAccount: " + id);
return;
}
ServiceAccount old = kubernetesClient.serviceAccounts().inNamespace(namespace).withName(id).get();
if (isRunning(old)) {
if (UserConfigurationCompare.configEqual(serviceAccount, old)) {
LOG.info("ServiceAccount has not changed so not doing anything");
} else {
if (isRecreateMode()) {
kubernetesClient.serviceAccounts().inNamespace(namespace).withName(id).delete();
doCreateServiceAccount(serviceAccount, namespace, sourceName);
} else {
LOG.info("Updating a ServiceAccount from " + sourceName);
try {
Object answer = kubernetesClient.serviceAccounts().inNamespace(namespace).withName(id).replace(serviceAccount);
logGeneratedEntity("Updated ServiceAccount: ", namespace, serviceAccount, answer);
} catch (Exception e) {
onApplyError("Failed to update ServiceAccount from " + sourceName + ". " + e + ". " + serviceAccount, e);
}
}
}
} else {
if (!isAllowCreate()) {
LOG.warn("Creation disabled so not creating a ServiceAccount from " + sourceName + " namespace " + namespace + " name " + getName(serviceAccount));
} else {
doCreateServiceAccount(serviceAccount, namespace, sourceName);
}
}
}
protected void doCreateServiceAccount(ServiceAccount serviceAccount, String namespace, String sourceName) {
LOG.info("Creating a ServiceAccount from " + sourceName + " namespace " + namespace + " name " + getName
(serviceAccount));
try {
Object answer;
if (Strings.isNotBlank(namespace)) {
answer = kubernetesClient.serviceAccounts().inNamespace(namespace).create(serviceAccount);
} else {
answer = kubernetesClient.serviceAccounts().inNamespace(getNamespace()).create(serviceAccount);
}
logGeneratedEntity("Created ServiceAccount: ", namespace, serviceAccount, answer);
} catch (Exception e) {
onApplyError("Failed to create ServiceAccount from " + sourceName + ". " + e + ". " + serviceAccount, e);
}
}
public void applySecret(Secret secret, String sourceName) throws Exception {
String namespace = getNamespace(secret);
String id = getName(secret);
Objects.notNull(id, "No name for " + secret + " " + sourceName);
if (isServicesOnlyMode()) {
LOG.debug("Only processing Services right now so ignoring Secrets: " + id);
return;
}
Secret old = kubernetesClient.secrets().inNamespace(namespace).withName(id).get();
// check if the secret already exists or not
if (isRunning(old)) {
// if the secret already exists and is the same, then do nothing
if (UserConfigurationCompare.configEqual(secret, old)) {
LOG.info("Secret has not changed so not doing anything");
return;
} else {
if (isRecreateMode()) {
kubernetesClient.secrets().inNamespace(namespace).withName(id).delete();
doCreateSecret(secret, namespace, sourceName);
} else {
LOG.info("Updating a Secret from " + sourceName);
try {
Object answer = kubernetesClient.secrets().inNamespace(namespace).withName(id).replace(secret);
logGeneratedEntity("Updated Secret:", namespace, secret, answer);
} catch (Exception e) {
onApplyError("Failed to update secret from " + sourceName + ". " + e + ". " + secret, e);
}
}
}
} else {
if (!isAllowCreate()) {
LOG.warn("Creation disabled so not creating a Secret from " + sourceName + " namespace " + namespace + " name " + getName(secret));
} else {
doCreateSecret(secret, namespace, sourceName);
}
}
}
protected void doCreateSecret(Secret secret, String namespace, String sourceName) {
LOG.info("Creating a Secret from " + sourceName + " namespace " + namespace + " name " + getName(secret));
try {
Object answer;
if (Strings.isNotBlank(namespace)) {
answer = kubernetesClient.secrets().inNamespace(namespace).create(secret);
} else {
answer = kubernetesClient.secrets().inNamespace(getNamespace()).create(secret);
}
logGeneratedEntity("Created Secret: ", namespace, secret, answer);
} catch (Exception e) {
onApplyError("Failed to create Secret from " + sourceName + ". " + e + ". " + secret, e);
}
}
protected void logGeneratedEntity(String message, String namespace, HasMetadata entity, Object result) {
if (logJsonDir != null) {
File namespaceDir = new File(logJsonDir, namespace);
namespaceDir.mkdirs();
String kind = getKind(entity);
String name = KubernetesHelper.getName(entity);
if (Strings.isNotBlank(kind)) {
name = kind.toLowerCase() + "-" + name;
}
if (Strings.isNullOrBlank(name)) {
LOG.warn("No name for the entity " + entity);
} else {
String fileName = name + ".json";
File file = new File(namespaceDir, fileName);
if (file.exists()) {
int idx = 1;
while (true) {
fileName = name + "-" + idx++ + ".json";
file = new File(namespaceDir, fileName);
if (!file.exists()) {
break;
}
}
}
String text;
if (result instanceof String) {
text = result.toString();
} else {
try {
text = KubernetesHelper.toJson(result);
} catch (JsonProcessingException e) {
LOG.warn("Cannot convert " + result + " to JSON: " + e, e);
if (result != null) {
text = result.toString();
} else {
text = "null";
}
}
}
try {
IOHelpers.writeFully(file, text);
Object fileLocation = file;
if (basedir != null) {
String path = Files.getRelativePath(basedir, file);
if (path != null) {
fileLocation = Strings.stripPrefix(path, "/");
}
}
LOG.info(message + fileLocation);
} catch (IOException e) {
LOG.warn("Failed to write to file " + file + ". " + e, e);
}
return;
}
}
LOG.info(message + result);
}
public Object processTemplate(Template entity, String sourceName) {
try {
return Templates.processTemplatesLocally(entity, failOnMissingParameterValue);
} catch (IOException e) {
onApplyError("Failed to process template " + sourceName + ". " + e + ". " + entity, e);
return null;
}
/* Let's do it in the client side.
String id = getName(entity);
Objects.notNull(id, "No name for " + entity + " " + sourceName);
String namespace = KubernetesHelper.getNamespace(entity);
LOG.info("Creating Template " + namespace + ":" + id + " " + summaryText(entity));
Object result = null;
try {
Template response = kubernetes.templates().inNamespace(namespace).create(entity);
String json = OBJECT_MAPPER.writeValueAsString(response);
logGeneratedEntity("Template processed into: ", namespace, entity, json);
result = loadJson(json);
printSummary(result);
} catch (Exception e) {
onApplyError("Failed to create controller from " + sourceName + ". " + e + ". " + entity, e);
}
return result;
*/
}
protected void printSummary(Object kubeResource) throws IOException {
if (kubeResource != null) {
LOG.debug(" " + kubeResource.getClass().getSimpleName() + " " + kubeResource);
}
if (kubeResource instanceof Template) {
Template template = (Template) kubeResource;
String id = getName(template);
LOG.info(" Template " + id + " " + summaryText(template));
printSummary(template.getObjects());
return;
}
List<HasMetadata> list = toItemList(kubeResource);
for (HasMetadata object : list) {
if (object != null) {
if (object == list) {
LOG.debug("Ignoring recursive list " + list);
continue;
} else if (object instanceof List) {
printSummary(object);
} else {
String kind = object.getClass().getSimpleName();
String id = getObjectId(object);
LOG.info(" " + kind + " " + id + " " + summaryText(object));
}
}
}
}
public void applyRoute(Route entity, String sourceName) {
OpenShiftClient openShiftClient = kubernetesClient.adapt(OpenShiftClient.class);
String id = getName(entity);
Objects.notNull(id, "No name for " + entity + " " + sourceName);
String namespace = KubernetesHelper.getNamespace(entity);
if (Strings.isNullOrBlank(namespace)) {
namespace = getNamespace();
}
Route route = openShiftClient.routes().inNamespace(namespace).withName(id).get();
if (route == null) {
try {
LOG.info("Creating Route " + namespace + ":" + id + " " + KubernetesHelper.summaryText(entity));
openShiftClient.routes().inNamespace(namespace).create(entity);
} catch (Exception e) {
onApplyError("Failed to create Route from " + sourceName + ". " + e + ". " + entity, e);
}
}
}
public void applyBuildConfig(BuildConfig entity, String sourceName) {
String id = getName(entity);
OpenShiftClient openShiftClient = kubernetesClient.adapt(OpenShiftClient.class);
Objects.notNull(id, "No name for " + entity + " " + sourceName);
String namespace = KubernetesHelper.getNamespace(entity);
if (Strings.isNullOrBlank(namespace)) {
namespace = getNamespace();
}
applyNamespace(namespace);
BuildConfig old = openShiftClient.buildConfigs().inNamespace(namespace).withName(id).get();
if (isRunning(old)) {
if (UserConfigurationCompare.configEqual(entity, old)) {
LOG.info("BuildConfig has not changed so not doing anything");
} else {
if (isRecreateMode()) {
LOG.info("Deleting BuildConfig: " + id);
openShiftClient.buildConfigs().inNamespace(namespace).withName(id).delete();
doCreateBuildConfig(entity, namespace, sourceName);
} else {
LOG.info("Updating BuildConfig from " + sourceName);
try {
String resourceVersion = KubernetesHelper.getResourceVersion(old);
ObjectMeta metadata = KubernetesHelper.getOrCreateMetadata(entity);
metadata.setNamespace(namespace);
metadata.setResourceVersion(resourceVersion);
Object answer = openShiftClient.buildConfigs().inNamespace(namespace).withName(id).replace(entity);
logGeneratedEntity("Updated BuildConfig: ", namespace, entity, answer);
} catch (Exception e) {
onApplyError("Failed to update BuildConfig from " + sourceName + ". " + e + ". " + entity, e);
}
}
}
} else {
if (!isAllowCreate()) {
LOG.warn("Creation disabled so not creating BuildConfig from " + sourceName + " namespace " + namespace + " name " + getName(entity));
} else {
doCreateBuildConfig(entity, namespace, sourceName);
}
}
}
public void doCreateBuildConfig(BuildConfig entity, String namespace ,String sourceName) {
try {
kubernetesClient.adapt(OpenShiftClient.class).buildConfigs().inNamespace(namespace).create(entity);
} catch (Exception e) {
onApplyError("Failed to create BuildConfig from " + sourceName + ". " + e, e);
}
}
public void applyDeploymentConfig(DeploymentConfig entity, String sourceName) {
try {
kubernetesClient.adapt(OpenShiftClient.class).deploymentConfigs().inNamespace(getNamespace()).create(entity);
} catch (Exception e) {
onApplyError("Failed to create DeploymentConfig from " + sourceName + ". " + e, e);
}
}
public void applyImageStream(ImageStream entity, String sourceName) {
try {
kubernetesClient.adapt(OpenShiftClient.class).imageStreams().inNamespace(getNamespace()).create(entity);
} catch (Exception e) {
onApplyError("Failed to create BuildConfig from " + sourceName + ". " + e, e);
}
}
public void applyList(KubernetesList list, String sourceName) throws Exception {
List<HasMetadata> entities = list.getItems();
if (entities != null) {
for (Object entity : entities) {
applyEntity(entity, sourceName);
}
}
}
public void applyService(Service service, String sourceName) throws Exception {
String namespace = getNamespace();
String id = getName(service);
Objects.notNull(id, "No name for " + service + " " + sourceName);
if (isIgnoreServiceMode()) {
LOG.debug("Ignoring Service: " + namespace + ":" + id);
return;
}
Service old = kubernetesClient.services().inNamespace(namespace).withName(id).get();
if (isRunning(old)) {
if (UserConfigurationCompare.configEqual(service, old)) {
LOG.info("Service has not changed so not doing anything");
} else {
if (isRecreateMode()) {
LOG.info("Deleting Service: " + id);
kubernetesClient.services().inNamespace(namespace).withName(id).delete();
doCreateService(service, namespace, sourceName);
} else {
LOG.info("Updating a Service from " + sourceName);
try {
Object answer = kubernetesClient.services().inNamespace(namespace).withName(id).replace(service);
logGeneratedEntity("Updated Service: ", namespace, service, answer);
} catch (Exception e) {
onApplyError("Failed to update Service from " + sourceName + ". " + e + ". " + service, e);
}
}
}
} else {
if (!isAllowCreate()) {
LOG.warn("Creation disabled so not creating a Service from " + sourceName + " namespace " + namespace + " name " + getName(service));
} else {
doCreateService(service, namespace, sourceName);
}
}
}
public <T extends HasMetadata,L,D> void applyResource(T resource, String sourceName, ClientOperation<T, L, D, ClientResource<T, D>> resources) throws Exception {
String namespace = getNamespace();
String id = getName(resource);
String kind = getKind(resource);
Objects.notNull(id, "No name for " + resource + " " + sourceName);
if (isServicesOnlyMode()) {
LOG.debug("Ignoring " + kind + ": " + namespace + ":" + id);
return;
}
T old = resources.inNamespace(namespace).withName(id).get();
if (isRunning(old)) {
if (UserConfigurationCompare.configEqual(resource, old)) {
LOG.info(kind + " has not changed so not doing anything");
} else {
if (isRecreateMode()) {
LOG.info("Deleting " + kind + ": " + id);
resources.inNamespace(namespace).withName(id).delete();
doCreateResource(resource, namespace, sourceName, resources);
} else {
LOG.info("Updating " + kind + " from " + sourceName);
try {
Object answer = resources.inNamespace(namespace).withName(id).replace(resource);
logGeneratedEntity("Updated " + kind + ": ", namespace, resource, answer);
} catch (Exception e) {
onApplyError("Failed to update " + kind + " from " + sourceName + ". " + e + ". " + resource, e);
}
}
}
} else {
if (!isAllowCreate()) {
LOG.warn("Creation disabled so not creating a " + kind + " from " + sourceName + " namespace " + namespace + " name " + getName(resource));
} else {
doCreateResource(resource, namespace, sourceName, resources);
}
}
}
protected <T extends HasMetadata,L,D> void doCreateResource(T resource, String namespace ,String sourceName, ClientOperation<T, L, D, ClientResource<T, D>> resources) throws Exception {
String kind = getKind(resource);
LOG.info("Creating a " + kind + " from " + sourceName + " namespace " + namespace + " name " + getName(resource));
try {
Object answer;
if (Strings.isNotBlank(namespace)) {
answer = resources.inNamespace(namespace).create(resource);
} else {
answer = resources.inNamespace(getNamespace()).create(resource);
}
logGeneratedEntity("Created " + kind + ": ", namespace, resource, answer);
} catch (Exception e) {
onApplyError("Failed to create " + kind + " from " + sourceName + ". " + e + ". " + resource, e);
}
}
protected void doCreateService(Service service, String namespace, String sourceName) {
LOG.info("Creating a Service from " + sourceName + " namespace " + namespace + " name " + getName(service));
try {
Object answer;
if (Strings.isNotBlank(namespace)) {
answer = kubernetesClient.services().inNamespace(namespace).create(service);
} else {
answer = kubernetesClient.services().inNamespace(getNamespace()).create(service);
}
logGeneratedEntity("Created Service: ", namespace, service, answer);
} catch (Exception e) {
onApplyError("Failed to create Service from " + sourceName + ". " + e + ". " + service, e);
}
}
public void applyNamespace(String namespaceName) {
Namespace entity = new Namespace();
getOrCreateMetadata(entity).setName(namespaceName);
applyNamespace(entity);
}
/**
* Returns true if the namespace is created
*/
public boolean applyNamespace(Namespace entity) {
String namespace = getOrCreateMetadata(entity).getName();
LOG.info("Using namespace: " + namespace);
String name = getName(entity);
Objects.notNull(name, "No name for " + entity );
Namespace old = kubernetesClient.namespaces().withName(name).get();
if (!isRunning(old)) {
try {
Object answer = kubernetesClient.namespaces().create(entity);
logGeneratedEntity("Created namespace: ", namespace, entity, answer);
return true;
} catch (Exception e) {
onApplyError("Failed to create namespace: " + name + " due " + e.getMessage(), e);
}
}
return false;
}
public void applyReplicationController(ReplicationController replicationController, String sourceName) throws Exception {
String namespace = getNamespace();
String id = getName(replicationController);
Objects.notNull(id, "No name for " + replicationController + " " + sourceName);
if (isServicesOnlyMode()) {
LOG.debug("Only processing Services right now so ignoring ReplicationController: " + namespace + ":" + id);
return;
}
ReplicationController old = kubernetesClient.replicationControllers().inNamespace(namespace).withName(id).get();
if (isRunning(old)) {
if (UserConfigurationCompare.configEqual(replicationController, old)) {
LOG.info("ReplicationController has not changed so not doing anything");
} else {
ReplicationControllerSpec newSpec = replicationController.getSpec();
ReplicationControllerSpec oldSpec = old.getSpec();
if (rollingUpgrade) {
LOG.info("Rolling upgrade of the ReplicationController: " + namespace + "/" + id);
// lets preserve the number of replicas currently running in the environment we are about to upgrade
if (rollingUpgradePreserveScale && newSpec != null && oldSpec != null) {
Integer replicas = oldSpec.getReplicas();
if (replicas != null) {
newSpec.setReplicas(replicas);
}
}
LOG.info("rollingUpgradePreserveScale " + rollingUpgradePreserveScale + " new replicas is " + (newSpec != null ? newSpec.getReplicas() : "<null>"));
kubernetesClient.replicationControllers().inNamespace(namespace).withName(id).rolling().replace(replicationController);
} else if (isRecreateMode()) {
LOG.info("Deleting ReplicationController: " + id);
kubernetesClient.replicationControllers().inNamespace(namespace).withName(id).delete();
doCreateReplicationController(replicationController, namespace, sourceName);
} else {
LOG.info("Updating ReplicationController from " + sourceName + " namespace " + namespace + " name " + getName(replicationController));
try {
Object answer = kubernetesClient.replicationControllers().inNamespace(namespace).withName(id).replace(replicationController);
logGeneratedEntity("Updated replicationController: ", namespace, replicationController, answer);
if (deletePodsOnReplicationControllerUpdate) {
kubernetesClient.pods().inNamespace(namespace).withLabels(newSpec.getSelector()).delete();
LOG.info("Deleting any pods for the replication controller to ensure they use the new configuration");
} else {
LOG.info("Warning not deleted any pods so they could well be running with the old configuration!");
}
} catch (Exception e) {
onApplyError("Failed to update ReplicationController from " + sourceName + ". " + e + ". " + replicationController, e);
}
}
}
} else {
if (!isAllowCreate()) {
LOG.warn("Creation disabled so not creating a ReplicationController from " + sourceName + " namespace " + namespace + " name " + getName(replicationController));
} else {
doCreateReplicationController(replicationController, namespace, sourceName);
}
}
}
protected void doCreateReplicationController(ReplicationController replicationController, String namespace, String sourceName) {
LOG.info("Creating a ReplicationController from " + sourceName + " namespace " + namespace + " name " + getName(replicationController));
try {
// lets check that if secrets are required they exist
ReplicationControllerSpec spec = replicationController.getSpec();
if (spec != null) {
PodTemplateSpec template = spec.getTemplate();
if (template != null) {
PodSpec podSpec = template.getSpec();
validatePodSpec(podSpec, namespace);
}
}
Object answer;
if (Strings.isNotBlank(namespace)) {
answer = kubernetesClient.replicationControllers().inNamespace(namespace).create(replicationController);
} else {
answer = kubernetesClient.replicationControllers().inNamespace(getNamespace()).create(replicationController);
}
logGeneratedEntity("Created ReplicationController: ", namespace, replicationController, answer);
} catch (Exception e) {
onApplyError("Failed to create ReplicationController from " + sourceName + ". " + e + ". " + replicationController, e);
}
}
/**
* Lets verify that any dependencies are available; such as volumes or secrets
*/
protected void validatePodSpec(PodSpec podSpec, String namespace) {
if (requireSecretsCreatedBeforeReplicationControllers) {
List<Volume> volumes = podSpec.getVolumes();
if (volumes != null) {
for (Volume volume : volumes) {
SecretVolumeSource secret = volume.getSecret();
if (secret != null) {
String secretName = secret.getSecretName();
if (Strings.isNotBlank(secretName)) {
KubernetesHelper.validateSecretExists(kubernetesClient, namespace, secretName);
}
}
}
}
}
}
public void applyPod(Pod pod, String sourceName) throws Exception {
String namespace = getNamespace();
String id = getName(pod);
Objects.notNull(id, "No name for " + pod + " " + sourceName);
if (isServicesOnlyMode()) {
LOG.debug("Only processing Services right now so ignoring Pod: " + namespace + ":" + id);
return;
}
Pod old = kubernetesClient.pods().inNamespace(namespace).withName(id).get();
if (isRunning(old)) {
if (UserConfigurationCompare.configEqual(pod, old)) {
LOG.info("Pod has not changed so not doing anything");
} else {
if (isRecreateMode()) {
LOG.info("Deleting Pod: " + id);
kubernetesClient.pods().inNamespace(namespace).withName(id).delete();
doCreatePod(pod, namespace, sourceName);
} else {
LOG.info("Updating a Pod from " + sourceName + " namespace " + namespace + " name " + getName(pod));
try {
Object answer = kubernetesClient.pods().inNamespace(namespace).withName(id).replace(pod);
LOG.info("Updated Pod result: " + answer);
} catch (Exception e) {
onApplyError("Failed to update Pod from " + sourceName + ". " + e + ". " + pod, e);
}
}
}
} else {
if (!isAllowCreate()) {
LOG.warn("Creation disabled so not creating a pod from " + sourceName + " namespace " + namespace + " name " + getName(pod));
} else {
doCreatePod(pod, namespace, sourceName);
}
}
}
protected void doCreatePod(Pod pod, String namespace, String sourceName) {
LOG.info("Creating a Pod from " + sourceName + " namespace " + namespace + " name " + getName(pod));
try {
PodSpec podSpec = pod.getSpec();
if (podSpec != null) {
validatePodSpec(podSpec, namespace);
}
Object answer;
if (Strings.isNotBlank(namespace)) {
answer = kubernetesClient.pods().inNamespace(namespace).create(pod);
} else {
answer = kubernetesClient.pods().inNamespace(getNamespace()).create(pod);
}
LOG.info("Created Pod result: " + answer);
} catch (Exception e) {
onApplyError("Failed to create Pod from " + sourceName + ". " + e + ". " + pod, e);
}
}
public String getNamespace() {
return namesapce;
}
/**
* Returns the namespace defined in the entity or the configured namespace
*/
protected String getNamespace(HasMetadata entity) {
String answer = KubernetesHelper.getNamespace(entity);
if (Strings.isNullOrBlank(answer)) {
answer = getNamespace();
}
// lest make sure the namespace exists
applyNamespace(answer);
return answer;
}
public void setNamespace(String namespace) {
this.namesapce = namespace;
}
public boolean isThrowExceptionOnError() {
return throwExceptionOnError;
}
public void setThrowExceptionOnError(boolean throwExceptionOnError) {
this.throwExceptionOnError = throwExceptionOnError;
}
public boolean isProcessTemplatesLocally() {
return processTemplatesLocally;
}
public void setProcessTemplatesLocally(boolean processTemplatesLocally) {
this.processTemplatesLocally = processTemplatesLocally;
}
public boolean isDeletePodsOnReplicationControllerUpdate() {
return deletePodsOnReplicationControllerUpdate;
}
public void setDeletePodsOnReplicationControllerUpdate(boolean deletePodsOnReplicationControllerUpdate) {
this.deletePodsOnReplicationControllerUpdate = deletePodsOnReplicationControllerUpdate;
}
public File getLogJsonDir() {
return logJsonDir;
}
/**
* Lets you configure the directory where JSON logging files should go
*/
public void setLogJsonDir(File logJsonDir) {
this.logJsonDir = logJsonDir;
}
public File getBasedir() {
return basedir;
}
public void setBasedir(File basedir) {
this.basedir = basedir;
}
protected boolean isRunning(HasMetadata entity) {
return entity != null;
}
/**
* Logs an error applying some JSON to Kubernetes and optionally throws an exception
*/
protected void onApplyError(String message, Exception e) {
LOG.error(message, e);
if (throwExceptionOnError) {
throw new RuntimeException(message, e);
}
}
/**
* Returns true if this controller allows new resources to be created in the given namespace
*/
public boolean isAllowCreate() {
return allowCreate;
}
public void setAllowCreate(boolean allowCreate) {
this.allowCreate = allowCreate;
}
/**
* If enabled then updates are performed by deleting the resource first then creating it
*/
public boolean isRecreateMode() {
return recreateMode;
}
public void setRecreateMode(boolean recreateMode) {
this.recreateMode = recreateMode;
}
public void setServicesOnlyMode(boolean servicesOnlyMode) {
this.servicesOnlyMode = servicesOnlyMode;
}
/**
* If enabled then only services are created/updated to allow services to be created/updated across
* a number of apps before any pods/replication controllers are updated
*/
public boolean isServicesOnlyMode() {
return servicesOnlyMode;
}
/**
* If enabled then all services are ignored to avoid them being recreated. This is useful if you want to
* recreate ReplicationControllers and Pods but leave Services as they are to avoid the portalIP addresses
* changing
*/
public boolean isIgnoreServiceMode() {
return ignoreServiceMode;
}
public void setIgnoreServiceMode(boolean ignoreServiceMode) {
this.ignoreServiceMode = ignoreServiceMode;
}
public boolean isIgnoreRunningOAuthClients() {
return ignoreRunningOAuthClients;
}
public void setIgnoreRunningOAuthClients(boolean ignoreRunningOAuthClients) {
this.ignoreRunningOAuthClients = ignoreRunningOAuthClients;
}
public boolean isFailOnMissingParameterValue() {
return failOnMissingParameterValue;
}
public void setFailOnMissingParameterValue(boolean failOnMissingParameterValue) {
this.failOnMissingParameterValue = failOnMissingParameterValue;
}
public boolean isSupportOAuthClients() {
return supportOAuthClients;
}
public void setSupportOAuthClients(boolean supportOAuthClients) {
this.supportOAuthClients = supportOAuthClients;
}
public boolean isRequireSecretsCreatedBeforeReplicationControllers() {
return requireSecretsCreatedBeforeReplicationControllers;
}
public void setRequireSecretsCreatedBeforeReplicationControllers(boolean requireSecretsCreatedBeforeReplicationControllers) {
this.requireSecretsCreatedBeforeReplicationControllers = requireSecretsCreatedBeforeReplicationControllers;
}
public boolean isRollingUpgrade() {
return rollingUpgrade;
}
public void setRollingUpgrade(boolean rollingUpgrade) {
this.rollingUpgrade = rollingUpgrade;
}
public boolean isRollingUpgradePreserveScale() {
return rollingUpgradePreserveScale;
}
public void setRollingUpgradePreserveScale(boolean rollingUpgradePreserveScale) {
this.rollingUpgradePreserveScale = rollingUpgradePreserveScale;
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v10/common/ad_asset.proto
package com.google.ads.googleads.v10.common;
/**
* <pre>
* A media bundle asset used inside an ad.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v10.common.AdMediaBundleAsset}
*/
public final class AdMediaBundleAsset extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v10.common.AdMediaBundleAsset)
AdMediaBundleAssetOrBuilder {
private static final long serialVersionUID = 0L;
// Use AdMediaBundleAsset.newBuilder() to construct.
private AdMediaBundleAsset(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private AdMediaBundleAsset() {
asset_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new AdMediaBundleAsset();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private AdMediaBundleAsset(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 18: {
java.lang.String s = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
asset_ = s;
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v10.common.AdAssetProto.internal_static_google_ads_googleads_v10_common_AdMediaBundleAsset_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v10.common.AdAssetProto.internal_static_google_ads_googleads_v10_common_AdMediaBundleAsset_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v10.common.AdMediaBundleAsset.class, com.google.ads.googleads.v10.common.AdMediaBundleAsset.Builder.class);
}
private int bitField0_;
public static final int ASSET_FIELD_NUMBER = 2;
private volatile java.lang.Object asset_;
/**
* <pre>
* The Asset resource name of this media bundle.
* </pre>
*
* <code>optional string asset = 2;</code>
* @return Whether the asset field is set.
*/
@java.lang.Override
public boolean hasAsset() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* The Asset resource name of this media bundle.
* </pre>
*
* <code>optional string asset = 2;</code>
* @return The asset.
*/
@java.lang.Override
public java.lang.String getAsset() {
java.lang.Object ref = asset_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
asset_ = s;
return s;
}
}
/**
* <pre>
* The Asset resource name of this media bundle.
* </pre>
*
* <code>optional string asset = 2;</code>
* @return The bytes for asset.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getAssetBytes() {
java.lang.Object ref = asset_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
asset_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, asset_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, asset_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v10.common.AdMediaBundleAsset)) {
return super.equals(obj);
}
com.google.ads.googleads.v10.common.AdMediaBundleAsset other = (com.google.ads.googleads.v10.common.AdMediaBundleAsset) obj;
if (hasAsset() != other.hasAsset()) return false;
if (hasAsset()) {
if (!getAsset()
.equals(other.getAsset())) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasAsset()) {
hash = (37 * hash) + ASSET_FIELD_NUMBER;
hash = (53 * hash) + getAsset().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v10.common.AdMediaBundleAsset parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v10.common.AdMediaBundleAsset parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v10.common.AdMediaBundleAsset parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v10.common.AdMediaBundleAsset parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v10.common.AdMediaBundleAsset parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v10.common.AdMediaBundleAsset parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v10.common.AdMediaBundleAsset parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v10.common.AdMediaBundleAsset parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v10.common.AdMediaBundleAsset parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v10.common.AdMediaBundleAsset parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v10.common.AdMediaBundleAsset parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v10.common.AdMediaBundleAsset parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v10.common.AdMediaBundleAsset prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* A media bundle asset used inside an ad.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v10.common.AdMediaBundleAsset}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v10.common.AdMediaBundleAsset)
com.google.ads.googleads.v10.common.AdMediaBundleAssetOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v10.common.AdAssetProto.internal_static_google_ads_googleads_v10_common_AdMediaBundleAsset_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v10.common.AdAssetProto.internal_static_google_ads_googleads_v10_common_AdMediaBundleAsset_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v10.common.AdMediaBundleAsset.class, com.google.ads.googleads.v10.common.AdMediaBundleAsset.Builder.class);
}
// Construct using com.google.ads.googleads.v10.common.AdMediaBundleAsset.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
asset_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v10.common.AdAssetProto.internal_static_google_ads_googleads_v10_common_AdMediaBundleAsset_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v10.common.AdMediaBundleAsset getDefaultInstanceForType() {
return com.google.ads.googleads.v10.common.AdMediaBundleAsset.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v10.common.AdMediaBundleAsset build() {
com.google.ads.googleads.v10.common.AdMediaBundleAsset result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v10.common.AdMediaBundleAsset buildPartial() {
com.google.ads.googleads.v10.common.AdMediaBundleAsset result = new com.google.ads.googleads.v10.common.AdMediaBundleAsset(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
to_bitField0_ |= 0x00000001;
}
result.asset_ = asset_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v10.common.AdMediaBundleAsset) {
return mergeFrom((com.google.ads.googleads.v10.common.AdMediaBundleAsset)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v10.common.AdMediaBundleAsset other) {
if (other == com.google.ads.googleads.v10.common.AdMediaBundleAsset.getDefaultInstance()) return this;
if (other.hasAsset()) {
bitField0_ |= 0x00000001;
asset_ = other.asset_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.ads.googleads.v10.common.AdMediaBundleAsset parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.ads.googleads.v10.common.AdMediaBundleAsset) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object asset_ = "";
/**
* <pre>
* The Asset resource name of this media bundle.
* </pre>
*
* <code>optional string asset = 2;</code>
* @return Whether the asset field is set.
*/
public boolean hasAsset() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* The Asset resource name of this media bundle.
* </pre>
*
* <code>optional string asset = 2;</code>
* @return The asset.
*/
public java.lang.String getAsset() {
java.lang.Object ref = asset_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
asset_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* The Asset resource name of this media bundle.
* </pre>
*
* <code>optional string asset = 2;</code>
* @return The bytes for asset.
*/
public com.google.protobuf.ByteString
getAssetBytes() {
java.lang.Object ref = asset_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
asset_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* The Asset resource name of this media bundle.
* </pre>
*
* <code>optional string asset = 2;</code>
* @param value The asset to set.
* @return This builder for chaining.
*/
public Builder setAsset(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
asset_ = value;
onChanged();
return this;
}
/**
* <pre>
* The Asset resource name of this media bundle.
* </pre>
*
* <code>optional string asset = 2;</code>
* @return This builder for chaining.
*/
public Builder clearAsset() {
bitField0_ = (bitField0_ & ~0x00000001);
asset_ = getDefaultInstance().getAsset();
onChanged();
return this;
}
/**
* <pre>
* The Asset resource name of this media bundle.
* </pre>
*
* <code>optional string asset = 2;</code>
* @param value The bytes for asset to set.
* @return This builder for chaining.
*/
public Builder setAssetBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
bitField0_ |= 0x00000001;
asset_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v10.common.AdMediaBundleAsset)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v10.common.AdMediaBundleAsset)
private static final com.google.ads.googleads.v10.common.AdMediaBundleAsset DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v10.common.AdMediaBundleAsset();
}
public static com.google.ads.googleads.v10.common.AdMediaBundleAsset getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<AdMediaBundleAsset>
PARSER = new com.google.protobuf.AbstractParser<AdMediaBundleAsset>() {
@java.lang.Override
public AdMediaBundleAsset parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new AdMediaBundleAsset(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<AdMediaBundleAsset> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<AdMediaBundleAsset> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v10.common.AdMediaBundleAsset getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Copyright 2016 Bjoern Bilger
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jrestless.test;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import com.jrestless.test.InvokableArguments.Argument;
/**
* Capture all valid and invalid arguments for an invokable (constructor or
* method) and test the preconditions and combinations against it.
*
* @author Bjoern Bilger
*
*/
public abstract class PreconditionsTester {
private final Invokable invokable;
private final InvokableArguments<Argument> validArguments;
private final InvokableArguments<InvalidArgument> invalidArguments;
protected PreconditionsTester(Invokable invokable, Class<?>[] parameterTypes) {
this.validArguments = new InvokableArguments<>(parameterTypes);
this.invalidArguments = new InvokableArguments<>(parameterTypes);
this.invokable = invokable;
}
/**
* Add a list of <b>valid</b> arguments for the parameter.
*
* @param paramIndex
* the index of the parameter
* @param arguments
* valid arguments for the parameter
* @return
*/
public PreconditionsTester addValidArgs(int paramIndex, Object... arguments) {
validArguments.addArg(paramIndex, Argument::new, arguments);
return this;
}
/**
* Add a list of <b>invalid</b> arguments for the parameter and the expected
* exception.
*
* @param paramIndex
* @param expectedException
* @param arguments
* @return
*/
public PreconditionsTester addInvalidArgs(int paramIndex, Class<? extends Exception> expectedException,
Object... arguments) {
invalidArguments.addArg(paramIndex, a -> new InvalidArgument(a, expectedException), arguments);
return this;
}
/**
* Shortcut for {@link #addInvalidArgs(int, Class, Object...)
* addInvalidArgs(paramIndex, NullpointerException.class, new Object[]
* ${null}}.
*
* @param paramIndex
* @return
*/
public PreconditionsTester addInvalidNpeArg(int paramIndex) {
addInvalidArgs(paramIndex, NullPointerException.class, new Object[] {null});
return this;
}
/**
* Shortcut for {@link #addInvalidArgs(int, Class, Object...)
* addInvalidArgs(paramIndex, IllegalArgumentException.class, arguments}.
*
* @param paramIndex
* @return
*/
public PreconditionsTester addInvalidIaeArgs(int paramIndex, Object... arguments) {
addInvalidArgs(paramIndex, IllegalArgumentException.class, arguments);
return this;
}
public void testPreconditionsAndValidCombinations() {
testValidCombinations();
testPreconditions();
}
/**
* Tests all possible combinations valid of arguments on the invokable (constructor or method).
* <p>
* This requires at least one valid argument for <b>each</b> parameter.
*
* @throws AssertionError
* if invoking the constructor with a valid set of arguments
* throws an exception
*/
public void testValidCombinations() {
validArguments.checkAllArgumentsSet();
testArguments(validArguments.getCapturedArgumentsSets(), null);
}
/**
* Tests all preconditions on the invokable (constructor or method).
* <p>
* This requires all at least one invalid argument for parameter and at
* least one valid argument for <b>each</b> parameter.
*
* @throws AssertionError
* if invoking the constructor with a valid set of arguments
* throws an exception, or a set of arguments with one invalid
* argument doesn't throw an exception or rather the expected
* exception
*
*/
public void testPreconditions() {
validArguments.checkAllArgumentsSet();
boolean oneTested = false;
for (int i = 0; i < validArguments.getCapturedArgumentsSets().size(); i++) {
List<Set<Argument>> argumentsLists = new ArrayList<>(validArguments.getCapturedArgumentsSets());
Set<InvalidArgument> currInvalidArgs = invalidArguments.getCapturedArgumentsSets().get(i);
if (!currInvalidArgs.isEmpty()) {
for (InvalidArgument invalidParam : currInvalidArgs) {
argumentsLists.set(i, Collections.singleton(invalidParam));
testArguments(argumentsLists, invalidParam.getExpectedException());
oneTested = true;
}
}
}
if (!oneTested) {
throw new IllegalStateException("no invlid arguments set - cannot test any preconditions");
}
}
private void testArguments(List<Set<Argument>> argumentsLists, Class<? extends Exception> expectedException) {
for (List<Argument> arguments : InvokableArguments.getCartesianProduct(argumentsLists)) {
List<Object> argVals = arguments.stream().map(Argument::getValue).collect(Collectors.toList());
try {
invokable.invoke(argVals.toArray(new Object[argVals.size()]));
if (expectedException != null) {
throw new AssertionError(
"expected " + expectedException.getName() + " to be thrown for arguments: " + argVals);
}
} catch (Exception e) {
Throwable t = e;
if (t instanceof InvocationTargetException) {
t = ((InvocationTargetException) t).getTargetException();
}
if (expectedException == null) {
throw new AssertionError("exception has been thrown for arguments: " + argVals, t);
}
if (!expectedException.isAssignableFrom(t.getClass())) {
throw new AssertionError("expected " + expectedException.getClass() + " to be thrown but got "
+ t.getClass() + " for arguments: " + argVals, t);
}
}
}
}
private static class InvalidArgument extends Argument {
private final Class<? extends Exception> expectedException;
InvalidArgument(Object value, Class<? extends Exception> expectedException) {
super(value);
this.expectedException = expectedException;
}
Class<? extends Exception> getExpectedException() {
return expectedException;
}
@Override
public int hashCode() {
/*
* do not take expectedException into account
* since only the argument's value is essential
*/
return super.hashCode();
}
@Override
public boolean equals(Object obj) {
/*
* do not take expectedException into account
* since only the argument's value is essential
*/
return super.equals(obj);
}
}
@FunctionalInterface
protected interface Invokable {
Object invoke(Object... args) throws InstantiationException, IllegalAccessException, InvocationTargetException;
}
}
| |
/*
* Copyright 2018-2021 Denis Kokorin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.github.kokorin.jaffree.ffprobe;
import com.github.kokorin.jaffree.Rational;
import com.github.kokorin.jaffree.StreamType;
import com.github.kokorin.jaffree.ffprobe.data.ProbeData;
import java.util.List;
/**
* Program description.
*/
public class Stream implements TagAware {
private final ProbeData probeData;
/**
* Creates {@link Stream} description based on provided ffprobe data.
*
* @param probeData ffprobe data
*/
public Stream(final ProbeData probeData) {
this.probeData = probeData;
}
/**
* {@inheritDoc}
*/
@Override
public ProbeData getProbeData() {
return probeData;
}
/**
* Returns format-specific stream ID.
*
* @return stream ID
*/
//TODO integer
public String getId() {
return probeData.getString("id");
}
/**
* @return stream index
*/
public Integer getIndex() {
return probeData.getInteger("index");
}
/**
* @return stream disposition
*/
public StreamDisposition getDisposition() {
return new StreamDisposition(probeData.getSubData("disposition"));
}
/**
* Returns additional stream data that can be provided by the container.
*
* @return side data
*/
public List<SideData> getSideDataList() {
return probeData.getSubDataList("side_data_list", SideData::new);
}
/**
* @return codec name
*/
public String getCodecName() {
return probeData.getString("codec_name");
}
/**
* @return codec long name
*/
public String getCodecLongName() {
return probeData.getString("codec_long_name");
}
/**
* @return codec profile
*/
public String getProfile() {
return probeData.getString("profile");
}
/**
* Returns codec level. Video only.
*
* @return codec level
*/
public Integer getLevel() {
return probeData.getInteger("level");
}
/**
* @return codec type
*/
public StreamType getCodecType() {
return probeData.getStreamType("codec_type");
}
/**
* @return codec time base
* @deprecated removed in ffmpeg source code
*/
@Deprecated
public Rational getCodecTimeBase() {
return probeData.getRational("codec_time_base");
}
/**
* Returns hexadecimal representation of fourcc codec tag, e.g. {@code 0x31637661}.
*
* @return codec tag
* @see <a href="https://www.fourcc.org/">fourcc.org</a>
*/
//TODO integer: hex number - FourCC
public String getCodecTag() {
return probeData.getString("codec_tag");
}
/**
* Returns string representation of fourcc codec tag, e.g. {@code avc1}.
*
* @return codec tag
* @see <a href="https://www.fourcc.org/">fourcc.org</a>
*/
public String getCodecTagString() {
return probeData.getString("codec_tag_string");
}
/**
* Returns extra binary data needed for initializing the decoder, codec-dependent.
*
* @return extradata
*/
public String getExtradata() {
return probeData.getString("extradata");
}
/**
* Returns extra binary data hash. Extradata is needed for initializing the decoder,
* codec-dependent.
*
* @return extradata hash
*/
public String getExtradataHash() {
return probeData.getString("extradata_hash");
}
/**
* Returns width of video or subtitle stream, or null.
*
* @return width
* @see #getCodecType()
*/
public Integer getWidth() {
return probeData.getInteger("width");
}
/**
* Returns height of video or subtitle stream, or null.
*
* @return height
* @see #getCodecType()
*/
public Integer getHeight() {
return probeData.getInteger("height");
}
/**
* Returns coded width of video stream, or null.
*
* @return coded width
*/
public Integer getCodedWidth() {
return probeData.getInteger("coded_width");
}
/**
* Returns coded height of video stream, or null.
*
* @return coded height
*/
public Integer getCodedHeight() {
return probeData.getInteger("coded_height");
}
/**
* Returns number of delayed frames of video stream, or null.
*
* @return number of delayed frames
*/
public Integer hasBFrames() {
return probeData.getInteger("has_b_frames");
}
/**
* Returns sample aspect ratio (SAR) of video stream, or null.
*
* @return sar
*/
public Rational getSampleAspectRatio() {
return probeData.getRatio("sample_aspect_ratio");
}
/**
* Returns display aspect ratio (DAR) of video stream, or null.
*
* @return dar
*/
public Rational getDisplayAspectRatio() {
return probeData.getRatio("display_aspect_ratio");
}
/**
* Returns pixel format of video stream, or null.
*
* @return pixel format
*/
public String getPixFmt() {
return probeData.getString("pix_fmt");
}
/**
* Returns visual content value range of video stream, or null.
*
* @return color range
* @see <a href="https://github.com/FFmpeg/FFmpeg/blob/master/libavutil/pixfmt.h#L541">
* enum AVColorRange</a>
*/
public String getColorRange() {
return probeData.getString("color_range");
}
/**
* Returns YUV color space type of video stream, or null.
*
* @return color space
* @see <a href="https://github.com/FFmpeg/FFmpeg/blob/master/libavutil/pixfmt.h#L502">
* enum AVColorSpace</a>
*/
public String getColorSpace() {
return probeData.getString("color_space");
}
/**
* Returns color transfer characteristic of video stream, or null.
*
* @return color transfer
* @see <a href="https://github.com/FFmpeg/FFmpeg/blob/master/libavutil/pixfmt.h#L473">
* enum AVColorTransferCharacteristic</a>
*/
public String getColorTransfer() {
return probeData.getString("color_transfer");
}
/**
* Returns chromaticity coordinates of the source primaries of video stream, or null.
*
* @return color primaries
* @see <a href="https://github.com/FFmpeg/FFmpeg/blob/master/libavutil/pixfmt.h#L448">
* enum AVColorPrimaries</a>
*/
public String getColorPrimaries() {
return probeData.getString("color_primaries");
}
/**
* Returns chroma location of video stream, or null.
*
* @return chroma location
* @see <a href="https://github.com/FFmpeg/FFmpeg/blob/master/libavutil/pixfmt.h#L595">
* enum AVChromaLocation</a>
*/
public String getChromaLocation() {
return probeData.getString("chroma_location");
}
/**
* Returns field order of video stream, or null.
*
* @return field order
* @see <a href="https://github.com/FFmpeg/FFmpeg/blob/master/libavcodec/codec_par.h#L36">
* enum AVFieldOrder</a>
*/
public String getFieldOrder() {
return probeData.getString("field_order");
}
/**
* Returns mpeg timecode string of video stream, or null.
*
* @return timecode
* @deprecated removed in ffmpeg source code
*/
@Deprecated
public String getTimecode() {
return probeData.getString("timecode");
}
/**
* @return number of reference frames
*/
public Integer getRefs() {
return probeData.getInteger("refs");
}
/**
* Returns sample format of audio stream, or null.
*
* @return sample format
*/
public String getSampleFmt() {
return probeData.getString("sample_fmt");
}
/**
* Returns sample rate of audio stream, or null.
*
* @return sample rate
*/
public Integer getSampleRate() {
return probeData.getInteger("sample_rate");
}
/**
* Returns number of channels in audio stream, or null.
*
* @return number of channels
*/
public Integer getChannels() {
return probeData.getInteger("channels");
}
/**
* Returns channels layout of audio stream, or null.
*
* @return channels layout
*/
public String getChannelLayout() {
return probeData.getString("channel_layout");
}
/**
* Returns number of bits per sample of audio stream, or null.
*
* @return bits per sample
*/
public Integer getBitsPerSample() {
return probeData.getInteger("bits_per_sample");
}
/**
* Returns real base framerate of the stream.
* <p>
* This is the lowest framerate with which all timestamps can be represented accurately
* (it is the least common multiple of all framerates in the stream).
* <p>
* Note, this value is just a guess! For example, if the time base is 1/90000 and all frames
* have either approximately 3600 or 1800 timer ticks, then r_frame_rate will be 50/1.
*
* @return base framerate
*/
public Rational getRFrameRate() {
return probeData.getRational("r_frame_rate");
}
/**
* @return average framerate
*/
public Rational getAvgFrameRate() {
return probeData.getRational("avg_frame_rate");
}
/**
* Timebase is the fundamental unit of time (in seconds) in terms of which frame timestamps
* are represented.
*
* @return stream timebase
*/
// TODO Rational
public String getTimeBase() {
return probeData.getString("time_base");
}
/**
* Returns presentation timestamp of the first frame of the stream in presentation order.
*
* @return start PTS
*/
public Long getStartPts() {
return probeData.getLong("start_pts");
}
/**
* Returns time (in seconds) of the first frame of the stream in presentation order.
*
* @return start time
*/
public Float getStartTime() {
return probeData.getFloat("start_time");
}
/**
* Stream duration in stream timebase units.
*
* @return duration in timebase units
*/
public Long getDurationTs() {
return probeData.getLong("duration_ts");
}
/**
* @return duration in seconds
*/
public Float getDuration() {
return probeData.getFloat("duration");
}
/**
* @return bit rate
*/
public Integer getBitRate() {
return probeData.getInteger("bit_rate");
}
/**
* @return max bit rate
*/
public Integer getMaxBitRate() {
return probeData.getInteger("max_bit_rate");
}
/**
* @return bits per raw sample
*/
public Integer getBitsPerRawSample() {
return probeData.getInteger("bits_per_raw_sample");
}
/**
* @return number of frames
*/
public Integer getNbFrames() {
return probeData.getInteger("nb_frames");
}
/**
* @return number of read frames
*/
public Integer getNbReadFrames() {
return probeData.getInteger("nb_read_frames");
}
/**
* @return number of read packets
*/
public Integer getNbReadPackets() {
return probeData.getInteger("nb_read_packets");
}
}
| |
package com.tanmoybanik.httpcamera;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.ProgressDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.hardware.Camera;
import android.media.ExifInterface;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.net.Uri;
import android.os.Build;
import android.os.Environment;
import android.os.Handler;
import android.os.ResultReceiver;
import android.os.StrictMode;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.Surface;
import android.view.View;
import android.widget.Button;
import android.widget.FrameLayout;
import android.widget.TextView;
import android.widget.Toast;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.ProtocolException;
import java.net.URL;
import java.text.SimpleDateFormat;
import java.util.Date;
public class MainActivity extends AppCompatActivity {
private static final String TAG = "";
String upLoadServerUri = "http://192.168.0.100/HttpCameraServer/upload_image.php";
int serverResponseCode = 0;
// Camera Variables/////////
private Camera mCamera;
private CameraPreview mPreview;
FrameLayout preview;
private int mInterval = 5000;
private Handler mHandler;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mCamera=getCameraInstance();
mPreview = new CameraPreview(this, mCamera);
preview = (FrameLayout) findViewById(R.id.camera_preview);
preview.addView(mPreview);
mPreview.setKeepScreenOn(true);
if (android.os.Build.VERSION.SDK_INT > 9)
{
StrictMode.ThreadPolicy policy = new StrictMode.ThreadPolicy.Builder().permitAll().build();
StrictMode.setThreadPolicy(policy);
}
if(isNetworkAvailable())
{
showAlertDialog("Title","Internet Connected",true);
mHandler = new Handler();
startRepeatTask();
}
else
{
showAlertDialog("error", "Internet is not Connected", false);
}
// mHandler.post(startUploadImg);
/* preview.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
mCamera.takePicture(null, null, mPicture);
}
});*/
}
Runnable startUploadImg=new Runnable() {
@Override
public void run() {
mCamera.takePicture(null, null, mPicture);
// Log.e("Handlers", "Called");
mHandler.postDelayed(startUploadImg, mInterval);
}
};
void startRepeatTask()
{
startUploadImg.run();
}
void stopRepeatTask()
{
mHandler.removeCallbacks(startUploadImg);
}
/////// Creating a camera instance which will open the camera//////
/////// Creating a camera instance which will open the camera//////
public Camera getCameraInstance(){
Camera c = null;
if (!hasCamera(getApplicationContext()))
{
Toast.makeText(getApplicationContext(), "Your Mobile doesn't have camera!!!", Toast.LENGTH_LONG).show();
}else
{
try{
c=openBackFacingCamera();
}
catch (Exception e){
Log.e(TAG, "Camera failed to open: " + e.getLocalizedMessage());
}
}
return c;
}
/////////////////////front facing camera open//////////////
private static Camera openBackFacingCamera()
{
int cameraCount = 0;
Camera cam = null;
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
cameraCount = Camera.getNumberOfCameras();
for ( int camIdx = 0; camIdx < cameraCount; camIdx++ ) {
Camera.getCameraInfo( camIdx, cameraInfo );
if ( cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK ) {
try {
cam = Camera.open( camIdx );
} catch (RuntimeException e) {
Log.e(TAG, "Camera failed to open: " + e.getLocalizedMessage());
}
}
}
return cam;
}
/////Checking if the device has camera////////////////
public boolean hasCamera(Context context){
if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA)) {
return true;
}else {
return false;
}
}
private void releaseCamera(){
if (mCamera!=null){
mCamera.release();
mCamera=null;
}
}
@Override
protected void onPause() {
super.onPause();
mCamera.stopPreview();
stopRepeatTask();
releaseCamera();
// mHandler.removeCallbacks(startUploadImg);
}
@Override
protected void onDestroy() {
super.onDestroy();
stopRepeatTask();
// mHandler.removeCallbacks(startUploadImg);
}
@Override
protected void onStart() {
super.onStart();
if(mCamera == null) {
mCamera = getCameraInstance();
mPreview = new CameraPreview(this, mCamera);
mPreview.setCamera(mCamera);
preview.addView(mPreview);
}
}
@Override
protected void onResume() {
super.onResume();
if (mCamera != null) {
if (Build.VERSION.SDK_INT >= 14)
setCameraDisplayOrientation(this,
Camera.CameraInfo.CAMERA_FACING_BACK, mCamera);
mPreview.setCamera(mCamera);
}
}
///Refreshing the gallery too show the image///////////////s
/* private void refreshFallery(File file){
Intent mediaScanintent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
mediaScanintent.setData(Uri.fromFile(file));
sendBroadcast(mediaScanintent);
}*/
public static void setCameraDisplayOrientation(Activity activity,
int cameraId, android.hardware.Camera camera) {
android.hardware.Camera.CameraInfo info =
new android.hardware.Camera.CameraInfo();
android.hardware.Camera.getCameraInfo(cameraId, info);
int rotation = activity.getWindowManager().getDefaultDisplay()
.getRotation();
int degrees = 0;
switch (rotation) {
case Surface.ROTATION_0: degrees = 0; break;
case Surface.ROTATION_90: degrees = 90; break;
case Surface.ROTATION_180: degrees = 180; break;
case Surface.ROTATION_270: degrees = 270; break;
}
int result;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (info.orientation + degrees) % 360;
result = (360 - result) % 360; // compensate the mirror
} else { // back-facing
result = (info.orientation - degrees + 360) % 360;
}
camera.setDisplayOrientation(result);
}
private boolean isNetworkAvailable() {
ConnectivityManager connectivityManager
= (ConnectivityManager) getSystemService(Context.CONNECTIVITY_SERVICE);
NetworkInfo activeNetworkInfo = connectivityManager.getActiveNetworkInfo();
return activeNetworkInfo != null && activeNetworkInfo.isConnected();
}
public void showAlertDialog(String title, String message, boolean status) {
AlertDialog alertDialog = new AlertDialog.Builder(MainActivity.this).create();
alertDialog.setTitle(title);
alertDialog.setMessage(message);
alertDialog.setCancelable(false);
alertDialog.setButton("OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
return;
}
});
if (status)
{
alertDialog.setIcon(R.drawable.ok);
}
else
{
alertDialog.setIcon(R.drawable.error);
}
alertDialog.show();
}
private static File getoutputMediaFile() {
File mediaStorageDir = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES), "MyCameraApp");
if (!mediaStorageDir.exists()){
if (!mediaStorageDir.mkdirs()){
Log.d("MycameraApp", "Failed to create directory");
return null;
}
}
//String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
File mediaFile;
mediaFile= new File(mediaStorageDir.getPath()+File.separator+"newImage.jpg");
return mediaFile;
}
File pictureFile = null;
boolean status;
private Camera.PictureCallback mPicture= new Camera.PictureCallback() {
@Override
public void onPictureTaken(byte[] data, Camera camera) {
pictureFile = getoutputMediaFile();
if (pictureFile==null)
{
return;
}
try{
FileOutputStream fos = new FileOutputStream(pictureFile);
fos.write(data);
fos.flush();
fos.close();
// refreshFallery(pictureFile);
status=uploadImage(pictureFile);
} catch (FileNotFoundException e) {
e.printStackTrace();
Log.e(TAG, "Camera failed to take picture: " + e.getLocalizedMessage());
}catch (IOException e){
e.printStackTrace();
Log.e(TAG, "Camera failed to take picture: " + e.getLocalizedMessage());
}
if (status)
{
pictureFile.delete();
}
}
};
boolean uploadImage(File sourceFile){
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
String fileName = "IMG_"+timeStamp+".jpg";
HttpURLConnection conn = null;
DataOutputStream outputStream = null;
String lineEnd = "\r\n";
String twoHyphens = "--";
String boundary = "*****";
int bytesRead, bytesAvailable, bufferSize;
byte[] buffer;
int maxBufferSize = 1 * 1024 * 1024;
//File sourceFile = new File(sourceFileUri);
if (!sourceFile.isFile()) {
/* dialog.dismiss();
Log.e("uploadFile", "Source File not exist :"
+ uploadFilePath + "" + uploadFileName);
runOnUiThread(new Runnable() {
public void run() {
messageText.setText("Source File not exist :"
+uploadFilePath + "" + uploadFileName);
}
});*/
// Log.e("uploadFile", "Source File not exist :" + uploadFilePath + "" + uploadFileName);
} else {
// Log.e("uploadFile", "Source File exist :"+ uploadFilePath + "" + uploadFileName);
try{
FileInputStream fileInputStream = new FileInputStream(sourceFile);
URL url = new URL(upLoadServerUri);
conn = (HttpURLConnection) url.openConnection();
conn.setDoInput(true); // Allow Inputs
conn.setDoOutput(true); // Allow Outputs
conn.setUseCaches(false); // Don't use a Cached Copy
conn.setRequestMethod("POST");
conn.setRequestProperty("Connection", "Keep-Alive");
conn.setRequestProperty("ENCTYPE", "multipart/form-data");
conn.setRequestProperty("Content-Type", "multipart/form-data;boundary=" + boundary);
conn.setRequestProperty("uploaded_file", fileName);
outputStream = new DataOutputStream(conn.getOutputStream());
outputStream.writeBytes(twoHyphens + boundary + lineEnd);
outputStream.writeBytes("Content-Disposition: form-data; name=\"uploaded_file\";filename=\"" + fileName + "\"" + lineEnd);
outputStream.writeBytes("Content-Type: image/jpeg" + lineEnd);
outputStream.writeBytes("Content-Length: " + sourceFile.length() + lineEnd);
outputStream.writeBytes(lineEnd);
bytesAvailable = fileInputStream.available();
bufferSize = Math.min(bytesAvailable, maxBufferSize);
buffer = new byte[bufferSize];
bytesRead = fileInputStream.read(buffer, 0, bufferSize);
while (bytesRead > 0) {
outputStream.write(buffer, 0, bufferSize);
bytesAvailable = fileInputStream.available();
bufferSize = Math.min(bytesAvailable, maxBufferSize);
bytesRead = fileInputStream.read(buffer, 0, bufferSize);
}
/*int bufferLength = 1024;
for (int i = 0; i < bytes.length; i += bufferLength) {
// publishing the progress....
Bundle resultData = new Bundle();
resultData.putInt("progress" ,(int)((i / (float) bytes.length) * 100));
// receiver.send(UPDATE_PROGRESS, resultData);
if (bytes.length - i >= bufferLength) {
outputStream.write(bytes, i, bufferLength);
} else {
outputStream.write(bytes, i, bytes.length - i);
}
}*/
outputStream.writeBytes(lineEnd);
outputStream.writeBytes(twoHyphens + boundary + twoHyphens + lineEnd);
serverResponseCode = conn.getResponseCode();
String serverResponseMessage = conn.getResponseMessage();
Log.i("uploadFile", "HTTP Response is : " + serverResponseMessage + ": " + serverResponseCode);
if(serverResponseCode == 200){
/* runOnUiThread(new Runnable() {
public void run() {
String msg = "File Upload Completed.\n\n See uploaded file here : \n\n"
+" http://www.androidexample.com/media/uploads/"
+uploadFileName;
messageText.setText(msg);
Toast.makeText(MainActivity.this, "File Upload Complete.",
Toast.LENGTH_SHORT).show();
}
});*/
Toast.makeText(MainActivity.this, "File Upload Complete.",
Toast.LENGTH_SHORT).show();
return true;
}
fileInputStream.close();
outputStream.flush();
outputStream.close();
} catch (FileNotFoundException e) {
} catch (MalformedURLException ex) {
/*dialog.dismiss();
ex.printStackTrace();
runOnUiThread(new Runnable() {
public void run() {
messageText.setText("MalformedURLException Exception : check script url.");
Toast.makeText(MainActivity.this, "MalformedURLException",
Toast.LENGTH_SHORT).show();
}
});*/
ex.printStackTrace();
Toast.makeText(MainActivity.this, "MalformedURLException",
Toast.LENGTH_SHORT).show();
Log.e("Upload file to server", "error: " + ex.getMessage(), ex);
} catch (ProtocolException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
return false;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tomcat.dbcp.dbcp2.cpdsadapter;
import java.io.PrintWriter;
import java.io.Serializable;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
import java.time.Duration;
import java.util.Hashtable;
import java.util.Properties;
import java.util.logging.Logger;
import javax.naming.Context;
import javax.naming.Name;
import javax.naming.NamingException;
import javax.naming.RefAddr;
import javax.naming.Reference;
import javax.naming.Referenceable;
import javax.naming.StringRefAddr;
import javax.naming.spi.ObjectFactory;
import javax.sql.ConnectionPoolDataSource;
import javax.sql.PooledConnection;
import org.apache.tomcat.dbcp.dbcp2.BasicDataSource;
import org.apache.tomcat.dbcp.dbcp2.Constants;
import org.apache.tomcat.dbcp.dbcp2.DelegatingPreparedStatement;
import org.apache.tomcat.dbcp.dbcp2.PStmtKey;
import org.apache.tomcat.dbcp.dbcp2.Utils;
import org.apache.tomcat.dbcp.pool2.KeyedObjectPool;
import org.apache.tomcat.dbcp.pool2.impl.BaseObjectPoolConfig;
import org.apache.tomcat.dbcp.pool2.impl.GenericKeyedObjectPool;
import org.apache.tomcat.dbcp.pool2.impl.GenericKeyedObjectPoolConfig;
/**
* <p>
* An adapter for JDBC drivers that do not include an implementation of {@link javax.sql.ConnectionPoolDataSource}, but
* still include a {@link java.sql.DriverManager} implementation. <code>ConnectionPoolDataSource</code>s are not used
* within general applications. They are used by <code>DataSource</code> implementations that pool
* <code>Connection</code>s, such as {@link org.apache.tomcat.dbcp.dbcp2.datasources.SharedPoolDataSource}. A J2EE container
* will normally provide some method of initializing the <code>ConnectionPoolDataSource</code> whose attributes are
* presented as bean getters/setters and then deploying it via JNDI. It is then available as a source of physical
* connections to the database, when the pooling <code>DataSource</code> needs to create a new physical connection.
* </p>
* <p>
* Although normally used within a JNDI environment, the DriverAdapterCPDS can be instantiated and initialized as any
* bean and then attached directly to a pooling <code>DataSource</code>. <code>Jdbc2PoolDataSource</code> can use the
* <code>ConnectionPoolDataSource</code> with or without the use of JNDI.
* </p>
* <p>
* The DriverAdapterCPDS also provides <code>PreparedStatement</code> pooling which is not generally available in jdbc2
* <code>ConnectionPoolDataSource</code> implementation, but is addressed within the jdbc3 specification. The
* <code>PreparedStatement</code> pool in DriverAdapterCPDS has been in the dbcp package for some time, but it has not
* undergone extensive testing in the configuration used here. It should be considered experimental and can be toggled
* with the poolPreparedStatements attribute.
* </p>
* <p>
* The <a href="package-summary.html">package documentation</a> contains an example using catalina and JNDI. The
* <a href="../datasources/package-summary.html">datasources package documentation</a> shows how to use
* <code>DriverAdapterCPDS</code> as a source for <code>Jdbc2PoolDataSource</code> without the use of JNDI.
* </p>
*
* @since 2.0
*/
public class DriverAdapterCPDS implements ConnectionPoolDataSource, Referenceable, Serializable, ObjectFactory {
private static final long serialVersionUID = -4820523787212147844L;
private static final String GET_CONNECTION_CALLED = "A PooledConnection was already requested from this source, "
+ "further initialization is not allowed.";
static {
// Attempt to prevent deadlocks - see DBCP - 272
DriverManager.getDrivers();
}
/** Description */
private String description;
/** Url name */
private String url;
/** User name */
private String userName;
/** User password */
private char[] userPassword;
/** Driver class name */
private String driver;
/** Login TimeOut in seconds */
private int loginTimeout;
/** Log stream. NOT USED */
private transient PrintWriter logWriter;
// PreparedStatement pool properties
private boolean poolPreparedStatements;
private int maxIdle = 10;
private Duration durationBetweenEvictionRuns = BaseObjectPoolConfig.DEFAULT_TIME_BETWEEN_EVICTION_RUNS;
private int numTestsPerEvictionRun = -1;
private Duration minEvictableIdleDuration = BaseObjectPoolConfig.DEFAULT_MIN_EVICTABLE_IDLE_DURATION;
private int maxPreparedStatements = -1;
/** Whether or not getConnection has been called */
private volatile boolean getConnectionCalled;
/** Connection properties passed to JDBC Driver */
private Properties connectionProperties;
/**
* Controls access to the underlying connection
*/
private boolean accessToUnderlyingConnectionAllowed;
/**
* Default no-argument constructor for Serialization
*/
public DriverAdapterCPDS() {
}
/**
* Throws an IllegalStateException, if a PooledConnection has already been requested.
*/
private void assertInitializationAllowed() throws IllegalStateException {
if (getConnectionCalled) {
throw new IllegalStateException(GET_CONNECTION_CALLED);
}
}
private boolean getBooleanContentString(final RefAddr ra) {
return Boolean.parseBoolean(getStringContent(ra));
}
/**
* Gets the connection properties passed to the JDBC driver.
*
* @return the JDBC connection properties used when creating connections.
*/
public Properties getConnectionProperties() {
return connectionProperties;
}
/**
* Gets the value of description. This property is here for use by the code which will deploy this data source. It
* is not used internally.
*
* @return value of description, may be null.
* @see #setDescription(String)
*/
public String getDescription() {
return description;
}
/**
* Gets the driver class name.
*
* @return value of driver.
*/
public String getDriver() {
return driver;
}
/**
* Gets the duration to sleep between runs of the idle object evictor thread. When non-positive, no
* idle object evictor thread will be run.
*
* @return the value of the evictor thread timer
* @see #setDurationBetweenEvictionRuns(Duration)
* @since 2.9.0
*/
public Duration getDurationBetweenEvictionRuns() {
return durationBetweenEvictionRuns;
}
private int getIntegerStringContent(final RefAddr ra) {
return Integer.parseInt(getStringContent(ra));
}
/**
* Gets the maximum time in seconds that this data source can wait while attempting to connect to a database. NOT
* USED.
*/
@Override
public int getLoginTimeout() {
return loginTimeout;
}
/**
* Gets the log writer for this data source. NOT USED.
*/
@Override
public PrintWriter getLogWriter() {
return logWriter;
}
/**
* Gets the maximum number of statements that can remain idle in the pool, without extra ones being released, or
* negative for no limit.
*
* @return the value of maxIdle
*/
public int getMaxIdle() {
return maxIdle;
}
/**
* Gets the maximum number of prepared statements.
*
* @return maxPrepartedStatements value
*/
public int getMaxPreparedStatements() {
return maxPreparedStatements;
}
/**
* Gets the minimum amount of time a statement may sit idle in the pool before it is eligible for eviction by the
* idle object evictor (if any).
*
* @see #setMinEvictableIdleDuration
* @see #setDurationBetweenEvictionRuns
* @return the minimum amount of time a statement may sit idle in the pool.
* @since 2.9.0
*/
public Duration getMinEvictableIdleDuration() {
return minEvictableIdleDuration;
}
/**
* Gets the minimum amount of time a statement may sit idle in the pool before it is eligible for eviction by the
* idle object evictor (if any).
*
* @see #setMinEvictableIdleTimeMillis
* @see #setTimeBetweenEvictionRunsMillis
* @return the minimum amount of time a statement may sit idle in the pool.
* @deprecated USe {@link #getMinEvictableIdleDuration()}.
*/
@Deprecated
public int getMinEvictableIdleTimeMillis() {
return (int) minEvictableIdleDuration.toMillis();
}
/**
* Gets the number of statements to examine during each run of the idle object evictor thread (if any.)
*
* @see #setNumTestsPerEvictionRun
* @see #setTimeBetweenEvictionRunsMillis
* @return the number of statements to examine during each run of the idle object evictor thread (if any.)
*/
public int getNumTestsPerEvictionRun() {
return numTestsPerEvictionRun;
}
/**
* Implements {@link ObjectFactory} to create an instance of this class
*/
@Override
public Object getObjectInstance(final Object refObj, final Name name, final Context context,
final Hashtable<?, ?> env) throws Exception {
// The spec says to return null if we can't create an instance
// of the reference
DriverAdapterCPDS cpds = null;
if (refObj instanceof Reference) {
final Reference ref = (Reference) refObj;
if (ref.getClassName().equals(getClass().getName())) {
RefAddr ra = ref.get("description");
if (isNotEmpty(ra)) {
setDescription(getStringContent(ra));
}
ra = ref.get("driver");
if (isNotEmpty(ra)) {
setDriver(getStringContent(ra));
}
ra = ref.get("url");
if (isNotEmpty(ra)) {
setUrl(getStringContent(ra));
}
ra = ref.get(Constants.KEY_USER);
if (isNotEmpty(ra)) {
setUser(getStringContent(ra));
}
ra = ref.get(Constants.KEY_PASSWORD);
if (isNotEmpty(ra)) {
setPassword(getStringContent(ra));
}
ra = ref.get("poolPreparedStatements");
if (isNotEmpty(ra)) {
setPoolPreparedStatements(getBooleanContentString(ra));
}
ra = ref.get("maxIdle");
if (isNotEmpty(ra)) {
setMaxIdle(getIntegerStringContent(ra));
}
ra = ref.get("timeBetweenEvictionRunsMillis");
if (isNotEmpty(ra)) {
setTimeBetweenEvictionRunsMillis(getIntegerStringContent(ra));
}
ra = ref.get("numTestsPerEvictionRun");
if (isNotEmpty(ra)) {
setNumTestsPerEvictionRun(getIntegerStringContent(ra));
}
ra = ref.get("minEvictableIdleTimeMillis");
if (isNotEmpty(ra)) {
setMinEvictableIdleTimeMillis(getIntegerStringContent(ra));
}
ra = ref.get("maxPreparedStatements");
if (isNotEmpty(ra)) {
setMaxPreparedStatements(getIntegerStringContent(ra));
}
ra = ref.get("accessToUnderlyingConnectionAllowed");
if (isNotEmpty(ra)) {
setAccessToUnderlyingConnectionAllowed(getBooleanContentString(ra));
}
cpds = this;
}
}
return cpds;
}
@Override
public Logger getParentLogger() throws SQLFeatureNotSupportedException {
throw new SQLFeatureNotSupportedException();
}
/**
* Gets the value of password for the default user.
*
* @return value of password.
*/
public String getPassword() {
return Utils.toString(userPassword);
}
/**
* Gets the value of password for the default user.
*
* @return value of password.
* @since 2.4.0
*/
public char[] getPasswordCharArray() {
return userPassword == null ? null : userPassword.clone();
}
/**
* Attempts to establish a database connection using the default user and password.
*/
@Override
public PooledConnection getPooledConnection() throws SQLException {
return getPooledConnection(getUser(), getPassword());
}
/**
* Attempts to establish a database connection.
*
* @param pooledUserName name to be used for the connection
* @param pooledUserPassword password to be used fur the connection
*/
@Override
public PooledConnection getPooledConnection(final String pooledUserName, final String pooledUserPassword)
throws SQLException {
getConnectionCalled = true;
PooledConnectionImpl pooledConnection = null;
// Workaround for buggy WebLogic 5.1 class loader - ignore the exception upon first invocation.
try {
if (connectionProperties != null) {
update(connectionProperties, Constants.KEY_USER, pooledUserName);
update(connectionProperties, Constants.KEY_PASSWORD, pooledUserPassword);
pooledConnection = new PooledConnectionImpl(
DriverManager.getConnection(getUrl(), connectionProperties));
} else {
pooledConnection = new PooledConnectionImpl(
DriverManager.getConnection(getUrl(), pooledUserName, pooledUserPassword));
}
pooledConnection.setAccessToUnderlyingConnectionAllowed(isAccessToUnderlyingConnectionAllowed());
} catch (final ClassCircularityError e) {
if (connectionProperties != null) {
pooledConnection = new PooledConnectionImpl(
DriverManager.getConnection(getUrl(), connectionProperties));
} else {
pooledConnection = new PooledConnectionImpl(
DriverManager.getConnection(getUrl(), pooledUserName, pooledUserPassword));
}
pooledConnection.setAccessToUnderlyingConnectionAllowed(isAccessToUnderlyingConnectionAllowed());
}
KeyedObjectPool<PStmtKey, DelegatingPreparedStatement> stmtPool = null;
if (isPoolPreparedStatements()) {
final GenericKeyedObjectPoolConfig<DelegatingPreparedStatement> config = new GenericKeyedObjectPoolConfig<>();
config.setMaxTotalPerKey(Integer.MAX_VALUE);
config.setBlockWhenExhausted(false);
config.setMaxWait(Duration.ZERO);
config.setMaxIdlePerKey(getMaxIdle());
if (getMaxPreparedStatements() <= 0) {
// Since there is no limit, create a prepared statement pool with an eviction thread;
// evictor settings are the same as the connection pool settings.
config.setTimeBetweenEvictionRuns(getDurationBetweenEvictionRuns());
config.setNumTestsPerEvictionRun(getNumTestsPerEvictionRun());
config.setMinEvictableIdleTime(getMinEvictableIdleDuration());
} else {
// Since there is a limit, create a prepared statement pool without an eviction thread;
// pool has LRU functionality so when the limit is reached, 15% of the pool is cleared.
// see org.apache.commons.pool2.impl.GenericKeyedObjectPool.clearOldest method
config.setMaxTotal(getMaxPreparedStatements());
config.setTimeBetweenEvictionRuns(Duration.ofMillis(-1));
config.setNumTestsPerEvictionRun(0);
config.setMinEvictableIdleTime(Duration.ZERO);
}
stmtPool = new GenericKeyedObjectPool<>(pooledConnection, config);
pooledConnection.setStatementPool(stmtPool);
}
return pooledConnection;
}
/**
* Implements {@link Referenceable}.
*/
@Override
public Reference getReference() throws NamingException {
// this class implements its own factory
final String factory = getClass().getName();
final Reference ref = new Reference(getClass().getName(), factory, null);
ref.add(new StringRefAddr("description", getDescription()));
ref.add(new StringRefAddr("driver", getDriver()));
ref.add(new StringRefAddr("loginTimeout", String.valueOf(getLoginTimeout())));
ref.add(new StringRefAddr(Constants.KEY_PASSWORD, getPassword()));
ref.add(new StringRefAddr(Constants.KEY_USER, getUser()));
ref.add(new StringRefAddr("url", getUrl()));
ref.add(new StringRefAddr("poolPreparedStatements", String.valueOf(isPoolPreparedStatements())));
ref.add(new StringRefAddr("maxIdle", String.valueOf(getMaxIdle())));
ref.add(new StringRefAddr("numTestsPerEvictionRun", String.valueOf(getNumTestsPerEvictionRun())));
ref.add(new StringRefAddr("maxPreparedStatements", String.valueOf(getMaxPreparedStatements())));
//
// Pair of current and deprecated.
ref.add(new StringRefAddr("durationBetweenEvictionRuns", String.valueOf(getDurationBetweenEvictionRuns())));
ref.add(new StringRefAddr("timeBetweenEvictionRunsMillis", String.valueOf(getTimeBetweenEvictionRunsMillis())));
//
// Pair of current and deprecated.
ref.add(new StringRefAddr("minEvictableIdleDuration", String.valueOf(getMinEvictableIdleDuration())));
ref.add(new StringRefAddr("minEvictableIdleTimeMillis", String.valueOf(getMinEvictableIdleTimeMillis())));
return ref;
}
private String getStringContent(final RefAddr ra) {
return ra.getContent().toString();
}
/**
* Gets the number of milliseconds to sleep between runs of the idle object evictor thread. When non-positive, no
* idle object evictor thread will be run.
*
* @return the value of the evictor thread timer
* @see #setDurationBetweenEvictionRuns(Duration)
* @deprecated Use {@link #getDurationBetweenEvictionRuns()}.
*/
@Deprecated
public long getTimeBetweenEvictionRunsMillis() {
return durationBetweenEvictionRuns.toMillis();
}
/**
* Gets the value of url used to locate the database for this datasource.
*
* @return value of url.
*/
public String getUrl() {
return url;
}
/**
* Gets the value of default user (login or user name).
*
* @return value of user.
*/
public String getUser() {
return userName;
}
/**
* Returns the value of the accessToUnderlyingConnectionAllowed property.
*
* @return true if access to the underlying is allowed, false otherwise.
*/
public synchronized boolean isAccessToUnderlyingConnectionAllowed() {
return this.accessToUnderlyingConnectionAllowed;
}
private boolean isNotEmpty(final RefAddr ra) {
return ra != null && ra.getContent() != null;
}
/**
* Whether to toggle the pooling of <code>PreparedStatement</code>s
*
* @return value of poolPreparedStatements.
*/
public boolean isPoolPreparedStatements() {
return poolPreparedStatements;
}
/**
* Sets the value of the accessToUnderlyingConnectionAllowed property. It controls if the PoolGuard allows access to
* the underlying connection. (Default: false)
*
* @param allow Access to the underlying connection is granted when true.
*/
public synchronized void setAccessToUnderlyingConnectionAllowed(final boolean allow) {
this.accessToUnderlyingConnectionAllowed = allow;
}
/**
* Sets the connection properties passed to the JDBC driver.
* <p>
* If <code>props</code> contains "user" and/or "password" properties, the corresponding instance properties are
* set. If these properties are not present, they are filled in using {@link #getUser()}, {@link #getPassword()}
* when {@link #getPooledConnection()} is called, or using the actual parameters to the method call when
* {@link #getPooledConnection(String, String)} is called. Calls to {@link #setUser(String)} or
* {@link #setPassword(String)} overwrite the values of these properties if <code>connectionProperties</code> is not
* null.
* </p>
*
* @param props Connection properties to use when creating new connections.
* @throws IllegalStateException if {@link #getPooledConnection()} has been called
*/
public void setConnectionProperties(final Properties props) {
assertInitializationAllowed();
connectionProperties = props;
if (connectionProperties != null) {
if (connectionProperties.containsKey(Constants.KEY_USER)) {
setUser(connectionProperties.getProperty(Constants.KEY_USER));
}
if (connectionProperties.containsKey(Constants.KEY_PASSWORD)) {
setPassword(connectionProperties.getProperty(Constants.KEY_PASSWORD));
}
}
}
/**
* Sets the value of description. This property is here for use by the code which will deploy this datasource. It is
* not used internally.
*
* @param description Value to assign to description.
*/
public void setDescription(final String description) {
this.description = description;
}
/**
* Sets the driver class name. Setting the driver class name cause the driver to be registered with the
* DriverManager.
*
* @param driver Value to assign to driver.
* @throws IllegalStateException if {@link #getPooledConnection()} has been called
* @throws ClassNotFoundException if the class cannot be located
*/
public void setDriver(final String driver) throws ClassNotFoundException {
assertInitializationAllowed();
this.driver = driver;
// make sure driver is registered
Class.forName(driver);
}
/**
* Sets the duration to sleep between runs of the idle object evictor thread. When non-positive, no
* idle object evictor thread will be run.
*
* @param durationBetweenEvictionRuns The duration to sleep between runs of the idle object evictor
* thread. When non-positive, no idle object evictor thread will be run.
* @see #getDurationBetweenEvictionRuns()
* @throws IllegalStateException if {@link #getPooledConnection()} has been called
* @since 2.9.0
*/
public void setDurationBetweenEvictionRuns(final Duration durationBetweenEvictionRuns) {
assertInitializationAllowed();
this.durationBetweenEvictionRuns = durationBetweenEvictionRuns;
}
/**
* Sets the maximum time in seconds that this data source will wait while attempting to connect to a database. NOT
* USED.
*/
@Override
public void setLoginTimeout(final int seconds) {
this.loginTimeout = seconds;
}
/**
* Sets the log writer for this data source. NOT USED.
*/
@Override
public void setLogWriter(final PrintWriter logWriter) {
this.logWriter = logWriter;
}
/**
* Gets the maximum number of statements that can remain idle in the pool, without extra ones being released, or
* negative for no limit.
*
* @param maxIdle The maximum number of statements that can remain idle
* @throws IllegalStateException if {@link #getPooledConnection()} has been called
*/
public void setMaxIdle(final int maxIdle) {
assertInitializationAllowed();
this.maxIdle = maxIdle;
}
/**
* Sets the maximum number of prepared statements.
*
* @param maxPreparedStatements the new maximum number of prepared statements
*/
public void setMaxPreparedStatements(final int maxPreparedStatements) {
this.maxPreparedStatements = maxPreparedStatements;
}
/**
* Sets the minimum amount of time a statement may sit idle in the pool before it is eligible for eviction by the
* idle object evictor (if any). When non-positive, no objects will be evicted from the pool due to idle time alone.
*
* @param minEvictableIdleDuration minimum time to set in milliseconds.
* @see #getMinEvictableIdleDuration()
* @see #setDurationBetweenEvictionRuns(Duration)
* @throws IllegalStateException if {@link #getPooledConnection()} has been called.
* @since 2.9.0
*/
public void setMinEvictableIdleDuration(final Duration minEvictableIdleDuration) {
assertInitializationAllowed();
this.minEvictableIdleDuration = minEvictableIdleDuration;
}
/**
* Sets the minimum amount of time a statement may sit idle in the pool before it is eligible for eviction by the
* idle object evictor (if any). When non-positive, no objects will be evicted from the pool due to idle time alone.
*
* @param minEvictableIdleTimeMillis minimum time to set in milliseconds.
* @see #getMinEvictableIdleDuration()
* @see #setDurationBetweenEvictionRuns(Duration)
* @throws IllegalStateException if {@link #getPooledConnection()} has been called
* @deprecated Use {@link #setMinEvictableIdleDuration(Duration)}.
*/
@Deprecated
public void setMinEvictableIdleTimeMillis(final int minEvictableIdleTimeMillis) {
assertInitializationAllowed();
this.minEvictableIdleDuration = Duration.ofMillis(minEvictableIdleTimeMillis);
}
/**
* Sets the number of statements to examine during each run of the idle object evictor thread (if any).
* <p>
* When a negative value is supplied,
* <code>ceil({@link BasicDataSource#getNumIdle})/abs({@link #getNumTestsPerEvictionRun})</code> tests will be run.
* I.e., when the value is <i>-n</i>, roughly one <i>n</i>th of the idle objects will be tested per run.
* </p>
*
* @param numTestsPerEvictionRun number of statements to examine per run
* @see #getNumTestsPerEvictionRun()
* @see #setDurationBetweenEvictionRuns(Duration)
* @throws IllegalStateException if {@link #getPooledConnection()} has been called
*/
public void setNumTestsPerEvictionRun(final int numTestsPerEvictionRun) {
assertInitializationAllowed();
this.numTestsPerEvictionRun = numTestsPerEvictionRun;
}
/**
* Sets the value of password for the default user.
*
* @param userPassword Value to assign to password.
* @throws IllegalStateException if {@link #getPooledConnection()} has been called
*/
public void setPassword(final char[] userPassword) {
assertInitializationAllowed();
this.userPassword = Utils.clone(userPassword);
update(connectionProperties, Constants.KEY_PASSWORD, Utils.toString(this.userPassword));
}
/**
* Sets the value of password for the default user.
*
* @param userPassword Value to assign to password.
* @throws IllegalStateException if {@link #getPooledConnection()} has been called
*/
public void setPassword(final String userPassword) {
assertInitializationAllowed();
this.userPassword = Utils.toCharArray(userPassword);
update(connectionProperties, Constants.KEY_PASSWORD, userPassword);
}
/**
* Whether to toggle the pooling of <code>PreparedStatement</code>s
*
* @param poolPreparedStatements true to pool statements.
* @throws IllegalStateException if {@link #getPooledConnection()} has been called
*/
public void setPoolPreparedStatements(final boolean poolPreparedStatements) {
assertInitializationAllowed();
this.poolPreparedStatements = poolPreparedStatements;
}
/**
* Sets the number of milliseconds to sleep between runs of the idle object evictor thread. When non-positive, no
* idle object evictor thread will be run.
*
* @param timeBetweenEvictionRunsMillis The number of milliseconds to sleep between runs of the idle object evictor
* thread. When non-positive, no idle object evictor thread will be run.
* @see #getDurationBetweenEvictionRuns()
* @throws IllegalStateException if {@link #getPooledConnection()} has been called
* @deprecated Use {@link #setDurationBetweenEvictionRuns(Duration)}.
*/
@Deprecated
public void setTimeBetweenEvictionRunsMillis(final long timeBetweenEvictionRunsMillis) {
assertInitializationAllowed();
this.durationBetweenEvictionRuns = Duration.ofMillis(timeBetweenEvictionRunsMillis);
}
/**
* Sets the value of URL string used to locate the database for this datasource.
*
* @param url Value to assign to url.
* @throws IllegalStateException if {@link #getPooledConnection()} has been called
*/
public void setUrl(final String url) {
assertInitializationAllowed();
this.url = url;
}
/**
* Sets the value of default user (login or user name).
*
* @param userName Value to assign to user.
* @throws IllegalStateException if {@link #getPooledConnection()} has been called
*/
public void setUser(final String userName) {
assertInitializationAllowed();
this.userName = userName;
update(connectionProperties, Constants.KEY_USER, userName);
}
/**
* Does not print the userName and userPassword field nor the 'user' or 'password' in the connectionProperties.
*
* @since 2.6.0
*/
@Override
public synchronized String toString() {
final StringBuilder builder = new StringBuilder(super.toString());
builder.append("[description=");
builder.append(description);
builder.append(", url=");
// TODO What if the connection string contains a 'user' or 'password' query parameter but that connection string
// is not in a legal URL format?
builder.append(url);
builder.append(", driver=");
builder.append(driver);
builder.append(", loginTimeout=");
builder.append(loginTimeout);
builder.append(", poolPreparedStatements=");
builder.append(poolPreparedStatements);
builder.append(", maxIdle=");
builder.append(maxIdle);
builder.append(", timeBetweenEvictionRunsMillis=");
builder.append(durationBetweenEvictionRuns);
builder.append(", numTestsPerEvictionRun=");
builder.append(numTestsPerEvictionRun);
builder.append(", minEvictableIdleTimeMillis=");
builder.append(minEvictableIdleDuration);
builder.append(", maxPreparedStatements=");
builder.append(maxPreparedStatements);
builder.append(", getConnectionCalled=");
builder.append(getConnectionCalled);
builder.append(", connectionProperties=");
builder.append(Utils.cloneWithoutCredentials(connectionProperties));
builder.append(", accessToUnderlyingConnectionAllowed=");
builder.append(accessToUnderlyingConnectionAllowed);
builder.append("]");
return builder.toString();
}
private void update(final Properties properties, final String key, final String value) {
if (properties != null && key != null) {
if (value == null) {
properties.remove(key);
} else {
properties.setProperty(key, value);
}
}
}
}
| |
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.compute.v1.stub;
import static com.google.cloud.compute.v1.ResourcePoliciesClient.AggregatedListPagedResponse;
import static com.google.cloud.compute.v1.ResourcePoliciesClient.ListPagedResponse;
import com.google.api.core.ApiFunction;
import com.google.api.core.ApiFuture;
import com.google.api.core.BetaApi;
import com.google.api.gax.core.GaxProperties;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.httpjson.GaxHttpJsonProperties;
import com.google.api.gax.httpjson.HttpJsonTransportChannel;
import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider;
import com.google.api.gax.httpjson.ProtoOperationTransformers;
import com.google.api.gax.longrunning.OperationSnapshot;
import com.google.api.gax.longrunning.OperationTimedPollAlgorithm;
import com.google.api.gax.retrying.RetrySettings;
import com.google.api.gax.rpc.ApiCallContext;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallSettings;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.PagedListDescriptor;
import com.google.api.gax.rpc.PagedListResponseFactory;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.gax.rpc.StubSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.compute.v1.AggregatedListResourcePoliciesRequest;
import com.google.cloud.compute.v1.DeleteResourcePolicyRequest;
import com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest;
import com.google.cloud.compute.v1.GetResourcePolicyRequest;
import com.google.cloud.compute.v1.InsertResourcePolicyRequest;
import com.google.cloud.compute.v1.ListResourcePoliciesRequest;
import com.google.cloud.compute.v1.Operation;
import com.google.cloud.compute.v1.Policy;
import com.google.cloud.compute.v1.ResourcePoliciesScopedList;
import com.google.cloud.compute.v1.ResourcePolicy;
import com.google.cloud.compute.v1.ResourcePolicyAggregatedList;
import com.google.cloud.compute.v1.ResourcePolicyList;
import com.google.cloud.compute.v1.SetIamPolicyResourcePolicyRequest;
import com.google.cloud.compute.v1.TestIamPermissionsResourcePolicyRequest;
import com.google.cloud.compute.v1.TestPermissionsResponse;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import javax.annotation.Generated;
import org.threeten.bp.Duration;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Settings class to configure an instance of {@link ResourcePoliciesStub}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li>The default service address (compute.googleapis.com) and default port (443) are used.
* <li>Credentials are acquired automatically through Application Default Credentials.
* <li>Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object.
*
* <p>For example, to set the total timeout of get to 30 seconds:
*
* <pre>{@code
* ResourcePoliciesStubSettings.Builder resourcePoliciesSettingsBuilder =
* ResourcePoliciesStubSettings.newBuilder();
* resourcePoliciesSettingsBuilder
* .getSettings()
* .setRetrySettings(
* resourcePoliciesSettingsBuilder
* .getSettings()
* .getRetrySettings()
* .toBuilder()
* .setTotalTimeout(Duration.ofSeconds(30))
* .build());
* ResourcePoliciesStubSettings resourcePoliciesSettings = resourcePoliciesSettingsBuilder.build();
* }</pre>
*/
@Generated("by gapic-generator-java")
public class ResourcePoliciesStubSettings extends StubSettings<ResourcePoliciesStubSettings> {
/** The default scopes of the service. */
private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES =
ImmutableList.<String>builder()
.add("https://www.googleapis.com/auth/compute")
.add("https://www.googleapis.com/auth/cloud-platform")
.build();
private final PagedCallSettings<
AggregatedListResourcePoliciesRequest,
ResourcePolicyAggregatedList,
AggregatedListPagedResponse>
aggregatedListSettings;
private final UnaryCallSettings<DeleteResourcePolicyRequest, Operation> deleteSettings;
private final OperationCallSettings<DeleteResourcePolicyRequest, Operation, Operation>
deleteOperationSettings;
private final UnaryCallSettings<GetResourcePolicyRequest, ResourcePolicy> getSettings;
private final UnaryCallSettings<GetIamPolicyResourcePolicyRequest, Policy> getIamPolicySettings;
private final UnaryCallSettings<InsertResourcePolicyRequest, Operation> insertSettings;
private final OperationCallSettings<InsertResourcePolicyRequest, Operation, Operation>
insertOperationSettings;
private final PagedCallSettings<
ListResourcePoliciesRequest, ResourcePolicyList, ListPagedResponse>
listSettings;
private final UnaryCallSettings<SetIamPolicyResourcePolicyRequest, Policy> setIamPolicySettings;
private final UnaryCallSettings<TestIamPermissionsResourcePolicyRequest, TestPermissionsResponse>
testIamPermissionsSettings;
private static final PagedListDescriptor<
AggregatedListResourcePoliciesRequest,
ResourcePolicyAggregatedList,
Map.Entry<String, ResourcePoliciesScopedList>>
AGGREGATED_LIST_PAGE_STR_DESC =
new PagedListDescriptor<
AggregatedListResourcePoliciesRequest,
ResourcePolicyAggregatedList,
Map.Entry<String, ResourcePoliciesScopedList>>() {
@Override
public String emptyToken() {
return "";
}
@Override
public AggregatedListResourcePoliciesRequest injectToken(
AggregatedListResourcePoliciesRequest payload, String token) {
return AggregatedListResourcePoliciesRequest.newBuilder(payload)
.setPageToken(token)
.build();
}
@Override
public AggregatedListResourcePoliciesRequest injectPageSize(
AggregatedListResourcePoliciesRequest payload, int pageSize) {
return AggregatedListResourcePoliciesRequest.newBuilder(payload)
.setMaxResults(pageSize)
.build();
}
@Override
public Integer extractPageSize(AggregatedListResourcePoliciesRequest payload) {
return payload.getMaxResults();
}
@Override
public String extractNextToken(ResourcePolicyAggregatedList payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<Map.Entry<String, ResourcePoliciesScopedList>> extractResources(
ResourcePolicyAggregatedList payload) {
return payload.getItemsMap() == null
? Collections.<Map.Entry<String, ResourcePoliciesScopedList>>emptySet()
: payload.getItemsMap().entrySet();
}
};
private static final PagedListDescriptor<
ListResourcePoliciesRequest, ResourcePolicyList, ResourcePolicy>
LIST_PAGE_STR_DESC =
new PagedListDescriptor<
ListResourcePoliciesRequest, ResourcePolicyList, ResourcePolicy>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListResourcePoliciesRequest injectToken(
ListResourcePoliciesRequest payload, String token) {
return ListResourcePoliciesRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListResourcePoliciesRequest injectPageSize(
ListResourcePoliciesRequest payload, int pageSize) {
return ListResourcePoliciesRequest.newBuilder(payload)
.setMaxResults(pageSize)
.build();
}
@Override
public Integer extractPageSize(ListResourcePoliciesRequest payload) {
return payload.getMaxResults();
}
@Override
public String extractNextToken(ResourcePolicyList payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<ResourcePolicy> extractResources(ResourcePolicyList payload) {
return payload.getItemsList() == null
? ImmutableList.<ResourcePolicy>of()
: payload.getItemsList();
}
};
private static final PagedListResponseFactory<
AggregatedListResourcePoliciesRequest,
ResourcePolicyAggregatedList,
AggregatedListPagedResponse>
AGGREGATED_LIST_PAGE_STR_FACT =
new PagedListResponseFactory<
AggregatedListResourcePoliciesRequest,
ResourcePolicyAggregatedList,
AggregatedListPagedResponse>() {
@Override
public ApiFuture<AggregatedListPagedResponse> getFuturePagedResponse(
UnaryCallable<AggregatedListResourcePoliciesRequest, ResourcePolicyAggregatedList>
callable,
AggregatedListResourcePoliciesRequest request,
ApiCallContext context,
ApiFuture<ResourcePolicyAggregatedList> futureResponse) {
PageContext<
AggregatedListResourcePoliciesRequest,
ResourcePolicyAggregatedList,
Map.Entry<String, ResourcePoliciesScopedList>>
pageContext =
PageContext.create(callable, AGGREGATED_LIST_PAGE_STR_DESC, request, context);
return AggregatedListPagedResponse.createAsync(pageContext, futureResponse);
}
};
private static final PagedListResponseFactory<
ListResourcePoliciesRequest, ResourcePolicyList, ListPagedResponse>
LIST_PAGE_STR_FACT =
new PagedListResponseFactory<
ListResourcePoliciesRequest, ResourcePolicyList, ListPagedResponse>() {
@Override
public ApiFuture<ListPagedResponse> getFuturePagedResponse(
UnaryCallable<ListResourcePoliciesRequest, ResourcePolicyList> callable,
ListResourcePoliciesRequest request,
ApiCallContext context,
ApiFuture<ResourcePolicyList> futureResponse) {
PageContext<ListResourcePoliciesRequest, ResourcePolicyList, ResourcePolicy>
pageContext = PageContext.create(callable, LIST_PAGE_STR_DESC, request, context);
return ListPagedResponse.createAsync(pageContext, futureResponse);
}
};
/** Returns the object with the settings used for calls to aggregatedList. */
public PagedCallSettings<
AggregatedListResourcePoliciesRequest,
ResourcePolicyAggregatedList,
AggregatedListPagedResponse>
aggregatedListSettings() {
return aggregatedListSettings;
}
/** Returns the object with the settings used for calls to delete. */
public UnaryCallSettings<DeleteResourcePolicyRequest, Operation> deleteSettings() {
return deleteSettings;
}
/** Returns the object with the settings used for calls to delete. */
public OperationCallSettings<DeleteResourcePolicyRequest, Operation, Operation>
deleteOperationSettings() {
return deleteOperationSettings;
}
/** Returns the object with the settings used for calls to get. */
public UnaryCallSettings<GetResourcePolicyRequest, ResourcePolicy> getSettings() {
return getSettings;
}
/** Returns the object with the settings used for calls to getIamPolicy. */
public UnaryCallSettings<GetIamPolicyResourcePolicyRequest, Policy> getIamPolicySettings() {
return getIamPolicySettings;
}
/** Returns the object with the settings used for calls to insert. */
public UnaryCallSettings<InsertResourcePolicyRequest, Operation> insertSettings() {
return insertSettings;
}
/** Returns the object with the settings used for calls to insert. */
public OperationCallSettings<InsertResourcePolicyRequest, Operation, Operation>
insertOperationSettings() {
return insertOperationSettings;
}
/** Returns the object with the settings used for calls to list. */
public PagedCallSettings<ListResourcePoliciesRequest, ResourcePolicyList, ListPagedResponse>
listSettings() {
return listSettings;
}
/** Returns the object with the settings used for calls to setIamPolicy. */
public UnaryCallSettings<SetIamPolicyResourcePolicyRequest, Policy> setIamPolicySettings() {
return setIamPolicySettings;
}
/** Returns the object with the settings used for calls to testIamPermissions. */
public UnaryCallSettings<TestIamPermissionsResourcePolicyRequest, TestPermissionsResponse>
testIamPermissionsSettings() {
return testIamPermissionsSettings;
}
@BetaApi("A restructuring of stub classes is planned, so this may break in the future")
public ResourcePoliciesStub createStub() throws IOException {
if (getTransportChannelProvider()
.getTransportName()
.equals(HttpJsonTransportChannel.getHttpJsonTransportName())) {
return HttpJsonResourcePoliciesStub.create(this);
}
throw new UnsupportedOperationException(
String.format(
"Transport not supported: %s", getTransportChannelProvider().getTransportName()));
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return InstantiatingExecutorProvider.newBuilder();
}
/** Returns the default service endpoint. */
public static String getDefaultEndpoint() {
return "compute.googleapis.com:443";
}
/** Returns the default mTLS service endpoint. */
public static String getDefaultMtlsEndpoint() {
return "compute.mtls.googleapis.com:443";
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return DEFAULT_SERVICE_SCOPES;
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return GoogleCredentialsProvider.newBuilder()
.setScopesToApply(DEFAULT_SERVICE_SCOPES)
.setUseJwtAccessWithScope(true);
}
/** Returns a builder for the default ChannelProvider for this service. */
public static InstantiatingHttpJsonChannelProvider.Builder
defaultHttpJsonTransportProviderBuilder() {
return InstantiatingHttpJsonChannelProvider.newBuilder();
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return defaultHttpJsonTransportProviderBuilder().build();
}
@BetaApi("The surface for customizing headers is not stable yet and may change in the future.")
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken(
"gapic", GaxProperties.getLibraryVersion(ResourcePoliciesStubSettings.class))
.setTransportToken(
GaxHttpJsonProperties.getHttpJsonTokenName(),
GaxHttpJsonProperties.getHttpJsonVersion());
}
/** Returns a new builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected ResourcePoliciesStubSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
aggregatedListSettings = settingsBuilder.aggregatedListSettings().build();
deleteSettings = settingsBuilder.deleteSettings().build();
deleteOperationSettings = settingsBuilder.deleteOperationSettings().build();
getSettings = settingsBuilder.getSettings().build();
getIamPolicySettings = settingsBuilder.getIamPolicySettings().build();
insertSettings = settingsBuilder.insertSettings().build();
insertOperationSettings = settingsBuilder.insertOperationSettings().build();
listSettings = settingsBuilder.listSettings().build();
setIamPolicySettings = settingsBuilder.setIamPolicySettings().build();
testIamPermissionsSettings = settingsBuilder.testIamPermissionsSettings().build();
}
/** Builder for ResourcePoliciesStubSettings. */
public static class Builder extends StubSettings.Builder<ResourcePoliciesStubSettings, Builder> {
private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders;
private final PagedCallSettings.Builder<
AggregatedListResourcePoliciesRequest,
ResourcePolicyAggregatedList,
AggregatedListPagedResponse>
aggregatedListSettings;
private final UnaryCallSettings.Builder<DeleteResourcePolicyRequest, Operation> deleteSettings;
private final OperationCallSettings.Builder<DeleteResourcePolicyRequest, Operation, Operation>
deleteOperationSettings;
private final UnaryCallSettings.Builder<GetResourcePolicyRequest, ResourcePolicy> getSettings;
private final UnaryCallSettings.Builder<GetIamPolicyResourcePolicyRequest, Policy>
getIamPolicySettings;
private final UnaryCallSettings.Builder<InsertResourcePolicyRequest, Operation> insertSettings;
private final OperationCallSettings.Builder<InsertResourcePolicyRequest, Operation, Operation>
insertOperationSettings;
private final PagedCallSettings.Builder<
ListResourcePoliciesRequest, ResourcePolicyList, ListPagedResponse>
listSettings;
private final UnaryCallSettings.Builder<SetIamPolicyResourcePolicyRequest, Policy>
setIamPolicySettings;
private final UnaryCallSettings.Builder<
TestIamPermissionsResourcePolicyRequest, TestPermissionsResponse>
testIamPermissionsSettings;
private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>>
RETRYABLE_CODE_DEFINITIONS;
static {
ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions =
ImmutableMap.builder();
definitions.put(
"retry_policy_0_codes",
ImmutableSet.copyOf(
Lists.<StatusCode.Code>newArrayList(
StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE)));
definitions.put(
"no_retry_1_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList()));
RETRYABLE_CODE_DEFINITIONS = definitions.build();
}
private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS;
static {
ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder();
RetrySettings settings = null;
settings =
RetrySettings.newBuilder()
.setInitialRetryDelay(Duration.ofMillis(100L))
.setRetryDelayMultiplier(1.3)
.setMaxRetryDelay(Duration.ofMillis(60000L))
.setInitialRpcTimeout(Duration.ofMillis(600000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeout(Duration.ofMillis(600000L))
.setTotalTimeout(Duration.ofMillis(600000L))
.build();
definitions.put("retry_policy_0_params", settings);
settings =
RetrySettings.newBuilder()
.setInitialRpcTimeout(Duration.ofMillis(600000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeout(Duration.ofMillis(600000L))
.setTotalTimeout(Duration.ofMillis(600000L))
.build();
definitions.put("no_retry_1_params", settings);
RETRY_PARAM_DEFINITIONS = definitions.build();
}
protected Builder() {
this(((ClientContext) null));
}
protected Builder(ClientContext clientContext) {
super(clientContext);
aggregatedListSettings = PagedCallSettings.newBuilder(AGGREGATED_LIST_PAGE_STR_FACT);
deleteSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
deleteOperationSettings = OperationCallSettings.newBuilder();
getSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
getIamPolicySettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
insertSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
insertOperationSettings = OperationCallSettings.newBuilder();
listSettings = PagedCallSettings.newBuilder(LIST_PAGE_STR_FACT);
setIamPolicySettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
testIamPermissionsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
aggregatedListSettings,
deleteSettings,
getSettings,
getIamPolicySettings,
insertSettings,
listSettings,
setIamPolicySettings,
testIamPermissionsSettings);
initDefaults(this);
}
protected Builder(ResourcePoliciesStubSettings settings) {
super(settings);
aggregatedListSettings = settings.aggregatedListSettings.toBuilder();
deleteSettings = settings.deleteSettings.toBuilder();
deleteOperationSettings = settings.deleteOperationSettings.toBuilder();
getSettings = settings.getSettings.toBuilder();
getIamPolicySettings = settings.getIamPolicySettings.toBuilder();
insertSettings = settings.insertSettings.toBuilder();
insertOperationSettings = settings.insertOperationSettings.toBuilder();
listSettings = settings.listSettings.toBuilder();
setIamPolicySettings = settings.setIamPolicySettings.toBuilder();
testIamPermissionsSettings = settings.testIamPermissionsSettings.toBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
aggregatedListSettings,
deleteSettings,
getSettings,
getIamPolicySettings,
insertSettings,
listSettings,
setIamPolicySettings,
testIamPermissionsSettings);
}
private static Builder createDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultTransportChannelProvider());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build());
builder.setEndpoint(getDefaultEndpoint());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder initDefaults(Builder builder) {
builder
.aggregatedListSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.deleteSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.getSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.getIamPolicySettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.insertSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.listSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.setIamPolicySettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.testIamPermissionsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.deleteOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<DeleteResourcePolicyRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(Operation.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(Operation.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelay(Duration.ofMillis(500L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelay(Duration.ofMillis(20000L))
.setInitialRpcTimeout(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeout(Duration.ZERO)
.setTotalTimeout(Duration.ofMillis(600000L))
.build()));
builder
.insertOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<InsertResourcePolicyRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(Operation.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(Operation.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelay(Duration.ofMillis(500L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelay(Duration.ofMillis(20000L))
.setInitialRpcTimeout(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeout(Duration.ZERO)
.setTotalTimeout(Duration.ofMillis(600000L))
.build()));
return builder;
}
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) {
super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater);
return this;
}
public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() {
return unaryMethodSettingsBuilders;
}
/** Returns the builder for the settings used for calls to aggregatedList. */
public PagedCallSettings.Builder<
AggregatedListResourcePoliciesRequest,
ResourcePolicyAggregatedList,
AggregatedListPagedResponse>
aggregatedListSettings() {
return aggregatedListSettings;
}
/** Returns the builder for the settings used for calls to delete. */
public UnaryCallSettings.Builder<DeleteResourcePolicyRequest, Operation> deleteSettings() {
return deleteSettings;
}
/** Returns the builder for the settings used for calls to delete. */
@BetaApi(
"The surface for use by generated code is not stable yet and may change in the future.")
public OperationCallSettings.Builder<DeleteResourcePolicyRequest, Operation, Operation>
deleteOperationSettings() {
return deleteOperationSettings;
}
/** Returns the builder for the settings used for calls to get. */
public UnaryCallSettings.Builder<GetResourcePolicyRequest, ResourcePolicy> getSettings() {
return getSettings;
}
/** Returns the builder for the settings used for calls to getIamPolicy. */
public UnaryCallSettings.Builder<GetIamPolicyResourcePolicyRequest, Policy>
getIamPolicySettings() {
return getIamPolicySettings;
}
/** Returns the builder for the settings used for calls to insert. */
public UnaryCallSettings.Builder<InsertResourcePolicyRequest, Operation> insertSettings() {
return insertSettings;
}
/** Returns the builder for the settings used for calls to insert. */
@BetaApi(
"The surface for use by generated code is not stable yet and may change in the future.")
public OperationCallSettings.Builder<InsertResourcePolicyRequest, Operation, Operation>
insertOperationSettings() {
return insertOperationSettings;
}
/** Returns the builder for the settings used for calls to list. */
public PagedCallSettings.Builder<
ListResourcePoliciesRequest, ResourcePolicyList, ListPagedResponse>
listSettings() {
return listSettings;
}
/** Returns the builder for the settings used for calls to setIamPolicy. */
public UnaryCallSettings.Builder<SetIamPolicyResourcePolicyRequest, Policy>
setIamPolicySettings() {
return setIamPolicySettings;
}
/** Returns the builder for the settings used for calls to testIamPermissions. */
public UnaryCallSettings.Builder<
TestIamPermissionsResourcePolicyRequest, TestPermissionsResponse>
testIamPermissionsSettings() {
return testIamPermissionsSettings;
}
@Override
public ResourcePoliciesStubSettings build() throws IOException {
return new ResourcePoliciesStubSettings(this);
}
}
}
| |
/* RMIClassLoaderImpl.java -- FIXME: briefly describe file purpose
Copyright (C) 2005 Free Software Foundation, Inc.
This file is part of GNU Classpath.
GNU Classpath is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2, or (at your option)
any later version.
GNU Classpath is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with GNU Classpath; see the file COPYING. If not, write to the
Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301 USA.
Linking this library statically or dynamically with other modules is
making a combined work based on this library. Thus, the terms and
conditions of the GNU General Public License cover the whole
combination.
As a special exception, the copyright holders of this library give you
permission to link this library with independent modules to produce an
executable, regardless of the license terms of these independent
modules, and to copy and distribute the resulting executable under
terms of your choice, provided that you also meet, for each linked
independent module, the terms and conditions of the license of that
module. An independent module is a module which is not derived from
or based on this library. If you modify this library, you may extend
this exception to your version of the library, but you are not
obligated to do so. If you do not wish to do so, delete this
exception statement from your version. */
package gnu.java.rmi.server;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.rmi.server.RMIClassLoaderSpi;
import java.util.ArrayList;
import java.util.Hashtable;
import java.util.Map;
import java.util.StringTokenizer;
/**
* The default implementation of {@link java.rmi.server.RMIClassLoaderSpi}.
*
* @author Roman Kennke (kennke@aicas.com)
*/
public class RMIClassLoaderImpl extends RMIClassLoaderSpi
{
private static class MyClassLoader extends URLClassLoader
{
// Package-private to avoid a trampoline constructor.
MyClassLoader (URL[] urls, ClassLoader parent, String annotation)
{
super (urls, parent);
this.annotation = annotation;
}
private MyClassLoader (URL[] urls, ClassLoader parent)
{
super (urls, parent);
this.annotation = urlToAnnotation (urls);
}
public static String urlToAnnotation (URL[] urls)
{
if (urls.length == 0)
return null;
StringBuffer annotation = new StringBuffer (64 * urls.length);
for (int i = 0; i < urls.length; i++)
{
annotation.append (urls [i].toExternalForm());
annotation.append (' ');
}
return annotation.toString();
}
public final String getClassAnnotation()
{
return annotation;
}
private final String annotation;
}
/**
* This class is used to identify a cached classloader by its codebase and
* the context classloader that is its parent.
*/
private static class CacheKey
{
private String mCodeBase;
private ClassLoader mContextClassLoader;
public CacheKey (String theCodebase, ClassLoader theContextClassLoader)
{
mCodeBase = theCodebase;
mContextClassLoader = theContextClassLoader;
}
/**
* @return true if the codebase and the context classloader are equal
*/
public boolean equals (Object theOther)
{
if (theOther instanceof CacheKey)
{
CacheKey key = (CacheKey) theOther;
return (equals (this.mCodeBase,key.mCodeBase)
&& equals (this.mContextClassLoader, key.mContextClassLoader));
}
return false;
}
/**
* Test if the two objects are equal or both null.
* @param theOne
* @param theOther
* @return
*/
private boolean equals (Object theOne, Object theOther)
{
return theOne != null ? theOne.equals (theOther) : theOther == null;
}
/**
* @return hashCode
*/
public int hashCode()
{
return ((mCodeBase != null ? mCodeBase.hashCode() : 0)
^(mContextClassLoader != null ? mContextClassLoader.hashCode() : -1));
}
public String toString()
{
return "[" + mCodeBase + "," + mContextClassLoader + "]";
}
}
private static RMIClassLoaderImpl instance = null;
private static Map cacheLoaders; //map annotations to loaders
private static Map cacheAnnotations; //map loaders to annotations
//class loader for defaultAnnotation
private static MyClassLoader defaultClassLoader;
//defaultAnnotation is got from system property
// "java.rmi.server.defaultAnnotation"
private static String defaultAnnotation;
//URL object for defaultAnnotation
private static URL defaultCodebase;
static
{
// 89 is a nice prime number for Hashtable initial capacity
cacheLoaders = new Hashtable (89);
cacheAnnotations = new Hashtable (89);
defaultAnnotation = System.getProperty ("java.rmi.server.defaultAnnotation");
try
{
if (defaultAnnotation != null)
defaultCodebase = new URL (defaultAnnotation);
}
catch (Exception _)
{
defaultCodebase = null;
}
if (defaultCodebase != null)
{
defaultClassLoader = new MyClassLoader (new URL[] { defaultCodebase }, null,
defaultAnnotation);
cacheLoaders.put (new CacheKey (defaultAnnotation,
Thread.currentThread().getContextClassLoader()),
defaultClassLoader);
}
}
/**
* This is a singleton class and may only be instantiated once from within
* the {@link #getInstance} method.
*/
private RMIClassLoaderImpl()
{
}
/**
* Returns an instance of RMIClassLoaderImpl.
*
* @return an instance of RMIClassLoaderImpl
*/
public static RMIClassLoaderSpi getInstance()
{
if (instance == null)
instance = new RMIClassLoaderImpl();
return instance;
}
public Class loadClass(String codeBase, String name,
ClassLoader defaultLoader)
throws MalformedURLException, ClassNotFoundException
{
ClassLoader loader;
if (defaultLoader == null)
loader = Thread.currentThread().getContextClassLoader();
else
loader = defaultLoader;
//try context class loader first
try
{
return Class.forName(name, false, loader);
}
catch (ClassNotFoundException e)
{
// class not found in the local classpath
}
if (codeBase.length() == 0) //==""
{
loader = defaultClassLoader;
}
else
{
loader = getClassLoader(codeBase);
}
if (loader == null)
{
//do not throw NullPointerException
throw new ClassNotFoundException ("Could not find class (" + name +
") at codebase (" + codeBase + ")");
}
return Class.forName(name, false, loader);
}
public Class loadProxyClass(String codeBase, String[] interfaces,
ClassLoader defaultLoader)
throws MalformedURLException, ClassNotFoundException
{
// FIXME: Implement this.
return null;
}
/**
* Gets a classloader for the given codebase and with the current
* context classloader as parent.
*
* @param codebase
*
* @return a classloader for the given codebase
*
* @throws MalformedURLException if the codebase contains a malformed URL
*/
public ClassLoader getClassLoader(String codebase)
throws MalformedURLException
{
ClassLoader loader;
CacheKey loaderKey = new CacheKey
(codebase, Thread.currentThread().getContextClassLoader());
loader = (ClassLoader) cacheLoaders.get (loaderKey);
if (loader == null)
{
//create an entry in cacheLoaders mapping a loader to codebases.
// codebases are separated by " "
StringTokenizer tok = new StringTokenizer (codebase, " ");
ArrayList urls = new ArrayList();
while (tok.hasMoreTokens())
urls.add (new URL(tok.nextToken()));
loader = new MyClassLoader((URL[]) urls.toArray(new URL [urls.size()]),
Thread.currentThread().getContextClassLoader(),
codebase);
cacheLoaders.put (loaderKey, loader);
}
return loader;
}
/**
* Returns a string representation of the network location where a remote
* endpoint can get the class-definition of the given class.
*
* @param cl
*
* @return a space seperated list of URLs where the class-definition
* of cl may be found
*/
public String getClassAnnotation(Class cl)
{
ClassLoader loader = cl.getClassLoader();
if (loader == null
|| loader == ClassLoader.getSystemClassLoader())
{
return System.getProperty ("java.rmi.server.codebase");
}
if (loader instanceof MyClassLoader)
{
return ((MyClassLoader) loader).getClassAnnotation();
}
String s = (String) cacheAnnotations.get (loader);
if (s != null)
return s;
if (loader instanceof URLClassLoader)
{
URL[] urls = ((URLClassLoader) loader).getURLs();
if (urls.length == 0)
return null;
StringBuffer annotation = new StringBuffer (64 * urls.length);
for (int i = 0; i < urls.length; i++)
{
annotation.append (urls [i].toExternalForm());
annotation.append (' ');
}
s = annotation.toString();
cacheAnnotations.put (loader, s);
return s;
}
return System.getProperty ("java.rmi.server.codebase");
}
}
| |
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.enterprise.feedback;
import static android.os.Looper.getMainLooper;
import static androidx.enterprise.feedback.KeyedAppStatesReporter.APP_STATES;
import static androidx.enterprise.feedback.KeyedAppStatesReporter.APP_STATE_KEY;
import static androidx.enterprise.feedback.KeyedAppStatesReporter.APP_STATE_SEVERITY;
import static androidx.enterprise.feedback.KeyedAppStatesReporter.WHAT_IMMEDIATE_STATE;
import static androidx.enterprise.feedback.KeyedAppStatesReporter.WHAT_STATE;
import static com.google.common.truth.Truth.assertThat;
import static org.robolectric.Shadows.shadowOf;
import static java.util.Arrays.asList;
import static java.util.Collections.singleton;
import android.content.ContextWrapper;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.os.Bundle;
import android.os.IBinder;
import android.os.Message;
import android.os.Messenger;
import android.os.RemoteException;
import android.os.SystemClock;
import androidx.annotation.NonNull;
import androidx.test.core.app.ApplicationProvider;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.Robolectric;
import org.robolectric.RobolectricTestRunner;
import org.robolectric.android.util.concurrent.PausedExecutorService;
import org.robolectric.annotation.Config;
import org.robolectric.annotation.internal.DoNotInstrument;
import org.robolectric.shadows.ShadowBinder;
import org.robolectric.shadows.ShadowPausedAsyncTask;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
/** Tests {@link KeyedAppStatesService}. */
@SuppressWarnings("UnstableApiUsage") // PausedExecutorService and ShadowPausedAsyncTask are @Beta
@RunWith(RobolectricTestRunner.class)
@DoNotInstrument
@Config(minSdk = 21, instrumentedPackages = { "androidx.enterprise.feedback" })
public class KeyedAppStatesServiceTest {
private static class TestKeyedAppStatesService extends KeyedAppStatesService {
Collection<ReceivedKeyedAppState> mStates;
boolean mRequestSync;
@Override
public void onReceive(
@NonNull Collection<ReceivedKeyedAppState> states, boolean requestSync) {
this.mStates = Collections.unmodifiableCollection(states);
this.mRequestSync = requestSync;
}
}
private static final KeyedAppState STATE =
KeyedAppState.builder()
.setKey("key1")
.setMessage("message1")
.setSeverity(KeyedAppState.SEVERITY_INFO)
.setData("data1")
.build();
private static final KeyedAppState STATE2 =
KeyedAppState.builder()
.setKey("key2")
.setMessage("message2")
.setSeverity(KeyedAppState.SEVERITY_INFO)
.setData("data2")
.build();
private final TestKeyedAppStatesService mKeyedAppStatesService =
Robolectric.setupService(TestKeyedAppStatesService.class);
private final IBinder mBinder = mKeyedAppStatesService.onBind(new Intent());
private final Messenger mMessenger = new Messenger(mBinder);
private final ContextWrapper mContext = ApplicationProvider.getApplicationContext();
private final PackageManager mPackageManager = mContext.getPackageManager();
private static PausedExecutorService sAsyncTaskExecutor;
private static final int DEFAULT_SENDING_UID = -1;
private static final long CURRENT_TIME_MILLIS = 1234567;
@BeforeClass
public static void setUpClass() {
sAsyncTaskExecutor = new PausedExecutorService();
}
@AfterClass
public static void tearDownClass() {
sAsyncTaskExecutor.shutdown();
}
@Before
public void setUp() {
shadowOf(mPackageManager).setNameForUid(DEFAULT_SENDING_UID, "test_package");
ShadowBinder.setCallingUid(DEFAULT_SENDING_UID);
ShadowPausedAsyncTask.overrideExecutor(sAsyncTaskExecutor);
}
@Test
public void receivesStates() throws RemoteException {
Collection<KeyedAppState> keyedAppStates = asList(STATE, STATE2);
Bundle appStatesBundle = buildStatesBundle(keyedAppStates);
Message message = createStateMessage(appStatesBundle);
mMessenger.send(message);
idleKeyedAppStatesService();
assertReceivedStatesMatch(mKeyedAppStatesService.mStates, asList(STATE, STATE2));
}
private void assertReceivedStatesMatch(
Collection<ReceivedKeyedAppState> receivedStates, Collection<KeyedAppState> states) {
Collection<KeyedAppState> convertedReceivedStates =
convertReceivedStatesToKeyedAppState(receivedStates);
assertThat(convertedReceivedStates).containsExactlyElementsIn(states);
}
private Collection<KeyedAppState> convertReceivedStatesToKeyedAppState(
Collection<ReceivedKeyedAppState> receivedStates) {
Collection<KeyedAppState> states = new ArrayList<>();
for (ReceivedKeyedAppState receivedState : receivedStates) {
states.add(
KeyedAppState.builder()
.setKey(receivedState.getKey())
.setSeverity(receivedState.getSeverity())
.setMessage(receivedState.getMessage())
.setData(receivedState.getData())
.build());
}
return states;
}
@Test
public void receivesTimestamp() throws RemoteException {
SystemClock.setCurrentTimeMillis(CURRENT_TIME_MILLIS);
mMessenger.send(createTestStateMessage());
idleKeyedAppStatesService();
ReceivedKeyedAppState receivedState = mKeyedAppStatesService.mStates.iterator().next();
long timestamp = receivedState.getTimestamp();
assertThat(timestamp).isEqualTo(CURRENT_TIME_MILLIS);
}
@Test
public void receivesPackageName() throws RemoteException {
final String packageName = "test.package.name";
shadowOf(mPackageManager).setNameForUid(DEFAULT_SENDING_UID, packageName);
mMessenger.send(createTestStateMessage());
idleKeyedAppStatesService();
ReceivedKeyedAppState receivedState = mKeyedAppStatesService.mStates.iterator().next();
assertThat(receivedState.getPackageName()).isEqualTo(packageName);
}
@Test
public void receivesDoesNotRequestSync() throws RemoteException {
mMessenger.send(createTestStateMessage());
idleKeyedAppStatesService();
assertThat(mKeyedAppStatesService.mRequestSync).isFalse();
}
@Test
public void receivesRequestSync() throws RemoteException {
Message message = createStateMessageImmediate(buildStatesBundle(singleton(STATE)));
mMessenger.send(message);
idleKeyedAppStatesService();
assertThat(mKeyedAppStatesService.mRequestSync).isTrue();
}
@Test
public void deduplicatesStates() throws RemoteException {
// Arrange
Collection<KeyedAppState> keyedAppStates =
asList(
KeyedAppState.builder().setKey("key").setSeverity(
KeyedAppState.SEVERITY_INFO).build(),
KeyedAppState.builder().setKey("key").setSeverity(
KeyedAppState.SEVERITY_INFO).build(),
KeyedAppState.builder()
.setKey("key")
.setSeverity(KeyedAppState.SEVERITY_INFO)
.setMessage("message")
.build());
Bundle appStatesBundle = buildStatesBundle(keyedAppStates);
Message message = createStateMessage(appStatesBundle);
// Act
mMessenger.send(message);
idleKeyedAppStatesService();
// Assert
assertThat(mKeyedAppStatesService.mStates).hasSize(1);
}
@Test
public void send_emptyStates_doesNotCallback() throws RemoteException {
Bundle appStatesBundle = buildStatesBundle(Collections.emptyList());
Message message = createStateMessage(appStatesBundle);
mMessenger.send(message);
idleKeyedAppStatesService();
assertThat(mKeyedAppStatesService.mStates).isNull();
}
@Test
public void send_messageWithoutWhat_doesNotCallback() throws RemoteException {
Message message = Message.obtain();
mMessenger.send(message);
idleKeyedAppStatesService();
assertThat(mKeyedAppStatesService.mStates).isNull();
}
@Test
public void send_messageWithoutBundle_doesNotCallback() throws RemoteException {
mMessenger.send(createStateMessage(null));
idleKeyedAppStatesService();
assertThat(mKeyedAppStatesService.mStates).isNull();
}
@Test
public void send_messageWithIncorrectObj_doesNotCallback() throws RemoteException {
Message message = createStateMessage(null);
message.obj = "";
mMessenger.send(message);
idleKeyedAppStatesService();
assertThat(mKeyedAppStatesService.mStates).isNull();
}
@Test
public void send_messageWithEmptyBundle_doesNotCallback() throws RemoteException {
mMessenger.send(createStateMessage(new Bundle()));
idleKeyedAppStatesService();
assertThat(mKeyedAppStatesService.mStates).isNull();
}
@Test
public void send_messsageWithInvalidState_doesNotCallback() throws RemoteException {
Bundle invalidStateBundle = createDefaultStateBundle();
invalidStateBundle.remove(APP_STATE_KEY);
Bundle bundle = buildStatesBundleFromBundles(singleton(invalidStateBundle));
Message message = createStateMessage(bundle);
mMessenger.send(message);
idleKeyedAppStatesService();
assertThat(mKeyedAppStatesService.mStates).isNull();
}
@Test
public void send_messageWithBothInvalidAndValidStates_callsBackWithOnlyValidStates()
throws RemoteException {
Bundle invalidStateBundle = createDefaultStateBundle();
invalidStateBundle.remove(APP_STATE_KEY);
Bundle bundle =
buildStatesBundleFromBundles(
asList(createDefaultStateBundle(), invalidStateBundle));
Message message = createStateMessage(bundle);
mMessenger.send(message);
idleKeyedAppStatesService();
assertThat(mKeyedAppStatesService.mStates).hasSize(1);
}
private static Bundle createDefaultStateBundle() {
Bundle bundle = new Bundle();
bundle.putInt(APP_STATE_SEVERITY, KeyedAppState.SEVERITY_INFO);
bundle.putString(APP_STATE_KEY, "key1");
return bundle;
}
private static Message createTestStateMessage() {
return createStateMessage(buildStatesBundle(singleton(STATE)));
}
private static Bundle buildStatesBundleFromBundles(Collection<Bundle> bundles) {
Bundle bundle = new Bundle();
bundle.putParcelableArrayList(APP_STATES, new ArrayList<>(bundles));
return bundle;
}
private static Bundle buildStatesBundle(Collection<KeyedAppState> keyedAppStates) {
Bundle bundle = new Bundle();
bundle.putParcelableArrayList(APP_STATES, buildStateBundles(keyedAppStates));
return bundle;
}
private static ArrayList<Bundle> buildStateBundles(Collection<KeyedAppState> keyedAppStates) {
ArrayList<Bundle> bundles = new ArrayList<>();
for (KeyedAppState keyedAppState : keyedAppStates) {
bundles.add(keyedAppState.toStateBundle());
}
return bundles;
}
private static Message createStateMessage(Bundle appStatesBundle) {
return createStateMessage(appStatesBundle, false);
}
private static Message createStateMessage(Bundle appStatesBundle, boolean immediate) {
Message message = Message.obtain();
message.what = immediate ? WHAT_IMMEDIATE_STATE : WHAT_STATE;
message.obj = appStatesBundle;
return message;
}
private static Message createStateMessageImmediate(Bundle appStatesBundle) {
return createStateMessage(appStatesBundle, true);
}
private static void idleKeyedAppStatesService() {
// Ensure messages are sent and handled by service
shadowOf(getMainLooper()).idle();
// Run any AsyncTasks executed by service
int numRun = sAsyncTaskExecutor.runAll();
if (numRun > 0) {
// Receive results of AsyncTasks
shadowOf(getMainLooper()).idle();
}
}
}
| |
package sk.lovasko.lucenec.world;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import sk.lovasko.lucenec.integrator.Integrator;
import sk.lovasko.lucenec.Primitive;
import sk.lovasko.lucenec.integrator.RayTracingIntegrator;
import sk.lovasko.lucenec.integrator.CosineIntegrator;
import sk.lovasko.lucenec.camera.*;
import sk.lovasko.lucenec.color.RGB;
import sk.lovasko.lucenec.geom.Point;
import sk.lovasko.lucenec.geom.Vector;
import sk.lovasko.lucenec.group.KDTree;
import sk.lovasko.lucenec.group.SimpleGroup;
import sk.lovasko.lucenec.model.ObjModel;
import sk.lovasko.lucenec.solid.AxisAlignedBox;
import sk.lovasko.lucenec.solid.Disc;
import sk.lovasko.lucenec.solid.InfinitePlane;
import sk.lovasko.lucenec.solid.Quad;
import sk.lovasko.lucenec.solid.Triangle;
import sk.lovasko.lucenec.solid.Quadric;
import sk.lovasko.lucenec.solid.Sphere;
import sk.lovasko.lucenec.light.Light;
import sk.lovasko.lucenec.light.SpotLight;
import sk.lovasko.lucenec.light.PointLight;
import sk.lovasko.lucenec.light.DirectionalLight;
import sk.lovasko.lucenec.texture.*;
import sk.lovasko.lucenec.xml.XmlHelpers;
public class World
{
private final Primitive scene;
private final List<Light> lights;
private final RGB background_color;
private final Camera camera;
private static HashSet<String> camera_set;
private static HashSet<String> primitive_set;
private static HashSet<String> light_set;
private Texture environment_texture;
static
{
camera_set = new HashSet<String>();
camera_set.add("perspectiveCamera");
camera_set.add("orthographicCamera");
camera_set.add("fishEyeCamera");
primitive_set = new HashSet<String>();
primitive_set.add("simpleGroup");
primitive_set.add("kdTree");
primitive_set.add("sphere");
primitive_set.add("quad");
primitive_set.add("triangle");
primitive_set.add("infinitePlane");
primitive_set.add("disc");
primitive_set.add("axisAlignedBox");
primitive_set.add("quadric");
light_set = new HashSet<String>();
light_set.add("spotLight");
light_set.add("pointLight");
light_set.add("directionalLight");
}
public World (final Primitive _scene, final RGB _background_color, final Camera _camera)
{
scene = _scene;
lights = new ArrayList<Light>();
background_color = _background_color;
camera = _camera;
environment_texture = null;
}
public final void set_environment_texture (final Texture texture)
{
environment_texture = texture;
}
public final Texture get_environment_texture ()
{
return environment_texture;
}
public final void add_light (final Light light)
{
lights.add(light);
}
public final Primitive get_scene ()
{
return scene;
}
public final RGB get_background_color ()
{
return background_color;
}
public final Camera get_camera ()
{
return camera;
}
public final List<Light> get_lights ()
{
return lights;
}
// move this to each respective camera class
private static Camera load_camera_from_element (final Element element)
{
String camera_type = element.getTagName();
if (camera_type.equals("perspectiveCamera"))
{
Point center = XmlHelpers.get_point(element, "center");
Vector up = XmlHelpers.get_vector(element, "up");
Vector forward = XmlHelpers.get_vector(element, "forward");
Double angle = XmlHelpers.get_double(element, "angle");
Double ratio = XmlHelpers.get_double(element, "ratio");
if (center == null || up == null || forward == null ||
angle == null || ratio == null)
{
return null;
}
else
{
return new PerspectiveCamera(center, forward, up, angle, ratio);
}
}
else if (camera_type.equals("orthographicCamera"))
{
Point center = XmlHelpers.get_point(element, "center");
Vector forward = XmlHelpers.get_vector(element, "forward");
Vector up = XmlHelpers.get_vector(element, "up");
Double scale_x = XmlHelpers.get_double(element, "scaleX");
Double scale_y = XmlHelpers.get_double(element, "scaleY");
if (center == null || up == null || forward == null ||
scale_x == null || scale_y == null)
{
return null;
}
else
{
return new OrthographicCamera(center, forward, up, scale_x, scale_y);
}
}
else if (camera_type.equals("fishEyeCamera"))
{
Point center = XmlHelpers.get_point(element, "center");
Vector forward = XmlHelpers.get_vector(element, "forward");
Vector up = XmlHelpers.get_vector(element, "up");
Double psi_max = XmlHelpers.get_double(element, "psiMax");
if (center == null || forward == null ||
up == null || psi_max == null)
{
return null;
}
else
{
return new FishEyeCamera(center, forward, up, psi_max);
}
}
else
{
// should never get here, just for consistency
return null;
}
}
private static Primitive load_primitive_from_element (final Element element)
{
String primitive_type = element.getTagName();
if (primitive_type.equals("simpleGroup"))
{
SimpleGroup sg = new SimpleGroup();
NodeList node_list = element.getChildNodes();
for (int i = 0; i < node_list.getLength(); i++)
{
Node node = node_list.item(i);
if (node.getNodeType() == Node.ELEMENT_NODE)
{
Element e = (Element) node;
if (e.getTagName().equals("objModel"))
{
ObjModel obj_model = ObjModel.read_from_xml_element(e);
if (obj_model != null)
{
obj_model.add_to_group(sg);
System.out.println(sg);
}
else
{
return null;
}
}
else
{
Primitive p = load_primitive_from_element(e);
if (p == null)
{
return null;
}
else
{
sg.add(p);
}
}
}
}
return sg;
}
else if (primitive_type.equals("kdTree"))
{
KDTree kdtree = new KDTree();
NodeList node_list = element.getChildNodes();
for (int i = 0; i < node_list.getLength(); i++)
{
Node node = node_list.item(i);
if (node.getNodeType() == Node.ELEMENT_NODE)
{
Element e = (Element) node;
if (e.getTagName().equals("objModel"))
{
ObjModel obj_model = ObjModel.read_from_xml_element(e);
if (obj_model != null)
{
obj_model.add_to_group(kdtree);
}
else
{
return null;
}
}
else
{
Primitive p = load_primitive_from_element(e);
if (p == null)
{
return null;
}
else
{
kdtree.add(p);
}
}
}
}
kdtree.rebuild_index();
return kdtree;
}
else if (primitive_type.equals("sphere"))
{
return Sphere.read_from_xml_element(element);
}
else if (primitive_type.equals("quadric"))
{
return Quadric.read_from_xml_element(element);
}
else if (primitive_type.equals("triangle"))
{
return Triangle.read_from_xml_element(element);
}
else if (primitive_type.equals("quad"))
{
return Quad.read_from_xml_element(element);
}
else if (primitive_type.equals("disc"))
{
return Disc.read_from_xml_element(element);
}
else if (primitive_type.equals("infinitePlane"))
{
return InfinitePlane.read_from_xml_element(element);
}
else if (primitive_type.equals("axisAlignedBox"))
{
return AxisAlignedBox.read_from_xml_element(element);
}
else
{
return null;
}
}
private static Light load_light_from_element (final Element element)
{
String primitive_type = element.getTagName();
if (primitive_type.equals("pointLight"))
{
final Point position = XmlHelpers.get_point(element, "position");
final RGB color = XmlHelpers.get_color(element, "color");
if (position == null || color == null)
{
return null;
}
else
{
return new PointLight(position, color);
}
}
else if (primitive_type.equals("spotLight"))
{
final Point position = XmlHelpers.get_point(element, "position");
final Vector direction = XmlHelpers.get_vector(element, "direction");
final Double angle = XmlHelpers.get_double(element, "angle");
final Double exp = XmlHelpers.get_double(element, "exp");
final RGB color = XmlHelpers.get_color(element, "color");
if (position == null || direction == null || angle == null ||
exp == null || color == null)
{
return null;
}
else
{
return new SpotLight(position, direction, angle, exp, color);
}
}
else if (primitive_type.equals("directionalLight"))
{
final Vector direction = XmlHelpers.get_vector(element, "direction");
final RGB color = XmlHelpers.get_color(element, "color");
if (direction == null || color == null)
{
return null;
}
else
{
return new DirectionalLight(direction, color);
}
}
else
{
return null;
}
}
public static Integrator load_from_file (final String filename)
{
File file = new File(filename);
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
List<Light> lights = new ArrayList<Light>();
DocumentBuilder builder = null;
try
{
builder = factory.newDocumentBuilder();
}
catch (ParserConfigurationException e)
{
System.err.println(e);
return null;
}
Document document = null;
try
{
document = builder.parse(file);
}
catch (SAXException|IOException e)
{
System.err.println(e);
return null;
}
document.getDocumentElement().normalize();
Element root = document.getDocumentElement();
if (!root.getTagName().equals("world"))
{
System.err.println("The root element of the document is not \"world\".");
return null;
}
RGB bgcolor = RGB.from_string(root.getAttribute("bgcolor"));
if (bgcolor == null)
{
System.out.println("The background color of the world not set, defaulting to black");
bgcolor = new RGB(0.0);
}
String integrator_name = root.getAttribute("integrator");
Integrator integrator;
if (integrator_name.isEmpty())
{
System.err.println("The world has no integrator set");
return null;
}
else if (integrator_name.equals("cosine"))
{
integrator = new CosineIntegrator();
}
else if (integrator_name.equals("raytracing"))
{
integrator = new RayTracingIntegrator();
}
else
{
System.err.println("Unknown integrator \"" + integrator_name + "\"");
return null;
}
boolean have_camera = false;
Camera camera = null;
boolean have_primitive = false;
Primitive primitive = null;
NodeList node_list = root.getChildNodes();
for (int i = 0; i < node_list.getLength(); i++)
{
Node node = node_list.item(i);
if (node.getNodeType() == Node.ELEMENT_NODE)
{
Element element = (Element)node;
String element_name = element.getTagName();
if (camera_set.contains(element_name))
{
if (have_camera)
{
System.err.println("Only one camera can be defined");
return null;
}
else
{
camera = load_camera_from_element(element);
if (camera == null)
{
System.err.println("Unable to parse the camera");
return null;
}
have_camera = true;
}
}
else if (light_set.contains(light_set))
{
final Light light = load_light_from_element(element);
if (light != null)
{
lights.add(light);
}
}
else if (primitive_set.contains(element_name))
{
if (have_primitive)
{
System.err.println("The scene can be defined by only one primitive");
return null;
}
else
{
primitive = load_primitive_from_element(element);
}
}
else
{
System.out.println("Unknown element: \"" + element_name + "\"");
return null;
}
}
}
World world = new World(primitive, bgcolor, camera);
for (final Light light : lights)
{
world.add_light(light);
}
integrator.set_world(world);
return integrator;
}
}
| |
/*
* Copyright 1997-2011 teatrove.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.teatrove.tea.compiler;
import java.io.IOException;
import java.io.OutputStream;
import java.io.Reader;
import java.lang.reflect.Method;
import java.util.ArrayList;
import org.teatrove.tea.parsetree.Name;
import org.teatrove.tea.parsetree.Template;
import org.teatrove.tea.parsetree.Variable;
import org.teatrove.tea.runtime.Substitution;
import org.teatrove.trove.io.SourceReader;
/**
* Pre-compiled templates implementation. Allows for the invocation of
* pre-compiled templates from other dynamically compiled or pre-compiled
* templates. Nested calls to other pre-compiled templates must be from within
* the same merged context interface.
*
* @author Guy A. Molinari
*/
public class CompiledTemplate extends CompilationUnit {
private Template mTree;
private Class<?> mTemplateClass;
private Method mExecuteMethod;
private SourceInfo mCallerInfo = null;
private CompilationUnit mCaller;
private Variable[] mParameters;
private Class<?> mRuntimeContext;
private boolean mSubParam = false;
private static final String TEMPLATE_PACKAGE = "org.teatrove.teaservlet.template";
/**
* This constructor is only invoked when the compiler cannot locate a source
* file for a template and a template class of the desired name and package
* exists. Further verification of the template signature occurs during the
* call to <b>getParseTree</b>.
*/
public CompiledTemplate(String name, Compiler compiler,
CompilationUnit caller) {
super(name, null, compiler);
mCaller = caller;
try {
if (caller != null && caller.getReader() instanceof SourceReader) {
SourceReader r = (SourceReader) caller.getReader();
mCallerInfo = new SourceInfo(r.getLineNumber(),
r.getStartPosition(), r.getEndPosition());
}
}
catch (IOException ignore) { }
}
private static String resolveName(String name, CompilationUnit from) {
if (from != null && name.indexOf('.') == -1) {
String fromName = from.getName();
if (fromName.indexOf('.') != -1)
name = fromName.substring(0, fromName.lastIndexOf('.')) + "." + name;
}
return name;
}
/**
* Load the template.
*/
private Class<?> getTemplateClass() {
String fqName = getTargetPackage() + "." + getName();
try {
mTemplateClass = getCompiler().loadClass(fqName);
}
catch (ClassNotFoundException nx) {
try {
mTemplateClass = getCompiler().loadClass(getName()); // Try standard path as a last resort
}
catch (ClassNotFoundException nx2) {
return null;
}
}
return mTemplateClass;
}
public String getName() {
return resolveName(super.getName(), mCaller);
}
/**
* Checks if the compiled template class is a precomiled
* template (i.e. it's runtime context is an interface, not a generated MergedContext)
*
* @return true if the template class is found, has the proper template execute method,
* and the first parameter to the execute method (the runtime context for the template) is an interface
*/
public boolean isValid() {
getTemplateClass();
if (findExecuteMethod() == null)
throw new IllegalArgumentException("Cannot locate compiled template entry point.");
Class<?>[] p = mExecuteMethod.getParameterTypes();
mSubParam = false;
mRuntimeContext = p[0];
return mRuntimeContext.isInterface();
}
/**
* Test to see of the template can be loaded before CompiledTemplate can be
* constructed.
*/
public static boolean exists(Compiler c, String name, CompilationUnit from) {
String fqName = getFullyQualifiedName(resolveName(name, from));
try {
c.loadClass(fqName);
return true;
}
catch (ClassNotFoundException nx) {
try {
c.loadClass(resolveName(name, from)); // Try standard path as a last resort
return true;
}
catch (ClassNotFoundException nx2) {
return false;
}
}
}
public static String getFullyQualifiedName(String name) {
return ! name.startsWith(TEMPLATE_PACKAGE) ? TEMPLATE_PACKAGE + "." + name : name;
}
/**
* Get the runtime context of the compiled template. This is an interface for
* compiled templates.
*/
public Class<?> getRuntimeContext() { return mRuntimeContext; }
/**
* This method is called by JavaClassGenerator during the compile phase. It overrides the
* method in CompilationUnit and returns just the reflected template signature.
*/
public Template getParseTree() {
getTemplateClass();
if (findExecuteMethod() == null)
throw new IllegalArgumentException("Cannot locate compiled template entry point.");
reflectParameters();
mTree = new Template(mCallerInfo, new Name(mCallerInfo, getName()),
mParameters, mSubParam, null, null);
mTree.setReturnType(new Type(mExecuteMethod.getReturnType(),
mExecuteMethod.getGenericReturnType()));
return mTree;
}
private Method findExecuteMethod() {
mExecuteMethod = null;
Method[] methods = mTemplateClass.getMethods();
for (int i = 0; i < methods.length; i++)
if (JavaClassGenerator.EXECUTE_METHOD_NAME.equals(methods[i].getName()))
mExecuteMethod = methods[i];
return mExecuteMethod;
}
private void reflectParameters() {
Class<?>[] p = mExecuteMethod.getParameterTypes();
java.lang.reflect.Type[] t = mExecuteMethod.getGenericParameterTypes();
ArrayList<Variable> list = new ArrayList<Variable>();
mSubParam = false;
mRuntimeContext = p[0];
if (!mRuntimeContext.isInterface())
throw new IllegalArgumentException("No context found in compiled template execute method.");
for (int i = 1; i < p.length; i++) { // Ignore context parameter
if (Substitution.class == p[i]) {
mSubParam = true;
continue;
}
list.add(new Variable(mCallerInfo, null, new Type(p[i], t[i])));
}
mParameters = list.toArray(new Variable[list.size()]);
}
public void setParseTree(Template tree) {
// Do nothing parse tree is immutable;
}
/**
* Return the package name that this CompilationUnit should be compiled
* into.
*/
public String getTargetPackage() {
return mCaller != null && mCaller.getTargetPackage() != null ?
mCaller.getTargetPackage() : TEMPLATE_PACKAGE;
}
/**
* @return true if the CompilationUnit should be compiled. Default is true.
*/
public boolean shouldCompile() {
return false;
}
/**
* Delegate to calling template.
*/
public OutputStream getOutputStream() throws IOException {
return mCaller != null ? mCaller.getOutputStream() : null;
}
/**
* Delegate to calling template.
*/
public void resetOutputStream() {
if (mCaller != null) {
mCaller.resetOutputStream();
}
}
/**
* Delegate to calling template.
*/
public Reader getReader() throws IOException {
return mCaller != null ? mCaller.getReader() : null;
}
/**
* Source is not accessible for pre-compiled templates
*/
public String getSourceFileName() {
return "Compiled code.";
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processor.util.put;
import org.apache.nifi.annotation.lifecycle.OnScheduled;
import org.apache.nifi.annotation.lifecycle.OnStopped;
import org.apache.nifi.components.AllowableValue;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.processor.AbstractSessionFactoryProcessor;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.ProcessorInitializationContext;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.processor.util.StandardValidators;
import org.apache.nifi.processor.util.put.sender.ChannelSender;
import org.apache.nifi.processor.util.put.sender.DatagramChannelSender;
import org.apache.nifi.processor.util.put.sender.SSLSocketChannelSender;
import org.apache.nifi.processor.util.put.sender.SocketChannelSender;
import org.apache.nifi.ssl.SSLContextService;
import javax.net.ssl.SSLContext;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
/**
* A base class for processors that send data to an external system using TCP or UDP.
*/
public abstract class AbstractPutEventProcessor extends AbstractSessionFactoryProcessor {
public static final PropertyDescriptor HOSTNAME = new PropertyDescriptor.Builder()
.name("Hostname")
.description("The ip address or hostname of the destination.")
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.defaultValue("localhost")
.required(true)
.expressionLanguageSupported(true)
.build();
public static final PropertyDescriptor PORT = new PropertyDescriptor
.Builder().name("Port")
.description("The port on the destination.")
.required(true)
.addValidator(StandardValidators.PORT_VALIDATOR)
.expressionLanguageSupported(true)
.build();
public static final PropertyDescriptor MAX_SOCKET_SEND_BUFFER_SIZE = new PropertyDescriptor.Builder()
.name("Max Size of Socket Send Buffer")
.description("The maximum size of the socket send buffer that should be used. This is a suggestion to the Operating System " +
"to indicate how big the socket buffer should be. If this value is set too low, the buffer may fill up before " +
"the data can be read, and incoming data will be dropped.")
.addValidator(StandardValidators.DATA_SIZE_VALIDATOR)
.defaultValue("1 MB")
.required(true)
.build();
public static final PropertyDescriptor IDLE_EXPIRATION = new PropertyDescriptor
.Builder().name("Idle Connection Expiration")
.description("The amount of time a connection should be held open without being used before closing the connection.")
.required(true)
.defaultValue("5 seconds")
.addValidator(StandardValidators.TIME_PERIOD_VALIDATOR)
.build();
// Putting these properties here so sub-classes don't have to redefine them, but they are
// not added to the properties by default since not all processors may need them
public static final AllowableValue TCP_VALUE = new AllowableValue("TCP", "TCP");
public static final AllowableValue UDP_VALUE = new AllowableValue("UDP", "UDP");
public static final PropertyDescriptor PROTOCOL = new PropertyDescriptor
.Builder().name("Protocol")
.description("The protocol for communication.")
.required(true)
.allowableValues(TCP_VALUE, UDP_VALUE)
.defaultValue(TCP_VALUE.getValue())
.build();
public static final PropertyDescriptor MESSAGE_DELIMITER = new PropertyDescriptor.Builder()
.name("Message Delimiter")
.description("Specifies the delimiter to use for splitting apart multiple messages within a single FlowFile. "
+ "If not specified, the entire content of the FlowFile will be used as a single message. "
+ "If specified, the contents of the FlowFile will be split on this delimiter and each section "
+ "sent as a separate message. Note that if messages are delimited and some messages for a given FlowFile "
+ "are transferred successfully while others are not, the messages will be split into individual FlowFiles, such that those "
+ "messages that were successfully sent are routed to the 'success' relationship while other messages are sent to the 'failure' "
+ "relationship.")
.required(false)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.expressionLanguageSupported(true)
.build();
public static final PropertyDescriptor CHARSET = new PropertyDescriptor.Builder()
.name("Character Set")
.description("Specifies the character set of the data being sent.")
.required(true)
.defaultValue("UTF-8")
.addValidator(StandardValidators.CHARACTER_SET_VALIDATOR)
.build();
public static final PropertyDescriptor TIMEOUT = new PropertyDescriptor.Builder()
.name("Timeout")
.description("The timeout for connecting to and communicating with the destination. Does not apply to UDP")
.required(false)
.defaultValue("10 seconds")
.addValidator(StandardValidators.TIME_PERIOD_VALIDATOR)
.build();
public static final PropertyDescriptor OUTGOING_MESSAGE_DELIMITER = new PropertyDescriptor.Builder()
.name("Outgoing Message Delimiter")
.description("Specifies the delimiter to use when sending messages out over the same TCP stream. The delimiter is appended to each FlowFile message "
+ "that is transmitted over the stream so that the receiver can determine when one message ends and the next message begins. Users should "
+ "ensure that the FlowFile content does not contain the delimiter character to avoid errors. In order to use a new line character you can "
+ "enter '\\n'. For a tab character use '\\t'. Finally for a carriage return use '\\r'.")
.required(true)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.defaultValue("\\n")
.expressionLanguageSupported(true)
.build();
public static final PropertyDescriptor CONNECTION_PER_FLOWFILE = new PropertyDescriptor.Builder()
.name("Connection Per FlowFile")
.description("Specifies whether to send each FlowFile's content on an individual connection.")
.required(true)
.defaultValue("false")
.allowableValues("true", "false")
.build();
public static final PropertyDescriptor SSL_CONTEXT_SERVICE = new PropertyDescriptor.Builder()
.name("SSL Context Service")
.description("The Controller Service to use in order to obtain an SSL Context. If this property is set, " +
"messages will be sent over a secure connection.")
.required(false)
.identifiesControllerService(SSLContextService.class)
.build();
public static final Relationship REL_SUCCESS = new Relationship.Builder()
.name("success")
.description("FlowFiles that are sent successfully to the destination are sent out this relationship.")
.build();
public static final Relationship REL_FAILURE = new Relationship.Builder()
.name("failure")
.description("FlowFiles that failed to send to the destination are sent out this relationship.")
.build();
private Set<Relationship> relationships;
private List<PropertyDescriptor> descriptors;
protected volatile String transitUri;
protected volatile BlockingQueue<ChannelSender> senderPool;
protected final BlockingQueue<FlowFileMessageBatch> completeBatches = new LinkedBlockingQueue<>();
protected final Set<FlowFileMessageBatch> activeBatches = Collections.synchronizedSet(new HashSet<FlowFileMessageBatch>());
@Override
protected void init(final ProcessorInitializationContext context) {
final List<PropertyDescriptor> descriptors = new ArrayList<>();
descriptors.add(HOSTNAME);
descriptors.add(PORT);
descriptors.add(MAX_SOCKET_SEND_BUFFER_SIZE);
descriptors.add(IDLE_EXPIRATION);
descriptors.addAll(getAdditionalProperties());
this.descriptors = Collections.unmodifiableList(descriptors);
final Set<Relationship> relationships = new HashSet<>();
relationships.add(REL_SUCCESS);
relationships.add(REL_FAILURE);
relationships.addAll(getAdditionalRelationships());
this.relationships = Collections.unmodifiableSet(relationships);
}
/**
* Override to provide additional relationships for the processor.
*
* @return a list of relationships
*/
protected List<Relationship> getAdditionalRelationships() {
return Collections.EMPTY_LIST;
}
/**
* Override to provide additional properties for the processor.
*
* @return a list of properties
*/
protected List<PropertyDescriptor> getAdditionalProperties() {
return Collections.EMPTY_LIST;
}
@Override
public final Set<Relationship> getRelationships() {
return this.relationships;
}
@Override
public final List<PropertyDescriptor> getSupportedPropertyDescriptors() {
return descriptors;
}
@OnScheduled
public void onScheduled(final ProcessContext context) throws IOException {
// initialize the queue of senders, one per task, senders will get created on the fly in onTrigger
this.senderPool = new LinkedBlockingQueue<>(context.getMaxConcurrentTasks());
this.transitUri = createTransitUri(context);
}
@OnStopped
public void closeSenders() {
if (senderPool != null) {
ChannelSender sender = senderPool.poll();
while (sender != null) {
sender.close();
sender = senderPool.poll();
}
}
}
/**
* Sub-classes construct a transit uri for provenance events. Called from @OnScheduled
* method of this class.
*
* @param context the current context
*
* @return the transit uri
*/
protected abstract String createTransitUri(final ProcessContext context);
/**
* Sub-classes create a ChannelSender given a context.
*
* @param context the current context
* @return an implementation of ChannelSender
* @throws IOException if an error occurs creating the ChannelSender
*/
protected abstract ChannelSender createSender(final ProcessContext context) throws IOException;
/**
* Close any senders that haven't been active with in the given threshold
*
* @param idleThreshold the threshold to consider a sender as idle
*/
protected void pruneIdleSenders(final long idleThreshold) {
long currentTime = System.currentTimeMillis();
final List<ChannelSender> putBack = new ArrayList<>();
// if a connection hasn't been used with in the threshold then it gets closed
ChannelSender sender;
while ((sender = senderPool.poll()) != null) {
if (currentTime > (sender.getLastUsed() + idleThreshold)) {
getLogger().debug("Closing idle connection...");
sender.close();
} else {
putBack.add(sender);
}
}
// re-queue senders that weren't idle, but if the queue is full then close the sender
for (ChannelSender putBackSender : putBack) {
boolean returned = senderPool.offer(putBackSender);
if (!returned) {
putBackSender.close();
}
}
}
/**
* Helper for sub-classes to create a sender.
*
* @param protocol the protocol for the sender
* @param host the host to send to
* @param port the port to send to
* @param timeout the timeout for connecting and communicating over the channel
* @param maxSendBufferSize the maximum size of the socket send buffer
* @param sslContext an SSLContext, or null if not using SSL
*
* @return a ChannelSender based on the given properties
*
* @throws IOException if an error occurs creating the sender
*/
protected ChannelSender createSender(final String protocol,
final String host,
final int port,
final int timeout,
final int maxSendBufferSize,
final SSLContext sslContext) throws IOException {
ChannelSender sender;
if (protocol.equals(UDP_VALUE.getValue())) {
sender = new DatagramChannelSender(host, port, maxSendBufferSize, getLogger());
} else {
// if an SSLContextService is provided then we make a secure sender
if (sslContext != null) {
sender = new SSLSocketChannelSender(host, port, maxSendBufferSize, sslContext, getLogger());
} else {
sender = new SocketChannelSender(host, port, maxSendBufferSize, getLogger());
}
}
sender.setTimeout(timeout);
sender.open();
return sender;
}
/**
* Helper method to acquire an available ChannelSender from the pool. If the pool is empty then the a new sender is created.
*
* @param context
* - the current process context.
*
* @param session
* - the current process session.
* @param flowFile
* - the FlowFile being processed in this session.
*
* @return ChannelSender - the sender that has been acquired or null if no sender is available and a new sender cannot be created.
*/
protected ChannelSender acquireSender(final ProcessContext context, final ProcessSession session, final FlowFile flowFile) {
ChannelSender sender = senderPool.poll();
if (sender == null) {
try {
getLogger().debug("No available connections, creating a new one...");
sender = createSender(context);
} catch (IOException e) {
getLogger().error("No available connections, and unable to create a new one, transferring {} to failure",
new Object[]{flowFile}, e);
session.transfer(flowFile, REL_FAILURE);
session.commit();
context.yield();
sender = null;
}
}
return sender;
}
/**
* Helper method to relinquish the ChannelSender back to the pool. If the sender is disconnected or the pool is full
* then the sender is closed and discarded.
*
* @param sender the sender to return or close
*/
protected void relinquishSender(final ChannelSender sender) {
if (sender != null) {
// if the connection is still open then then try to return the sender to the pool.
if (sender.isConnected()) {
boolean returned = senderPool.offer(sender);
// if the pool is full then close the sender.
if (!returned) {
sender.close();
}
} else {
// probably already closed here, but quietly close anyway to be safe.
sender.close();
}
}
}
/**
* Represents a range of messages from a FlowFile.
*/
protected static class Range {
private final long start;
private final long end;
public Range(final long start, final long end) {
this.start = start;
this.end = end;
}
public long getStart() {
return start;
}
public long getEnd() {
return end;
}
@Override
public String toString() {
return "Range[" + start + "-" + end + "]";
}
}
/**
* A wrapper to hold the ranges of a FlowFile that were successful and ranges that failed, and then
* transfer those ranges appropriately.
*/
protected class FlowFileMessageBatch {
private final ProcessSession session;
private final FlowFile flowFile;
private final long startTime = System.nanoTime();
private final List<Range> successfulRanges = new ArrayList<>();
private final List<Range> failedRanges = new ArrayList<>();
private Exception lastFailureReason;
private long numMessages = -1L;
private long completeTime = 0L;
private boolean canceled = false;
public FlowFileMessageBatch(final ProcessSession session, final FlowFile flowFile) {
this.session = session;
this.flowFile = flowFile;
}
public synchronized void cancelOrComplete() {
if (isComplete()) {
completeSession();
return;
}
this.canceled = true;
session.rollback();
successfulRanges.clear();
failedRanges.clear();
}
public synchronized void addSuccessfulRange(final long start, final long end) {
if (canceled) {
return;
}
successfulRanges.add(new Range(start, end));
if (isComplete()) {
activeBatches.remove(this);
completeBatches.add(this);
completeTime = System.nanoTime();
}
}
public synchronized void addFailedRange(final long start, final long end, final Exception e) {
if (canceled) {
return;
}
failedRanges.add(new Range(start, end));
lastFailureReason = e;
if (isComplete()) {
activeBatches.remove(this);
completeBatches.add(this);
completeTime = System.nanoTime();
}
}
private boolean isComplete() {
return !canceled && (numMessages > -1) && (successfulRanges.size() + failedRanges.size() >= numMessages);
}
public synchronized void setNumMessages(final long msgCount) {
this.numMessages = msgCount;
if (isComplete()) {
activeBatches.remove(this);
completeBatches.add(this);
completeTime = System.nanoTime();
}
}
private void transferRanges(final List<Range> ranges, final Relationship relationship) {
Collections.sort(ranges, new Comparator<Range>() {
@Override
public int compare(final Range o1, final Range o2) {
return Long.compare(o1.getStart(), o2.getStart());
}
});
for (int i = 0; i < ranges.size(); i++) {
Range range = ranges.get(i);
int count = 1;
while (i + 1 < ranges.size()) {
// Check if the next range in the List continues where this one left off.
final Range nextRange = ranges.get(i + 1);
if (nextRange.getStart() == range.getEnd()) {
// We have two ranges in a row that are contiguous; combine them into a single Range.
range = new Range(range.getStart(), nextRange.getEnd());
count++;
i++;
} else {
break;
}
}
// Create a FlowFile for this range.
FlowFile child = session.clone(flowFile, range.getStart(), range.getEnd() - range.getStart());
if (relationship == REL_SUCCESS) {
session.getProvenanceReporter().send(child, transitUri, "Sent " + count + " messages");
session.transfer(child, relationship);
} else {
child = session.penalize(child);
session.transfer(child, relationship);
}
}
}
public synchronized void completeSession() {
if (canceled) {
return;
}
if (successfulRanges.isEmpty() && failedRanges.isEmpty()) {
getLogger().info("Completed processing {} but sent 0 FlowFiles", new Object[] {flowFile});
session.transfer(flowFile, REL_SUCCESS);
session.commit();
return;
}
if (successfulRanges.isEmpty()) {
getLogger().error("Failed to send {}; routing to 'failure'; last failure reason reported was {};", new Object[] {flowFile, lastFailureReason});
final FlowFile penalizedFlowFile = session.penalize(flowFile);
session.transfer(penalizedFlowFile, REL_FAILURE);
session.commit();
return;
}
if (failedRanges.isEmpty()) {
final long transferMillis = TimeUnit.NANOSECONDS.toMillis(completeTime - startTime);
session.getProvenanceReporter().send(flowFile, transitUri, "Sent " + successfulRanges.size() + " messages;", transferMillis);
session.transfer(flowFile, REL_SUCCESS);
getLogger().info("Successfully sent {} messages for {} in {} millis", new Object[] {successfulRanges.size(), flowFile, transferMillis});
session.commit();
return;
}
// At this point, the successful ranges is not empty and the failed ranges is not empty. This indicates that some messages made their way
// successfully and some failed. We will address this by splitting apart the source FlowFile into children and sending the successful messages to 'success'
// and the failed messages to 'failure'.
transferRanges(successfulRanges, REL_SUCCESS);
transferRanges(failedRanges, REL_FAILURE);
session.remove(flowFile);
getLogger().error("Successfully sent {} messages, but failed to send {} messages; the last error received was {}",
new Object[] {successfulRanges.size(), failedRanges.size(), lastFailureReason});
session.commit();
}
}
/**
* Gets the current value of the "Outgoing Message Delimiter" property and parses the special characters.
*
* @param context
* - the current process context.
* @param flowFile
* - the FlowFile being processed.
*
* @return String containing the Delimiter value.
*/
protected String getOutgoingMessageDelimiter(final ProcessContext context, final FlowFile flowFile) {
String delimiter = context.getProperty(OUTGOING_MESSAGE_DELIMITER).evaluateAttributeExpressions(flowFile).getValue();
if (delimiter != null) {
delimiter = delimiter.replace("\\n", "\n").replace("\\r", "\r").replace("\\t", "\t");
}
return delimiter;
}
}
| |
package com.github.TKnudsen.ComplexDataObject.model.transformations.dimensionalityReduction;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.function.Function;
import com.github.TKnudsen.ComplexDataObject.data.features.numericalData.NumericalFeatureVector;
import com.github.TKnudsen.ComplexDataObject.model.transformations.descriptors.IDescriptor;
import com.github.TKnudsen.ComplexDataObject.model.transformations.normalization.LinearNormalizationFunction;
import com.github.TKnudsen.ComplexDataObject.model.transformations.normalization.NormalizationFunction;
/**
* <p>
* The dimensionality reduction pipeline helps to streamline dim-red approaches.
* Embedded is the following:
*
* - an upstream descriptor which transforms the objects of type X into
* NumericalFeatureVector
*
* - a IDimensionalityReduction routine
*
* - some useful downstream functionality to look-up X in the low-dimensional
* space, as well as in the relative low-dimensional space.
*
* <p>
* Copyright: Copyright (c) 2016-2019 Juergen Bernard,
* https://github.com/TKnudsen/ComplexDataObject
* </p>
*
* @author Juergen Bernard
* @version 1.01
*/
public class DimensionalityReductionPipeline<X> {
private final IDescriptor<X, NumericalFeatureVector> descriptor;
private final Collection<X> data;
private IDimensionalityReduction<NumericalFeatureVector, NumericalFeatureVector> dimensionalityReduction;
private Map<X, NumericalFeatureVector> dataToFeatureVectorsMapping;
private Map<NumericalFeatureVector, NumericalFeatureVector> featureVectorsToLowDMapping;
// relative coordinates per dimension
private SortedMap<Integer, Function<X, Double>> lowDimRelativeWorldCoordinates;
public DimensionalityReductionPipeline(Collection<X> data, IDescriptor<X, NumericalFeatureVector> descriptor) {
this.data = data;
this.descriptor = descriptor;
}
/**
* pre-processing: transform x into NumericalFeatureVectors
*
* @return
*/
public Map<X, NumericalFeatureVector> getFeatureVectorsMap() {
if (dataToFeatureVectorsMapping == null) {
dataToFeatureVectorsMapping = new HashMap<X, NumericalFeatureVector>();
ArrayList<X> dataList = new ArrayList<>(data);
List<NumericalFeatureVector> transform = descriptor.transform(dataList);
for (int i = 0; i < transform.size(); i++) {
NumericalFeatureVector fv = transform.get(i);
X x = dataList.get(i);
dataToFeatureVectorsMapping.put(x, fv);
}
}
return dataToFeatureVectorsMapping;
}
/**
* Implementations of IDimensionalityReduction either receive X through the
* constructor or via transform(X). Also, the particular
* IDimensionalityReduction may not be known in this context. Thus, it is
* necessary to receive the IDimensionalityReduction routine from external.
*
* Note: the data X is used (instantly) to make the transformation!
*
* @param dimensionalityReduction
*/
public void setDimensionalityReduction(
IDimensionalityReduction<NumericalFeatureVector, NumericalFeatureVector> dimensionalityReduction) {
featureVectorsToLowDMapping = null;
lowDimRelativeWorldCoordinates = null;
// high-dim feature vectors
Map<X, NumericalFeatureVector> featureVectorsMap = getFeatureVectorsMap();
List<NumericalFeatureVector> input = new ArrayList<>(featureVectorsMap.values());
// transform (refers to the mapping, not necessarily the calculation)
dimensionalityReduction.transform(input);
// store result
featureVectorsToLowDMapping = dimensionalityReduction.getMapping();
// set dim red
this.dimensionalityReduction = dimensionalityReduction;
}
/**
* receive the low-dim representation of X
*
* @param x
* @return
*/
public NumericalFeatureVector getLowDimFeatureVector(X x) {
// the maps are calculated in a lazy way. it they still produce no output for X,
// the data element maybe unknown (not part of the dim-red process). In such a
// case the dim red model may be able to transform the new data element.
if (getFeatureVectorsMap() == null || getFeatureVectorsMap().get(x) == null) {
if (dimensionalityReduction != null) {
List<NumericalFeatureVector> fv = descriptor.transform(x);
List<NumericalFeatureVector> transform = dimensionalityReduction.transform(fv);
if (transform.size() == 1)
return transform.get(0);
}
return null;
}
return featureVectorsToLowDMapping.get(getFeatureVectorsMap().get(x));
}
/**
* provides relative coordinates for the low-dimensional output space. Can be
* used for visualization purposes (position/color mappings), etc.
*
* @return
*/
public Double getLowDimRelativeWorldCoordinates(X x, int dim) {
if (lowDimRelativeWorldCoordinates == null)
refreshLowDimRelativeWorldCoordinates();
if (lowDimRelativeWorldCoordinates.containsKey(dim))
return lowDimRelativeWorldCoordinates.get(dim).apply(x);
return 0.0;
}
/**
* provides relative coordinates for the low-dimensional output space. Can be
* used for visualization purposes (position/color mappings), etc.
*
* @return
*/
public Double[] getLowDimRelativeWorldCoordinates(X x) {
if (lowDimRelativeWorldCoordinates == null)
refreshLowDimRelativeWorldCoordinates();
NumericalFeatureVector lowDimFeatureVector = getLowDimFeatureVector(x);
int dim = lowDimFeatureVector.getDimensions();
Double[] relativeValues = new Double[dim];
for (int i = 0; i < dim; i++)
relativeValues[i] = getLowDimRelativeWorldCoordinates(x, i);
return relativeValues;
}
private void refreshLowDimRelativeWorldCoordinates() {
lowDimRelativeWorldCoordinates = null;
if (featureVectorsToLowDMapping == null)
return;
lowDimRelativeWorldCoordinates = new TreeMap<>();
SortedMap<Integer, Collection<Double>> dimensionsOutput = new TreeMap<Integer, Collection<Double>>();
for (X x : data) {
NumericalFeatureVector fv = getLowDimFeatureVector(x);
if (fv != null) {
for (int i = 0; i < fv.getDimensions(); i++) {
if (dimensionsOutput.get(i) == null)
dimensionsOutput.put(i, new ArrayList<>());
Collection<Double> collection = dimensionsOutput.get(i);
collection.add(fv.get(i));
}
}
}
for (Integer dim : dimensionsOutput.keySet()) {
Collection<Double> dimOutput = dimensionsOutput.get(dim);
if (dimOutput.size() > 0) {
Function<X, Double> function = new Function<X, Double>() {
NormalizationFunction normalization = new LinearNormalizationFunction(dimOutput, true);
@Override
public Double apply(X t) {
if (getLowDimFeatureVector(t) == null)
return 0.0;
if (getLowDimFeatureVector(t).getDimensions() < dim)
return 0.0;
double d = getLowDimFeatureVector(t).get(dim);
return normalization.apply(d).doubleValue();
}
};
lowDimRelativeWorldCoordinates.put(dim, function);
}
}
}
}
| |
package com.example.medroad;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.lang.reflect.Method;
import java.text.DecimalFormat;
import java.text.FieldPosition;
import java.text.Format;
import java.text.ParsePosition;
import java.util.Set;
import android.app.AlertDialog;
import android.bluetooth.BluetoothAdapter;
import android.bluetooth.BluetoothDevice;
import android.bluetooth.BluetoothManager;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.res.Configuration;
import android.graphics.Color;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.os.Message;
import android.preference.PreferenceManager;
import android.support.v4.app.Fragment;
import android.support.v7.app.ActionBarActivity;
import android.text.Html;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.TextView;
import android.widget.Toast;
import ca.ammi.medlib.EmotionEcg;
import com.androidplot.xy.BoundaryMode;
import com.androidplot.xy.LineAndPointFormatter;
import com.androidplot.xy.PointLabelFormatter;
import com.androidplot.xy.SimpleXYSeries;
import com.androidplot.xy.XYPlot;
import com.androidplot.xy.XYStepMode;
public class MainActivity extends ActionBarActivity {
private static final String TAG = "EcgTester";
private SharedPreferences sharedPreferences;
public static final String PREF_ECG_NAME = "PREF_ECG_NAME";
public static final String PREF_SIGNAL_RESOLUTION = "PREF_SIGNAL_RESOLUTION";
public static final String PREF_HIGH_PASS_FILTER = "PREF_HIGH_PASS_FILTER";
public static final String PREF_SAMPLING_FREQUENCY = "PREF_SAMPLING_FREQUENCY";
public static final String PREF_Y_RANGE = "PREF_Y_RANGE";
public static final String PREF_FILE_NAME = "PREF_FILE_NAME";
public static final String PREF_SAVE_FILE = "PREF_SAVE_FILE";
private Button connectButton;
private TextView ecgNameTextView, pulseDataTextView;
private View mainView = null;
public String [] btPairedNames = null;
public BluetoothDevice [] btPairedDevices = null;
public static BluetoothManager bluetoothManager;
public static BluetoothAdapter mBluetoothAdapter;
private static final int REQUEST_ENABLE_BT = 1;
private BluetoothDevice ecgDevice = null;
private EmotionEcg emotionEcg = null;
public String ECG_NAME = ""; // name of ecg device
public int SIGNAL_RESOLUTION = 0; // note that signal resolution is actually /1000
public int HIGH_PASS_FILTER = 0;
public int SAMPLING_FREQUENCY = 1;
public int Y_RANGE = 7000;
public int useYrange = 7000;
public boolean SAVE_FILE = false;
public String FILE_NAME = "";
public int graphMax = 1000; //1000 for portrait 2000 for landscape
public int GRAPH_MAX_PORTRAIT = 1000; //1000 for portrait
public int GRAPH_MAX_LANDSCAPE = 2000; //2000 for landscape
public final String DEFAULT_SIGNAL_RESOLUTION = "100";
public final String DEFAULT_HIGH_PASS_FILTER = "1";
public final String DEFAULT_SAMPLING_FREQUENCY = "100";
public final String DEFAULT_Y_RANGE = "7000";
public final String DEFAULT_FILE_NAME = "ecg-data";
private int maxBeforeAdd = 75;
private int sampleCount = 0;
private int howOftenRedraw = 1;
private int[] zeroEcgSample = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
private int lastPacketNum = -1;
private boolean connected = false;
private boolean connecting = false;
private boolean reading = false;
private Method handleEcgDataMethod = null;
public boolean okToWrite = false;
private String filename = "";
private boolean writeFile = false;
private FileOutputStream dataOut = null;
private Configuration currentConfig = null;
private XYPlot ecgPlot;
private SimpleXYSeries ecgSeries = null;
LineAndPointFormatter ecgFormat;
@Override
protected void onCreate(Bundle savedInstanceState) {
Log.i(TAG,"inside create");
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
/*
if (savedInstanceState == null) {
PlaceholderFragment newf = new PlaceholderFragment();
Log.i(TAG,"setting main view");
mainView = newf.getView();
getSupportFragmentManager().beginTransaction()
.add(R.id.container, newf).commit();
//getSupportFragmentManager().beginTransaction()
//.add(R.id.container, new PlaceholderFragment()).commit();
} else {
Log.i(TAG,"saved instance state - how to set mainView");
}
*/
// Initializes Bluetooth adapter.
bluetoothManager =
(BluetoothManager) getSystemService(Context.BLUETOOTH_SERVICE);
mBluetoothAdapter = bluetoothManager.getAdapter();
checkBluetooth();
if (isExternalStorageWritable()) {
okToWrite = true;
} else {
okToWrite = false;
Toast.makeText(this, "external storage not writable ", Toast.LENGTH_LONG).show();
Log.e( TAG, "Could not write external storage");
}
currentConfig = getResources().getConfiguration();
if (currentConfig.orientation == Configuration.ORIENTATION_PORTRAIT) {
graphMax = GRAPH_MAX_PORTRAIT;
} else if (currentConfig.orientation == Configuration.ORIENTATION_LANDSCAPE) {
graphMax = GRAPH_MAX_LANDSCAPE;
}
setupViewStuff();
ecgPlotInit();
try { // get method for ecg to pass to ecg class
Class[] parameterTypes = new Class[1];
parameterTypes[0] = EmotionEcg.EcgData.class;
handleEcgDataMethod = MainActivity.class.getDeclaredMethod("handleEcgSampleData", parameterTypes);
Class[] ptypes = handleEcgDataMethod.getParameterTypes();
String sp = " ";
for (int j=0; j<ptypes.length; j++) {
sp = sp + ptypes[j] + " ";
}
Log.i(TAG,"setting ecg method " + handleEcgDataMethod.getName() + sp);
} catch (NoSuchMethodException e) {
Log.i(TAG,"no ecg method " + e);
handleEcgDataMethod = null;
}
}
private void setupViewStuff() {
Log.i(TAG,"setup view stuff " + connected + " " + reading);
ecgNameTextView = (TextView) findViewById(R.id.ecgNameTextView);
ecgNameTextView.setText("");
pulseDataTextView = (TextView) findViewById(R.id.pulseDataTextView);
pulseDataTextView.setText("");
// get buttons so we can fiddle with them
connectButton = (Button) findViewById(R.id.connectButton);
// disable buttons until they are ready to use
if (connected) {
connectButton.setText(getString(R.string.stop_idle));
connectButton.setEnabled(true);
connectButton.setBackgroundColor(Color.RED);
} else {
if (connecting) { // in process of connecting
connectButton.setText(getString(R.string.start));
connectButton.setEnabled(false);
connectButton.setBackgroundColor(Color.GRAY);
} else {
connectButton.setText(getString(R.string.start));
connectButton.setBackgroundColor(Color.GREEN);
connectButton.setEnabled(true);
}
}
}
private void ecgPlotInit() {
Log.i(TAG,"ecgPlotInit");
// this stuff doesn't change
// Create a formatter to use for drawing a series using LineAndPointRenderer
// and configure it from xml:
//LineAndPointFormatter
ecgFormat = new LineAndPointFormatter();
ecgFormat.setPointLabelFormatter(new PointLabelFormatter());
ecgFormat.configure(getApplicationContext(),
R.xml.line_point_formatter_with_plf1);
ecgFormat.setPointLabelFormatter(null);
ecgSeries = new SimpleXYSeries("");
ecgSeries.useImplicitXVals();
ecgPlotSetup();
}
private void ecgPlotSetup() {
ecgPlot = (XYPlot) findViewById(R.id.ecgPlot);
// add a new series' to the xyplot:
ecgPlot.addSeries(ecgSeries, ecgFormat);
// ecgPlot.addSeries(ecgSeries, new LineAndPointFormatter(Color.rgb(100, 100, 200), Color.BLACK, null));
//ecgPlot.setTicksPerRangeLabel(10);
ecgPlot.setRangeValueFormat(new DecimalFormat("#"));
ecgPlot.getGraphWidget().setDomainLabelOrientation(-45);
//ecgPlot.setTicksPerDomainLabel(10);
//ecgPlot.setDomainValueFormat(new DecimalFormat("#"));
// make domain legend blank (better than squished or meaningless numbers)
ecgPlot.setDomainValueFormat(new Format() {
@Override
public StringBuffer format(Object obj, StringBuffer toAppendTo, FieldPosition pos) {
return new StringBuffer("");
}
@Override
public Object parseObject(String source, ParsePosition pos) {
return null;
}
});
// ecgPlot.getLayoutManager().remove(ecgPlot.getDomainLabelWidget());
// ecgPlot.getLayoutManager().remove(ecgPlot.getRangeLabelWidget());
//ecgPlot.disableAllMarkup();
//ecgPlot.removeMarkers();
//make the background lines disappear
ecgPlot.getGraphWidget().getGridBackgroundPaint().setColor(Color.WHITE);
ecgPlot.getGraphWidget().getDomainGridLinePaint().setColor(Color.WHITE);
ecgPlot.getGraphWidget().getRangeGridLinePaint().setColor(Color.WHITE);
// this stuff might
ecgPlotReinit();
}
private void ecgPlotReinit() {
Log.i(TAG,"ecgPlotReinit " + graphMax);
howOftenRedraw = SAMPLING_FREQUENCY / 100; //keep redraw rate same even with different sampling rate
maxBeforeAdd = (graphMax / 1000) * SAMPLING_FREQUENCY - 25;
// Domain
// ensure that data still displays the same time wise even with higher sampling
ecgPlot.setDomainStep(XYStepMode.SUBDIVIDE, graphMax/SAMPLING_FREQUENCY);
//ecgPlot.setDomainStep(XYStepMode.SUBDIVIDE, 1000/SAMPLING_FREQUENCY);
//Range
//ecgPlot.setRangeBoundaries(-7000, 7000, BoundaryMode.FIXED);
ecgPlot.setRangeBoundaries(- useYrange, useYrange, BoundaryMode.FIXED);
//ecgPlot.setRangeBoundaries(- Y_RANGE, Y_RANGE, BoundaryMode.FIXED);
ecgPlot.setRangeStepValue(11);
}
@Override
protected void onResume() {
super.onResume();
//recheck bluetooth status in case turned off ? is this really needed?
checkBluetooth();
Log.i(TAG,"resuming " + connected + " " + reading);
sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this);
ECG_NAME = sharedPreferences.getString(PREF_ECG_NAME,"");
SIGNAL_RESOLUTION = Integer.parseInt(sharedPreferences.getString(PREF_SIGNAL_RESOLUTION,DEFAULT_SIGNAL_RESOLUTION));
HIGH_PASS_FILTER = Integer.parseInt(sharedPreferences.getString(PREF_HIGH_PASS_FILTER,DEFAULT_HIGH_PASS_FILTER));
SAMPLING_FREQUENCY = Integer.parseInt(sharedPreferences.getString(PREF_SAMPLING_FREQUENCY,DEFAULT_SAMPLING_FREQUENCY));
Y_RANGE = Integer.parseInt(sharedPreferences.getString(PREF_Y_RANGE,DEFAULT_Y_RANGE));
FILE_NAME = sharedPreferences.getString(PREF_FILE_NAME, DEFAULT_FILE_NAME);
SAVE_FILE = sharedPreferences.getBoolean(PREF_SAVE_FILE,false);
useYrange = Y_RANGE;
if (SIGNAL_RESOLUTION == 20) { //(SIGNAL_RESOLUTION == 100) {
useYrange = (int) (useYrange * 6.0f); //(useYrange * .2f);
}
if (HIGH_PASS_FILTER == 10) {
useYrange = (int) (useYrange * .6f);
}
Log.i(TAG,"ecg " + ECG_NAME + "\n"
+ " signal resolution " + SIGNAL_RESOLUTION + "\n"
+ " high pass filter " + HIGH_PASS_FILTER + "\n"
+ " sampling frequency " + SAMPLING_FREQUENCY + "\n"
+ " yrange " + Y_RANGE + " " + useYrange + "\n"
+ " " + SAVE_FILE + " " + FILE_NAME);
if (!connected) {
setPaired();
}
if (!reading) {
setEcgInfo();
// ecgNameTextView.setText(ECG_NAME + " " + (SIGNAL_RESOLUTION/100f) + " muV/count "
// + HIGH_PASS_FILTER + " Hz " + SAMPLING_FREQUENCY + " Hz" );
}
}
private void setEcgInfo() {
/*
if (currentConfig.orientation == Configuration.ORIENTATION_PORTRAIT) {
ecgNameTextView.setText(ECG_NAME + " SR: " + (SIGNAL_RESOLUTION/100f) + " muV/count HPF: "
+ HIGH_PASS_FILTER + " Hz SF: " + SAMPLING_FREQUENCY + " Hz" );
} else if (currentConfig.orientation == Configuration.ORIENTATION_LANDSCAPE) {
ecgNameTextView.setText(ECG_NAME + " SR: " + (SIGNAL_RESOLUTION/100f) + " muV/count "
+ "\n" + "HPF: " + HIGH_PASS_FILTER + " Hz SF: " + SAMPLING_FREQUENCY + " Hz" );
}*/
ecgNameTextView.setText(ECG_NAME + " SR: " + (SIGNAL_RESOLUTION/100f) + " muV/count "
+ "\n" + "HPF: " + HIGH_PASS_FILTER + " Hz SF: " + SAMPLING_FREQUENCY + " Hz" );
//ecgNameTextView.setText(ECG_NAME + " " + (SIGNAL_RESOLUTION/100f) + " muV/count "
// + "\n" + HIGH_PASS_FILTER + " Hz " + SAMPLING_FREQUENCY + " Hz" );
}
// Bluetooth stuff
private void checkBluetooth() {
// Ensures Bluetooth is enabled on the device. If Bluetooth is not currently enabled,
// fire an intent to display a dialog asking the user to grant permission to enable it.
if (!mBluetoothAdapter.isEnabled()) {
if (!mBluetoothAdapter.isEnabled()) {
Intent enableBtIntent = new Intent(BluetoothAdapter.ACTION_REQUEST_ENABLE);
startActivityForResult(enableBtIntent, REQUEST_ENABLE_BT);
}
}
}
private void setPaired() {
// reset devices in case something changed
ecgDevice = null;
emotionEcg = null;
Set<BluetoothDevice> pairedDevices = mBluetoothAdapter.getBondedDevices();
// If there are paired devices
int psize = pairedDevices.size();
if (psize > 0) {
// Loop through paired devices
for (BluetoothDevice device : pairedDevices) {
// check name Polar = heart monitor, Nonin = oximeter
String name = device.getName();
if (name.equals(ECG_NAME)) {
ecgDevice = device;
emotionEcg = new EmotionEcg(ecgDevice, ecgHandler, handleEcgDataMethod, this);
//emotionEcg.setSeries(ecgSeries);
//emotionEcg.setPlot(ecgPlot);
}
}
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
if (id == R.id.action_settings) {
Intent preferencesIntent = new Intent(this, SensorReadPreferenceActivity.class);
startActivity(preferencesIntent);
return true;
} else if (id == R.id.action_help) {
displayHelpFile();
return true;
}
return super.onOptionsItemSelected(item);
}
/**
* A placeholder fragment containing a simple view.
*/
public static class PlaceholderFragment extends Fragment {
public PlaceholderFragment() {
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_main, container,
false);
Log.i(TAG,"onCreateView");
return rootView;
}
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
currentConfig = newConfig;
if (newConfig.orientation == Configuration.ORIENTATION_PORTRAIT) {
Log.i(TAG,"changed to portrait");
graphMax = GRAPH_MAX_PORTRAIT;
} else if (newConfig.orientation == Configuration.ORIENTATION_LANDSCAPE) {
Log.i(TAG,"changed to landscape");
graphMax = GRAPH_MAX_LANDSCAPE;
}
setContentView(R.layout.activity_main);
setupViewStuff();
ecgPlotSetup();
setEcgInfo();
}
/** Called when the user clicks the Connect button */
public void ecgConnect(View view) {
Log.i(TAG,"in connect");
if (connectButton.getText().equals(getString(R.string.stop_idle))) {
ecgDisconnect(view);
} else {
doEcgConnect(view);
}
}
public void doEcgConnect(View view) {
if (emotionEcg != null) {
lastPacketNum = -1;
connecting = true;
connectButton.setEnabled(false);
connectButton.setBackgroundColor(Color.GRAY);
//connected = true;
// set up output file for writing
if (SAVE_FILE && okToWrite) {
//if (emotionEcg != null) {
//emotionEcg.setWriteFile(FILE_NAME);
setWriteFile(FILE_NAME);
//}
}
emotionEcg.connect();
} else {
Toast.makeText(MainActivity.this, "Select an ECG device in Settings", Toast.LENGTH_SHORT).show();
}
}
/** Called when the user clicks the Disconnect button */
public void ecgDisconnect(View view) {
Log.i(TAG,"in disconnect");
connected = false;
stopIdle();
if (emotionEcg != null) {
emotionEcg.disconnect();
}
connectButton.setText(getString(R.string.start));
connectButton.setBackgroundColor(Color.GREEN);
connectButton.setEnabled(true);
}
/** Called when the user clicks the start button */
public void doStartRead() { //View view) {
if (emotionEcg != null) {
setupRead();
reading = true;
//setEcgParameters();
//emotionEcg.startReadFormat07();
Log.i(TAG,"before setupand startread");
emotionEcg.setupAndStartRead();
Log.i(TAG,"after setupand startread");
//Utils.waitABit(500, "ECG");
//emotionEcg.getData();
}
}
public void setupRead() {
setEcgInfo();
//ecgNameTextView.setText(ECG_NAME + " " + (SIGNAL_RESOLUTION/100f) + " muV/count "
// + HIGH_PASS_FILTER + " Hz " + SAMPLING_FREQUENCY + " Hz" );
if (emotionEcg != null) {
emotionEcg.setSamplingFrequency(SAMPLING_FREQUENCY);
emotionEcg.setHighPassFilter(HIGH_PASS_FILTER);
emotionEcg.setSignalResolution(SIGNAL_RESOLUTION);
}
//initialize graph stuff
ecgPlotReinit();
// set up output file for writing
if (SAVE_FILE && okToWrite) {
setWriteFile(FILE_NAME);
//if (emotionEcg != null) {
// emotionEcg.setWriteFile(FILE_NAME);
//}
}
}
/** Called when the user clicks the stopIdle button */
public void stopIdle() { //View view) {
if (emotionEcg != null) {
reading = false;
emotionEcg.stopData();
emotionEcg.stopAndIdle();
}
}
Handler ecgHandler = new Handler(new IncomingEmotionHandlerCallback());
class IncomingEmotionHandlerCallback implements Handler.Callback{
BluetoothDevice device;
@Override
public boolean handleMessage(Message msg) {
switch (msg.what) {
case EmotionEcg.GET_DATA:
int pulse = msg.arg1;
//Log.i(TAG,"got pulse " + pulse);
pulseDataTextView.setText(" " + pulse);
break;
case EmotionEcg.ECG_SAMPLES:
// now handled by ecg thread
break;
case EmotionEcg.STOP_DATA:
// have to stop reading from device and user didn't ask for it
Log.i(TAG,"stop reading " );
reading = false;
break;
case EmotionEcg.BATTERY_LOW:
// battery low
Log.i(TAG,"ecg battery low " );
Toast.makeText(MainActivity.this, "Battery low ", Toast.LENGTH_SHORT).show();
break;
case EmotionEcg.CONNECTED_BT:
// we connected to device
device = (BluetoothDevice) msg.obj;
Log.i(TAG,"connected to bluetooth device " + device.getName().toString());
connectButton.setText(getString(R.string.stop_idle));
connectButton.setBackgroundColor(Color.RED);
connectButton.setEnabled(true);
connected = true;
connecting = false;
doStartRead();
break;
case EmotionEcg.CANNOT_CONNECT_BT:
// we couldn't connect to device
boolean givemessage = true;
if (givemessage) {
//tryingAllUuids = false;
device = (BluetoothDevice) msg.obj;
Log.i(TAG,"cannot connect to bluetooth device " + device.getName().toString());
Toast.makeText(MainActivity.this, "Cannot connect to bluetooth device " + device.getName().toString(), Toast.LENGTH_LONG).show();
connectButton.setText(getString(R.string.start));
connectButton.setBackgroundColor(Color.GREEN);
connectButton.setEnabled(true);
connecting = false;
}
break;
default:
Log.i(TAG,"message handler " + msg.toString() + msg.what);
}
return true;
}
}
/* Checks if external storage is available for read and write */
public boolean isExternalStorageWritable() {
String state = Environment.getExternalStorageState();
if (Environment.MEDIA_MOUNTED.equals(state)) {
return true;
}
return false;
}
private void doHandle(int[] sampl, int pnew) {
updateEcgSeries(sampl,pnew);
writeFileData(sampl);
}
// invoked from edg thread
public void handleEcgSampleData(EmotionEcg.EcgData newdata) {
int[] sampl = null;
int pnew = 0;
if (newdata != null) {
sampl = newdata.getSamples();
pnew = newdata.getPacketNumber();
//Log.i(TAG,"handling data packet " + pnew);
if ((lastPacketNum > 0) && ((pnew - lastPacketNum) > 1)) {
Log.w(TAG, " Missing packet numbers " + (lastPacketNum + 1) + " to " + (pnew - 1));
//fill in with 0's
for (int i=lastPacketNum+1; i<=pnew-1; i++){
Log.w(TAG,"filling in packet " + i + "with zeros");
doHandle(zeroEcgSample, i);
}
} else if (pnew < lastPacketNum) {
Log.w(TAG," Packet out of order " + pnew );
}
if (pnew > lastPacketNum) {
lastPacketNum = pnew;
}
doHandle(sampl, pnew);
}
}
// file stuff
public void setWriteFile(String f) {
Log.i(TAG,"set file " + f);
if (dataOut != null) { // close last file if there is one
closeDataFile(dataOut);
}
filename = f;
writeFile = true;
dataOut = getDataFile(f);
}
public void shrinkSeries() {
// get rid the oldest samples in history:
while (ecgSeries.size() > maxBeforeAdd) {
ecgSeries.removeFirst();
}
}
//update series used for graph, then redraw graph if necessary
private void updateEcgSeries(int[] samples, int packetNum) {
//Log.i(TAG,"update ecg series " + samples.toString());
// get rid the oldest samples in history:
//check packet num to see if missing any - if so, handle
while (ecgSeries.size() > maxBeforeAdd) {
ecgSeries.removeFirst();
}
// add current data
for (int index = 0; index < samples.length; index++) {
ecgSeries.addLast(null, samples[index]);
}
// Try to keep the rate of redrawing the same even with a higher sampling rate and more data
sampleCount += 1;
if (ecgPlot != null) {
if ((sampleCount % howOftenRedraw) == 0) { // redraw about same rate
//Log.i(TAG,"redraw " + sampleCount + " " + howOftenRedraw );
ecgPlot.redraw();
if (sampleCount > 30000) { // keep the number from getting too big
sampleCount = 1;
}
}
}
}
private FileOutputStream getDataFile(String filename) {
// Get the directory for documents.
//File file = new File(Environment.getExternalStoragePublicDirectory(
// Environment.DIRECTORY_DOCUMENTS), fileName); // only in 19 can't use
// external file
File file = new File(Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_DOWNLOADS), filename);
//File file = new File(Environment.getExternalStorageDirectory(), filename);
//File file = new File(Environment.getDataDirectory(), "com.example.blueplay." + filename);
//File file = new File(this.getFilesDir(), filename);
//File file = new File(filename);
Log.i(TAG, "Filename is " + file.toString());
FileOutputStream ostream = null;
//Toast.makeText(this, "file:" + file.toString(), Toast.LENGTH_LONG).show();
if (!file.canWrite()) {
Log.e(TAG, "Could not write output file");
}
/*
if (!file.mkdirs()) {
Log.e(TAG, file.toString() + " Directory not created");
Toast.makeText(this, "Directory not created", Toast.LENGTH_SHORT).show();
} */
if (file != null) {
try {
//ostream = openFileOutput(filename, Context.MODE_PRIVATE); // internal file
ostream = new FileOutputStream(file);
}
catch (IOException ex){
Log.e(TAG, "Could not get output file stream", ex );
}
}
return ostream;
}
private void writeFileData (int[] samples) {
String s = "";
for (int i=0; i<25; i++) {
s = s + samples[i] + ", ";
}
byte[] bytes = s.getBytes();
//Log.i(TAG,"samples s " + s);
writeData(bytes, bytes.length);
}
// write data to file
private void writeData(byte[] buffer, int bytes) {
if (dataOut != null) {
try {
//append data
dataOut.write(buffer, 0, bytes);
//Log.i(TAG, "writing " + buffer.toString());
}
catch (IOException ex){
Log.e( TAG, "Could not write to file", ex );
}
}
}
private void closeDataFile(FileOutputStream fileOut) {
try {
Log.i(TAG,"closing output file");
fileOut.close();
}
catch(IOException ex){
Log.e( TAG, "Could not close file", ex );
}
}
private void displayHelpFile() {
try {
byte [] gothelp = Utils.getFileContents(R.raw.help, this);
String helpstring = new String(gothelp);
infoDialog(getString(R.string.action_help), helpstring);
} catch (Exception e) {
Log.i(TAG,"could not get help file");
infoDialog(getString(R.string.action_help), "Sorry, help is not available");
}
}
private void infoDialog (String title, String message) {
// create a new AlertDialog Builder
AlertDialog.Builder builder =
new AlertDialog.Builder(this);
builder.setTitle(title);
// set dialog's message to display
builder.setMessage(Html.fromHtml(message));
//builder.setMessage(message);
// provide an OK button that simply dismisses the dialog
builder.setPositiveButton(R.string.OK, null);
// create AlertDialog from the AlertDialog.Builder
AlertDialog infoDialog = builder.create();
infoDialog.show(); // display the modal dialog
}
@Override
protected void onDestroy() {
super.onDestroy(); // Always call the superclass method first
if (emotionEcg != null) {
emotionEcg.cleanup();
}
/*
if (btDevice != null) {
btDevice.cleanup();
}
if (emotionEcg != null) {
emotionEcg.cleanup();
}*/
if (dataOut != null) {
// close it
try {
Log.i(TAG,"closing output file");
dataOut.close();
}
catch(IOException ex){
Log.e( TAG, "Could not close file", ex );
}
}
}
}
| |
package matrix;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import application.MMMDataModel;
import application.setup.LinkListDialog;
import application.setup.Triple;
import at.jku.sea.cloud.Artifact;
import init.setup.Link;
import javafx.scene.layout.StackPane;
import javafx.scene.web.WebEngine;
import javafx.scene.web.WebView;
import netscape.javascript.JSObject;
/**
*
* This view represents matrix.
*
*/
public class MatrixView extends StackPane {
private MMMDataModel model;
// Visualization of matrix
private WebView webView;
private WebEngine webEngine;
// Matrix consists of MatrixElements
private MatrixElement[][] matrix;
// Maximum row/col size of matrix
private int row_size;
private int col_size;
// Selected link
private Link matrixLink;
/**
* Constructor to created matrix view by a certain model.
*
* @param model
* Model to be represent.
*/
public MatrixView(MMMDataModel model) {
this.model = model;
webView = new WebView();
// loading of matrix view
String viewerDir = System.getProperty("user.dir") + "/src/matrix/matrix.html";
viewerDir = ("file:///" + viewerDir.substring(3));
webEngine = webView.getEngine();
webEngine.load(viewerDir);
getChildren().add(webView);
}
/**
* Updates matrix view by given parameters.
*
* @param selectedSources
* Selected sources.
* @param selectedTargets
* Selected targets
* @param selectedLink
* Selected link.
*/
public void update(Map<Artifact, Set<Artifact>> selectedSources, Map<Artifact, Set<Artifact>> selectedTargets,
Link selectedLink) {
// refreshing of matrix by given new data
updateMatrix(selectedSources, selectedTargets, selectedLink);
// visualize/load new matrix into screen
webEngine.executeScript("removeMatrix()");
JSObject window = (JSObject) webEngine.executeScript("window");
window.setMember("matrix", this);
webEngine.executeScript("refreshMatrix()");
}
/**
* Creation of link for selected row/col.
*
* @param row
* Row of selected element.
* @param col
* Col of selected element.
*/
public void addLink(int row, int col) {
// Replace matrix element by new element with checked link flag.
matrix[row][col] = new MatrixElement(row, col, true, null);
}
public void deleteLink(int row, int col) {
// Replace matrix element by new element with unchecked link flag and
// existing link (if available).
matrix[row][col] = new MatrixElement(row, col, false, matrix[row][col].getLinkInstance());
}
/**
* Saving of Links created/deleted by matrix.
*/
public void saveLinks() {
if (matrixLink != null && matrixLink.isMultipleLink()) {
saveMultipleLinks();
} else {
saveSingleLinks();
}
}
/**
* Only for non-specific links. Shows via dialog the available defined links
* between source and target.<br>
* Checks defined link, if an instance already exists.
*
* @param row
* Row of the selected element.
* @param col
* Column of the selected element.
*/
public void showMultipleLinks(int row, int col) {
if (!matrix[row][col].isToBeProcessed()) {
List<Link> linkInstances = model.getLinkInstances(matrix[row][0].getHeaderInstance(),
matrix[0][col].getHeaderInstance(), matrix[row][col].getDefinedLinks());
matrix[row][col].setListOfLinkInstances(linkInstances);
}
matrix[row][col].setToBeProcessed(true);
new LinkListDialog(matrix[row][col]).showAndWait();
}
public String getStringElement(int row, int col) {
return (matrix[row][col]).toString();
}
public boolean isMultipleMatrix() {
return matrixLink.isMultipleLink();
}
public int getRowSize() {
return matrix.length;
}
public int getColSize() {
return matrix[0].length;
}
/**
* Creates new matrix by given parameters.
*
* @param source
* Selected sources.
* @param target
* Selected targets.
* @param currentLink
* Selected link.
*/
private void updateMatrix(Map<Artifact, Set<Artifact>> source, Map<Artifact, Set<Artifact>> target,
Link currentLink) {
matrixLink = currentLink;
// prepare header: complex/instance
List<MatrixElement> sourceList = new ArrayList<>();
source.entrySet().stream().forEach(map -> {
map.getValue().stream().forEach(instance -> {
sourceList.add(new MatrixElement(map.getKey(), instance));
});
});
List<MatrixElement> targetList = new ArrayList<>();
target.entrySet().stream().forEach(map -> {
map.getValue().stream().forEach(instance -> {
targetList.add(new MatrixElement(map.getKey(), instance));
});
});
row_size = sourceList.size() + 1;
col_size = targetList.size() + 1;
matrix = new MatrixElement[row_size][col_size];
MatrixElement temp = new MatrixElement();
for (int row = 0; row < row_size; row++) {
for (int col = 0; col < col_size; col++) {
if (row == 0 && col == 0) { // start
matrix[0][0] = new MatrixElement();
} else if (row >= 1 && col == 0) { // y-axis
temp = sourceList.get(row - 1);
matrix[row][col] = new MatrixElement(temp.getHeaderComplexType(), temp.getHeaderInstance());
} else if (row == 0 && col >= 1) { // x-axis
temp = targetList.get(col - 1);
matrix[row][col] = new MatrixElement(temp.getHeaderComplexType(), temp.getHeaderInstance());
} else { // data
if (currentLink.isMultipleLink()) {
// load only defined links
matrix[row][col] = new MatrixElement(
(model.getDefinedLinks(matrix[row][0].getHeaderComplexType(),
matrix[0][col].getHeaderComplexType())),
MatrixElementType.MULTIPLE_LINK);
} else {
// load instance
matrix[row][col] = new MatrixElement(model.getLinkInstance((matrix[row][0]).getHeaderInstance(),
(matrix[0][col]).getHeaderInstance(), currentLink.getComplexType()));
}
}
}
}
}
/**
* Saving process of a <b>specific</b> link.<br>
* Only those elements with <b>processed flag = true</b> will be processed.
*
*/
private void saveSingleLinks() {
MatrixElement mElement;
for (int row = 1; row < row_size; row++) {
for (int col = 1; col < col_size; col++) {
mElement = matrix[row][col];
if (mElement.isToBeProcessed()) {
if (mElement.isLinkExists()) {
matrix[row][col] = new MatrixElement(
model.addLinkInstance(null, matrixLink.isMultipleLink() ? null : matrixLink,
(matrix[row][0]).getHeaderInstance(), (matrix[0][col]).getHeaderInstance()));
} else {
model.removeLinkInstance(null, matrix[row][0].getHeaderInstance(),
matrix[0][col].getHeaderInstance(), matrix[row][col].getLinkInstance());
matrix[row][col] = new MatrixElement(null);
}
}
}
}
}
/**
* Saving process of a <b>non specific</b> link. <br>Only those elements with
* <b>processed flag = true</b> will be processed.
*/
private void saveMultipleLinks() {
MatrixElement mElement;
for (int row = 1; row < row_size; row++) {
for (int col = 1; col < col_size; col++) {
mElement = matrix[row][col];
if (mElement.isToBeProcessed()) {
HashMap<Link, Triple> linksToBeProcessed = mElement.getLinksToBeProcessed();
for (Link defLink : linksToBeProcessed.keySet()) {
Triple triple = linksToBeProcessed.get(defLink);
if (triple.isAvailable() && !triple.isToCreate()) {
// delete link and set null instance
model.removeLinkInstance(triple.getLink());
triple.setLink(null);
triple.setAvailable(false);
} else if (!triple.isAvailable() && triple.isToCreate()) {
// create link and store it
triple.setLink(model.addLinkInstance(null, defLink, (matrix[row][0]).getHeaderInstance(),
(matrix[0][col]).getHeaderInstance()));
triple.setAvailable(true);
}
}
}
}
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.flink;
import org.apache.flink.api.common.JobID;
import org.apache.flink.api.scala.FlinkILoop;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.akka.AkkaUtils;
import org.apache.flink.runtime.instance.ActorGateway;
import org.apache.flink.runtime.messages.JobManagerMessages;
import org.apache.flink.runtime.minicluster.LocalFlinkMiniCluster;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import scala.Console;
import scala.Some;
import scala.collection.JavaConversions;
import scala.concurrent.duration.FiniteDuration;
import scala.runtime.AbstractFunction0;
import scala.tools.nsc.Settings;
import scala.tools.nsc.interpreter.IMain;
import scala.tools.nsc.interpreter.Results;
import scala.tools.nsc.settings.MutableSettings;
import scala.tools.nsc.settings.MutableSettings.BooleanSetting;
import scala.tools.nsc.settings.MutableSettings.PathSetting;
import org.apache.zeppelin.interpreter.Interpreter;
import org.apache.zeppelin.interpreter.InterpreterContext;
import org.apache.zeppelin.interpreter.InterpreterResult;
import org.apache.zeppelin.interpreter.InterpreterResult.Code;
import org.apache.zeppelin.interpreter.InterpreterUtils;
import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion;
/**
* Interpreter for Apache Flink (http://flink.apache.org).
*/
public class FlinkInterpreter extends Interpreter {
Logger logger = LoggerFactory.getLogger(FlinkInterpreter.class);
private ByteArrayOutputStream out;
private Configuration flinkConf;
private LocalFlinkMiniCluster localFlinkCluster;
private FlinkILoop flinkIloop;
private Map<String, Object> binder;
private IMain imain;
public FlinkInterpreter(Properties property) {
super(property);
}
@Override
public void open() {
out = new ByteArrayOutputStream();
flinkConf = new org.apache.flink.configuration.Configuration();
Properties intpProperty = getProperties();
for (Object k : intpProperty.keySet()) {
String key = (String) k;
String val = toString(intpProperty.get(key));
flinkConf.setString(key, val);
}
if (localMode()) {
startFlinkMiniCluster();
}
String[] externalJars = new String[0];
String localRepo = getProperty("zeppelin.interpreter.localRepo");
if (localRepo != null) {
File localRepoDir = new File(localRepo);
if (localRepoDir.exists()) {
File[] files = localRepoDir.listFiles();
if (files != null) {
externalJars = new String[files.length];
for (int i = 0; i < files.length; i++) {
if (externalJars.length > 0) {
externalJars[i] = files[i].getAbsolutePath();
}
}
}
}
}
flinkIloop = new FlinkILoop(getHost(),
getPort(),
flinkConf,
new Some<>(externalJars),
(BufferedReader) null,
new PrintWriter(out));
flinkIloop.settings_$eq(createSettings());
flinkIloop.createInterpreter();
imain = flinkIloop.intp();
org.apache.flink.api.scala.ExecutionEnvironment benv =
flinkIloop.scalaBenv();
org.apache.flink.streaming.api.scala.StreamExecutionEnvironment senv =
flinkIloop.scalaSenv();
senv.getConfig().disableSysoutLogging();
benv.getConfig().disableSysoutLogging();
// prepare bindings
imain.interpret("@transient var _binder = new java.util.HashMap[String, Object]()");
Map<String, Object> binder = (Map<String, Object>) getLastObject();
// import libraries
imain.interpret("import scala.tools.nsc.io._");
imain.interpret("import Properties.userHome");
imain.interpret("import scala.compat.Platform.EOL");
imain.interpret("import org.apache.flink.api.scala._");
imain.interpret("import org.apache.flink.api.common.functions._");
binder.put("benv", benv);
imain.interpret("val benv = _binder.get(\"benv\").asInstanceOf["
+ benv.getClass().getName() + "]");
binder.put("senv", senv);
imain.interpret("val senv = _binder.get(\"senv\").asInstanceOf["
+ senv.getClass().getName() + "]");
}
private boolean localMode() {
String host = getProperty("host");
return host == null || host.trim().length() == 0 || host.trim().equals("local");
}
private String getHost() {
if (localMode()) {
return "localhost";
} else {
return getProperty("host");
}
}
private int getPort() {
if (localMode()) {
return localFlinkCluster.getLeaderRPCPort();
} else {
return Integer.parseInt(getProperty("port"));
}
}
private Settings createSettings() {
URL[] urls = getClassloaderUrls();
Settings settings = new Settings();
// set classpath
PathSetting pathSettings = settings.classpath();
String classpath = "";
List<File> paths = currentClassPath();
for (File f : paths) {
if (classpath.length() > 0) {
classpath += File.pathSeparator;
}
classpath += f.getAbsolutePath();
}
if (urls != null) {
for (URL u : urls) {
if (classpath.length() > 0) {
classpath += File.pathSeparator;
}
classpath += u.getFile();
}
}
pathSettings.v_$eq(classpath);
settings.scala$tools$nsc$settings$ScalaSettings$_setter_$classpath_$eq(pathSettings);
settings.explicitParentLoader_$eq(new Some<>(Thread.currentThread()
.getContextClassLoader()));
BooleanSetting b = (BooleanSetting) settings.usejavacp();
b.v_$eq(true);
settings.scala$tools$nsc$settings$StandardScalaSettings$_setter_$usejavacp_$eq(b);
// To prevent 'File name too long' error on some file system.
MutableSettings.IntSetting numClassFileSetting = settings.maxClassfileName();
numClassFileSetting.v_$eq(128);
settings.scala$tools$nsc$settings$ScalaSettings$_setter_$maxClassfileName_$eq(
numClassFileSetting);
return settings;
}
private List<File> currentClassPath() {
List<File> paths = classPath(Thread.currentThread().getContextClassLoader());
String[] cps = System.getProperty("java.class.path").split(File.pathSeparator);
if (cps != null) {
for (String cp : cps) {
paths.add(new File(cp));
}
}
return paths;
}
private List<File> classPath(ClassLoader cl) {
List<File> paths = new LinkedList<>();
if (cl == null) {
return paths;
}
if (cl instanceof URLClassLoader) {
URLClassLoader ucl = (URLClassLoader) cl;
URL[] urls = ucl.getURLs();
if (urls != null) {
for (URL url : urls) {
paths.add(new File(url.getFile()));
}
}
}
return paths;
}
public Object getLastObject() {
Object obj = imain.lastRequest().lineRep().call(
"$result",
JavaConversions.asScalaBuffer(new LinkedList<>()));
return obj;
}
@Override
public void close() {
flinkIloop.closeInterpreter();
if (localMode()) {
stopFlinkMiniCluster();
}
}
@Override
public InterpreterResult interpret(String line, InterpreterContext context) {
if (line == null || line.trim().length() == 0) {
return new InterpreterResult(Code.SUCCESS);
}
InterpreterResult result = interpret(line.split("\n"), context);
return result;
}
public InterpreterResult interpret(String[] lines, InterpreterContext context) {
final IMain imain = flinkIloop.intp();
String[] linesToRun = new String[lines.length + 1];
for (int i = 0; i < lines.length; i++) {
linesToRun[i] = lines[i];
}
linesToRun[lines.length] = "print(\"\")";
System.setOut(new PrintStream(out));
out.reset();
Code r = null;
String incomplete = "";
boolean inComment = false;
for (int l = 0; l < linesToRun.length; l++) {
final String s = linesToRun[l];
// check if next line starts with "." (but not ".." or "./") it is treated as an invocation
if (l + 1 < linesToRun.length) {
String nextLine = linesToRun[l + 1].trim();
boolean continuation = false;
if (nextLine.isEmpty()
|| nextLine.startsWith("//") // skip empty line or comment
|| nextLine.startsWith("}")
|| nextLine.startsWith("object")) { // include "} object" for Scala companion object
continuation = true;
} else if (!inComment && nextLine.startsWith("/*")) {
inComment = true;
continuation = true;
} else if (inComment && nextLine.lastIndexOf("*/") >= 0) {
inComment = false;
continuation = true;
} else if (nextLine.length() > 1
&& nextLine.charAt(0) == '.'
&& nextLine.charAt(1) != '.' // ".."
&& nextLine.charAt(1) != '/') { // "./"
continuation = true;
} else if (inComment) {
continuation = true;
}
if (continuation) {
incomplete += s + "\n";
continue;
}
}
final String currentCommand = incomplete;
scala.tools.nsc.interpreter.Results.Result res = null;
try {
res = Console.withOut(
System.out,
new AbstractFunction0<Results.Result>() {
@Override
public Results.Result apply() {
return imain.interpret(currentCommand + s);
}
});
} catch (Exception e) {
logger.info("Interpreter exception", e);
return new InterpreterResult(Code.ERROR, InterpreterUtils.getMostRelevantMessage(e));
}
r = getResultCode(res);
if (r == Code.ERROR) {
return new InterpreterResult(r, out.toString());
} else if (r == Code.INCOMPLETE) {
incomplete += s + "\n";
} else {
incomplete = "";
}
}
if (r == Code.INCOMPLETE) {
return new InterpreterResult(r, "Incomplete expression");
} else {
return new InterpreterResult(r, out.toString());
}
}
private Code getResultCode(scala.tools.nsc.interpreter.Results.Result r) {
if (r instanceof scala.tools.nsc.interpreter.Results.Success$) {
return Code.SUCCESS;
} else if (r instanceof scala.tools.nsc.interpreter.Results.Incomplete$) {
return Code.INCOMPLETE;
} else {
return Code.ERROR;
}
}
@Override
public void cancel(InterpreterContext context) {
if (localMode()) {
// In localMode we can cancel all running jobs,
// because the local cluster can only run one job at the time.
for (JobID job : this.localFlinkCluster.getCurrentlyRunningJobsJava()) {
logger.info("Stop job: " + job);
cancelJobLocalMode(job);
}
}
}
private void cancelJobLocalMode(JobID jobID){
FiniteDuration timeout = AkkaUtils.getTimeout(this.localFlinkCluster.configuration());
ActorGateway leader = this.localFlinkCluster.getLeaderGateway(timeout);
leader.ask(new JobManagerMessages.CancelJob(jobID), timeout);
}
@Override
public FormType getFormType() {
return FormType.NATIVE;
}
@Override
public int getProgress(InterpreterContext context) {
return 0;
}
@Override
public List<InterpreterCompletion> completion(String buf, int cursor,
InterpreterContext interpreterContext) {
return new LinkedList<>();
}
private void startFlinkMiniCluster() {
localFlinkCluster = new LocalFlinkMiniCluster(flinkConf, false);
try {
localFlinkCluster.start(true);
} catch (Exception e){
throw new RuntimeException("Could not start Flink mini cluster.", e);
}
}
private void stopFlinkMiniCluster() {
if (localFlinkCluster != null) {
localFlinkCluster.stop();
localFlinkCluster = null;
}
}
static final String toString(Object o) {
return (o instanceof String) ? (String) o : "";
}
}
| |
/**
* Copyright (C) 2012-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ninja.bodyparser;
import com.fasterxml.jackson.annotation.JsonFormat;
import com.fasterxml.jackson.dataformat.xml.XmlMapper;
import ninja.Context;
import ninja.exceptions.BadRequestException;
import org.hamcrest.CoreMatchers;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.runners.MockitoJUnitRunner;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.TimeZone;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
/**
* Unit tests for the Xml body parser.
*
* @author Thibault Meyer
*/
@RunWith(MockitoJUnitRunner.class)
public class BodyParserEngineXmlTest {
private static final String DATA_FIRSTNAME = "John";
private static final String DATA_LASTNAME = "Do";
private static final Integer DATA_BIRTHYEAR = 1664;
private static final String DATA_LASTSEEN = "2015-03-15 15:45:00";
private static final String PARSER_DATEFORMAT = "yyyy-MM-dd hh:mm:ss";
private static final String PARSER_DATETZ = "GMT";
@Mock
private Context context;
@Test
public void testValidXmlBody() {
final String xmlDocument = String.format("<form><firstName>%s</firstName><lastName>%s</lastName><birthYear>%d</birthYear><lastSeen>%s</lastSeen></form>",
BodyParserEngineXmlTest.DATA_FIRSTNAME,
BodyParserEngineXmlTest.DATA_LASTNAME,
BodyParserEngineXmlTest.DATA_BIRTHYEAR,
BodyParserEngineXmlTest.DATA_LASTSEEN);
final InputStream is = new ByteArrayInputStream(xmlDocument.getBytes());
final XmlMapper xmlObjMapper = new XmlMapper();
final BodyParserEngineXml bodyParserEngineXml = new BodyParserEngineXml(xmlObjMapper);
SimpleTestForm testForm = null;
try {
Mockito.when(context.getInputStream()).thenReturn(is);
} catch (IOException ignore) {
}
try {
testForm = bodyParserEngineXml.invoke(context, SimpleTestForm.class);
} catch (BadRequestException ignore) {
} finally {
try {
is.close();
} catch (IOException ignore) {
}
}
final Calendar cal = Calendar.getInstance();
final SimpleDateFormat dateFormat = new SimpleDateFormat(BodyParserEngineXmlTest.PARSER_DATEFORMAT);
dateFormat.setTimeZone(TimeZone.getTimeZone(BodyParserEngineXmlTest.PARSER_DATETZ));
try {
cal.setTime(dateFormat.parse(BodyParserEngineXmlTest.DATA_LASTSEEN));
} catch (ParseException ignore) {
}
cal.setTimeZone(TimeZone.getTimeZone(BodyParserEngineXmlTest.PARSER_DATETZ));
assertTrue(testForm != null);
assertThat(testForm.firstName, equalTo(BodyParserEngineXmlTest.DATA_FIRSTNAME));
assertThat(testForm.lastName, equalTo(BodyParserEngineXmlTest.DATA_LASTNAME));
assertThat(testForm.birthYear, CoreMatchers.equalTo(BodyParserEngineXmlTest.DATA_BIRTHYEAR));
assertTrue(testForm.lastSeen != null);
assertTrue(testForm.lastSeen.compareTo(cal) == 0);
}
@Test
public void testXmlBodyWithMissingVariables() {
final String xmlDocument = String.format("<form><firstName>%s</firstName><lastName>%s</lastName></form>",
BodyParserEngineXmlTest.DATA_FIRSTNAME,
BodyParserEngineXmlTest.DATA_LASTNAME);
final InputStream is = new ByteArrayInputStream(xmlDocument.getBytes());
final XmlMapper xmlObjMapper = new XmlMapper();
final BodyParserEngineXml bodyParserEngineXml = new BodyParserEngineXml(xmlObjMapper);
SimpleTestForm testForm = null;
try {
Mockito.when(context.getInputStream()).thenReturn(is);
} catch (IOException ignore) {
}
try {
testForm = bodyParserEngineXml.invoke(context, SimpleTestForm.class);
} catch (BadRequestException ignore) {
} finally {
try {
is.close();
} catch (IOException ignore) {
}
}
assertTrue(testForm != null);
assertThat(testForm.firstName, equalTo(BodyParserEngineXmlTest.DATA_FIRSTNAME));
assertThat(testForm.lastName, equalTo(BodyParserEngineXmlTest.DATA_LASTNAME));
assertTrue(testForm.birthYear == null);
assertTrue(testForm.lastSeen == null);
}
@Test
public void testEmptyXmlBody() {
final String xmlDocument = "";
final InputStream is = new ByteArrayInputStream(xmlDocument.getBytes());
final XmlMapper xmlObjMapper = new XmlMapper();
final BodyParserEngineXml bodyParserEngineXml = new BodyParserEngineXml(xmlObjMapper);
boolean badRequestThrown = false;
try {
Mockito.when(context.getInputStream()).thenReturn(is);
} catch (IOException ignore) {
}
try {
bodyParserEngineXml.invoke(context, SimpleTestForm.class);
} catch (BadRequestException ignore) {
badRequestThrown = true;
} finally {
try {
is.close();
} catch (IOException ignore) {
}
}
assertTrue(badRequestThrown);
}
@Test
public void testInvalidXmlBadCloseBody() {
final String xmlDocument = String.format("<form><firstName>%s</firstName><lastName>%s</lastName><birthYear>%d</birthYear><lastSeen>%s</lastSeen></>",
BodyParserEngineXmlTest.DATA_FIRSTNAME,
BodyParserEngineXmlTest.DATA_LASTNAME,
BodyParserEngineXmlTest.DATA_BIRTHYEAR,
BodyParserEngineXmlTest.DATA_LASTSEEN);
final InputStream is = new ByteArrayInputStream(xmlDocument.getBytes());
final XmlMapper xmlObjMapper = new XmlMapper();
final BodyParserEngineXml bodyParserEngineXml = new BodyParserEngineXml(xmlObjMapper);
boolean badRequestThrown = false;
try {
Mockito.when(context.getInputStream()).thenReturn(is);
} catch (IOException ignore) {
}
try {
bodyParserEngineXml.invoke(context, SimpleTestForm.class);
} catch (BadRequestException ignore) {
badRequestThrown = true;
} finally {
try {
is.close();
} catch (IOException ignore) {
}
}
assertTrue(badRequestThrown);
}
@Test
public void testInvalidXmlMissingRootBody() {
final String xmlDocument = String.format("<firstName>%s</firstName><lastName>%s</lastName><birthYear>%d</birthYear><lastSeen>%s</lastSeen>",
BodyParserEngineXmlTest.DATA_FIRSTNAME,
BodyParserEngineXmlTest.DATA_LASTNAME,
BodyParserEngineXmlTest.DATA_BIRTHYEAR,
BodyParserEngineXmlTest.DATA_LASTSEEN);
final InputStream is = new ByteArrayInputStream(xmlDocument.getBytes());
final XmlMapper xmlObjMapper = new XmlMapper();
final BodyParserEngineXml bodyParserEngineXml = new BodyParserEngineXml(xmlObjMapper);
boolean badRequestThrown = false;
try {
Mockito.when(context.getInputStream()).thenReturn(is);
} catch (IOException ignore) {
}
try {
bodyParserEngineXml.invoke(context, SimpleTestForm.class);
} catch (BadRequestException ignore) {
badRequestThrown = true;
} finally {
try {
is.close();
} catch (IOException ignore) {
}
}
assertTrue(badRequestThrown);
}
/**
* Simple form used during unit tests.
*
* @author Thibault Meyer
*/
private static final class SimpleTestForm {
public String firstName;
public String lastName;
public Integer birthYear;
@JsonFormat(shape = JsonFormat.Shape.STRING, pattern = BodyParserEngineXmlTest.PARSER_DATEFORMAT, timezone = BodyParserEngineXmlTest.PARSER_DATETZ)
public Calendar lastSeen;
}
}
| |
/*
* Copyright 2017 StreamSets Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.pipeline.stage.processor.http;
import com.streamsets.pipeline.lib.el.TimeEL;
import com.streamsets.pipeline.lib.el.TimeNowEL;
import com.streamsets.pipeline.api.ListBeanModel;
import com.streamsets.pipeline.lib.el.VaultEL;
import com.streamsets.pipeline.api.ConfigDef;
import com.streamsets.pipeline.api.ConfigDefBean;
import com.streamsets.pipeline.api.FieldSelectorModel;
import com.streamsets.pipeline.api.Stage;
import com.streamsets.pipeline.api.ValueChooserModel;
import com.streamsets.pipeline.config.DataFormat;
import com.streamsets.pipeline.lib.el.RecordEL;
import com.streamsets.pipeline.lib.http.DataFormatChooserValues;
import com.streamsets.pipeline.lib.http.HttpMethod;
import com.streamsets.pipeline.lib.http.JerseyClientConfigBean;
import com.streamsets.pipeline.stage.common.MissingValuesBehavior;
import com.streamsets.pipeline.stage.common.MissingValuesBehaviorChooserValues;
import com.streamsets.pipeline.stage.common.MultipleValuesBehavior;
import com.streamsets.pipeline.stage.common.MultipleValuesBehaviorChooserValues;
import com.streamsets.pipeline.stage.origin.http.PaginationConfigBean;
import com.streamsets.pipeline.stage.origin.lib.BasicConfig;
import com.streamsets.pipeline.stage.origin.http.HttpStatusResponseActionConfigBean;
import com.streamsets.pipeline.stage.origin.http.HttpTimeoutResponseActionConfigBean;
import com.streamsets.pipeline.stage.origin.http.ResponseAction;
import com.streamsets.pipeline.stage.origin.lib.DataParserFormatConfig;
import com.streamsets.pipeline.stage.util.http.HttpStageUtil;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Bean specifying the configuration for an HttpProcessor instance.
*/
public class HttpProcessorConfig {
@ConfigDefBean(groups = "HTTP")
public DataParserFormatConfig dataFormatConfig = new DataParserFormatConfig();
@ConfigDefBean(groups = "HTTP")
public BasicConfig basic = new BasicConfig();
@ConfigDef(
required = true,
type = ConfigDef.Type.MODEL,
label = "Output Field",
description = "Field in which to place the result of the HTTP request",
displayPosition = 10,
displayMode = ConfigDef.DisplayMode.BASIC,
group = "HTTP"
)
@FieldSelectorModel(singleValued = true)
public String outputField;
@ConfigDef(
required = true,
type = ConfigDef.Type.MODEL,
label = "Header Output Location",
description = "Field in which to place the result of the HTTP request",
defaultValue = "HEADER",
displayPosition = 20,
displayMode = ConfigDef.DisplayMode.ADVANCED,
group = "HTTP"
)
@ValueChooserModel(HeaderOutputLocationChooserValues.class)
public HeaderOutputLocation headerOutputLocation;
@ConfigDef(
required = false,
type = ConfigDef.Type.STRING,
label = "Header Attribute Prefix",
description = "A prefix to add to record header attributes in the response",
displayPosition = 30,
displayMode = ConfigDef.DisplayMode.ADVANCED,
group = "HTTP",
dependsOn = "headerOutputLocation",
triggeredByValue = "HEADER"
)
public String headerAttributePrefix = "";
@ConfigDef(
required = true,
type = ConfigDef.Type.MODEL,
label = "Header Output Field",
description = "Field in which to place the HTTP response headers.",
displayPosition = 40,
displayMode = ConfigDef.DisplayMode.ADVANCED,
group = "HTTP",
dependsOn = "headerOutputLocation",
triggeredByValue = "FIELD"
)
@FieldSelectorModel(singleValued = true)
public String headerOutputField;
@ConfigDef(
required = true,
type = ConfigDef.Type.MODEL,
defaultValue = "JSON",
label = "Data Format",
description = "Data Format of the response. Response will be parsed before being placed in the record.",
displayPosition = 1,
group = "DATA_FORMAT"
)
@ValueChooserModel(DataFormatChooserValues.class)
public DataFormat dataFormat = DataFormat.JSON;
@ConfigDef(
required = true,
type = ConfigDef.Type.STRING,
label = "Resource URL",
description = "The HTTP resource URL",
elDefs = {RecordEL.class, TimeEL.class, TimeNowEL.class},
evaluation = ConfigDef.Evaluation.EXPLICIT,
displayPosition = 1,
displayMode = ConfigDef.DisplayMode.BASIC,
group = "HTTP"
)
public String resourceUrl = "";
@ConfigDef(
required = false,
type = ConfigDef.Type.MAP,
label = "Headers",
description = "Headers to include in the request",
evaluation = ConfigDef.Evaluation.EXPLICIT,
displayPosition = 70,
displayMode = ConfigDef.DisplayMode.BASIC,
elDefs = {RecordEL.class, VaultEL.class},
group = "HTTP"
)
public Map<String, String> headers = new HashMap<>();
@ConfigDef(
required = true,
type = ConfigDef.Type.MODEL,
label = "HTTP Method",
defaultValue = "GET",
description = "HTTP method to send",
elDefs = RecordEL.class,
evaluation = ConfigDef.Evaluation.EXPLICIT,
displayPosition = 80,
displayMode = ConfigDef.DisplayMode.BASIC,
group = "HTTP"
)
@ValueChooserModel(HttpMethodChooserValues.class)
public HttpMethod httpMethod = HttpMethod.GET;
@ConfigDef(
required = false,
type = ConfigDef.Type.STRING,
label = "HTTP Method Expression",
description = "Expression used to determine the HTTP method to use",
displayPosition = 90,
displayMode = ConfigDef.DisplayMode.BASIC,
dependsOn = "httpMethod",
elDefs = RecordEL.class,
evaluation = ConfigDef.Evaluation.EXPLICIT,
triggeredByValue = { "EXPRESSION" },
group = "HTTP"
)
public String methodExpression = "";
@ConfigDef(
required = false,
type = ConfigDef.Type.TEXT,
label = "Request Data",
description = "Data that should be included as a part of the request",
displayPosition = 100,
displayMode = ConfigDef.DisplayMode.BASIC,
lines = 2,
dependsOn = "httpMethod",
elDefs = {RecordEL.class, VaultEL.class},
evaluation = ConfigDef.Evaluation.EXPLICIT,
triggeredByValue = { "POST", "PUT", "DELETE", "PATCH", "EXPRESSION" },
group = "HTTP"
)
public String requestBody = "";
@ConfigDef(
required = true,
type = ConfigDef.Type.STRING,
label = "Default Request Content Type",
defaultValue = HttpStageUtil.DEFAULT_CONTENT_TYPE,
description = "Content-Type header to be sent with the request; used if that header is not already present",
displayPosition = 110,
displayMode = ConfigDef.DisplayMode.BASIC,
dependsOn = "httpMethod",
elDefs = {RecordEL.class, VaultEL.class},
evaluation = ConfigDef.Evaluation.EXPLICIT,
triggeredByValue = { "POST", "PUT", "DELETE", "PATCH", "EXPRESSION" },
group = "HTTP"
)
public String defaultRequestContentType = HttpStageUtil.DEFAULT_CONTENT_TYPE;
@ConfigDefBean
public JerseyClientConfigBean client = new JerseyClientConfigBean();
@ConfigDef(
required = false,
type = ConfigDef.Type.NUMBER,
label = "Rate Limit (ms)",
defaultValue = "0",
description = "Time between requests (in ms, 0 for unlimited). Useful for rate-limited APIs.",
displayPosition = 160,
displayMode = ConfigDef.DisplayMode.ADVANCED,
group = "HTTP"
)
public int rateLimit;
@ConfigDef(
required = true,
type = ConfigDef.Type.NUMBER,
label = "Maximum Request Time (sec)",
defaultValue = "60",
description = "Maximum time to wait for each request completion.",
displayPosition = 999,
displayMode = ConfigDef.DisplayMode.ADVANCED,
group = "HTTP"
)
public long maxRequestCompletionSecs = 60L;
@ConfigDef(
required = true,
type = ConfigDef.Type.MODEL,
label = "Missing Values Behavior",
description = "How to handle missing values when no default value is defined.",
defaultValue = "PASS_RECORD_ON",
displayPosition = 11,
group = "HTTP"
)
@ValueChooserModel(MissingValuesBehaviorChooserValues.class)
public MissingValuesBehavior missingValuesBehavior = MissingValuesBehavior.DEFAULT;
@ConfigDef(
required = true,
type = ConfigDef.Type.MODEL,
label = "Multiple Values Behavior",
description = "How to handle multiple values produced by the parser",
defaultValue = "FIRST_ONLY",
displayPosition = 12,
displayMode = ConfigDef.DisplayMode.BASIC,
group = "HTTP"
)
@ValueChooserModel(MultipleValuesBehaviorChooserValues.class)
public MultipleValuesBehavior multipleValuesBehavior = MultipleValuesBehavior.DEFAULT;
@ConfigDefBean(groups = "PAGINATION")
public PaginationConfigBean pagination = new PaginationConfigBean();
@ConfigDef(
required = false,
type = ConfigDef.Type.MODEL,
label = "Per-Status Actions",
description = "List of actions to take for specific response statuses.",
displayPosition = 1200,
group = "HTTP"
)
@ListBeanModel
public List<HttpStatusResponseActionConfigBean> responseStatusActionConfigs;
@ConfigDef(
required = true,
type = ConfigDef.Type.BOOLEAN,
label = "Records for Remaining Statuses",
description = "Produces records for all HTTP status codes not listed in Per-Status Actions.",
defaultValue = "false",
displayPosition = 1201,
group = "HTTP"
)
public boolean propagateAllHttpResponses = false;
@ConfigDef(
required = true,
type = ConfigDef.Type.STRING,
label = "Error Response Body Field",
description = "Field to store the error response body after performing per-status actions",
defaultValue = "outErrorBody",
displayPosition = 1202,
dependsOn = "propagateAllHttpResponses",
triggeredByValue = "true",
group = "HTTP"
)
public String errorResponseField = "outErrorBody";
@ConfigDefBean(groups = "TIMEOUT")
public HttpTimeoutResponseActionConfigBean responseTimeoutActionConfig =
new HttpTimeoutResponseActionConfigBean(0, ResponseAction.RETRY_IMMEDIATELY);
public void init(Stage.Context context, String group, String prefix, List<Stage.ConfigIssue> issues) {
client.init(context, group, prefix + "client", issues);
}
}
| |
/*
* Copyright 2017 StreamSets Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.pipeline.lib.jdbc;
import com.google.common.base.Strings;
import com.streamsets.pipeline.api.Field;
import com.streamsets.pipeline.api.Record;
import com.streamsets.pipeline.api.StageException;
import com.streamsets.pipeline.api.base.OnRecordErrorException;
import com.streamsets.pipeline.lib.operation.UnsupportedOperationAction;
import java.math.BigDecimal;
import java.sql.Types;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import java.util.zip.DataFormatException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.sql.DataSource;
import java.io.IOException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Array;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.SortedMap;
public abstract class JdbcBaseRecordWriter implements JdbcRecordWriter {
private static final Logger LOG = LoggerFactory.getLogger(JdbcBaseRecordWriter.class);
private final List<JdbcFieldColumnParamMapping> customMappings;
private final String connectionString;
private final DataSource dataSource;
private final String schema;
private final String tableName;
private final boolean rollbackOnError;
private final boolean caseSensitive;
private Map<String, String> columnsToFields = new HashMap<>();
private Map<String, String> columnsToParameters = new HashMap<>();
private final List<JdbcFieldColumnMapping> generatedColumnMappings;
private Map<String, Integer> columnType = new HashMap<>();
private List<String> primaryKeyColumns;
private Map<String, String> columnsWithoutPrimaryKeys;
JdbcRecordReader recordReader;
/* Static lists for column type checking in setParamsToStatement */
private Set<Integer> binaryTypes = new HashSet<>(Arrays.asList(
Types.BINARY,
Types.LONGVARBINARY,
Types.VARBINARY
));
private Set<Integer> textTypes = new HashSet<>(Arrays.asList(
Types.CHAR,
Types.VARCHAR,
Types.BLOB,
Types.LONGNVARCHAR,
Types.NCHAR,
Types.NVARCHAR,
Types.LONGVARCHAR,
Types.SQLXML,
Types.CLOB,
Types.NCLOB
));
private Set<Integer> numericTypes = new HashSet<>(Arrays.asList(
Types.BIT,
Types.TINYINT,
Types.SMALLINT,
Types.INTEGER,
Types.BIGINT,
Types.DECIMAL,
Types.NUMERIC,
Types.FLOAT,
Types.REAL,
Types.DOUBLE
));
/*
The following should be handled by setObject() and hence
won't be checked.
Types.TIME_WITH_TIMEZONE
Types.TIMESTAMP_WITH_TIMEZONE
*/
private Set<Integer> dateTypes = new HashSet<>(Arrays.asList(
Types.DATE,
Types.TIME,
Types.TIMESTAMP
));
// Index of columns returned by DatabaseMetaData.getColumns. Defined in DatabaseMetaData class.
private static final int COLUMN_NAME = 4;
private static final int DATA_TYPE = 5;
private static final String MSSQL = "Microsoft";
private final int defaultOpCode;
private final UnsupportedOperationAction unsupportedAction;
private final List<String> primaryKeyParams;
protected final JdbcUtil jdbcUtil;
public JdbcBaseRecordWriter(
String connectionString,
DataSource dataSource,
String schema,
String tableName,
boolean rollbackOnError,
List<JdbcFieldColumnParamMapping> customMappings,
int defaultOpCode,
UnsupportedOperationAction unsupportedAction,
JdbcRecordReader recordReader,
List<JdbcFieldColumnMapping> generatedColumnMappings,
boolean caseSensitive
) throws StageException {
this.jdbcUtil = UtilsProvider.getJdbcUtil();
this.connectionString = connectionString;
this.dataSource = dataSource;
if (Strings.isNullOrEmpty(schema) && tableName.contains(".") && !caseSensitive) {
// Need to split this into the schema and table parts for column metadata to be retrieved.
LOG.warn("Schema in the tableName is no longer supported. Schema should defined in Schema configuration: {}", tableName);
String[] parts = tableName.split("\\.");
if (parts.length != 2) {
throw new StageException(JdbcErrors.JDBC_16, tableName);
}
this.schema = parts[0];
this.tableName = tableName;
} else {
this.schema = schema;
this.tableName = tableName;
}
this.rollbackOnError = rollbackOnError;
this.customMappings = customMappings;
this.defaultOpCode = defaultOpCode;
this.unsupportedAction = unsupportedAction;
this.recordReader = recordReader;
this.generatedColumnMappings = generatedColumnMappings;
this.caseSensitive = caseSensitive;
createDefaultFieldMappings();
createCustomFieldMappings();
lookupPrimaryKeys();
primaryKeyParams = new LinkedList<>();
columnsWithoutPrimaryKeys = new HashMap<>(columnsToFields);
for (String key: primaryKeyColumns) {
primaryKeyParams.add(getColumnsToParameters().get(key));
columnsWithoutPrimaryKeys.remove(key);
}
}
@Override
public void deinit() {
}
/**
* Access the database, obtain a list of primary key columns, and store them in primaryKeyColumns.
* If table has no primary keys, primaryKeyColumns stays empty.
*
* @throws StageException
*/
void lookupPrimaryKeys() throws StageException {
Connection connection = null;
try {
connection = dataSource.getConnection();
primaryKeyColumns = jdbcUtil.getPrimaryKeys(connection, schema, tableName);
} catch (SQLException e) {
String formattedError = jdbcUtil.formatSqlException(e);
LOG.error(formattedError, e);
throw new StageException(JdbcErrors.JDBC_17, tableName, formattedError);
} finally {
if (connection != null) {
try {
connection.close();
} catch (SQLException e) {
String formattedError = jdbcUtil.formatSqlException(e);
LOG.error(formattedError, e);
}
}
}
}
int getColumnType(String columnName) { return columnType.get(columnName); }
/**
* Access database and obtain the metadata for the table.
* Store columnName and "/columnName" to the columnsToFields map as a default column-to-field mapping.
* Store columnName and "?" to columnsToParameters map as a default column-to-value mapping.
* They will be updated later in createCustomFieldMappings().
*
* @throws StageException
*/
private void createDefaultFieldMappings() throws StageException {
try (Connection connection = dataSource.getConnection()) {
try (ResultSet res = jdbcUtil.getTableMetadata(connection, schema, tableName)) {
if (!res.next()) {
throw new StageException(JdbcErrors.JDBC_16, getTableName());
}
}
try (ResultSet columns = jdbcUtil.getColumnMetadata(connection, schema, tableName)) {
while (columns.next()) {
String columnName = columns.getString(COLUMN_NAME);
columnsToFields.put(columnName, "/" + columnName); // Default implicit field mappings
columnsToParameters.put(columnName, "?");
columnType.put(columnName, columns.getInt(DATA_TYPE));
}
}
} catch (SQLException e) {
String errorMessage = jdbcUtil.formatSqlException(e);
LOG.error(errorMessage);
LOG.debug(errorMessage, e);
throw new StageException(JdbcErrors.JDBC_09, tableName);
}
}
/**
* Use "Field to Column Mapping" option obtained from configuration and update
* columnsToFields and columnsToParameters.
*/
private void createCustomFieldMappings() {
for (JdbcFieldColumnParamMapping mapping : customMappings) {
LOG.debug("Custom mapping field {} to column {}", mapping.field, mapping.columnName);
if (columnsToFields.containsKey(mapping.columnName)) {
LOG.debug("Mapping field {} to column {}", mapping.field, mapping.columnName);
columnsToFields.put(mapping.columnName, mapping.field);
columnsToParameters.put(mapping.columnName, mapping.paramValue);
}
}
}
// This is necessary for supporting array data types. For some awful reason, the JDBC
// spec requires a string name for a data type, rather than just an enum.
static String getSQLTypeName(Field.Type type) throws OnRecordErrorException {
switch (type) {
case BOOLEAN:
return "BOOLEAN";
case CHAR:
return "CHAR";
case BYTE:
return "BINARY";
case SHORT:
return "SMALLINT";
case INTEGER:
return "INTEGER";
case LONG:
return "BIGINT";
case FLOAT:
return "FLOAT";
case DOUBLE:
return "DOUBLE";
case DATE:
return "DATE";
case TIME:
return "TIME";
case DATETIME:
return "TIMESTAMP";
case DECIMAL:
return "DECIMAL";
case STRING:
return "VARCHAR";
case BYTE_ARRAY:
return "VARBINARY";
case LIST_MAP:
case MAP:
throw new OnRecordErrorException(JdbcErrors.JDBC_05, "Unsupported list or map type: MAP");
case LIST:
return "ARRAY";
default:
throw new OnRecordErrorException(JdbcErrors.JDBC_05, "Unsupported type: " + type.name());
}
}
/**
* Database connection string
* @return connection string
*/
protected String getConnectionString() {
return connectionString;
}
/**
* Table this writer will write to.
* @return table name
*/
protected String getTableName() {
if (!Strings.isNullOrEmpty(schema)) {
if (caseSensitive) {
return "\"" + schema + "\"." + "\"" + tableName + "\"";
} else {
return schema + "." + tableName;
}
}
if (caseSensitive) {
return "\"" + tableName + "\"";
}
return tableName;
}
/**
* JDBC DataSource used for writing.
* @return JDBC DataSource
*/
DataSource getDataSource() {
return dataSource;
}
/**
* SQL Table to SDC Field mappings
* @return map of the mappings
*/
Map<String, String> getColumnsToFields() {
return columnsToFields;
}
/**
* SQL Columns to custom parameter mappings. These will be used to parameterized
* an INSERT statement
* @return map of the mappings
*/
Map<String, String> getColumnsToParameters() {
return columnsToParameters;
}
/**
* A list of primary key column names
* @return List of primary key column names
*/
List<String> getPrimaryKeyColumns() {
return primaryKeyColumns;
}
/**
* A list of primary key column name
* @return List of primary key column names
*/
List<String> getPrimaryKeyParams() {
return primaryKeyParams;
}
/**
* A list of table columns that don't include primary key columns
* @return List of table columns without primary keys
*/
Map<String, String> getColumnsToFieldNoPK() {
return columnsWithoutPrimaryKeys;
}
/**
* Whether or not to try to perform a transaction rollback on error.
* @return whether to rollback the transaction
*/
boolean getRollbackOnError() {
return rollbackOnError;
}
static List<Object> unpackList(List<Field> value) {
List<Object> unpackedList = new ArrayList<>();
for (Field item : value) {
unpackedList.add(item.getValue());
}
return unpackedList;
}
List<JdbcFieldColumnMapping> getGeneratedColumnMappings() {
return generatedColumnMappings;
}
void writeGeneratedColumns(
PreparedStatement statement,
Iterator<Record> iter,
List<OnRecordErrorException> errorRecords
) throws SQLException {
ResultSet resultSet = statement.getGeneratedKeys();
ResultSetMetaData md = resultSet.getMetaData();
int numColumns = md.getColumnCount();
while (resultSet.next()) {
Record record = iter.next();
// Process row
for (int i = 1; i <= numColumns; i++) {
try {
// Assuming generated columns can't be CLOBs/BLOBs, so just pass
// zero for maxClobSize
Field field = jdbcUtil.resultToField(
md,
resultSet,
i,
0,
0,
UnknownTypeAction.STOP_PIPELINE
);
if (field == null) {
LOG.error(JdbcErrors.JDBC_03.getMessage(), md.getColumnName(i), resultSet.getObject(i));
errorRecords.add(new OnRecordErrorException(record, JdbcErrors.JDBC_03,
md.getColumnName(i), resultSet.getObject(i)));
}
record.set(generatedColumnMappings.get(i - 1).field, field);
} catch (IOException|StageException e) {
LOG.error(JdbcErrors.JDBC_03.getMessage(), md.getColumnName(i), resultSet.getObject(i), e);
errorRecords.add(new OnRecordErrorException(record, JdbcErrors.JDBC_03,
md.getColumnName(i), resultSet.getObject(i)));
}
}
}
}
boolean isColumnTypeNumeric(int columnType) {
return numericTypes.contains(columnType);
}
boolean isColumnTypeText(int columnType) {
return textTypes.contains(columnType);
}
boolean isColumnTypeDate(int columnType) {
return dateTypes.contains(columnType);
}
boolean isColumnTypeBinary(int columnType) {
return binaryTypes.contains(columnType);
}
int setParamsToStatement(int paramIdx,
PreparedStatement statement,
SortedMap<String, String> columnsToParameters,
Record record,
Connection connection,
int opCode) throws OnRecordErrorException {
// fill in parameters to existing statement
for (String column : columnsToParameters.keySet()) {
Field field = record.get(recordReader.getFieldPath(column, getColumnsToFields(), opCode));
Field.Type fieldType = field.getType();
Object value = field.getValue();
int columnType = getColumnType(column);
/* See SDC-7959: MapD does not support PreparedStatement.setObject()
* To minimise exceptions, explicitly set values using setType method.
* Note:
* - MAP, LIST_MAP not implemented as handled prior to calling. */
try {
/* If a value is null, regardless of its passed in Field.Type, the column should be set to null
*/
if (value == null) {
statement.setObject(paramIdx, value, getColumnType(column));
paramIdx++;
continue;
}
switch (fieldType) {
case LIST:
List<Field> fieldList = field.getValueAsList();
if (fieldList.size() > 0) {
Field.Type elementFieldType = fieldList.get(0).getType();
Array array = connection.createArrayOf(getSQLTypeName(elementFieldType), unpackList(fieldList).toArray());
statement.setArray(paramIdx, array);
} else {
statement.setArray(paramIdx, null);
}
break;
case DATE:
case TIME:
case DATETIME:
if (!isColumnTypeDate(columnType)) {
LOG.debug("fieldType: {} and column: {} not directly compatible. Attempting to use setObject()",
fieldType,
column
);
statement.setObject(paramIdx, value, getColumnType(column));
break;
}
// Java Date types are not accepted by JDBC drivers, so we need to convert to java.sql.Timestamp
statement.setTimestamp(paramIdx,
field.getValueAsDate() == null ? null : new java.sql.Timestamp(field.getValueAsDatetime().getTime())
);
break;
case BOOLEAN:
if (columnType != Types.BOOLEAN) {
LOG.debug("fieldType: {} and column: {} not directly compatible. Attempting to use setObject()",
fieldType,
column
);
statement.setObject(paramIdx, value, getColumnType(column));
break;
}
statement.setBoolean(paramIdx, (Boolean)value);
break;
case CHAR:
case STRING:
if (!isColumnTypeText(columnType)) {
LOG.debug("fieldType: {} and column: {} not directly compatible. Attempting to use setObject()",
fieldType,
column
);
statement.setObject(paramIdx, value, getColumnType(column));
break;
}
statement.setString(paramIdx, String.valueOf(value));
break;
case BYTE:
if (!isColumnTypeNumeric(columnType)) {
LOG.debug("fieldType: {} and column: {} not directly compatible. Attempting to use setObject()",
fieldType,
column
);
statement.setObject(paramIdx, value, getColumnType(column));
break;
}
statement.setByte(paramIdx, (Byte)value);
break;
case SHORT:
if (!isColumnTypeNumeric(columnType)) {
LOG.debug("fieldType: {} and column: {} not directly compatible. Attempting to use setObject()",
fieldType,
column
);
statement.setObject(paramIdx, value, getColumnType(column));
break;
}
statement.setShort(paramIdx, (Short)value);
break;
case INTEGER:
if (!isColumnTypeNumeric(columnType)) {
LOG.debug("fieldType: {} and column: {} not directly compatible. Attempting to use setObject()",
fieldType,
column
);
statement.setObject(paramIdx, value, getColumnType(column));
break;
}
statement.setInt(paramIdx, (Integer)value);
break;
case LONG:
if (!isColumnTypeNumeric(columnType)) {
LOG.debug("fieldType: {} and column: {} not directly compatible. Attempting to use setObject()",
fieldType,
column
);
statement.setObject(paramIdx, value, getColumnType(column));
break;
}
statement.setLong(paramIdx, (Long)value);
break;
case FLOAT:
if (!isColumnTypeNumeric(columnType)) {
LOG.debug("fieldType: {} and column: {} not directly compatible. Attempting to use setObject()",
fieldType,
column
);
statement.setObject(paramIdx, value, getColumnType(column));
break;
}
statement.setFloat(paramIdx, (Float)value);
break;
case DOUBLE:
if (!isColumnTypeNumeric(columnType)) {
LOG.debug("fieldType: {} and column: {} not directly compatible. Attempting to use setObject()",
fieldType,
column
);
statement.setObject(paramIdx, value, getColumnType(column));
break;
}
statement.setDouble(paramIdx, (Double)value);
break;
case DECIMAL:
if (!isColumnTypeNumeric(columnType)) {
LOG.debug("fieldType: {} and column: {} not directly compatible. Attempting to use setObject()",
fieldType,
column
);
statement.setObject(paramIdx, value, getColumnType(column));
break;
}
if (connection.getMetaData().getDriverName().contains(MSSQL)) {
LOG.debug(
"Since {} is being used we will send the record as object",
connection.getMetaData().getDriverName()
);
// Microsoft SQL Server JDBC Driver doesn't implement setBigDecimal() properly, it's better to always
// use setObject which have reasonable behavior.
statement.setObject(paramIdx, value, getColumnType(column));
break;
}
statement.setBigDecimal(paramIdx, (BigDecimal) value);
break;
case BYTE_ARRAY:
if (!isColumnTypeBinary(columnType)) {
LOG.debug("fieldType: {} and column: {} not directly compatible. Attempting to use setObject()",
fieldType,
column
);
statement.setObject(paramIdx, value, getColumnType(column));
break;
}
statement.setBytes(paramIdx, (byte[])value);
break;
case FILE_REF:
case MAP: // should not be seen as un-mapping handled prior to call
case LIST_MAP: // should not be seen as un-mapping handled prior to call
throw new DataFormatException(fieldType.name());
case ZONED_DATETIME: //guidance is to use setObject() for this type
default:
LOG.debug("fieldType: {} handled by default case. Attempting to use setObject()", fieldType);
statement.setObject(paramIdx, value, getColumnType(column));
break;
}
} catch (DataFormatException e) {
LOG.error("Query failed unsupported type {}", e.getMessage());
throw new OnRecordErrorException(record, JdbcErrors.JDBC_05, field.getValue(), fieldType.toString(), column);
} catch (SQLException e) {
LOG.error("Query failed due to {}", e.getMessage(), e);
throw new OnRecordErrorException(record, JdbcErrors.JDBC_23, field.getValue(), fieldType.toString(), column);
}
++paramIdx;
}
return paramIdx;
}
/**
* Set primary key values to query. This is called only for UPDATE and DELETE operations.
* If primary key value is missing in record, it throws OnRecordErrorException.
* @param index
* @param record
* @param statement
* @param opCode
* @return
* @throws OnRecordErrorException
*/
int setPrimaryKeys(int index, final Record record, PreparedStatement statement, int opCode)
throws OnRecordErrorException {
for (String key : getPrimaryKeyColumns()) {
Field field = record.get(recordReader.getFieldPath(key, getColumnsToFields(), opCode));
if(field == null){
LOG.error("Primary key {} is missing in record", key);
throw new OnRecordErrorException(record, JdbcErrors.JDBC_19, key);
}
Object value = field.getValue();
try {
statement.setObject(index, value, getColumnType(key));
} catch (SQLException ex){
LOG.error("SQLException thrown: {}", ex.getMessage());
throw new OnRecordErrorException(record, JdbcErrors.JDBC_19, key, ex);
}
++index;
}
return index;
}
/**
* This is an error that is not due to bad input record and should throw a StageException
* once we format the error.
*
* @param e SQLException
* @throws StageException
*/
void handleSqlException(SQLException e) throws StageException {
String formattedError = jdbcUtil.formatSqlException(e);
LOG.error(formattedError, e);
throw new StageException(JdbcErrors.JDBC_14, formattedError);
}
/**
* Get the numeric operation code from record header. The default code is
* used if the operation code is not found in the header.
*
* @param record the record to find the operation code
* @param errorRecords the list to take error records
* @return the numeric operation code or -1 for unsupported operation
*/
protected int getOperationCode(Record record, List<OnRecordErrorException> errorRecords) {
return recordReader.getOperationFromRecord(
record,
defaultOpCode,
unsupportedAction,
errorRecords);
}
}
| |
package com.se329.gui;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.EventQueue;
import java.awt.Font;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Insets;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.File;
import java.io.FileWriter;
import java.io.InputStream;
import java.net.URISyntaxException;
import java.security.CodeSource;
import java.util.ArrayList;
import javax.swing.DefaultListModel;
import javax.swing.JButton;
import javax.swing.JFileChooser;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JList;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JTable;
import javax.swing.JTextField;
import javax.swing.UIManager;
import javax.swing.table.DefaultTableModel;
import com.firebase.client.ChildEventListener;
import com.firebase.client.DataSnapshot;
import com.firebase.client.Firebase;
import com.firebase.client.FirebaseError;
public class FaceTrainFrame {
//DEBUG
//public static final String outputFileName = "../facemap/facesDB.csv";
//public static final String exeFilePath = "../facemap/faceMap.exe";
//public static final String picsDir = "../pics";
//RELEASE
public static final String outputFileName = "./facesDB.csv";
public static final String exeFileName = "./FaceMap.exe";
public static final String picsDir = "pics";
public static String currentDir = ""; //set upon intialization
private static FileWriter writer = null;
private static JFrame frmFaceTrainer;
private static JPanel panel_1;
private static JLabel photoLabel_1;
private static DefaultListModel<String> listModel = new DefaultListModel<String>();
// subjects displayed on the right side of the screen
private static ArrayList<Subject> subjects = new ArrayList<Subject>();
// the subject you are browsing for photos for, it is not yet in "subjects"
private static Subject currSub = new Subject();
// Delimiter used in CSV file
private static final String COMMA_DELIMITER = ",";
private static final String NEW_LINE_SEPARATOR = "\n";
// File header for CSV file
private static Firebase firebaseRef = new Firebase(
"https://torrid-heat-4382.firebaseio.com/");
private static JTable table;
private static JTextField nameTxt;
private static final Object[] columnNames = { "Name", "Attendance" };
private static Object[][] tableData;
/**
* Launch the application.
*/
public static void main(String[] args) {
//Find current directory of the .jar file
CodeSource codeSource = FaceTrainFrame.class.getProtectionDomain().getCodeSource();
File jarFile;
try {
jarFile = new File(codeSource.getLocation().toURI().getPath());
String jarDir = jarFile.getParentFile().getPath();
currentDir = jarDir;
System.out.println(currentDir);
} catch (URISyntaxException e1) {
e1.printStackTrace();
System.exit(1);
}
EventQueue.invokeLater(new Runnable() {
public void run() {
try {
FaceTrainFrame window = new FaceTrainFrame();
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
/**
* Create the application.
* @wbp.parser.entryPoint
*/
public FaceTrainFrame() {
initialize();
querySubjects();
}
private void querySubjects() {
System.out.println("adding listener");
firebaseRef.addChildEventListener(new ChildEventListener() {
@Override
public void onCancelled(FirebaseError firebaseError) {
System.out.println("The read failed: "
+ firebaseError.getMessage());
}
@Override
public void onChildAdded(DataSnapshot snapshot,
String previousChildKey) {
System.out.println("child added");
subjects.clear();
System.out.println("There are " + snapshot.getChildrenCount()
+ " Subjects");
for (DataSnapshot postSnapshot : snapshot.getChildren()) {
// System.out.println(postSnapshot.getValue());
Subject sub = postSnapshot.getValue(Subject.class);
sub.setId(postSnapshot.getKey());
System.out.println(sub.getName() + " "
+ sub.getTimesAttended());
subjects.add(sub);
}
System.out.println("Size of arraylist " + subjects.size());
setTableData();
DefaultTableModel model = (DefaultTableModel) table.getModel();
model.setDataVector(tableData, columnNames);
//table = new JTable(new DefaultTableModel(tableData, columnNames));
//GridBagConstraints gbc_table = new GridBagConstraints();
//gbc_table.fill = GridBagConstraints.BOTH;
//gbc_table.gridx = 0;
//gbc_table.gridy = 0;
//panel_1.add(table, gbc_table);
frmFaceTrainer.getContentPane().validate();
frmFaceTrainer.getContentPane().repaint();
//initialize();
}
@Override
public void onChildChanged(DataSnapshot snapshot, String arg1) {
}
@Override
public void onChildMoved(DataSnapshot arg0, String arg1) {
}
@Override
public void onChildRemoved(DataSnapshot arg0) {
}
});
}
private void addSubject() {
System.out.println("adding subject...");
Firebase subjectRef = firebaseRef.child("subjects");
System.out.println(currSub.getName());
//Map<String, String> subj1 = new HashMap<String, String>();
//subj1.put("name", currSub.getName());
//subj1.put("timesAttended", Integer.toString(currSub.getTimesAttended()));
//subj1.put( "photoPaths", currSub.getPhotoPaths().toString());
Firebase newSubjectRef = subjectRef.push();
newSubjectRef.setValue(currSub);
//subjectRef.push().setValue(subj1);
currSub.setId(newSubjectRef.getKey());
subjects.add(currSub);
DefaultTableModel model = (DefaultTableModel) table.getModel();
model.addRow(new Object[] { currSub.getName(), "0" });
frmFaceTrainer.getContentPane().validate();
frmFaceTrainer.getContentPane().repaint();
}
/**
* Initialize the contents of the frame.
*/
private void initialize() {
frmFaceTrainer = new JFrame();
frmFaceTrainer.getContentPane().setBackground(
UIManager.getColor("FormattedTextField.selectionBackground"));
frmFaceTrainer.setTitle("Face Recognition Attendance Taker");
frmFaceTrainer.setBounds(100, 100, 719, 485);
frmFaceTrainer.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frmFaceTrainer.setVisible(true);
frmFaceTrainer.getContentPane().setLayout(new BorderLayout(0, 0));
JPanel panel = new JPanel();
frmFaceTrainer.getContentPane().add(panel, BorderLayout.WEST);
GridBagLayout gbl_panel = new GridBagLayout();
gbl_panel.columnWidths = new int[] { 202, 0, 4, 0 };
gbl_panel.rowHeights = new int[] { 22, 17, 25, 261, 14, 35, 23, 0 };
gbl_panel.columnWeights = new double[] { 1.0, 1.0, 0.0,
Double.MIN_VALUE };
gbl_panel.rowWeights = new double[] { 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, Double.MIN_VALUE };
panel.setLayout(gbl_panel);
JLabel descLabel = new JLabel("Add a new subject to be recognized.");
GridBagConstraints gbc_descLabel = new GridBagConstraints();
gbc_descLabel.anchor = GridBagConstraints.WEST;
gbc_descLabel.insets = new Insets(0, 0, 5, 5);
gbc_descLabel.gridx = 0;
gbc_descLabel.gridy = 0;
panel.add(descLabel, gbc_descLabel);
descLabel.setFont(new Font("Arial Rounded MT Bold", Font.PLAIN, 11));
nameTxt = new JTextField();
GridBagConstraints gbc_nameTxt = new GridBagConstraints();
gbc_nameTxt.insets = new Insets(0, 0, 5, 5);
gbc_nameTxt.fill = GridBagConstraints.HORIZONTAL;
gbc_nameTxt.gridx = 0;
gbc_nameTxt.gridy = 1;
panel.add(nameTxt, gbc_nameTxt);
nameTxt.setColumns(10);
JLabel nameLabel = new JLabel("Name");
GridBagConstraints gbc_nameLabel = new GridBagConstraints();
gbc_nameLabel.anchor = GridBagConstraints.WEST;
gbc_nameLabel.insets = new Insets(0, 0, 5, 5);
gbc_nameLabel.gridx = 1;
gbc_nameLabel.gridy = 1;
panel.add(nameLabel, gbc_nameLabel);
nameLabel.setFont(new Font("Arial Black", Font.PLAIN, 11));
photoLabel_1 = new JLabel("Upload 10 photos...");
GridBagConstraints gbc_photoLabel_1 = new GridBagConstraints();
gbc_photoLabel_1.anchor = GridBagConstraints.WEST;
gbc_photoLabel_1.insets = new Insets(0, 0, 5, 5);
gbc_photoLabel_1.gridx = 0;
gbc_photoLabel_1.gridy = 2;
panel.add(photoLabel_1, gbc_photoLabel_1);
photoLabel_1
.setFont(new Font("Arial Rounded MT Bold", Font.ITALIC, 11));
JButton photoButton_1 = new JButton("Select Photo");
GridBagConstraints gbc_photoButton_1 = new GridBagConstraints();
gbc_photoButton_1.anchor = GridBagConstraints.NORTHWEST;
gbc_photoButton_1.insets = new Insets(0, 0, 5, 5);
gbc_photoButton_1.gridx = 1;
gbc_photoButton_1.gridy = 2;
panel.add(photoButton_1, gbc_photoButton_1);
photoButton_1.setFont(new Font("Arial Black", Font.PLAIN, 11));
photoButton_1.setBackground(Color.CYAN);
photoButton_1.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
String photoName = selectPhoto();
if (photoName != null)
currSub.appendPhotoPath("./pics/" + photoName);
}
});
JList<String> list = new JList<String>(listModel);
GridBagConstraints gbc_list = new GridBagConstraints();
gbc_list.gridwidth = 2;
gbc_list.insets = new Insets(0, 0, 5, 5);
gbc_list.fill = GridBagConstraints.BOTH;
gbc_list.gridx = 0;
gbc_list.gridy = 3;
panel.add(list, gbc_list);
JButton addButton = new JButton("Add Subject");
GridBagConstraints gbc_addButton = new GridBagConstraints();
gbc_addButton.anchor = GridBagConstraints.WEST;
gbc_addButton.insets = new Insets(0, 0, 5, 5);
gbc_addButton.gridx = 0;
gbc_addButton.gridy = 4;
panel.add(addButton, gbc_addButton);
addButton.setBackground(Color.GREEN);
addButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
if (nameTxt.getText().isEmpty()) {
JOptionPane.showMessageDialog(frmFaceTrainer,
"Missing name!");
// } else if (currSub.getPhotoPaths().isEmpty() ||
// currSub.getPhotoPaths().size() < 10) {
// JOptionPane.showMessageDialog(frmFaceTrainer,
// "Upload at least 10 photos!");
} else {
currSub.setName(nameTxt.getText());
addSubject();
resetValues();
}
}
});
JButton cancelButton = new JButton("Cancel");
GridBagConstraints gbc_cancelButton = new GridBagConstraints();
gbc_cancelButton.anchor = GridBagConstraints.WEST;
gbc_cancelButton.insets = new Insets(0, 0, 0, 5);
gbc_cancelButton.gridx = 0;
gbc_cancelButton.gridy = 6;
panel.add(cancelButton, gbc_cancelButton);
cancelButton.setBackground(Color.RED);
cancelButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
resetValues();
}
});
JButton runButton = new JButton("Run");
runButton.setBackground(Color.YELLOW);
GridBagConstraints gbc_runButton = new GridBagConstraints();
gbc_runButton.insets = new Insets(0, 0, 0, 5);
gbc_runButton.gridx = 1;
gbc_runButton.gridy = 6;
panel.add(runButton, gbc_runButton);
runButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
System.out.println("Closing frame and starting executable.");
createFile();
startFaceRecognition();
// Close frame
frmFaceTrainer.setVisible(false);
frmFaceTrainer.dispose();
System.exit(0);
}
});
panel_1 = new JPanel();
frmFaceTrainer.getContentPane().add(panel_1, BorderLayout.CENTER);
GridBagLayout gbl_panel_1 = new GridBagLayout();
gbl_panel_1.columnWidths = new int[] { 0, 0 };
gbl_panel_1.rowHeights = new int[] { 0, 0 };
gbl_panel_1.columnWeights = new double[] { 1.0, Double.MIN_VALUE };
gbl_panel_1.rowWeights = new double[] { 1.0, Double.MIN_VALUE };
panel_1.setLayout(gbl_panel_1);
table = new JTable(new DefaultTableModel(tableData, columnNames));
GridBagConstraints gbc_table = new GridBagConstraints();
gbc_table.fill = GridBagConstraints.BOTH;
gbc_table.gridx = 0;
gbc_table.gridy = 0;
panel_1.add(table, gbc_table);
frmFaceTrainer.getContentPane().validate();
frmFaceTrainer.getContentPane().repaint();
}
private static String selectPhoto() {
String picsFilePath = new File(currentDir, picsDir).toString();
File photo;
JFileChooser chooser = new JFileChooser(picsFilePath);
chooser.setFileSelectionMode(JFileChooser.FILES_ONLY);
int option = chooser.showOpenDialog(frmFaceTrainer);
if (option == JFileChooser.APPROVE_OPTION) {
photo = chooser.getSelectedFile();
listModel.addElement(photo.getName());
return photo.getName();
}
return null;
}
private boolean createFile() {
try {
String outputFilePath = new File(currentDir, outputFileName).toString();
writer = new FileWriter(outputFilePath, false);
System.out.println("creating file");
for (Subject sub : subjects) {
writer.append(sub.getId());
writer.append(COMMA_DELIMITER);
writer.append(sub.getName());
writer.append(COMMA_DELIMITER);
writer.append(sub.getTimesAttended() + "");
for (String str : sub.getPhotoPaths()) {
writer.append(COMMA_DELIMITER);
writer.append(str);
}
writer.append(NEW_LINE_SEPARATOR);
}
writer.flush();
writer.close();
} catch (Exception e) {
System.out.println("Error creating csv!");
return false;
}
return true;
}
private static void resetValues() {
// Reset all values
listModel.clear();
nameTxt.setText("");
currSub = new Subject();
}
private void startFaceRecognition() {
File outputFile = new File(outputFileName);
String outputFileNameOnly = outputFile.getName();
String exeFilePath = new File(currentDir, exeFileName).toString();
if (new File(exeFilePath).exists()) {
try {
/*ProcessBuilder pb = new ProcessBuilder(exeFilePath,
outputFileNameOnly);
pb.redirectError();
Process p = pb.start();
InputStream is = p.getInputStream();
int value = -1;
while ((value = is.read()) != -1) {
System.out.print((char) value);
}*/
Process p = new ProcessBuilder("./FaceMap.exe","./facesDB.csv").start();
int exitCode = p.waitFor();
System.out.println("exited with " + exitCode);
} catch (Exception e) {
e.printStackTrace();
}
} else {
System.err.println(exeFilePath + " does not exist");
}
}
private void setTableData() {
int i = 1;
tableData = new Object[subjects.size() + 1][2];
tableData[0] = columnNames;
for (Subject sub : subjects) {
Object[] row = { sub.getName(), sub.getTimesAttended() };
tableData[i] = row;
i++;
}
}
}
| |
package org.activiti.engine.test.bpmn.event.timer;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.activiti.engine.delegate.event.ActivitiEvent;
import org.activiti.engine.delegate.event.ActivitiEventType;
import org.activiti.engine.impl.test.PluggableActivitiTestCase;
import org.activiti.engine.impl.util.DefaultClockImpl;
import org.activiti.engine.runtime.Clock;
import org.activiti.engine.runtime.Job;
import org.activiti.engine.runtime.ProcessInstance;
import org.activiti.engine.task.Task;
import org.activiti.engine.test.api.event.TestActivitiEntityEventListener;
/**
* @author Vasile Dirla
*/
public class StartTimerEventRepeatWithoutEndDateTest extends PluggableActivitiTestCase {
private TestActivitiEntityEventListener listener;
@Override
protected void setUp() throws Exception {
super.setUp();
listener = new TestActivitiEntityEventListener(Job.class);
processEngineConfiguration.getEventDispatcher().addEventListener(listener);
}
@Override
protected void tearDown() throws Exception {
super.tearDown();
if (listener != null) {
processEngineConfiguration.getEventDispatcher().removeEventListener(listener);
}
}
/**
* Timer repetition
*/
public void testCycleDateStartTimerEvent() throws Exception {
Clock previousClock = processEngineConfiguration.getClock();
Clock testClock = new DefaultClockImpl();
processEngineConfiguration.setClock(testClock);
Calendar calendar = Calendar.getInstance();
calendar.set(2025, Calendar.DECEMBER, 10, 0, 0, 0);
testClock.setCurrentTime(calendar.getTime());
// deploy the process
repositoryService.createDeployment().addClasspathResource("org/activiti/engine/test/bpmn/event/timer/StartTimerEventRepeatWithoutEndDateTest.testCycleDateStartTimerEvent.bpmn20.xml").deploy();
assertEquals(1, repositoryService.createProcessDefinitionQuery().count());
// AFTER DEPLOYMENT
// when the process is deployed there will be created a timerStartEvent
// job which will wait to be executed.
List<Job> jobs = managementService.createTimerJobQuery().list();
assertEquals(1, jobs.size());
// dueDate should be after 24 hours from the process deployment
Calendar dueDateCalendar = Calendar.getInstance();
dueDateCalendar.set(2025, Calendar.DECEMBER, 11, 0, 0, 0);
// check the due date is inside the 2 seconds range
assertEquals(true, Math.abs(dueDateCalendar.getTime().getTime() - jobs.get(0).getDuedate().getTime()) < 2000);
// No process instances
List<ProcessInstance> processInstances = runtimeService.createProcessInstanceQuery().list();
assertEquals(0, processInstances.size());
// No tasks
List<Task> tasks = taskService.createTaskQuery().list();
assertEquals(0, tasks.size());
// ADVANCE THE CLOCK
// advance the clock after 9 days from starting the process ->
// the system will execute the pending job and will create a new one (day by day)
moveByMinutes((9 * 60 * 24));
executeJobExecutorForTime(10000, 200);
// there must be a pending job because the endDate is not reached yet
assertEquals(1, managementService.createTimerJobQuery().count());
// After time advanced 9 days there should be 9 process instance started
processInstances = runtimeService.createProcessInstanceQuery().list();
assertEquals(9, processInstances.size());
// 9 task to be executed (the userTask "Task A")
tasks = taskService.createTaskQuery().list();
assertEquals(9, tasks.size());
// one new job will be created (and the old one will be deleted after execution)
jobs = managementService.createTimerJobQuery().list();
assertEquals(1, jobs.size());
// check if the last job to be executed has the dueDate set correctly
// (10'th repeat after 10 dec. => dueDate must have DueDate = 20 dec.)
dueDateCalendar = Calendar.getInstance();
dueDateCalendar.set(2025, Calendar.DECEMBER, 20, 0, 0, 0);
assertEquals(true, Math.abs(dueDateCalendar.getTime().getTime() - jobs.get(0).getDuedate().getTime()) < 2000);
// ADVANCE THE CLOCK SO that all 10 repeats to be executed
// (last execution)
moveByMinutes(60 * 24);
try {
waitForJobExecutorToProcessAllJobsAndExecutableTimerJobs(2000, 200);
} catch (Exception e) {
fail("Because the maximum number of repeats is reached no other jobs are created");
}
// After the 10nth startEvent Execution should have 10 process instances started
// (since the first one was not completed)
processInstances = runtimeService.createProcessInstanceQuery().list();
assertEquals(10, processInstances.size());
// the current job will be deleted after execution and a new one will
// not be created. (all 10 has already executed)
jobs = managementService.createTimerJobQuery().list();
assertEquals(0, jobs.size());
jobs = managementService.createJobQuery().list();
assertEquals(0, jobs.size());
// 10 tasks to be executed (the userTask "Task A")
// one task for each process instance
tasks = taskService.createTaskQuery().list();
assertEquals(10, tasks.size());
// FINAL CHECK
// count "timer fired" events
int timerFiredCount = 0;
List<ActivitiEvent> eventsReceived = listener.getEventsReceived();
for (ActivitiEvent eventReceived : eventsReceived) {
if (ActivitiEventType.TIMER_FIRED.equals(eventReceived.getType())) {
timerFiredCount++;
}
}
// count "entity created" events
int eventCreatedCount = 0;
for (ActivitiEvent eventReceived : eventsReceived) {
if (ActivitiEventType.ENTITY_CREATED.equals(eventReceived.getType())) {
eventCreatedCount++;
}
}
// count "entity deleted" events
int eventDeletedCount = 0;
for (ActivitiEvent eventReceived : eventsReceived) {
if (ActivitiEventType.ENTITY_DELETED.equals(eventReceived.getType())) {
eventDeletedCount++;
}
}
assertEquals(10, timerFiredCount); // 10 timers fired
assertEquals(20, eventCreatedCount); // 20 jobs created, 2 per timer job
assertEquals(20, eventDeletedCount); // 20 jobs deleted, 2 per timer job
// for each processInstance
// let's complete the userTasks where the process is hanging in order to
// complete the processes.
for (ProcessInstance processInstance : processInstances) {
tasks = taskService.createTaskQuery().processInstanceId(processInstance.getProcessInstanceId()).list();
Task task = tasks.get(0);
assertEquals("Task A", task.getName());
assertEquals(1, tasks.size());
taskService.complete(task.getId());
}
// now All the process instances should be completed
processInstances = runtimeService.createProcessInstanceQuery().list();
assertEquals(0, processInstances.size());
// no jobs
jobs = managementService.createTimerJobQuery().list();
assertEquals(0, jobs.size());
jobs = managementService.createJobQuery().list();
assertEquals(0, jobs.size());
// no tasks
tasks = taskService.createTaskQuery().list();
assertEquals(0, tasks.size());
listener.clearEventsReceived();
processEngineConfiguration.setClock(previousClock);
repositoryService.deleteDeployment(repositoryService.createDeploymentQuery().singleResult().getId(), true);
}
private void moveByMinutes(int minutes) throws Exception {
processEngineConfiguration.getClock().setCurrentTime(new Date(processEngineConfiguration.getClock().getCurrentTime().getTime() + ((minutes * 60 * 1000))));
}
}
| |
package org.maera.plugin.osgi.container.felix;
import org.apache.commons.lang.Validate;
import org.apache.felix.framework.Felix;
import org.apache.felix.framework.Logger;
import org.apache.felix.framework.cache.BundleCache;
import org.apache.felix.framework.util.FelixConstants;
import org.apache.felix.framework.util.StringMap;
import org.maera.plugin.event.PluginEventListener;
import org.maera.plugin.event.PluginEventManager;
import org.maera.plugin.event.events.PluginFrameworkShutdownEvent;
import org.maera.plugin.event.events.PluginFrameworkStartingEvent;
import org.maera.plugin.event.events.PluginFrameworkWarmRestartingEvent;
import org.maera.plugin.event.events.PluginUninstalledEvent;
import org.maera.plugin.event.events.PluginUpgradedEvent;
import org.maera.plugin.osgi.container.OsgiContainerException;
import org.maera.plugin.osgi.container.OsgiContainerManager;
import org.maera.plugin.osgi.container.OsgiContainerStartedEvent;
import org.maera.plugin.osgi.container.OsgiContainerStoppedEvent;
import org.maera.plugin.osgi.container.OsgiPersistentCache;
import org.maera.plugin.osgi.container.PackageScannerConfiguration;
import org.maera.plugin.osgi.container.impl.DefaultOsgiPersistentCache;
import org.maera.plugin.osgi.hostcomponents.HostComponentProvider;
import org.maera.plugin.osgi.hostcomponents.HostComponentRegistration;
import org.maera.plugin.osgi.hostcomponents.impl.DefaultComponentRegistrar;
import org.maera.plugin.osgi.util.OsgiHeaderUtil;
import org.maera.plugin.util.ClassLoaderUtils;
import org.maera.plugin.util.ContextClassLoaderSwitchingUtil;
import org.osgi.framework.Bundle;
import org.osgi.framework.BundleActivator;
import org.osgi.framework.BundleContext;
import org.osgi.framework.BundleEvent;
import org.osgi.framework.BundleException;
import org.osgi.framework.BundleListener;
import org.osgi.framework.Constants;
import org.osgi.framework.FrameworkEvent;
import org.osgi.framework.FrameworkListener;
import org.osgi.framework.ServiceReference;
import org.osgi.framework.ServiceRegistration;
import org.osgi.service.packageadmin.PackageAdmin;
import org.osgi.util.tracker.ServiceTracker;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.jar.JarFile;
/**
* Felix implementation of the OSGi container manager
*
* @since 0.1
*/
public class FelixOsgiContainerManager implements OsgiContainerManager {
public static final String OSGI_FRAMEWORK_BUNDLES_ZIP = "osgi-framework-bundles.zip";
public static final int REFRESH_TIMEOUT = 10;
private static final org.slf4j.Logger log = LoggerFactory.getLogger(FelixOsgiContainerManager.class);
private static final String OSGI_BOOTDELEGATION = "org.osgi.framework.bootdelegation";
private static final String MAERA_PREFIX = "maera.";
private final OsgiPersistentCache persistentCache;
private final URL frameworkBundlesUrl;
private final PackageScannerConfiguration packageScannerConfig;
private final HostComponentProvider hostComponentProvider;
private final List<ServiceTracker> trackers;
private final ExportsBuilder exportsBuilder;
private final ThreadFactory threadFactory = new ThreadFactory() {
public Thread newThread(final Runnable r) {
final Thread thread = new Thread(r, "Felix:Startup");
thread.setDaemon(true);
return thread;
}
};
private BundleRegistration registration = null;
private Felix felix = null;
private boolean felixRunning = false;
private boolean disableMultipleBundleVersions = true;
private Logger felixLogger;
private final PluginEventManager pluginEventManager;
/**
* Constructs the container manager using the framework bundles zip file located in this library
*
* @param frameworkBundlesDir The directory to unzip the framework bundles into.
* @param packageScannerConfig The configuration for package scanning
* @param provider The host component provider. May be null.
* @param eventManager The plugin event manager to register for init and shutdown events
* @deprecated Since 2.2.0, use
* {@link #FelixOsgiContainerManager(OsgiPersistentCache,PackageScannerConfiguration,HostComponentProvider,PluginEventManager)} instead
*/
@Deprecated
public FelixOsgiContainerManager(final File frameworkBundlesDir, final PackageScannerConfiguration packageScannerConfig, final HostComponentProvider provider, final PluginEventManager eventManager) {
this(ClassLoaderUtils.getResource(OSGI_FRAMEWORK_BUNDLES_ZIP, FelixOsgiContainerManager.class), frameworkBundlesDir, packageScannerConfig,
provider, eventManager);
}
/**
* Constructs the container manager
*
* @param frameworkBundlesZip The location of the zip file containing framework bundles
* @param frameworkBundlesDir The directory to unzip the framework bundles into.
* @param packageScannerConfig The configuration for package scanning
* @param provider The host component provider. May be null.
* @param eventManager The plugin event manager to register for init and shutdown events
* @deprecated Since 2.2.0, use
* {@link #FelixOsgiContainerManager(URL, OsgiPersistentCache,PackageScannerConfiguration,HostComponentProvider,PluginEventManager)} instead
*/
@Deprecated
public FelixOsgiContainerManager(final URL frameworkBundlesZip, final File frameworkBundlesDir, final PackageScannerConfiguration packageScannerConfig, final HostComponentProvider provider, final PluginEventManager eventManager) {
this(frameworkBundlesZip, new DefaultOsgiPersistentCache(new File(frameworkBundlesDir.getParentFile(),
"osgi-cache")), packageScannerConfig, provider, eventManager);
}
/**
* Constructs the container manager using the framework bundles zip file located in this library
*
* @param persistentCache The persistent cache configuration.
* @param packageScannerConfig The configuration for package scanning
* @param provider The host component provider. May be null.
* @param eventManager The plugin event manager to register for init and shutdown events
* @since 2.2.0
*/
public FelixOsgiContainerManager(final OsgiPersistentCache persistentCache, final PackageScannerConfiguration packageScannerConfig, final HostComponentProvider provider, final PluginEventManager eventManager) {
this(ClassLoaderUtils.getResource(OSGI_FRAMEWORK_BUNDLES_ZIP, FelixOsgiContainerManager.class), persistentCache, packageScannerConfig,
provider, eventManager);
}
/**
* Constructs the container manager
*
* @param frameworkBundlesZip The location of the zip file containing framework bundles
* @param persistentCache The persistent cache to use for the framework and framework bundles
* @param packageScannerConfig The configuration for package scanning
* @param provider The host component provider. May be null.
* @param eventManager The plugin event manager to register for init and shutdown events
* @throws org.maera.plugin.osgi.container.OsgiContainerException
* If the host version isn't supplied and the
* cache directory cannot be cleaned.
* @since 2.2.0
*/
public FelixOsgiContainerManager(final URL frameworkBundlesZip, OsgiPersistentCache persistentCache,
final PackageScannerConfiguration packageScannerConfig,
final HostComponentProvider provider, final PluginEventManager eventManager)
throws OsgiContainerException {
Validate.notNull(frameworkBundlesZip, "The framework bundles zip is required");
Validate.notNull(persistentCache, "The framework bundles directory must not be null");
Validate.notNull(packageScannerConfig, "The package scanner configuration must not be null");
Validate.notNull(eventManager, "The plugin event manager is required");
frameworkBundlesUrl = frameworkBundlesZip;
this.packageScannerConfig = packageScannerConfig;
this.persistentCache = persistentCache;
hostComponentProvider = provider;
trackers = Collections.synchronizedList(new ArrayList<ServiceTracker>());
this.pluginEventManager = eventManager;
eventManager.register(this);
felixLogger = new FelixLoggerBridge(log);
exportsBuilder = new ExportsBuilder();
}
public void setFelixLogger(final Logger logger) {
felixLogger = logger;
}
public void setDisableMultipleBundleVersions(final boolean val) {
disableMultipleBundleVersions = val;
}
@SuppressWarnings({"UnusedDeclaration"})
@PluginEventListener
public void onStart(final PluginFrameworkStartingEvent event) {
start();
}
@SuppressWarnings({"UnusedDeclaration"})
@PluginEventListener
public void onShutdown(final PluginFrameworkShutdownEvent event) {
stop();
}
@SuppressWarnings({"UnusedDeclaration"})
@PluginEventListener
public void onPluginUpgrade(PluginUpgradedEvent event) {
registration.refreshPackages();
}
@SuppressWarnings({"UnusedDeclaration"})
@PluginEventListener
public void onPluginUninstallation(PluginUninstalledEvent event) {
registration.refreshPackages();
}
@SuppressWarnings({"UnusedDeclaration"})
@PluginEventListener
public void onPluginFrameworkWarmRestarting(PluginFrameworkWarmRestartingEvent event) {
registration.loadHostComponents(collectHostComponents(hostComponentProvider));
}
public void start() throws OsgiContainerException {
if (isRunning()) {
return;
}
final DefaultComponentRegistrar registrar = collectHostComponents(hostComponentProvider);
// Create a case-insensitive configuration property map.
final StringMap configMap = new StringMap(false);
// Add the bundle provided service interface package and the core OSGi
// packages to be exported from the class path via the system bundle.
configMap.put(Constants.FRAMEWORK_SYSTEMPACKAGES_EXTRA, exportsBuilder.getExports(registrar.getRegistry(), packageScannerConfig));
// Explicitly specify the directory to use for caching bundles.
configMap.put(BundleCache.CACHE_ROOTDIR_PROP, persistentCache.getOsgiBundleCache().getAbsolutePath());
configMap.put(FelixConstants.LOG_LEVEL_PROP, String.valueOf(felixLogger.getLogLevel()));
configMap.put(FelixConstants.LOG_LOGGER_PROP, felixLogger);
String bootDelegation = getAtlassianSpecificOsgiSystemProperty(OSGI_BOOTDELEGATION);
if ((bootDelegation == null) || (bootDelegation.trim().length() == 0)) {
// These exist to work around JAXP problems. Specifically, bundles that use static factories to create JAXP
// instances will execute FactoryFinder with the CCL set to the bundle. These delegations ensure the appropriate
// implementation is found and loaded.
bootDelegation = "weblogic,weblogic.*," +
"META-INF.services," +
"com.yourkit,com.yourkit.*," +
"com.jprofiler,com.jprofiler.*," +
"org.apache.xerces,org.apache.xerces.*," +
"org.apache.xalan,org.apache.xalan.*," +
"sun.*," +
"com.icl.saxon";
}
configMap.put(FelixConstants.FRAMEWORK_BOOTDELEGATION, bootDelegation);
configMap.put(FelixConstants.IMPLICIT_BOOT_DELEGATION_PROP, "false");
configMap.put(FelixConstants.FRAMEWORK_BUNDLE_PARENT, FelixConstants.FRAMEWORK_BUNDLE_PARENT_FRAMEWORK);
if (log.isDebugEnabled()) {
log.debug("Felix configuration: " + configMap);
}
validateConfiguration(configMap);
try {
// Create host activator;
registration = new BundleRegistration(frameworkBundlesUrl, persistentCache.getFrameworkBundleCache(), registrar);
final List<BundleActivator> list = new ArrayList<BundleActivator>();
list.add(registration);
configMap.put(FelixConstants.SYSTEMBUNDLE_ACTIVATORS_PROP, list);
// Now create an instance of the framework with
// our configuration properties and activator.
felix = new Felix(configMap);
// Now start Felix instance. Starting in a different thread to explicity set daemon status
final Runnable start = new Runnable() {
public void run() {
try {
Thread.currentThread().setContextClassLoader(null);
felix.start();
felixRunning = true;
}
catch (final BundleException e) {
throw new OsgiContainerException("Unable to start felix", e);
}
}
};
final Thread t = threadFactory.newThread(start);
t.start();
// Give it 10 seconds
t.join(10 * 60 * 1000);
}
catch (final Exception ex) {
throw new OsgiContainerException("Unable to start OSGi container", ex);
}
pluginEventManager.broadcast(new OsgiContainerStartedEvent(this));
}
/**
* @param configMap The Felix configuration
* @throws OsgiContainerException If any validation fails
*/
private void validateConfiguration(StringMap configMap) throws OsgiContainerException {
String systemExports = (String) configMap.get(Constants.FRAMEWORK_SYSTEMPACKAGES_EXTRA);
validateCaches(systemExports);
detectIncorrectOsgiVersion();
detectXercesOverride(systemExports);
}
/**
* Detect when xerces has no version, most likely due to an installation of Tomcat where an old version of xerces
* is installed into common/lib/endorsed in order to support Java 1.4.
*
* @param systemExports The system exports
* @throws OsgiContainerException If xerces has no version
*/
void detectXercesOverride(String systemExports) throws OsgiContainerException {
int pos = systemExports.indexOf("org.apache.xerces.util");
if (pos > -1) {
if (pos == 0 || (pos > 0 && systemExports.charAt(pos - 1) == ',')) {
pos += "org.apache.xerces.util".length();
// only fail if no xerces found and xerces has no version
if (pos >= systemExports.length() || ';' != systemExports.charAt(pos)) {
throw new OsgiContainerException(
"Detected an incompatible version of Apache Xerces on the classpath. If using Tomcat, you may have " +
"an old version of Xerces in $TOMCAT_HOME/common/lib/endorsed that will need to be removed.");
}
}
}
}
/**
* Validate caches based on the list of packages exported from the application. If the list has changed, the cache
* directories should be cleared.
*
* @param systemExports The value of system exports in the header
*/
private void validateCaches(String systemExports) {
String cacheKey = String.valueOf(systemExports.hashCode());
persistentCache.validate(cacheKey);
log.debug("Using Felix bundle cache directory :" + persistentCache.getOsgiBundleCache().getAbsolutePath());
}
/**
* Detects incorrect configuration of WebSphere 6.1 that leaks OSGi 4.0 jars into the application
*/
private void detectIncorrectOsgiVersion() {
try {
Bundle.class.getMethod("getBundleContext");
}
catch (final NoSuchMethodException e) {
throw new OsgiContainerException(
"Detected older version (4.0 or earlier) of OSGi. If using WebSphere " + "6.1, please enable application-first (parent-last) classloading and the 'Single classloader for " + "application' WAR classloader policy.");
}
}
public void stop() throws OsgiContainerException {
if (felixRunning) {
for (final ServiceTracker tracker : new HashSet<ServiceTracker>(trackers)) {
tracker.close();
}
try {
felix.stop();
felix.waitForStop(5000);
}
catch (InterruptedException e) {
log.warn("Interrupting Felix shutdown", e);
}
catch (BundleException ex) {
log.error("An error occurred while stopping the Felix OSGi Container. ", ex);
}
}
felixRunning = false;
felix = null;
pluginEventManager.broadcast(new OsgiContainerStoppedEvent(this));
}
public Bundle[] getBundles() {
if (isRunning()) {
return registration.getBundles();
} else {
throw new IllegalStateException(
"Cannot retrieve the bundles if the Felix container isn't running. Check earlier in the logs for the possible cause as to why Felix didn't start correctly.");
}
}
public ServiceReference[] getRegisteredServices() {
return felix.getRegisteredServices();
}
public ServiceTracker getServiceTracker(final String cls) {
if (!isRunning()) {
throw new IllegalStateException("Unable to create a tracker when osgi is not running");
}
final ServiceTracker tracker = registration.getServiceTracker(cls, trackers);
tracker.open();
trackers.add(tracker);
return tracker;
}
public Bundle installBundle(final File file) throws OsgiContainerException {
try {
return registration.install(file, disableMultipleBundleVersions);
}
catch (final BundleException e) {
throw new OsgiContainerException("Unable to install bundle", e);
}
}
DefaultComponentRegistrar collectHostComponents(final HostComponentProvider provider) {
final DefaultComponentRegistrar registrar = new DefaultComponentRegistrar();
if (provider != null) {
provider.provide(registrar);
}
return registrar;
}
public boolean isRunning() {
return felixRunning;
}
public List<HostComponentRegistration> getHostComponentRegistrations() {
return registration.getHostComponentRegistrations();
}
private String getAtlassianSpecificOsgiSystemProperty(final String originalSystemProperty) {
return System.getProperty(MAERA_PREFIX + originalSystemProperty);
}
/**
* Manages framework-level framework bundles and host components registration, and individual plugin bundle
* installation and removal.
*/
static class BundleRegistration implements BundleActivator, BundleListener, FrameworkListener {
private BundleContext bundleContext;
private DefaultComponentRegistrar registrar;
private List<ServiceRegistration> hostServicesReferences;
private List<HostComponentRegistration> hostComponentRegistrations;
private final URL frameworkBundlesUrl;
private final File frameworkBundlesDir;
private ClassLoader initializedClassLoader;
private PackageAdmin packageAdmin;
public BundleRegistration(final URL frameworkBundlesUrl, final File frameworkBundlesDir, final DefaultComponentRegistrar registrar) {
this.registrar = registrar;
this.frameworkBundlesUrl = frameworkBundlesUrl;
this.frameworkBundlesDir = frameworkBundlesDir;
this.initializedClassLoader = Thread.currentThread().getContextClassLoader();
}
public void start(final BundleContext context) throws Exception {
bundleContext = context;
final ServiceReference ref = context.getServiceReference(org.osgi.service.packageadmin.PackageAdmin.class.getName());
packageAdmin = (PackageAdmin) context.getService(ref);
context.addBundleListener(this);
context.addFrameworkListener(this);
loadHostComponents(registrar);
extractAndInstallFrameworkBundles();
}
public void stop(final BundleContext ctx) throws Exception {
ctx.removeBundleListener(this);
ctx.removeFrameworkListener(this);
if (hostServicesReferences != null) {
for (ServiceRegistration ref : hostServicesReferences) {
ref.unregister();
}
}
bundleContext = null;
packageAdmin = null;
hostServicesReferences = null;
hostComponentRegistrations = null;
registrar = null;
initializedClassLoader = null;
}
public void bundleChanged(final BundleEvent evt) {
switch (evt.getType()) {
case BundleEvent.INSTALLED:
log.info("Installed bundle " + evt.getBundle().getSymbolicName() + " (" + evt.getBundle().getBundleId() + ")");
break;
case BundleEvent.RESOLVED:
log.info("Resolved bundle " + evt.getBundle().getSymbolicName() + " (" + evt.getBundle().getBundleId() + ")");
break;
case BundleEvent.UNRESOLVED:
log.info("Unresolved bundle " + evt.getBundle().getSymbolicName() + " (" + evt.getBundle().getBundleId() + ")");
break;
case BundleEvent.STARTED:
log.info("Started bundle " + evt.getBundle().getSymbolicName() + " (" + evt.getBundle().getBundleId() + ")");
break;
case BundleEvent.STOPPED:
log.info("Stopped bundle " + evt.getBundle().getSymbolicName() + " (" + evt.getBundle().getBundleId() + ")");
break;
case BundleEvent.UNINSTALLED:
log.info("Uninstalled bundle " + evt.getBundle().getSymbolicName() + " (" + evt.getBundle().getBundleId() + ")");
break;
}
}
public Bundle install(final File path, final boolean uninstallOtherVersions) throws BundleException {
boolean bundleUninstalled = false;
if (uninstallOtherVersions) {
try {
JarFile jar = new JarFile(path);
String pluginKey = null;
try {
pluginKey = OsgiHeaderUtil.getPluginKey(jar.getManifest());
}
finally {
jar.close();
}
for (final Bundle oldBundle : bundleContext.getBundles()) {
if (pluginKey.equals(OsgiHeaderUtil.getPluginKey(oldBundle))) {
log.info("Uninstalling existing version " + oldBundle.getHeaders().get(Constants.BUNDLE_VERSION));
oldBundle.uninstall();
bundleUninstalled = true;
}
}
}
catch (final IOException e) {
throw new BundleException("Invalid bundle format", e);
}
}
final Bundle bundle = bundleContext.installBundle(path.toURI().toString());
if (bundleUninstalled) {
refreshPackages();
}
return bundle;
}
public Bundle[] getBundles() {
return bundleContext.getBundles();
}
public ServiceTracker getServiceTracker(final String clazz, final Collection<ServiceTracker> trackedTrackers) {
return new ServiceTracker(bundleContext, clazz, null) {
@Override
public void close() {
trackedTrackers.remove(this);
}
};
}
public List<HostComponentRegistration> getHostComponentRegistrations() {
return hostComponentRegistrations;
}
void loadHostComponents(final DefaultComponentRegistrar registrar) {
// Unregister any existing host components
if (hostServicesReferences != null) {
for (final ServiceRegistration reg : hostServicesReferences) {
reg.unregister();
}
}
ContextClassLoaderSwitchingUtil.runInContext(initializedClassLoader, new Runnable() {
public void run() {
hostServicesReferences = registrar.writeRegistry(bundleContext);
hostComponentRegistrations = registrar.getRegistry();
}
});
}
private void extractAndInstallFrameworkBundles() throws BundleException {
final List<Bundle> bundles = new ArrayList<Bundle>();
org.maera.plugin.util.FileUtils.conditionallyExtractZipFile(frameworkBundlesUrl, frameworkBundlesDir);
for (final File bundleFile : frameworkBundlesDir.listFiles(new FilenameFilter() {
public boolean accept(final File file, final String s) {
return s.endsWith(".jar");
}
})) {
bundles.add(install(bundleFile, false));
}
packageAdmin.resolveBundles(null);
for (final Bundle bundle : bundles) {
if (bundle.getHeaders().get(Constants.FRAGMENT_HOST) == null) {
bundle.start();
}
}
}
public void refreshPackages() {
final CountDownLatch latch = new CountDownLatch(1);
FrameworkListener refreshListener = new FrameworkListener() {
public void frameworkEvent(FrameworkEvent event) {
if (event.getType() == FrameworkEvent.PACKAGES_REFRESHED) {
log.info("Packages refreshed");
latch.countDown();
}
}
};
bundleContext.addFrameworkListener(refreshListener);
try {
packageAdmin.refreshPackages(null);
boolean refreshed = false;
try {
refreshed = latch.await(REFRESH_TIMEOUT, TimeUnit.SECONDS);
}
catch (InterruptedException e) {
// ignore
}
if (!refreshed) {
log.warn("Timeout exceeded waiting for package refresh");
}
}
finally {
bundleContext.removeFrameworkListener(refreshListener);
}
}
@SuppressWarnings({"ThrowableResultOfMethodCallIgnored"})
public void frameworkEvent(FrameworkEvent event) {
String bundleBits = "";
if (event.getBundle() != null) {
bundleBits = " in bundle " + event.getBundle().getSymbolicName();
}
switch (event.getType()) {
case FrameworkEvent.ERROR:
log.error("Framework error" + bundleBits, event.getThrowable());
break;
case FrameworkEvent.WARNING:
log.warn("Framework warning" + bundleBits, event.getThrowable());
break;
case FrameworkEvent.INFO:
log.info("Framework info" + bundleBits, event.getThrowable());
break;
}
}
}
}
| |
/*
*
* Copyright 2002-2004 The Ant-Contrib project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.sf.antcontrib.cpptasks;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.UnsupportedEncodingException;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.Vector;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import net.sf.antcontrib.cpptasks.compiler.CompilerConfiguration;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.Project;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.DefaultHandler;
/**
* @author Curt Arnold
*/
public final class DependencyTable {
/**
* This class handles populates the TargetHistory hashtable in response to
* SAX parse events
*/
private class DependencyTableHandler extends DefaultHandler {
private File baseDir;
private final DependencyTable dependencyTable;
private String includePath;
private Vector includes;
private String source;
private long sourceLastModified;
private Vector sysIncludes;
/**
* Constructor
*
* @param history
* hashtable of TargetHistory keyed by output name
* @param outputFiles
* existing files in output directory
*/
private DependencyTableHandler(DependencyTable dependencyTable,
File baseDir) {
this.dependencyTable = dependencyTable;
this.baseDir = baseDir;
includes = new Vector();
sysIncludes = new Vector();
source = null;
}
public void endElement(String namespaceURI, String localName,
String qName) throws SAXException {
//
// if </source> then
// create Dependency object and add to hashtable
// if corresponding source file exists and
// has the same timestamp
//
if (qName.equals("source")) {
if (source != null && includePath != null) {
File existingFile = new File(baseDir, source);
//
// if the file exists and the time stamp is right
// preserve the dependency info
if (existingFile.exists()) {
//
// would have expected exact matches
// but was seeing some unexpected difference by
// a few tens of milliseconds, as long
// as the times are within a second
long existingLastModified = existingFile.lastModified();
if (!CUtil.isSignificantlyAfter(existingLastModified, sourceLastModified) &&
!CUtil.isSignificantlyBefore(existingLastModified, sourceLastModified)) {
DependencyInfo dependInfo = new DependencyInfo(
includePath, source, sourceLastModified,
includes, sysIncludes);
dependencyTable.putDependencyInfo(source,
dependInfo);
}
}
source = null;
includes.setSize(0);
}
} else {
//
// this causes any <source> elements outside the
// scope of an <includePath> to be discarded
//
if (qName.equals("includePath")) {
includePath = null;
}
}
}
/**
* startElement handler
*/
public void startElement(String namespaceURI, String localName,
String qName, Attributes atts) throws SAXException {
//
// if includes, then add relative file name to vector
//
if (qName.equals("include")) {
includes.addElement(atts.getValue("file"));
} else {
if (qName.equals("sysinclude")) {
sysIncludes.addElement(atts.getValue("file"));
} else {
//
// if source then
// capture source file name,
// modification time and reset includes vector
//
if (qName.equals("source")) {
source = atts.getValue("file");
sourceLastModified = Long.parseLong(atts
.getValue("lastModified"), 16);
includes.setSize(0);
sysIncludes.setSize(0);
} else {
if (qName.equals("includePath")) {
includePath = atts.getValue("signature");
}
}
}
}
}
}
public abstract class DependencyVisitor {
/**
* Previews all the children of this source file.
*
* May be called multiple times as DependencyInfo's for children are
* filled in.
*
* @return true to continue towards recursion into included files
*/
public abstract boolean preview(DependencyInfo parent,
DependencyInfo[] children);
/**
* Called if the dependency depth exhausted the stack.
*/
public abstract void stackExhausted();
/**
* Visits the dependency info.
*
* @return true to continue towards recursion into included files
*/
public abstract boolean visit(DependencyInfo dependInfo);
}
public class TimestampChecker extends DependencyVisitor {
private boolean noNeedToRebuild;
private long outputLastModified;
private boolean rebuildOnStackExhaustion;
public TimestampChecker(final long outputLastModified,
boolean rebuildOnStackExhaustion) {
this.outputLastModified = outputLastModified;
noNeedToRebuild = true;
this.rebuildOnStackExhaustion = rebuildOnStackExhaustion;
}
public boolean getMustRebuild() {
return !noNeedToRebuild;
}
public boolean preview(DependencyInfo parent, DependencyInfo[] children) {
int withCompositeTimes = 0;
long parentCompositeLastModified = parent.getSourceLastModified();
for (int i = 0; i < children.length; i++) {
if (children[i] != null) {
//
// expedient way to determine if a child forces us to
// rebuild
//
visit(children[i]);
long childCompositeLastModified = children[i]
.getCompositeLastModified();
if (childCompositeLastModified != Long.MIN_VALUE) {
withCompositeTimes++;
if (childCompositeLastModified > parentCompositeLastModified) {
parentCompositeLastModified = childCompositeLastModified;
}
}
}
}
if (withCompositeTimes == children.length) {
parent.setCompositeLastModified(parentCompositeLastModified);
}
//
// may have been changed by an earlier call to visit()
//
return noNeedToRebuild;
}
public void stackExhausted() {
if (rebuildOnStackExhaustion) {
noNeedToRebuild = false;
}
}
public boolean visit(DependencyInfo dependInfo) {
if (noNeedToRebuild) {
if (CUtil.isSignificantlyAfter(dependInfo.getSourceLastModified(), outputLastModified)
|| CUtil.isSignificantlyAfter(dependInfo.getCompositeLastModified(), outputLastModified)) {
noNeedToRebuild = false;
}
}
//
// only need to process the children if
// it has not yet been determined whether
// we need to rebuild and the composite modified time
// has not been determined for this file
return noNeedToRebuild
&& dependInfo.getCompositeLastModified() == Long.MIN_VALUE;
}
}
private/* final */File baseDir;
private String baseDirPath;
/**
* a hashtable of DependencyInfo[] keyed by output file name
*/
private final Hashtable dependencies = new Hashtable();
/** The file the cache was loaded from. */
private/* final */File dependenciesFile;
/** Flag indicating whether the cache should be written back to file. */
private boolean dirty;
/**
* Creates a target history table from dependencies.xml in the prject
* directory, if it exists. Otherwise, initializes the dependencies empty.
*
* @param baseDir
* output directory for task
*/
public DependencyTable(File baseDir) {
if (baseDir == null) {
throw new NullPointerException("baseDir");
}
this.baseDir = baseDir;
try {
baseDirPath = baseDir.getCanonicalPath();
} catch (IOException ex) {
baseDirPath = baseDir.toString();
}
dirty = false;
//
// load any existing dependencies from file
dependenciesFile = new File(baseDir, "dependencies.xml");
}
public void commit(CCTask task) {
//
// if not dirty, no need to update file
//
if (dirty) {
//
// walk through dependencies to get vector of include paths
// identifiers
//
Vector includePaths = getIncludePaths();
//
//
// write dependency file
//
try {
FileOutputStream outStream = new FileOutputStream(
dependenciesFile);
OutputStreamWriter streamWriter;
//
// Early VM's may not have UTF-8 support
// fallback to default code page which
// "should" be okay unless there are
// non ASCII file names
String encodingName = "UTF-8";
try {
streamWriter = new OutputStreamWriter(outStream, "UTF-8");
} catch (UnsupportedEncodingException ex) {
streamWriter = new OutputStreamWriter(outStream);
encodingName = streamWriter.getEncoding();
}
BufferedWriter writer = new BufferedWriter(streamWriter);
writer.write("<?xml version='1.0' encoding='");
writer.write(encodingName);
writer.write("'?>\n");
writer.write("<dependencies>\n");
StringBuffer buf = new StringBuffer();
Enumeration includePathEnum = includePaths.elements();
while (includePathEnum.hasMoreElements()) {
writeIncludePathDependencies((String) includePathEnum
.nextElement(), writer, buf);
}
writer.write("</dependencies>\n");
writer.close();
dirty = false;
} catch (IOException ex) {
task.log("Error writing " + dependenciesFile.toString() + ":"
+ ex.toString());
}
}
}
/**
* Returns an enumerator of DependencyInfo's
*/
public Enumeration elements() {
return dependencies.elements();
}
/**
* This method returns a DependencyInfo for the specific source file and
* include path identifier
*
*/
public DependencyInfo getDependencyInfo(String sourceRelativeName,
String includePathIdentifier) {
DependencyInfo dependInfo = null;
DependencyInfo[] dependInfos = (DependencyInfo[]) dependencies
.get(sourceRelativeName);
if (dependInfos != null) {
for (int i = 0; i < dependInfos.length; i++) {
dependInfo = dependInfos[i];
if (dependInfo.getIncludePathIdentifier().equals(
includePathIdentifier)) {
return dependInfo;
}
}
}
return null;
}
private Vector getIncludePaths() {
Vector includePaths = new Vector();
DependencyInfo[] dependInfos;
Enumeration dependenciesEnum = dependencies.elements();
while (dependenciesEnum.hasMoreElements()) {
dependInfos = (DependencyInfo[]) dependenciesEnum.nextElement();
for (int i = 0; i < dependInfos.length; i++) {
DependencyInfo dependInfo = dependInfos[i];
boolean matchesExisting = false;
final String dependIncludePath = dependInfo
.getIncludePathIdentifier();
Enumeration includePathEnum = includePaths.elements();
while (includePathEnum.hasMoreElements()) {
if (dependIncludePath.equals(includePathEnum.nextElement())) {
matchesExisting = true;
break;
}
}
if (!matchesExisting) {
includePaths.addElement(dependIncludePath);
}
}
}
return includePaths;
}
public void load() throws IOException, ParserConfigurationException,
SAXException {
dependencies.clear();
if (dependenciesFile.exists()) {
SAXParserFactory factory = SAXParserFactory.newInstance();
factory.setValidating(false);
SAXParser parser = factory.newSAXParser();
parser.parse(dependenciesFile, new DependencyTableHandler(this,
baseDir));
dirty = false;
}
}
/**
* Determines if the specified target needs to be rebuilt.
*
* This task may result in substantial IO as files are parsed to determine
* their dependencies
*/
public boolean needsRebuild(CCTask task, TargetInfo target,
int dependencyDepth) {
// look at any files where the compositeLastModified
// is not known, but the includes are known
//
boolean mustRebuild = false;
CompilerConfiguration compiler = (CompilerConfiguration) target
.getConfiguration();
String includePathIdentifier = compiler.getIncludePathIdentifier();
File[] sources = target.getSources();
DependencyInfo[] dependInfos = new DependencyInfo[sources.length];
long outputLastModified = target.getOutput().lastModified();
//
// try to solve problem using existing dependency info
// (not parsing any new files)
//
DependencyInfo[] stack = new DependencyInfo[50];
boolean rebuildOnStackExhaustion = true;
if (dependencyDepth >= 0) {
if (dependencyDepth < 50) {
stack = new DependencyInfo[dependencyDepth];
}
rebuildOnStackExhaustion = false;
}
TimestampChecker checker = new TimestampChecker(outputLastModified,
rebuildOnStackExhaustion);
for (int i = 0; i < sources.length && !mustRebuild; i++) {
File source = sources[i];
String relative = CUtil.getRelativePath(baseDirPath, source);
DependencyInfo dependInfo = getDependencyInfo(relative,
includePathIdentifier);
if (dependInfo == null) {
task.log("Parsing " + relative, Project.MSG_VERBOSE);
dependInfo = parseIncludes(task, compiler, source);
}
walkDependencies(task, dependInfo, compiler, stack, checker);
mustRebuild = checker.getMustRebuild();
}
return mustRebuild;
}
public DependencyInfo parseIncludes(CCTask task,
CompilerConfiguration compiler, File source) {
DependencyInfo dependInfo = compiler.parseIncludes(task, baseDir,
source);
String relativeSource = CUtil.getRelativePath(baseDirPath, source);
putDependencyInfo(relativeSource, dependInfo);
return dependInfo;
}
private void putDependencyInfo(String key, DependencyInfo dependInfo) {
//
// optimistic, add new value
//
DependencyInfo[] old = (DependencyInfo[]) dependencies.put(key,
new DependencyInfo[]{dependInfo});
dirty = true;
//
// something was already there
//
if (old != null) {
//
// see if the include path matches a previous entry
// if so replace it
String includePathIdentifier = dependInfo
.getIncludePathIdentifier();
for (int i = 0; i < old.length; i++) {
DependencyInfo oldDepend = old[i];
if (oldDepend.getIncludePathIdentifier().equals(
includePathIdentifier)) {
old[i] = dependInfo;
dependencies.put(key, old);
return;
}
}
//
// no match prepend the new entry to the array
// of dependencies for the file
DependencyInfo[] combined = new DependencyInfo[old.length + 1];
combined[0] = dependInfo;
for (int i = 0; i < old.length; i++) {
combined[i + 1] = old[i];
}
dependencies.put(key, combined);
}
return;
}
public void walkDependencies(CCTask task, DependencyInfo dependInfo,
CompilerConfiguration compiler, DependencyInfo[] stack,
DependencyVisitor visitor) throws BuildException {
//
// visit this node
// if visit returns true then
// visit the referenced include and sysInclude dependencies
//
if (visitor.visit(dependInfo)) {
//
// find first null entry on stack
//
int stackPosition = -1;
for (int i = 0; i < stack.length; i++) {
if (stack[i] == null) {
stackPosition = i;
stack[i] = dependInfo;
break;
} else {
//
// if we have appeared early in the calling history
// then we didn't exceed the criteria
if (stack[i] == dependInfo) {
return;
}
}
}
if (stackPosition == -1) {
visitor.stackExhausted();
return;
}
//
// locate dependency infos
//
String[] includes = dependInfo.getIncludes();
String includePathIdentifier = compiler.getIncludePathIdentifier();
DependencyInfo[] includeInfos = new DependencyInfo[includes.length];
for (int i = 0; i < includes.length; i++) {
DependencyInfo includeInfo = getDependencyInfo(includes[i],
includePathIdentifier);
includeInfos[i] = includeInfo;
}
//
// preview with only the already available dependency infos
//
if (visitor.preview(dependInfo, includeInfos)) {
//
// now need to fill in the missing DependencyInfos
//
int missingCount = 0;
for (int i = 0; i < includes.length; i++) {
if (includeInfos[i] == null) {
missingCount++;
task.log("Parsing " + includes[i], Project.MSG_VERBOSE);
//
// If the include filepath is relative
// then anchor it the base directory
File src = new File(includes[i]);
if (!src.isAbsolute()) {
src = new File(baseDir, includes[i]);
}
DependencyInfo includeInfo = parseIncludes(task,
compiler, src);
includeInfos[i] = includeInfo;
}
}
//
// if it passes a review the second time
// then recurse into all the children
if (missingCount == 0
|| visitor.preview(dependInfo, includeInfos)) {
//
// recurse into
//
for (int i = 0; i < includeInfos.length; i++) {
DependencyInfo includeInfo = includeInfos[i];
walkDependencies(task, includeInfo, compiler, stack,
visitor);
}
}
}
stack[stackPosition] = null;
}
}
private void writeDependencyInfo(BufferedWriter writer, StringBuffer buf,
DependencyInfo dependInfo) throws IOException {
String[] includes = dependInfo.getIncludes();
String[] sysIncludes = dependInfo.getSysIncludes();
//
// if the includes have not been evaluted then
// it is not worth our time saving it
// and trying to distiguish between files with
// no dependencies and those with undetermined dependencies
buf.setLength(0);
buf.append(" <source file=\"");
buf.append(CUtil.xmlAttribEncode(dependInfo.getSource()));
buf.append("\" lastModified=\"");
buf.append(Long.toHexString(dependInfo.getSourceLastModified()));
buf.append("\">\n");
writer.write(buf.toString());
for (int i = 0; i < includes.length; i++) {
buf.setLength(0);
buf.append(" <include file=\"");
buf.append(CUtil.xmlAttribEncode(includes[i]));
buf.append("\"/>\n");
writer.write(buf.toString());
}
for (int i = 0; i < sysIncludes.length; i++) {
buf.setLength(0);
buf.append(" <sysinclude file=\"");
buf.append(CUtil.xmlAttribEncode(sysIncludes[i]));
buf.append("\"/>\n");
writer.write(buf.toString());
}
writer.write(" </source>\n");
return;
}
private void writeIncludePathDependencies(String includePathIdentifier,
BufferedWriter writer, StringBuffer buf) throws IOException {
//
// include path element
//
buf.setLength(0);
buf.append(" <includePath signature=\"");
buf.append(CUtil.xmlAttribEncode(includePathIdentifier));
buf.append("\">\n");
writer.write(buf.toString());
Enumeration dependenciesEnum = dependencies.elements();
while (dependenciesEnum.hasMoreElements()) {
DependencyInfo[] dependInfos = (DependencyInfo[]) dependenciesEnum
.nextElement();
for (int i = 0; i < dependInfos.length; i++) {
DependencyInfo dependInfo = dependInfos[i];
//
// if this is for the same include path
// then output the info
if (dependInfo.getIncludePathIdentifier().equals(
includePathIdentifier)) {
writeDependencyInfo(writer, buf, dependInfo);
}
}
}
writer.write(" </includePath>\n");
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.controller.kudu;
import org.apache.kudu.ColumnSchema;
import org.apache.kudu.Schema;
import org.apache.kudu.Type;
import org.apache.kudu.client.AsyncKuduClient;
import org.apache.kudu.client.KuduClient;
import org.apache.kudu.client.KuduException;
import org.apache.kudu.client.KuduPredicate;
import org.apache.kudu.client.KuduScanner;
import org.apache.kudu.client.KuduTable;
import org.apache.kudu.client.ReplicaSelection;
import org.apache.kudu.client.RowResult;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.annotation.lifecycle.OnDisabled;
import org.apache.nifi.annotation.lifecycle.OnEnabled;
import org.apache.nifi.components.AllowableValue;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.controller.AbstractControllerService;
import org.apache.nifi.controller.ConfigurationContext;
import org.apache.nifi.controller.ControllerServiceInitializationContext;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.kerberos.KerberosCredentialsService;
import org.apache.nifi.lookup.RecordLookupService;
import org.apache.nifi.processor.util.StandardValidators;
import org.apache.nifi.reporting.InitializationException;
import org.apache.nifi.security.krb.KerberosAction;
import org.apache.nifi.security.krb.KerberosKeytabUser;
import org.apache.nifi.security.krb.KerberosUser;
import org.apache.nifi.serialization.SimpleRecordSchema;
import org.apache.nifi.serialization.record.MapRecord;
import org.apache.nifi.serialization.record.Record;
import org.apache.nifi.serialization.record.RecordField;
import org.apache.nifi.serialization.record.RecordFieldType;
import org.apache.nifi.serialization.record.RecordSchema;
import javax.security.auth.login.LoginException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Base64;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
@CapabilityDescription("Lookup a record from Kudu Server associated with the specified key. Binary columns are base64 encoded. Only one matched row will be returned")
@Tags({"lookup", "enrich", "key", "value", "kudu"})
public class KuduLookupService extends AbstractControllerService implements RecordLookupService {
public static final PropertyDescriptor KUDU_MASTERS = new PropertyDescriptor.Builder()
.name("kudu-lu-masters")
.displayName("Kudu Masters")
.description("Comma separated addresses of the Kudu masters to connect to.")
.required(true)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
.build();
public static final PropertyDescriptor KERBEROS_CREDENTIALS_SERVICE = new PropertyDescriptor.Builder()
.name("kudu-lu-kerberos-credentials-service")
.displayName("Kerberos Credentials Service")
.description("Specifies the Kerberos Credentials to use for authentication")
.required(false)
.identifiesControllerService(KerberosCredentialsService.class)
.build();
public static final PropertyDescriptor KUDU_OPERATION_TIMEOUT_MS = new PropertyDescriptor.Builder()
.name("kudu-lu-operations-timeout-ms")
.displayName("Kudu Operation Timeout")
.description("Default timeout used for user operations (using sessions and scanners)")
.required(false)
.defaultValue(AsyncKuduClient.DEFAULT_OPERATION_TIMEOUT_MS + "ms")
.addValidator(StandardValidators.TIME_PERIOD_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
.build();
public static final AllowableValue CLOSEST_REPLICA = new AllowableValue(ReplicaSelection.CLOSEST_REPLICA.toString(), ReplicaSelection.CLOSEST_REPLICA.name(),
"Select the closest replica to the client. Replicas are classified from closest to furthest as follows: "+
"1) Local replicas 2) Replicas whose tablet server has the same location as the client 3) All other replicas");
public static final AllowableValue LEADER_ONLY = new AllowableValue(ReplicaSelection.LEADER_ONLY.toString(), ReplicaSelection.LEADER_ONLY.name(),
"Select the LEADER replica");
public static final PropertyDescriptor KUDU_REPLICA_SELECTION = new PropertyDescriptor.Builder()
.name("kudu-lu-replica-selection")
.displayName("Kudu Replica Selection")
.description("Policy with which to choose amongst multiple replicas")
.required(true)
.defaultValue(CLOSEST_REPLICA.getValue())
.allowableValues(CLOSEST_REPLICA, LEADER_ONLY)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
.build();
public static final PropertyDescriptor TABLE_NAME = new PropertyDescriptor.Builder()
.name("kudu-lu-table-name")
.displayName("Kudu Table Name")
.description("Name of the table to access.")
.required(true)
.defaultValue("default")
.addValidator(StandardValidators.NON_BLANK_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
.build();
public static final PropertyDescriptor RETURN_COLUMNS = new PropertyDescriptor.Builder()
.name("kudu-lu-return-cols")
.displayName("Kudu Return Columns")
.description("A comma-separated list of columns to return when scanning. To return all columns set to \"*\"")
.required(true)
.defaultValue("*")
.addValidator(StandardValidators.NON_BLANK_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
.build();
protected List<PropertyDescriptor> properties;
protected KerberosCredentialsService credentialsService;
private volatile KerberosUser kerberosUser;
protected String kuduMasters;
protected KuduClient kuduClient;
protected ReplicaSelection replicaSelection;
protected volatile String tableName;
protected volatile KuduTable table;
protected volatile List<String> columnNames;
protected volatile RecordSchema resultSchema;
protected volatile Schema tableSchema;
@Override
protected void init(final ControllerServiceInitializationContext context) {
final List<PropertyDescriptor> properties = new ArrayList<>();
properties.add(KUDU_MASTERS);
properties.add(KERBEROS_CREDENTIALS_SERVICE);
properties.add(KUDU_OPERATION_TIMEOUT_MS);
properties.add(KUDU_REPLICA_SELECTION);
properties.add(TABLE_NAME);
properties.add(RETURN_COLUMNS);
addProperties(properties);
this.properties = Collections.unmodifiableList(properties);
}
protected void addProperties(List<PropertyDescriptor> properties) {
}
protected void createKuduClient(ConfigurationContext context) throws LoginException {
final String kuduMasters = context.getProperty(KUDU_MASTERS).evaluateAttributeExpressions().getValue();
final KerberosCredentialsService credentialsService = context.getProperty(KERBEROS_CREDENTIALS_SERVICE).asControllerService(KerberosCredentialsService.class);
if (credentialsService != null) {
final String keytab = credentialsService.getKeytab();
final String principal = credentialsService.getPrincipal();
kerberosUser = loginKerberosUser(principal, keytab);
final KerberosAction<KuduClient> kerberosAction = new KerberosAction<>(kerberosUser, () -> buildClient(kuduMasters, context), getLogger());
this.kuduClient = kerberosAction.execute();
} else {
this.kuduClient = buildClient(kuduMasters, context);
}
}
protected KerberosUser loginKerberosUser(final String principal, final String keytab) throws LoginException {
final KerberosUser kerberosUser = new KerberosKeytabUser(principal, keytab);
kerberosUser.login();
return kerberosUser;
}
protected KuduClient buildClient(final String masters, final ConfigurationContext context) {
final Integer operationTimeout = context.getProperty(KUDU_OPERATION_TIMEOUT_MS).asTimePeriod(TimeUnit.MILLISECONDS).intValue();
return new KuduClient.KuduClientBuilder(masters)
.defaultOperationTimeoutMs(operationTimeout)
.build();
}
/**
* Establish a connection to a Kudu cluster.
* @param context the configuration context
* @throws InitializationException if unable to connect a Kudu cluster
*/
@OnEnabled
public void onEnabled(final ConfigurationContext context) throws InitializationException {
try {
kuduMasters = context.getProperty(KUDU_MASTERS).evaluateAttributeExpressions().getValue();
credentialsService = context.getProperty(KERBEROS_CREDENTIALS_SERVICE).asControllerService(KerberosCredentialsService.class);
if (kuduClient == null) {
getLogger().debug("Setting up Kudu connection...");
createKuduClient(context);
getLogger().debug("Kudu connection successfully initialized");
}
} catch(Exception ex){
getLogger().error("Exception occurred while interacting with Kudu due to " + ex.getMessage(), ex);
throw new InitializationException(ex);
}
replicaSelection = ReplicaSelection.valueOf(context.getProperty(KUDU_REPLICA_SELECTION).getValue());
tableName = context.getProperty(TABLE_NAME).evaluateAttributeExpressions().getValue();
try {
table = kuduClient.openTable(tableName);
tableSchema = table.getSchema();
columnNames = getColumns(context.getProperty(RETURN_COLUMNS).getValue());
//Result Schema
resultSchema = kuduSchemaToNiFiSchema(tableSchema, columnNames);
} catch (KuduException e) {
throw new IllegalArgumentException(e);
}
}
@Override
public Set<String> getRequiredKeys() {
return new HashSet<>();
}
@Override
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
return properties;
}
@Override
public Optional<Record> lookup(Map<String, Object> coordinates) {
//Scanner
KuduScanner.KuduScannerBuilder builder = kuduClient.newScannerBuilder(table);
builder.setProjectedColumnNames(columnNames);
builder.replicaSelection(replicaSelection);
//Only expecting one match
builder.limit(1);
coordinates.forEach((key,value)->
builder.addPredicate(KuduPredicate.newComparisonPredicate(tableSchema.getColumn(key), KuduPredicate.ComparisonOp.EQUAL, value))
);
KuduScanner kuduScanner = builder.build();
//Run lookup
for ( RowResult row : kuduScanner){
final Map<String, Object> values = new HashMap<>();
for(String columnName : columnNames){
Object object;
if(row.getColumnType(columnName) == Type.BINARY){
object = Base64.getEncoder().encodeToString(row.getBinaryCopy(columnName));
} else {
object = row.getObject(columnName);
}
values.put(columnName, object);
}
return Optional.of(new MapRecord(resultSchema, values));
}
//No match
return Optional.empty();
}
private List<String> getColumns(String columns){
if(columns.equals("*")){
return tableSchema
.getColumns()
.stream().map(ColumnSchema::getName)
.collect(Collectors.toList());
} else {
return Arrays.asList(columns.split(","));
}
}
private RecordSchema kuduSchemaToNiFiSchema(Schema kuduTableSchema, List<String> columnNames){
final List<RecordField> fields = new ArrayList<>();
for(String columnName : columnNames) {
if(!kuduTableSchema.hasColumn(columnName)){
throw new IllegalArgumentException("Column not found in Kudu table schema " + columnName);
}
ColumnSchema cs = kuduTableSchema.getColumn(columnName);
switch (cs.getType()) {
case INT8:
fields.add(new RecordField(cs.getName(), RecordFieldType.BYTE.getDataType()));
break;
case INT16:
fields.add(new RecordField(cs.getName(), RecordFieldType.SHORT.getDataType()));
break;
case INT32:
fields.add(new RecordField(cs.getName(), RecordFieldType.INT.getDataType()));
break;
case INT64:
fields.add(new RecordField(cs.getName(), RecordFieldType.LONG.getDataType()));
break;
case UNIXTIME_MICROS:
fields.add(new RecordField(cs.getName(), RecordFieldType.TIMESTAMP.getDataType()));
break;
case BINARY:
case STRING:
case DECIMAL:
fields.add(new RecordField(cs.getName(), RecordFieldType.STRING.getDataType()));
break;
case DOUBLE:
fields.add(new RecordField(cs.getName(), RecordFieldType.DOUBLE.getDataType()));
break;
case BOOL:
fields.add(new RecordField(cs.getName(), RecordFieldType.BOOLEAN.getDataType()));
break;
case FLOAT:
fields.add(new RecordField(cs.getName(), RecordFieldType.FLOAT.getDataType()));
break;
}
}
return new SimpleRecordSchema(fields);
}
/**
* Disconnect from the Kudu cluster.
*/
@OnDisabled
public void onDisabled() throws Exception {
try {
if (this.kuduClient != null) {
getLogger().debug("Closing KuduClient");
this.kuduClient.close();
this.kuduClient = null;
}
} finally {
if (kerberosUser != null) {
kerberosUser.logout();
kerberosUser = null;
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.shardingsphere.shardingjdbc.orchestration.internal.circuit.statement;
import lombok.Getter;
import org.apache.shardingsphere.shardingjdbc.jdbc.unsupported.AbstractUnsupportedOperationPreparedStatement;
import org.apache.shardingsphere.shardingjdbc.orchestration.internal.circuit.connection.CircuitBreakerConnection;
import org.apache.shardingsphere.shardingjdbc.orchestration.internal.circuit.resultset.CircuitBreakerResultSet;
import java.io.InputStream;
import java.io.Reader;
import java.math.BigDecimal;
import java.net.URL;
import java.sql.Blob;
import java.sql.Clob;
import java.sql.Connection;
import java.sql.Date;
import java.sql.ParameterMetaData;
import java.sql.ResultSet;
import java.sql.SQLXML;
import java.sql.Statement;
import java.sql.Time;
import java.sql.Timestamp;
import java.util.Calendar;
import java.util.Collection;
import java.util.Collections;
/**
* Circuit breaker preparedStatement.
*/
@Getter
public final class CircuitBreakerPreparedStatement extends AbstractUnsupportedOperationPreparedStatement {
@Override
public void setNull(final int parameterIndex, final int sqlType) {
}
@Override
public void setNull(final int parameterIndex, final int sqlType, final String typeName) {
}
@Override
public void setBoolean(final int parameterIndex, final boolean x) {
}
@Override
public void setByte(final int parameterIndex, final byte x) {
}
@Override
public void setShort(final int parameterIndex, final short x) {
}
@Override
public void setInt(final int parameterIndex, final int x) {
}
@Override
public void setLong(final int parameterIndex, final long x) {
}
@Override
public void setFloat(final int parameterIndex, final float x) {
}
@Override
public void setDouble(final int parameterIndex, final double x) {
}
@Override
public void setBigDecimal(final int parameterIndex, final BigDecimal x) {
}
@Override
public void setString(final int parameterIndex, final String x) {
}
@Override
public void setBytes(final int parameterIndex, final byte[] x) {
}
@Override
public void setDate(final int parameterIndex, final Date x) {
}
@Override
public void setDate(final int parameterIndex, final Date x, final Calendar cal) {
}
@Override
public void setTime(final int parameterIndex, final Time x) {
}
@Override
public void setTime(final int parameterIndex, final Time x, final Calendar cal) {
}
@Override
public void setTimestamp(final int parameterIndex, final Timestamp x) {
}
@Override
public void setTimestamp(final int parameterIndex, final Timestamp x, final Calendar cal) {
}
@Override
public void setAsciiStream(final int parameterIndex, final InputStream x, final int length) {
}
@Override
public void setAsciiStream(final int parameterIndex, final InputStream x) {
}
@Override
public void setAsciiStream(final int parameterIndex, final InputStream x, final long length) {
}
@SuppressWarnings("deprecation")
@Override
public void setUnicodeStream(final int parameterIndex, final InputStream x, final int length) {
}
@Override
public void setBinaryStream(final int parameterIndex, final InputStream x, final int length) {
}
@Override
public void setBinaryStream(final int parameterIndex, final InputStream x, final long length) {
}
@Override
public void setBinaryStream(final int parameterIndex, final InputStream x) {
}
@Override
public void clearParameters() {
}
@Override
public void setObject(final int parameterIndex, final Object x) {
}
@Override
public void setObject(final int parameterIndex, final Object x, final int targetSqlType) {
}
@Override
public void setObject(final int parameterIndex, final Object x, final int targetSqlType, final int scaleOrLength) {
}
@Override
public boolean execute() {
return false;
}
@Override
public void clearBatch() {
}
@Override
public void addBatch() {
}
@Override
public void setCharacterStream(final int parameterIndex, final Reader reader, final int length) {
}
@Override
public void setCharacterStream(final int parameterIndex, final Reader reader, final long length) {
}
@Override
public void setCharacterStream(final int parameterIndex, final Reader reader) {
}
@Override
public void setBlob(final int parameterIndex, final Blob x) {
}
@Override
public void setBlob(final int parameterIndex, final InputStream inputStream, final long length) {
}
@Override
public void setBlob(final int parameterIndex, final InputStream inputStream) {
}
@Override
public void setClob(final int parameterIndex, final Clob x) {
}
@Override
public void setClob(final int parameterIndex, final Reader reader, final long length) {
}
@Override
public void setClob(final int parameterIndex, final Reader reader) {
}
@Override
public void setURL(final int parameterIndex, final URL x) {
}
@Override
public ParameterMetaData getParameterMetaData() {
return null;
}
@Override
public void setSQLXML(final int parameterIndex, final SQLXML xmlObject) {
}
@Override
public int[] executeBatch() {
return new int[]{-1};
}
@Override
public Connection getConnection() {
return new CircuitBreakerConnection();
}
@Override
public ResultSet getGeneratedKeys() {
return new CircuitBreakerResultSet();
}
@Override
public int getResultSetHoldability() {
return 0;
}
@Override
public ResultSet getResultSet() {
return new CircuitBreakerResultSet();
}
@Override
public int getResultSetConcurrency() {
return ResultSet.CONCUR_READ_ONLY;
}
@Override
public int getResultSetType() {
return ResultSet.TYPE_FORWARD_ONLY;
}
@Override
protected boolean isAccumulate() {
return false;
}
@Override
protected Collection<? extends Statement> getRoutedStatements() {
return Collections.emptyList();
}
@Override
public ResultSet executeQuery() {
return new CircuitBreakerResultSet();
}
@Override
public int executeUpdate() {
return -1;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.giraph.block_app.library;
import org.apache.giraph.block_app.framework.api.BlockWorkerReceiveApi;
import org.apache.giraph.block_app.framework.api.BlockWorkerSendApi;
import org.apache.giraph.block_app.framework.api.local.LocalBlockRunner;
import org.apache.giraph.block_app.framework.block.Block;
import org.apache.giraph.block_app.framework.block.SequenceBlock;
import org.apache.giraph.block_app.framework.piece.Piece;
import org.apache.giraph.block_app.framework.piece.interfaces.VertexReceiver;
import org.apache.giraph.block_app.framework.piece.interfaces.VertexSender;
import org.apache.giraph.block_app.test_setup.NumericTestGraph;
import org.apache.giraph.combiner.MaxMessageCombiner;
import org.apache.giraph.combiner.SumMessageCombiner;
import org.apache.giraph.conf.GiraphConfiguration;
import org.apache.giraph.conf.GiraphConstants;
import org.apache.giraph.edge.Edge;
import org.apache.giraph.function.primitive.PrimitiveRefs.LongRef;
import org.apache.giraph.reducers.impl.SumReduce;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.junit.Assert;
import org.junit.Test;
import com.google.common.collect.Iterators;
/**
* Tests and examples of using SendMessageChain
*/
public class TestMessageChain {
private static GiraphConfiguration createConf() {
GiraphConfiguration conf = new GiraphConfiguration();
GiraphConstants.VERTEX_ID_CLASS.set(conf, LongWritable.class);
GiraphConstants.VERTEX_VALUE_CLASS.set(conf, LongWritable.class);
GiraphConstants.EDGE_VALUE_CLASS.set(conf, NullWritable.class);
return conf;
}
private static NumericTestGraph<LongWritable, LongWritable, NullWritable> createTestGraph() {
NumericTestGraph<LongWritable, LongWritable, NullWritable> graph =
new NumericTestGraph<LongWritable, LongWritable, NullWritable>(createConf());
graph.addVertex(1);
graph.addVertex(2);
graph.addVertex(3);
graph.addVertex(4);
graph.addSymmetricEdge(1, 2);
graph.addSymmetricEdge(2, 3);
return graph;
}
@Test
public void testReply() {
NumericTestGraph<LongWritable, LongWritable, NullWritable> graph = createTestGraph();
// calculates max ID of FOFs
Block reply = SendMessageChain.<LongWritable, LongWritable, NullWritable, LongWritable>
startSendToNeighbors(
"SendMyIdToAllNeighbors",
LongWritable.class,
VertexSuppliers.vertexIdSupplier()
).thenSendToNeighbors(
"SendMaxIReceivedToAllNeighbors",
LongWritable.class,
(vertex, messages) -> new LongWritable(max(messages))
).endConsume(
(vertex, messages) -> vertex.getValue().set(max(messages))
);
LocalBlockRunner.runBlock(graph.getTestGraph(), reply, new Object());
Assert.assertEquals(3, graph.getVertex(1).getValue().get());
Assert.assertEquals(2, graph.getVertex(2).getValue().get());
Assert.assertEquals(3, graph.getVertex(3).getValue().get());
Assert.assertEquals(0, graph.getVertex(4).getValue().get());
}
@Test
public void testReplyCombiner() {
NumericTestGraph<LongWritable, LongWritable, NullWritable> graph = createTestGraph();
// calculates max ID of FOFs
Block reply = SendMessageChain.<LongWritable, LongWritable, NullWritable, LongWritable>
startSendToNeighbors(
"SendMyIdToAllNeighbors",
MaxMessageCombiner.LONG,
VertexSuppliers.vertexIdSupplier()
).thenSendToNeighbors(
"SendMaxIReceivedToAllNeighbors",
MaxMessageCombiner.LONG,
(vertex, message) -> message
).endConsume(
(vertex, message) -> vertex.getValue().set(message != null ? message.get() : 0)
);
LocalBlockRunner.runBlock(graph.getTestGraph(), reply, new Object());
Assert.assertEquals(3, graph.getVertex(1).getValue().get());
Assert.assertEquals(2, graph.getVertex(2).getValue().get());
Assert.assertEquals(3, graph.getVertex(3).getValue().get());
Assert.assertEquals(0, graph.getVertex(4).getValue().get());
}
@Test
public void testReplyCombinerEndReduce() {
NumericTestGraph<LongWritable, LongWritable, NullWritable> graph = createTestGraph();
LongRef sumOfAll = new LongRef(0);
// calculates max ID of FOFs
Block reply = SendMessageChain.<LongWritable, LongWritable, NullWritable, LongWritable>
startSendToNeighbors(
"SendMyIdToAllNeighbors",
MaxMessageCombiner.LONG,
VertexSuppliers.vertexIdSupplier()
).thenSendToNeighbors(
"SendMaxIReceivedToAllNeighbors",
MaxMessageCombiner.LONG,
(vertex, message) -> message
).endReduce(
"SumAllReceivedValues",
SumReduce.LONG,
(vertex, message) -> message != null ? message : new LongWritable(0),
(value) -> sumOfAll.value = value.get()
);
LocalBlockRunner.runBlock(
graph.getTestGraph(),
new SequenceBlock(
reply,
Pieces.forAllVertices(
"SetAllValuesToReduced",
(vertex) -> ((LongWritable) vertex.getValue()).set(sumOfAll.value))),
new Object());
Assert.assertEquals(8, graph.getVertex(1).getValue().get());
Assert.assertEquals(8, graph.getVertex(2).getValue().get());
Assert.assertEquals(8, graph.getVertex(3).getValue().get());
Assert.assertEquals(8, graph.getVertex(4).getValue().get());
// Block execution is happening in the separate environment if SERIALIZE_MASTER is used,
// so our instance of sumOfAll will be unchanged
Assert.assertEquals(LocalBlockRunner.SERIALIZE_MASTER.getDefaultValue() ? 0 : 8, sumOfAll.value);
}
@Test
public void testStartCustom() {
NumericTestGraph<LongWritable, LongWritable, NullWritable> graph = createTestGraph();
Block reply = SendMessageChain.<LongWritable, LongWritable, NullWritable, LongWritable>
startCustom(
// Sends ID to it's first neighbor, passing max of received messages to next part of the chain
(consumer) -> new Piece<LongWritable, LongWritable, NullWritable, LongWritable, Object>() {
@Override
public VertexSender<LongWritable, LongWritable, NullWritable> getVertexSender(
BlockWorkerSendApi<LongWritable, LongWritable, NullWritable, LongWritable> workerApi,
Object executionStage) {
return (vertex) -> {
Edge<LongWritable, NullWritable> edge =
Iterators.getNext(vertex.getEdges().iterator(), null);
if (edge != null) {
workerApi.sendMessage(edge.getTargetVertexId(), vertex.getId());
}
};
}
@Override
public VertexReceiver<LongWritable, LongWritable, NullWritable, LongWritable>
getVertexReceiver(BlockWorkerReceiveApi<LongWritable> workerApi, Object executionStage) {
return (vertex, messages) -> {
consumer.apply(vertex, new LongWritable(max(messages)));
};
}
@Override
protected Class<LongWritable> getMessageClass() {
return LongWritable.class;
}
}
).thenSendToNeighbors(
"SendMaxIReceivedToAllNeighbors",
SumMessageCombiner.LONG,
(vertex, message) -> message
).endConsume(
(vertex, message) -> vertex.getValue().set(message != null ? message.get() : 0)
);
LocalBlockRunner.runBlock(graph.getTestGraph(), reply, new Object());
Assert.assertEquals(3, graph.getVertex(1).getValue().get());
Assert.assertEquals(2, graph.getVertex(2).getValue().get());
Assert.assertEquals(3, graph.getVertex(3).getValue().get());
Assert.assertEquals(0, graph.getVertex(4).getValue().get());
}
private static long max(Iterable<LongWritable> messages) {
long result = 0;
for (LongWritable message : messages) {
result = Math.max(result, message.get());
}
return result;
}
}
| |
/*
* Copyright 2007 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.javascript.jscomp.NodeTraversal.Callback;
import com.google.javascript.rhino.IR;
import com.google.javascript.rhino.Node;
import java.util.Collection;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Inlines methods that take no arguments and have only a return statement
* returning a property. Because it works on method names rather than type
* inference, a method with multiple definitions will be inlined if each
* definition is identical.
*
* <pre>
* A.prototype.foo = function() { return this.b; }
* B.prototype.foo = function() { return this.b; }
* </pre>
*
* will inline foo, but
*
* <pre>
* A.prototype.foo = function() { return this.b; }
* B.prototype.foo = function() { return this.c; }
* </pre>
*
* will not.
*
* Declarations are not removed because we do not find all possible
* call sites. For examples, calls of the form foo["bar"] are not
* detected.
*
* This pass is not on by default because it is not safe in simple mode.
* If the prototype method is mutated and we don't detect that, inlining it is
* unsafe.
* We enable it whenever function inlining is enabled.
*
*/
class InlineSimpleMethods extends MethodCompilerPass {
private static final Logger logger =
Logger.getLogger(InlineSimpleMethods.class.getName());
InlineSimpleMethods(AbstractCompiler compiler) {
super(compiler);
}
/**
* For each method call, see if it is a candidate for inlining.
* TODO(kushal): Cache the results of the checks
*/
private class InlineTrivialAccessors extends InvocationsCallback {
@Override
void visit(NodeTraversal t, Node callNode, Node parent, String callName) {
if (externMethods.contains(callName) ||
nonMethodProperties.contains(callName)) {
return;
}
Collection<Node> definitions = methodDefinitions.get(callName);
if (definitions == null || definitions.isEmpty()) {
return;
}
// Do check of arity, complexity, and consistency in what we think is
// the order from least to most complex
Node firstDefinition = definitions.iterator().next();
// Check any multiple definitions
if (definitions.size() == 1 || allDefinitionsEquivalent(definitions)) {
if (!argsMayHaveSideEffects(callNode)) {
// Verify this is a trivial return
Node returned = returnedExpression(firstDefinition);
if (returned != null) {
if (isPropertyTree(returned)) {
if (logger.isLoggable(Level.FINE)) {
logger.fine("Inlining property accessor: " + callName);
}
inlinePropertyReturn(parent, callNode, returned);
} else if (NodeUtil.isLiteralValue(returned, false) &&
!NodeUtil.mayHaveSideEffects(
callNode.getFirstChild(), compiler)) {
if (logger.isLoggable(Level.FINE)) {
logger.fine("Inlining constant accessor: " + callName);
}
inlineConstReturn(parent, callNode, returned);
}
} else if (isEmptyMethod(firstDefinition) &&
!NodeUtil.mayHaveSideEffects(
callNode.getFirstChild(), compiler)) {
if (logger.isLoggable(Level.FINE)) {
logger.fine("Inlining empty method: " + callName);
}
inlineEmptyMethod(t, parent, callNode);
}
}
} else {
if (logger.isLoggable(Level.FINE)) {
logger.fine("Method '" + callName + "' has conflicting definitions.");
}
}
}
}
@Override
Callback getActingCallback() {
return new InlineTrivialAccessors();
}
/**
* Returns true if the provided node is a getprop for
* which the left child is this or a valid property tree
* and for which the right side is a string.
*/
private static boolean isPropertyTree(Node expectedGetprop) {
if (!expectedGetprop.isGetProp()) {
return false;
}
Node leftChild = expectedGetprop.getFirstChild();
if (!leftChild.isThis() &&
!isPropertyTree(leftChild)) {
return false;
}
Node retVal = leftChild.getNext();
return NodeUtil.getStringValue(retVal) != null;
}
/**
* Finds the occurrence of "this" in the provided property tree and replaces
* it with replacement
*/
private static void replaceThis(Node expectedGetprop, Node replacement) {
Node leftChild = expectedGetprop.getFirstChild();
if (leftChild.isThis()) {
expectedGetprop.replaceChild(leftChild, replacement);
} else {
replaceThis(leftChild, replacement);
}
}
/**
* Return the node that represents the expression returned
* by the method, given a FUNCTION node.
*/
private static Node returnedExpression(Node fn) {
Node expectedBlock = getMethodBlock(fn);
if (!expectedBlock.hasOneChild()) {
return null;
}
Node expectedReturn = expectedBlock.getFirstChild();
if (!expectedReturn.isReturn()) {
return null;
}
if (!expectedReturn.hasOneChild()) {
return null;
}
return expectedReturn.getLastChild();
}
/**
* Return whether the given FUNCTION node is an empty method definition.
*
* Must be private, or moved to NodeUtil.
*/
private static boolean isEmptyMethod(Node fn) {
Node expectedBlock = getMethodBlock(fn);
return expectedBlock == null ?
false : NodeUtil.isEmptyBlock(expectedBlock);
}
/**
* Return a BLOCK node if the given FUNCTION node is a valid method
* definition, null otherwise.
*
* Must be private, or moved to NodeUtil.
*/
private static Node getMethodBlock(Node fn) {
if (fn.getChildCount() != 3) {
return null;
}
Node expectedBlock = fn.getLastChild();
return expectedBlock.isNormalBlock() ? expectedBlock : null;
}
/** Given a set of method definitions, verify they are the same. */
private boolean allDefinitionsEquivalent(Collection<Node> definitions) {
Node first = null;
for (Node n : definitions) {
if (first == null) {
first = n;
} else if (!compiler.areNodesEqualForInlining(first, n)) {
return false;
} // else continue
}
return true;
}
/**
* Replace the provided method call with the tree specified in returnedValue
*
* Parse tree of a call is
* name
* call
* getprop
* obj
* string
*/
private void inlinePropertyReturn(Node parent, Node call,
Node returnedValue) {
Node getProp = returnedValue.cloneTree();
replaceThis(getProp, call.getFirstChild().removeFirstChild());
parent.replaceChild(call, getProp);
compiler.reportChangeToEnclosingScope(getProp);
}
/**
* Replace the provided object and its method call with the tree specified
* in returnedValue. Should be called only if the object reference has
* no side effects.
*/
private void inlineConstReturn(Node parent, Node call,
Node returnedValue) {
Node retValue = returnedValue.cloneTree();
parent.replaceChild(call, retValue);
compiler.reportChangeToEnclosingScope(retValue);
}
/**
* Remove the provided object and its method call.
*/
private void inlineEmptyMethod(NodeTraversal t, Node parent, Node call) {
// If the return value of the method call is read,
// replace it with "void 0". Otherwise, remove the call entirely.
if (NodeUtil.isExprCall(parent)) {
parent.replaceWith(IR.empty());
} else {
Node srcLocation = call;
parent.replaceChild(call, NodeUtil.newUndefinedNode(srcLocation));
}
t.reportCodeChange();
}
/**
* Check whether the given method call's arguments have side effects.
* @param call The call node of a method invocation.
*/
private boolean argsMayHaveSideEffects(Node call) {
for (Node currentChild = call.getSecondChild();
currentChild != null;
currentChild = currentChild.getNext()) {
if (NodeUtil.mayHaveSideEffects(currentChild, compiler)) {
return true;
}
}
return false;
}
/**
* A do-nothing signature store.
*/
static final MethodCompilerPass.SignatureStore DUMMY_SIGNATURE_STORE =
new MethodCompilerPass.SignatureStore() {
@Override
public void addSignature(
String functionName, Node functionNode, String sourceFile) {
}
@Override
public void removeSignature(String functionName) {
}
@Override
public void reset() {
}
};
@Override
SignatureStore getSignatureStore() {
return DUMMY_SIGNATURE_STORE;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.uima.util;
import java.util.ArrayList;
import java.util.Date;
import java.util.Vector;
import org.apache.uima.UIMAFramework;
import org.apache.uima.analysis_engine.AnalysisEngine;
import org.apache.uima.analysis_engine.TextAnalysisEngine;
import org.apache.uima.cas.CASException;
import org.apache.uima.jcas.JCas;
import org.apache.uima.resource.ResourceInitializationException;
import org.apache.uima.resource.metadata.ProcessingResourceMetaData;
/**
* Note: This class is not used by the framework itself.
*/
/**
* This class represents a simple pool of {@link JCas} instances. This is useful for multithreaded
* applications, where there is a need for multiple CASes to be processed simultaneously. Because
* JCas creation is expensive, it is a good idea to create a pool of reusable JCas instances at
* initialization time, rather than creating a new JCas each time one is needed.
* <p>
* Clients check-out JCas instances from the pool using the {@link #getJCas()} method and check-in
* JCas instances using the {@link #releaseJCas(JCas)} method.
*
*
*
*/
public class JCasPool {
private Vector<JCas> mAllInstances = new Vector<>();
private Vector<JCas> mFreeInstances = new Vector<>();
private int mNumInstances;
/**
* resource bundle for log messages
*/
private static final String LOG_RESOURCE_BUNDLE = "org.apache.uima.impl.log_messages";
/**
* current class
*/
private static final Class<JCasPool> CLASS_NAME = JCasPool.class;
/**
* Creates a new JCasPool
*
* @param aNumInstances
* the number of JCas instances in the pool
* @param aTextAnalysisEngine
* the TAE that will create the JCas instances and which will later be used to process
* them
*
* @throws ResourceInitializationException
* if the JCas instances could not be created
*
* @deprecated As of v2.0, TextAnalysisEngine has been deprecated. Use
* {@link #JCasPool(int, AnalysisEngine)} instead.
*/
@Deprecated
public JCasPool(int aNumInstances, TextAnalysisEngine aTextAnalysisEngine)
throws ResourceInitializationException {
mNumInstances = aNumInstances;
fillPool(aTextAnalysisEngine.getAnalysisEngineMetaData());
}
/**
* Creates a new JCasPool
*
* @param aNumInstances
* the number of JCas instances in the pool
* @param aAnalysisEngine
* the AE that will create the JCas instances and which will later be used to process
* them
*
* @throws ResourceInitializationException
* if the JCas instances could not be created
*/
public JCasPool(int aNumInstances, AnalysisEngine aAnalysisEngine)
throws ResourceInitializationException {
mNumInstances = aNumInstances;
fillPool(aAnalysisEngine.getAnalysisEngineMetaData());
}
/**
* Creates a new JCasPool
*
* @param aNumInstances
* the number of JCas instances in the pool
* @param aMetaData
* metadata that includes the type system for the CAS
*
* @throws ResourceInitializationException
* if the CAS instances could not be created
*/
public JCasPool(int aNumInstances, ProcessingResourceMetaData aMetaData)
throws ResourceInitializationException {
mNumInstances = aNumInstances;
fillPool(aMetaData);
}
/**
* Checks out a JCas from the pool.
*
* @return a JCas instance. Returns <code>null</code> if none are available (in which case the
* client may {@link Object#wait()} on this object in order to be notified when an
* instance becomes available).
*/
public synchronized JCas getJCas() {
if (!mFreeInstances.isEmpty()) {
return mFreeInstances.remove(0);
} else {
// no instances available
return null;
}
}
/**
* Checks in a JCas to the pool. This automatically calls the {@link JCas#reset()} method, to
* ensure that when the JCas is later retrieved from the pool it will be ready to use. Also
* notifies other Threads that may be waiting for an instance to become available.
*
* @param aJCas
* the JCas to release
*/
public synchronized void releaseJCas(JCas aJCas) {
// make sure this CAS actually belongs to this pool and is checked out
if (!mAllInstances.contains(aJCas) || mFreeInstances.contains(aJCas)) {
UIMAFramework.getLogger(CLASS_NAME).logrb(Level.WARNING, CLASS_NAME.getName(), "releaseJCas",
LOG_RESOURCE_BUNDLE, "UIMA_return_jcas_to_pool__WARNING");
} else {
// reset CAS
aJCas.reset();
// Add the CAS to the end of the free instances List
mFreeInstances.add(aJCas);
}
// Notify any threads waiting on this object
notifyAll();
}
/**
* Checks out a JCas from the pool. If none is currently available, wait for the specified amount
* of time for one to be checked in.
*
* @param aTimeout
* the time to wait in milliseconds. A value of <=0 will wait forever.
*
* @return a JCas instance. Returns <code>null</code> if none are available within the specified
* timeout period.
*/
public synchronized JCas getJCas(long aTimeout) {
long startTime = new Date().getTime();
JCas cas;
while ((cas = getJCas()) == null) {
try {
wait(aTimeout);
} catch (InterruptedException e) {
}
if (aTimeout > 0 && (new Date().getTime() - startTime) >= aTimeout) {
// Timeout has expired
return null;
}
}
return cas;
}
/**
* Gets the size of this pool (the total number of JCas instances that it can hold).
*
* @return the size of this pool
*/
public int getSize() {
return mNumInstances;
}
/**
* Utility method used in the constructor to fill the pool with CAS instances.
*
* @param aMetaData
* metadata including the type system for the CASes
*
* @throws ResourceInitializationException
* if the Resource instances could not be created
*/
protected void fillPool(ProcessingResourceMetaData aMetaData)
throws ResourceInitializationException {
// fill the pool
ArrayList<ProcessingResourceMetaData> mdList = new ArrayList<>();
mdList.add(aMetaData);
for (int i = 0; i < mNumInstances; i++) {
JCas c;
try {
c = CasCreationUtils.createCas(mdList).getJCas();
} catch (CASException e) {
throw new ResourceInitializationException(e);
}
mAllInstances.add(c);
mFreeInstances.add(c);
}
}
protected Vector<JCas> getAllInstances() {
return mAllInstances;
}
protected Vector<JCas> getFreeInstances() {
return mFreeInstances;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ipc;
import java.net.NoRouteToHostException;
import java.net.PortUnreachableException;
import java.net.Socket;
import java.net.InetSocketAddress;
import java.net.SocketTimeoutException;
import java.net.UnknownHostException;
import java.net.ConnectException;
import java.io.IOException;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.FilterInputStream;
import java.io.InputStream;
import java.io.InterruptedIOException;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.Map.Entry;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import javax.net.SocketFactory;
import org.apache.commons.logging.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.ReflectionUtils;
/** A client for an IPC service. IPC calls take a single {@link Writable} as a
* parameter, and return a {@link Writable} as their value. A service runs on
* a port and is defined by a parameter class and a value class.
*
* @see Server
*/
public class Client {
public static final Log LOG =
LogFactory.getLog(Client.class);
private Hashtable<ConnectionId, Connection> connections =
new Hashtable<ConnectionId, Connection>();
private Class<? extends Writable> valueClass; // class of call values
private int counter; // counter for call ids
private AtomicBoolean running = new AtomicBoolean(true); // if client runs
final private Configuration conf;
final private int maxIdleTime; //connections will be culled if it was idle for
//maxIdleTime msecs
final private int connectTimeout; // timeout in msecs for each connect
final private int maxRetries; //the max. no. of retries for socket connections
private boolean tcpNoDelay; // if T then disable Nagle's Algorithm
private int pingInterval; // how often sends ping to the server in msecs
private SocketFactory socketFactory; // how to create sockets
private int refCount = 1;
final private static String PING_INTERVAL_NAME = "ipc.ping.interval";
final static int DEFAULT_PING_INTERVAL = 60000; // 1 min
final static int PING_CALL_ID = -1;
/**
* set the ping interval value in configuration
*
* @param conf Configuration
* @param pingInterval the ping interval
*/
final public static void setPingInterval(Configuration conf, int pingInterval) {
conf.setInt(PING_INTERVAL_NAME, pingInterval);
}
/**
* Get the ping interval from configuration;
* If not set in the configuration, return the default value.
*
* @param conf Configuration
* @return the ping interval
*/
final static int getPingInterval(Configuration conf) {
return conf.getInt(PING_INTERVAL_NAME, DEFAULT_PING_INTERVAL);
}
/**
* The time after which a RPC will timeout.
* If ping is not enabled (via ipc.client.ping), then the timeout value is the
* same as the pingInterval.
* If ping is enabled, then there is no timeout value.
*
* @param conf Configuration
* @return the timeout period in milliseconds. -1 if no timeout value is set
*/
final public static int getTimeout(Configuration conf) {
if (!conf.getBoolean("ipc.client.ping", true)) {
return getPingInterval(conf);
}
return -1;
}
/**
* Increment this client's reference count
*
*/
synchronized void incCount() {
refCount++;
}
/**
* Decrement this client's reference count
*
*/
synchronized void decCount() {
if (refCount > 0)
refCount--;
}
/**
* Return if this client has no reference
*
* @return true if this client has no reference; false otherwise
*/
synchronized boolean isZeroReference() {
return refCount==0;
}
synchronized int getRefCount() {
return refCount;
}
/** A call waiting for a value. */
private class Call {
int id; // call id
Writable param; // parameter
Writable value; // value, null if error
IOException error; // exception, null if value
boolean done; // true when call is done
protected Call(Writable param) {
this.param = param;
synchronized (Client.this) {
this.id = counter++;
}
}
/** Indicate when the call is complete and the
* value or error are available. Notifies by default. */
protected synchronized void callComplete() {
this.done = true;
notify(); // notify caller
}
/** Set the exception when there is an error.
* Notify the caller the call is done.
*
* @param error exception thrown by the call; either local or remote
*/
public synchronized void setException(IOException error) {
this.error = error;
callComplete();
}
/** Set the return value when there is no error.
* Notify the caller the call is done.
*
* @param value return value of the call.
*/
public synchronized void setValue(Writable value) {
this.value = value;
callComplete();
}
}
/** Thread that reads responses and notifies callers. Each connection owns a
* socket connected to a remote address. Calls are multiplexed through this
* socket: responses may be delivered out of order. */
private class Connection extends Thread {
private InetSocketAddress server; // server ip:port
private ConnectionHeader header; // connection header
private ConnectionId remoteId; // connection id
private Socket socket = null; // connected socket
private DataInputStream in;
private DataOutputStream out;
private int rpcTimeout; // max waiting time for each RPC
// currently active calls
private Hashtable<Integer, Call> calls = new Hashtable<Integer, Call>();
private AtomicLong lastActivity = new AtomicLong();// last I/O activity time
private AtomicBoolean shouldCloseConnection = new AtomicBoolean(); // indicate if the connection is closed
private AtomicLong currentSetupId = new AtomicLong(0L);
private IOException closeException; // close reason
private final ThreadFactory daemonThreadFactory = new ThreadFactory() {
private final ThreadFactory defaultThreadFactory =
Executors.defaultThreadFactory();
private final AtomicInteger counter = new AtomicInteger(0);
@Override
public Thread newThread(Runnable r) {
Thread thread = defaultThreadFactory.newThread(r);
thread.setDaemon(true);
thread.setName("sendParams-" + counter.getAndIncrement());
return thread;
}
};
private final ExecutorService executor =
Executors.newSingleThreadExecutor(daemonThreadFactory);
public Connection(ConnectionId remoteId) throws IOException {
this.remoteId = remoteId;
this.server = remoteId.getAddress();
if (server.isUnresolved()) {
throw new UnknownHostException("unknown host: " +
remoteId.getAddress().getHostName());
}
this.rpcTimeout = remoteId.getRpcTimeout();
UserGroupInformation ticket = remoteId.getTicket();
Class<?> protocol = remoteId.getProtocol();
header =
new ConnectionHeader(protocol == null ? null : protocol.getName(), ticket);
this.setName("IPC Client (" + socketFactory.hashCode() +") connection to " +
remoteId.getAddress().toString() +
" from " + ((ticket==null)?"an unknown user":ticket.getUserName()));
this.setDaemon(true);
}
/** Update lastActivity with the current time. */
private void touch() {
lastActivity.set(System.currentTimeMillis());
}
/**
* Add a call to this connection's call queue and notify
* a listener; synchronized.
* Returns false if called during shutdown.
* @param call to add
* @return true if the call was added.
*/
private synchronized boolean addCall(Call call) {
if (shouldCloseConnection.get())
return false;
calls.put(call.id, call);
notify();
return true;
}
/** This class sends a ping to the remote side when timeout on
* reading. If no failure is detected, it retries until at least
* a byte is read.
*/
private class PingInputStream extends FilterInputStream {
/* constructor */
protected PingInputStream(InputStream in) {
super(in);
}
/* Process timeout exception
* if the connection is not going to be closed or
* is not configured to have a RPC timeout, send a ping.
* (if rpcTimeout is not set to be 0, then RPC should timeout.)
* otherwise, throw the timeout exception.
*/
private void handleTimeout(SocketTimeoutException e) throws IOException {
if (shouldCloseConnection.get() || !running.get() || rpcTimeout > 0) {
throw e;
} else {
sendPing();
}
}
/** Read a byte from the stream.
* Send a ping if timeout on read. Retries if no failure is detected
* until a byte is read.
* @throws IOException for any IO problem other than socket timeout
*/
public int read() throws IOException {
do {
try {
return super.read();
} catch (SocketTimeoutException e) {
handleTimeout(e);
}
} while (true);
}
/** Read bytes into a buffer starting from offset <code>off</code>
* Send a ping if timeout on read. Retries if no failure is detected
* until a byte is read.
*
* @return the total number of bytes read; -1 if the connection is closed.
*/
public int read(byte[] buf, int off, int len) throws IOException {
do {
try {
return super.read(buf, off, len);
} catch (SocketTimeoutException e) {
handleTimeout(e);
}
} while (true);
}
}
/** Connect to the server and set up the I/O streams. It then sends
* a header to the server and starts
* the connection thread that waits for responses.
*/
private void setupIOstreams() {
synchronized(currentSetupId) {
long setupId = currentSetupId.get();
if (setupId != 0L) {
// There is a thread setting up the streams. Just wait
// the thread to finish and exit.
try {
do {
currentSetupId.wait();
} while (currentSetupId.get() == setupId);
} catch (InterruptedException ie) {
}
return;
} else {
// No one is doing the setting. Set the setupID and
// initialize the streams.
currentSetupId.set(System.currentTimeMillis());
}
}
try {
setupIOstreamsWithInternal();
} finally {
synchronized(currentSetupId) {
currentSetupId.set(0L);
currentSetupId.notifyAll();
}
}
}
/** Connect to the server and set up the I/O streams. It then sends
* a header to the server and starts
* the connection thread that waits for responses.
*/
private synchronized void setupIOstreamsWithInternal() {
if (socket != null || shouldCloseConnection.get()) {
return;
}
short ioFailures = 0;
short timeoutFailures = 0;
try {
if (LOG.isDebugEnabled()) {
LOG.debug("Connecting to "+server);
}
while (true) {
try {
this.socket = socketFactory.createSocket();
this.socket.setTcpNoDelay(tcpNoDelay);
// connection time out is 20s by default
NetUtils.connect(this.socket, remoteId.getAddress(), connectTimeout);
if (rpcTimeout > 0) {
pingInterval = rpcTimeout; // rpcTimeout overwrites pingInterval
}
this.socket.setSoTimeout(pingInterval);
break;
} catch (SocketTimeoutException toe) {
/* The max number of retries is 45,
* which amounts to 20s*45 = 15 minutes retries.
*/
handleConnectionFailure(timeoutFailures++, maxRetries, toe);
} catch (IOException ie) {
handleConnectionFailure(ioFailures++, maxRetries, ie);
}
}
this.in = new DataInputStream(new BufferedInputStream
(new PingInputStream(NetUtils.getInputStream(socket))));
this.out = new DataOutputStream
(new BufferedOutputStream(NetUtils.getOutputStream(socket)));
writeHeader();
// update last activity time
touch();
// start the receiver thread after the socket connection has been set up
start();
} catch (IOException e) {
markClosed(e);
close();
}
}
/* Handle connection failures
*
* If the current number of retries is equal to the max number of retries,
* stop retrying and throw the exception; Otherwise backoff 1 second and
* try connecting again.
*
* This Method is only called from inside setupIOstreams(), which is
* synchronized. Hence the sleep is synchronized; the locks will be retained.
*
* @param curRetries current number of retries
* @param maxRetries max number of retries allowed
* @param ioe failure reason
* @throws IOException if max number of retries is reached
*/
private void handleConnectionFailure(
int curRetries, int maxRetries, IOException ioe) throws IOException {
// close the current connection
if (socket != null) {
try {
socket.close();
} catch (IOException e) {
LOG.warn("Not able to close a socket", e);
}
}
// set socket to null so that the next call to setupIOstreams
// can start the process of connect all over again.
socket = null;
// throw the exception if the maximum number of retries is reached
if (curRetries >= maxRetries) {
throw ioe;
}
// otherwise back off and retry
try {
Thread.sleep(1000);
} catch (InterruptedException ignored) {
throw new InterruptedIOException();
}
LOG.info("Retrying connect to server: " + server +
". Already tried " + curRetries + " time(s).");
}
/* Write the header for each connection
* Out is not synchronized because only the first thread does this.
*/
private void writeHeader() throws IOException {
// Write out the header and version
out.write(Server.HEADER.array());
out.write(Server.CURRENT_VERSION);
// Write out the ConnectionHeader
DataOutputBuffer buf = new DataOutputBuffer();
header.write(buf);
// Write out the payload length
int bufLen = buf.getLength();
out.writeInt(bufLen);
out.write(buf.getData(), 0, bufLen);
}
/* wait till someone signals us to start reading RPC response or
* it is idle too long, it is marked as to be closed,
* or the client is marked as not running.
*
* Return true if it is time to read a response; false otherwise.
*/
private synchronized boolean waitForWork() {
if (calls.isEmpty() && !shouldCloseConnection.get() && running.get()) {
long timeout = maxIdleTime-
(System.currentTimeMillis()-lastActivity.get());
if (timeout>0) {
try {
wait(timeout);
} catch (InterruptedException e) {}
}
}
if (!calls.isEmpty() && !shouldCloseConnection.get() && running.get()) {
return true;
} else if (shouldCloseConnection.get()) {
return false;
} else if (calls.isEmpty()) { // idle connection closed or stopped
markClosed(null);
return false;
} else { // get stopped but there are still pending requests
markClosed((IOException)new IOException().initCause(
new InterruptedException()));
return false;
}
}
public InetSocketAddress getRemoteAddress() {
return server;
}
/* Send a ping to the server if the time elapsed
* since last I/O activity is equal to or greater than the ping interval
*/
private synchronized void sendPing() throws IOException {
long curTime = System.currentTimeMillis();
if ( curTime - lastActivity.get() >= pingInterval) {
lastActivity.set(curTime);
synchronized (out) {
out.writeInt(PING_CALL_ID);
out.flush();
}
}
}
public void run() {
if (LOG.isDebugEnabled())
LOG.debug(getName() + ": starting, having connections "
+ connections.size());
while (waitForWork()) {//wait here for work - read or close connection
receiveResponse();
}
close();
if (LOG.isDebugEnabled())
LOG.debug(getName() + ": stopped, remaining connections "
+ connections.size());
}
/** Initiates a call by sending the parameter to the remote server.
* Note: this is not called from the Connection thread, but by other
* threads.
*/
public void sendParam(final Call call) throws InterruptedException {
if (shouldCloseConnection.get()) {
return;
}
final CountDownLatch latch = new CountDownLatch(1);
executor.submit(new Runnable() {
@Override
public void run() {
DataOutputBuffer d = null;
try {
if (shouldCloseConnection.get()) {
return;
}
synchronized (Connection.this.out) {
if (LOG.isDebugEnabled())
LOG.debug(getName() + " sending #" + call.id);
//for serializing the
//data to be written
d = new DataOutputBuffer();
d.writeInt(call.id);
call.param.write(d);
byte[] data = d.getData();
int dataLength = d.getLength();
out.writeInt(dataLength); //first put the data length
out.write(data, 0, dataLength);//write the data
out.flush();
}
} catch (IOException e) {
markClosed(e);
} finally {
latch.countDown();
//the buffer is just an in-memory buffer, but it is still polite to
// close early
IOUtils.closeStream(d);
}
}
});
if (!latch.await(pingInterval, TimeUnit.MILLISECONDS)) {
markClosed(new IOException(
String.format("timeout waiting for sendParam, %d ms", pingInterval)
));
}
}
/* Receive a response.
* Because only one receiver, so no synchronization on in.
*/
private void receiveResponse() {
if (shouldCloseConnection.get()) {
return;
}
touch();
try {
int id = in.readInt(); // try to read an id
if (LOG.isDebugEnabled())
LOG.debug(getName() + " got value #" + id);
Call call = calls.get(id);
int state = in.readInt(); // read call status
if (state == Status.SUCCESS.state) {
Writable value = ReflectionUtils.newInstance(valueClass, conf);
value.readFields(in); // read value
call.setValue(value);
calls.remove(id);
} else if (state == Status.ERROR.state) {
call.setException(new RemoteException(WritableUtils.readString(in),
WritableUtils.readString(in)));
calls.remove(id);
} else if (state == Status.FATAL.state) {
// Close the connection
markClosed(new RemoteException(WritableUtils.readString(in),
WritableUtils.readString(in)));
}
} catch (IOException e) {
markClosed(e);
} catch (Throwable te) {
markClosed((IOException)new IOException().initCause(te));
}
}
private synchronized void markClosed(IOException e) {
if (shouldCloseConnection.compareAndSet(false, true)) {
executor.shutdown();
closeException = e;
notifyAll();
}
}
/** Close the connection. */
private synchronized void close() {
if (!shouldCloseConnection.get()) {
LOG.error("The connection is not in the closed state");
return;
}
// release the resources
// first thing to do;take the connection out of the connection list
synchronized (connections) {
if (connections.get(remoteId) == this) {
connections.remove(remoteId);
connections.notifyAll();
}
}
// close the streams and therefore the socket
IOUtils.closeStream(out);
IOUtils.closeStream(in);
// clean up all calls
if (closeException == null) {
if (!calls.isEmpty()) {
LOG.warn(
"A connection is closed for no cause and calls are not empty");
// clean up calls anyway
closeException = new IOException("Unexpected closed connection");
cleanupCalls();
}
} else {
// log the info
if (LOG.isDebugEnabled()) {
LOG.debug("closing ipc connection to " + server + ": " +
closeException.getMessage(),closeException);
}
// cleanup calls
cleanupCalls();
}
if (LOG.isDebugEnabled())
LOG.debug(getName() + ": closed");
}
/* Cleanup all calls and mark them as done */
private void cleanupCalls() {
Iterator<Entry<Integer, Call>> itor = calls.entrySet().iterator() ;
while (itor.hasNext()) {
Call c = itor.next().getValue();
c.setException(closeException); // local exception
itor.remove();
}
}
}
/** Call implementation used for parallel calls. */
private class ParallelCall extends Call {
private ParallelResults results;
private int index;
public ParallelCall(Writable param, ParallelResults results, int index) {
super(param);
this.results = results;
this.index = index;
}
/** Deliver result to result collector. */
protected void callComplete() {
results.callComplete(this);
}
}
/** Result collector for parallel calls. */
private static class ParallelResults {
private Writable[] values;
private int size;
private int count;
public ParallelResults(int size) {
this.values = new Writable[size];
this.size = size;
}
/** Collect a result. */
public synchronized void callComplete(ParallelCall call) {
values[call.index] = call.value; // store the value
count++; // count it
if (count == size) // if all values are in
notify(); // then notify waiting caller
}
}
/** Construct an IPC client whose values are of the given {@link Writable}
* class. */
public Client(Class<? extends Writable> valueClass, Configuration conf,
SocketFactory factory) {
this.valueClass = valueClass;
this.maxIdleTime =
conf.getInt("ipc.client.connection.maxidletime", 10000); //10s
this.connectTimeout =
conf.getInt("ipc.client.connect.timeout", 20000); //20s
this.maxRetries = conf.getInt("ipc.client.connect.max.retries", 10);
this.tcpNoDelay = conf.getBoolean("ipc.client.tcpnodelay", false);
this.pingInterval = getPingInterval(conf);
if (LOG.isDebugEnabled()) {
LOG.debug("The ping interval is" + this.pingInterval + "ms.");
}
this.conf = conf;
this.socketFactory = factory;
}
/**
* Construct an IPC client with the default SocketFactory
* @param valueClass
* @param conf
*/
public Client(Class<? extends Writable> valueClass, Configuration conf) {
this(valueClass, conf, NetUtils.getDefaultSocketFactory(conf));
}
/** Return the socket factory of this client
*
* @return this client's socket factory
*/
SocketFactory getSocketFactory() {
return socketFactory;
}
/** Stop all threads related to this client. No further calls may be made
* using this client. */
public void stop() {
if (LOG.isDebugEnabled()) {
LOG.debug("Stopping client");
}
if (!running.compareAndSet(true, false)) {
return;
}
synchronized (connections) {
// wake up all connections
for (Connection conn : connections.values()) {
conn.interrupt();
}
// wait until all connections are closed
while (!connections.isEmpty()) {
try {
connections.wait();
} catch (InterruptedException e) {
// pass
}
}
}
}
/** Make a call, passing <code>param</code>, to the IPC server running at
* <code>address</code>, returning the value. Throws exceptions if there are
* network problems or if the remote code threw an exception.
* @deprecated Use {@link #call(Writable, InetSocketAddress, Class, UserGroupInformation)} instead
*/
@Deprecated
public Writable call(Writable param, InetSocketAddress address)
throws InterruptedException, IOException {
return call(param, address, null);
}
/** Make a call, passing <code>param</code>, to the IPC server running at
* <code>address</code> with the <code>ticket</code> credentials, returning
* the value.
* Throws exceptions if there are network problems or if the remote code
* threw an exception.
* @deprecated Use {@link #call(Writable, InetSocketAddress, Class, UserGroupInformation)} instead
*/
@Deprecated
public Writable call(Writable param, InetSocketAddress addr,
UserGroupInformation ticket)
throws InterruptedException, IOException {
return call(param, addr, null, ticket, 0);
}
/** Make a call, passing <code>param</code>, to the IPC server running at
* <code>address</code> which is servicing the <code>protocol</code> protocol,
* with the <code>ticket</code> credentials and <code>rpcTimeout</code>,
* returning the value.
* Throws exceptions if there are network problems or if the remote code
* threw an exception. */
public Writable call(Writable param, InetSocketAddress addr,
Class<?> protocol, UserGroupInformation ticket,
int rpcTimeout)
throws InterruptedException, IOException {
Call call = new Call(param);
Connection connection = getConnection(addr, protocol, ticket,
rpcTimeout, call);
try {
connection.sendParam(call); // send the parameter
} catch (RejectedExecutionException e) {
throw new IOException("connection has been closed", e);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new IOException("interrupted waiting for sendParam to complete", e);
}
boolean interrupted = false;
synchronized (call) {
while (!call.done) {
try {
call.wait(); // wait for the result
} catch (InterruptedException ie) {
// save the fact that we were interrupted
interrupted = true;
}
}
if (interrupted) {
// set the interrupt flag now that we are done waiting
Thread.currentThread().interrupt();
}
if (call.error != null) {
if (call.error instanceof RemoteException) {
call.error.fillInStackTrace();
throw call.error;
} else { // local exception
throw wrapException(addr, call.error);
}
} else {
return call.value;
}
}
}
/**
* Take an IOException and the address we were trying to connect to
* and return an IOException with the input exception as the cause.
* The new exception provides the stack trace of the place where
* the exception is thrown and some extra diagnostics information.
* If the exception is ConnectException or SocketTimeoutException,
* return a new one of the same type; Otherwise return an IOException.
*
* @param addr target address
* @param exception the relevant exception
* @return an exception to throw
*/
private IOException wrapException(InetSocketAddress addr,
IOException exception) {
if (exception instanceof ConnectException) {
//connection refused; include the host:port in the error
return (ConnectException)new ConnectException(
"Call to " + addr + " failed on connection exception: " + exception)
.initCause(exception);
} else if (exception instanceof SocketTimeoutException) {
return (SocketTimeoutException)new SocketTimeoutException(
"Call to " + addr + " failed on socket timeout exception: "
+ exception).initCause(exception);
} else if (exception instanceof NoRouteToHostException) {
return (NoRouteToHostException)new NoRouteToHostException(
"Call to " + addr + " failed on NoRouteToHostException exception: "
+ exception).initCause(exception);
} else if (exception instanceof PortUnreachableException) {
return (PortUnreachableException)new PortUnreachableException(
"Call to " + addr + " failed on PortUnreachableException exception: "
+ exception).initCause(exception);
} else {
return (IOException)new IOException(
"Call to " + addr + " failed on local exception: " + exception)
.initCause(exception);
}
}
/**
* Makes a set of calls in parallel. Each parameter is sent to the
* corresponding address. When all values are available, or have timed out
* or errored, the collected results are returned in an array. The array
* contains nulls for calls that timed out or errored.
* @deprecated Use {@link #call(Writable[], InetSocketAddress[], Class, UserGroupInformation)} instead
*/
@Deprecated
public Writable[] call(Writable[] params, InetSocketAddress[] addresses)
throws IOException {
return call(params, addresses, null, null);
}
/** Makes a set of calls in parallel. Each parameter is sent to the
* corresponding address. When all values are available, or have timed out
* or errored, the collected results are returned in an array. The array
* contains nulls for calls that timed out or errored. */
public Writable[] call(Writable[] params, InetSocketAddress[] addresses,
Class<?> protocol, UserGroupInformation ticket)
throws IOException {
if (addresses.length == 0) return new Writable[0];
ParallelResults results = new ParallelResults(params.length);
synchronized (results) {
for (int i = 0; i < params.length; i++) {
ParallelCall call = new ParallelCall(params[i], results, i);
try {
Connection connection =
getConnection(addresses[i], protocol, ticket, 0, call);
connection.sendParam(call); // send each parameter
} catch (RejectedExecutionException e) {
throw new IOException("connection has been closed", e);
} catch (IOException e) {
// log errors
LOG.info("Calling "+addresses[i]+" caught: " +
e.getMessage(),e);
results.size--; // wait for one fewer result
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
LOG.warn("interrupted waiting to send params to server", e);
throw new IOException(e);
}
}
while (results.count != results.size) {
try {
results.wait(); // wait for all results
} catch (InterruptedException e) {}
}
return results.values;
}
}
/** Get a connection from the pool, or create a new one and add it to the
* pool. Connections to a given host/port are reused. */
private Connection getConnection(InetSocketAddress addr,
Class<?> protocol,
UserGroupInformation ticket,
int rpcTimeout,
Call call)
throws IOException {
if (!running.get()) {
// the client is stopped
throw new IOException("The client is stopped");
}
Connection connection;
/* we could avoid this allocation for each RPC by having a
* connectionsId object and with set() method. We need to manage the
* refs for keys in HashMap properly. For now its ok.
*/
ConnectionId remoteId = new ConnectionId(
addr, protocol, ticket, rpcTimeout);
do {
synchronized (connections) {
connection = connections.get(remoteId);
if (connection == null) {
connection = new Connection(remoteId);
connections.put(remoteId, connection);
}
}
} while (!connection.addCall(call));
//we don't invoke the method below inside "synchronized (connections)"
//block above. The reason for that is if the server happens to be slow,
//it will take longer to establish a connection and that will slow the
//entire system down.
connection.setupIOstreams();
return connection;
}
/**
* This class holds the address and the user ticket. The client connections
* to servers are uniquely identified by <remoteAddress, protocol, ticket>
*/
private static class ConnectionId {
InetSocketAddress address;
UserGroupInformation ticket;
Class<?> protocol;
private static final int PRIME = 16777619;
private int rpcTimeout;
ConnectionId(InetSocketAddress address, Class<?> protocol,
UserGroupInformation ticket, int rpcTimeout) {
this.protocol = protocol;
this.address = address;
this.ticket = ticket;
this.rpcTimeout = rpcTimeout;
}
InetSocketAddress getAddress() {
return address;
}
Class<?> getProtocol() {
return protocol;
}
UserGroupInformation getTicket() {
return ticket;
}
private int getRpcTimeout() {
return rpcTimeout;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof ConnectionId) {
ConnectionId id = (ConnectionId) obj;
return address.equals(id.address) && protocol == id.protocol &&
ticket == id.ticket && rpcTimeout == id.rpcTimeout;
//Note : ticket is a ref comparision.
}
return false;
}
@Override
public int hashCode() {
return (address.hashCode() + PRIME * (
PRIME * (
PRIME * System.identityHashCode(protocol) +
System.identityHashCode(ticket)
) + rpcTimeout
));
}
}
}
| |
package com.planet_ink.coffee_mud.Abilities.Druid;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2000-2014 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
@SuppressWarnings("rawtypes")
public class Chant_PlantWall extends Chant
{
@Override public String ID() { return "Chant_PlantWall"; }
private final static String localizedName = CMLib.lang()._("Plant Wall");
@Override public String name() { return localizedName; }
@Override public int classificationCode(){return Ability.ACODE_CHANT|Ability.DOMAIN_PLANTGROWTH;}
private final static String localizedStaticDisplay = CMLib.lang()._("(Plant Wall)");
@Override public String displayText() { return localizedStaticDisplay; }
@Override public int maxRange(){return adjustedMaxInvokerRange(10);}
@Override public int minRange(){return 1;}
@Override public int abstractQuality(){ return Ability.QUALITY_INDIFFERENT;}
@Override protected int canAffectCode(){return CAN_ITEMS;}
@Override protected int canTargetCode(){return 0;}
protected int amountRemaining=0;
protected Item theWall=null;
protected String deathNotice="";
@Override
public boolean okMessage(final Environmental myHost, final CMMsg msg)
{
if((affected==null)||(!(affected instanceof Item)))
return true;
final MOB mob=msg.source();
if((invoker!=null)
&&(mob.isInCombat())
&&(mob!=invoker)
&&(mob.getVictim()==invoker))
{
if((msg.targetMinor()==CMMsg.TYP_WEAPONATTACK)
&&(mob.rangeToTarget()>0)
&&(msg.tool()!=null)
&&(msg.tool() instanceof Weapon)
&&(((Weapon)msg.tool()).weaponClassification()==Weapon.CLASS_RANGED)
&&(msg.tool().maxRange()>0))
{
final CMMsg msg2=CMClass.getMsg(mob,null,CMMsg.MSG_WEAPONATTACK,_("^F^<FIGHT^><S-NAME> fire(s) at the plant wall with @x1.^</FIGHT^>^?",msg.tool().name()));
CMLib.color().fixSourceFightColor(msg2);
if(mob.location().okMessage(mob,msg2))
{
mob.location().send(mob,msg2);
amountRemaining-=mob.phyStats().damage();
if(amountRemaining<0)
{
deathNotice="The plant wall is destroyed!";
((Item)affected).destroy();
}
}
return false;
}
else
if((mob.rangeToTarget()==1)&&(msg.sourceMinor()==CMMsg.TYP_ADVANCE))
{
Item w=mob.fetchWieldedItem();
if(w==null) w=mob.myNaturalWeapon();
if(w==null) return false;
final CMMsg msg2=CMClass.getMsg(mob,null,CMMsg.MSG_WEAPONATTACK,_("^F<S-NAME> hack(s) at the plant wall with @x1.^?",w.name()));
CMLib.color().fixSourceFightColor(msg2);
if(mob.location().okMessage(mob,msg2))
{
mob.location().send(mob,msg2);
amountRemaining-=mob.phyStats().damage();
if(amountRemaining<0)
{
deathNotice="The plant wall is destroyed!";
((Item)affected).destroy();
}
}
return false;
}
else
if((mob.rangeToTarget()>0)
&&(msg.targetMinor()==CMMsg.TYP_CAST_SPELL)
&&(msg.tool()!=null)
&&(msg.tool() instanceof Ability)
&&(msg.tool().maxRange()>0))
{
final CMMsg msg2=CMClass.getMsg(mob,null,msg.tool(),CMMsg.MSG_OK_VISUAL,_("^F^<FIGHT^>The plant wall absorbs <O-NAME> from <S-NAME>.^</FIGHT^>^?"));
CMLib.color().fixSourceFightColor(msg2);
if(mob.location().okMessage(mob,msg2))
mob.location().send(mob,msg2);
return false;
}
}
return super.okMessage(myHost,msg);
}
@Override
public void unInvoke()
{
super.unInvoke();
if(canBeUninvoked())
{
if((theWall!=null)
&&(theWall.owner()!=null)
&&(theWall.owner() instanceof Room)
&&(((Room)theWall.owner()).isContent(theWall)))
{
final MOB actorM=(invoker!=null)? invoker : CMLib.map().deity();
((Room)theWall.owner()).show(actorM,null,CMMsg.MSG_OK_VISUAL,deathNotice);
final Item wall=theWall;
theWall=null;
wall.destroy();
}
}
}
@Override
public boolean tick(Tickable ticking, int tickID)
{
if(tickID==Tickable.TICKID_MOB)
{
if((invoker!=null)
&&(theWall!=null)
&&(invoker.location()!=null)
&&(!invoker.location().isContent(theWall)))
unInvoke();
}
return super.tick(ticking,tickID);
}
@Override
public boolean invoke(MOB mob, Vector commands, Physical givenTarget, boolean auto, int asLevel)
{
if(((mob.location().domainType()&Room.INDOORS)>0)&&(!auto))
{
mob.tell(_("You must be outdoors for this chant to work."));
return false;
}
if((!mob.isInCombat())||(mob.rangeToTarget()<1))
{
mob.tell(_("You really should be in ranged combat to use this chant."));
return false;
}
for(int i=0;i<mob.location().numItems();i++)
{
final Item I=mob.location().getItem(i);
if((I!=null)&&(I.fetchEffect(ID())!=null))
{
mob.tell(_("There is already a plant wall here."));
return false;
}
}
// the invoke method for spells receives as
// parameters the invoker, and the REMAINING
// command line parameters, divided into words,
// and added as String objects to a vector.
if(!super.invoke(mob,commands,givenTarget,auto,asLevel))
return false;
final Physical target = mob.location();
final boolean success=proficiencyCheck(mob,0,auto);
if(success)
{
// it worked, so build a copy of this ability,
// and add it to the affects list of the
// affected MOB. Then tell everyone else
// what happened.
final CMMsg msg = CMClass.getMsg(mob,target,this,verbalCastCode(mob,target,auto),auto?_("A plant wall appears!"):_("^S<S-NAME> chant(s) for a plant wall!^?"));
if(mob.location().okMessage(mob,msg))
{
mob.location().send(mob,msg);
amountRemaining=(mob.baseState().getHitPoints()/6)+(2*(super.getX1Level(invoker())+super.getXLEVELLevel(invoker())));
final Item I=CMClass.getItem("GenItem");
I.setName(_("a plant wall"));
I.setDisplayText(_("a writhing plant wall has grown here"));
I.setDescription(_("The wall is thick and stringy."));
I.setMaterial(RawMaterial.RESOURCE_GREENS);
CMLib.flags().setGettable(I,false);
I.recoverPhyStats();
mob.location().addItem(I);
theWall=I;
deathNotice="The plant wall withers away!";
beneficialAffect(mob,I,asLevel,0);
}
}
else
return beneficialWordsFizzle(mob,null,_("<S-NAME> incant(s), but the magic fizzles."));
// return whether it worked
return success;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.